_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
a5bda6d18d395d6e64591e2d1248a97e556afedf6bb362db5e2206b252fca6ae | int-index/ether | T8.hs | module Regression.T8 (test8) where
import Ether
import qualified Control.Monad.State as T
import Test.Tasty
import Test.Tasty.QuickCheck
data Foo
data Bar
testMTL1 :: T.MonadState Int m => m ()
testMTL1 = T.modify negate
testMTL2 :: T.MonadState Bool m => m ()
testMTL2 = T.modify not
testEther
:: (MonadState Foo Int m, MonadState Bar Bool m)
=> m String
testEther = do
tagAttach @Foo testMTL1
tagAttach @Bar testMTL2
a <- gets @Foo show
b <- gets @Bar show
return (a ++ b)
model :: Int -> Bool -> String
model a b = show (negate a) ++ show (not b)
runner1 a b
= flip (evalState @Foo) a
. flip (evalStateT @Bar) b
runner2 a b
= flip (evalState @Bar) b
. flip (evalStateT @Foo) a
test8 :: TestTree
test8 = testGroup "T8: Multiple tag attachements"
[ testProperty "runner₁ works"
$ \a b -> property
$ runner1 a b testEther == model a b
, testProperty "runner₂ works"
$ \a b -> property
$ runner2 a b testEther == model a b
, testProperty "runner₁ == runner₂"
$ \a b -> property
$ runner1 a b testEther == runner2 a b testEther
]
| null | https://raw.githubusercontent.com/int-index/ether/84c1d560da241c8111d1a3c98d9a896f0c62087b/test/Regression/T8.hs | haskell | module Regression.T8 (test8) where
import Ether
import qualified Control.Monad.State as T
import Test.Tasty
import Test.Tasty.QuickCheck
data Foo
data Bar
testMTL1 :: T.MonadState Int m => m ()
testMTL1 = T.modify negate
testMTL2 :: T.MonadState Bool m => m ()
testMTL2 = T.modify not
testEther
:: (MonadState Foo Int m, MonadState Bar Bool m)
=> m String
testEther = do
tagAttach @Foo testMTL1
tagAttach @Bar testMTL2
a <- gets @Foo show
b <- gets @Bar show
return (a ++ b)
model :: Int -> Bool -> String
model a b = show (negate a) ++ show (not b)
runner1 a b
= flip (evalState @Foo) a
. flip (evalStateT @Bar) b
runner2 a b
= flip (evalState @Bar) b
. flip (evalStateT @Foo) a
test8 :: TestTree
test8 = testGroup "T8: Multiple tag attachements"
[ testProperty "runner₁ works"
$ \a b -> property
$ runner1 a b testEther == model a b
, testProperty "runner₂ works"
$ \a b -> property
$ runner2 a b testEther == model a b
, testProperty "runner₁ == runner₂"
$ \a b -> property
$ runner1 a b testEther == runner2 a b testEther
]
| |
3aa9bbcf8e73bc1b8b5558f6a806716ba0f3176ba9fbf3ac2b21ca6e9eea5509 | Gandalf-/coreutils | Shuf.hs | {-# OPTIONS_GHC -Wno-unused-binds -Wno-unused-matches #-}
# LANGUAGE ExistentialQuantification #
# LANGUAGE LambdaCase #
module Coreutils.Shuf where
import Control.Monad
import GHC.Int (Int64)
import GHC.IO.Handle
import System.Console.GetOpt
import System.Exit
import System.IO
import System.Random
import System.Random.Shuffle
import Coreutils.Util
data Shuf = Shuf
instance Util Shuf where
run _ = shufMain
data Runtime = RunRange (Int, Int) | RunFile | RunEcho
data Options = Options
{ optRuntime :: Runtime
, optHead :: Maybe Int
, optOutput :: Maybe FilePath
, optSource :: Maybe FilePath
, optRepeat :: Bool
}
data File = File
{ _handle :: Handle
, _filename :: FilePath
, _filesize :: Maybe Int64
}
-- | Business logic
simpleShuf :: StdGen -> [String] -> [String]
simpleShuf g xs = shuffle' xs l g
where
l = length xs
-- | IO
shufMain :: [String] -> IO ()
shufMain args = do
let (actions, arguments, errors) = getOpt RequireOrder options args
unless (null errors) $ do
mapM_ putStr errors
exitFailure
g <- getStdGen
case foldM (flip id) defaults actions of
Left err -> die err
Right opts ->
setupOutput opts
data ShufBox = forall s. Shuffler s => SB s
getShuffler :: Options -> [String] -> ShufBox
getShuffler o args =
case optRuntime o of
RunEcho -> SB $ LineShuf args
RunRange (l, h) -> SB $ RangeShuf l h
RunFile -> undefined
class Shuffler a where
shuf :: a -> StdGen -> [String]
validate :: a -> Maybe String
runRangeShuf :: Options -> (Int, Int) -> IO ()
runRangeShuf o (l, h) = undefined
runArgShuf :: Options -> [String] -> IO ()
runArgShuf = runFileShuf
runFileShuf :: Options -> [FilePath] -> IO ()
runFileShuf _ filenames = do
file <- case filenames of
[] -> getFile "-"
(f:_) -> getFile f
pure ()
runShuf :: Shuffler a => a -> Options -> [String]
runShuf a o = undefined
where
limiter = case optHead o of
Nothing -> id
(Just l) -> take l
data RangeShuf = RangeShuf Int Int
instance Shuffler RangeShuf where
shuf (RangeShuf l h) =
map show . shuffle' vs size
where
vs = [l..h]
size = h - l + 1
validate (RangeShuf l h)
| l <= h = Nothing
| otherwise = Just "Invalid range"
newtype LineShuf = LineShuf [String]
instance Shuffler LineShuf where
shuf (LineShuf xs) = shuffle' xs size
where
size = length xs
validate _ = Nothing
setupOutput :: Options -> IO ()
-- maybe change where stdout points so we can just print everywhere else
setupOutput o = case optOutput o of
Nothing -> pure ()
(Just f) -> do
h <- openFile f WriteMode
hDuplicateTo h stdout
getFile :: FilePath -> IO File
-- take the filename, acquire a handle and determine it's size
getFile "-" = pure $ File stdin "-" Nothing
getFile name = do
h <- openBinaryFile name ReadMode
size <- hIsSeekable h >>= \case
True -> Just . fromIntegral <$> hFileSize h
False -> pure Nothing
pure $ File h name size
-- | Options
readRange :: String -> Maybe (Int, Int)
readRange = undefined
defaults :: Options
defaults = Options
{ optRuntime = RunFile
, optHead = Nothing
, optOutput = Nothing
, optSource = Nothing
, optRepeat = False
}
options :: [OptDescr (Options -> Either String Options)]
options =
[ Option "e" ["echo"]
(NoArg
(\opt -> Right opt { optRuntime = RunEcho }))
"treat each argument as an input line"
, Option "i" ["input-range"]
(ReqArg
(\arg opt -> case readRange arg of
Just r -> Right opt { optRuntime = RunRange r }
_ -> Left $ "error: '" <> arg <> "' is not a range")
"LO-HI")
"treat each number LO through HI as input lines"
, Option "n" ["head-count"]
(ReqArg
(\arg opt -> case reads arg of
[(n, "")] -> Right opt { optHead = n }
_ -> Left $ "error: '" <> arg <> "' is not a number")
"COUNT")
"output at most COUNT lines"
, Option "o" ["output"]
(ReqArg
(\arg opt -> Right opt { optOutput = Just arg})
"FILE")
"write results to FILE instead of stdout"
, Option "" ["random-source"]
(ReqArg
(\arg opt -> Right opt { optSource = Just arg})
"FILE")
"get random bytes from FILE"
, Option "r" ["repeat"]
(NoArg
(\opt -> Right opt { optRepeat = True }))
"output lines can be repeated"
, Option "h" ["help"]
(NoArg
(\_ -> Left $ usageInfo "head" options))
"Show this help text"
]
| null | https://raw.githubusercontent.com/Gandalf-/coreutils/9ebe7b3897b97ee635d866f8cc651a74338001c8/Coreutils/Shuf.hs | haskell | # OPTIONS_GHC -Wno-unused-binds -Wno-unused-matches #
| Business logic
| IO
maybe change where stdout points so we can just print everywhere else
take the filename, acquire a handle and determine it's size
| Options | # LANGUAGE ExistentialQuantification #
# LANGUAGE LambdaCase #
module Coreutils.Shuf where
import Control.Monad
import GHC.Int (Int64)
import GHC.IO.Handle
import System.Console.GetOpt
import System.Exit
import System.IO
import System.Random
import System.Random.Shuffle
import Coreutils.Util
data Shuf = Shuf
instance Util Shuf where
run _ = shufMain
data Runtime = RunRange (Int, Int) | RunFile | RunEcho
data Options = Options
{ optRuntime :: Runtime
, optHead :: Maybe Int
, optOutput :: Maybe FilePath
, optSource :: Maybe FilePath
, optRepeat :: Bool
}
data File = File
{ _handle :: Handle
, _filename :: FilePath
, _filesize :: Maybe Int64
}
simpleShuf :: StdGen -> [String] -> [String]
simpleShuf g xs = shuffle' xs l g
where
l = length xs
shufMain :: [String] -> IO ()
shufMain args = do
let (actions, arguments, errors) = getOpt RequireOrder options args
unless (null errors) $ do
mapM_ putStr errors
exitFailure
g <- getStdGen
case foldM (flip id) defaults actions of
Left err -> die err
Right opts ->
setupOutput opts
data ShufBox = forall s. Shuffler s => SB s
getShuffler :: Options -> [String] -> ShufBox
getShuffler o args =
case optRuntime o of
RunEcho -> SB $ LineShuf args
RunRange (l, h) -> SB $ RangeShuf l h
RunFile -> undefined
class Shuffler a where
shuf :: a -> StdGen -> [String]
validate :: a -> Maybe String
runRangeShuf :: Options -> (Int, Int) -> IO ()
runRangeShuf o (l, h) = undefined
runArgShuf :: Options -> [String] -> IO ()
runArgShuf = runFileShuf
runFileShuf :: Options -> [FilePath] -> IO ()
runFileShuf _ filenames = do
file <- case filenames of
[] -> getFile "-"
(f:_) -> getFile f
pure ()
runShuf :: Shuffler a => a -> Options -> [String]
runShuf a o = undefined
where
limiter = case optHead o of
Nothing -> id
(Just l) -> take l
data RangeShuf = RangeShuf Int Int
instance Shuffler RangeShuf where
shuf (RangeShuf l h) =
map show . shuffle' vs size
where
vs = [l..h]
size = h - l + 1
validate (RangeShuf l h)
| l <= h = Nothing
| otherwise = Just "Invalid range"
newtype LineShuf = LineShuf [String]
instance Shuffler LineShuf where
shuf (LineShuf xs) = shuffle' xs size
where
size = length xs
validate _ = Nothing
setupOutput :: Options -> IO ()
setupOutput o = case optOutput o of
Nothing -> pure ()
(Just f) -> do
h <- openFile f WriteMode
hDuplicateTo h stdout
getFile :: FilePath -> IO File
getFile "-" = pure $ File stdin "-" Nothing
getFile name = do
h <- openBinaryFile name ReadMode
size <- hIsSeekable h >>= \case
True -> Just . fromIntegral <$> hFileSize h
False -> pure Nothing
pure $ File h name size
readRange :: String -> Maybe (Int, Int)
readRange = undefined
defaults :: Options
defaults = Options
{ optRuntime = RunFile
, optHead = Nothing
, optOutput = Nothing
, optSource = Nothing
, optRepeat = False
}
options :: [OptDescr (Options -> Either String Options)]
options =
[ Option "e" ["echo"]
(NoArg
(\opt -> Right opt { optRuntime = RunEcho }))
"treat each argument as an input line"
, Option "i" ["input-range"]
(ReqArg
(\arg opt -> case readRange arg of
Just r -> Right opt { optRuntime = RunRange r }
_ -> Left $ "error: '" <> arg <> "' is not a range")
"LO-HI")
"treat each number LO through HI as input lines"
, Option "n" ["head-count"]
(ReqArg
(\arg opt -> case reads arg of
[(n, "")] -> Right opt { optHead = n }
_ -> Left $ "error: '" <> arg <> "' is not a number")
"COUNT")
"output at most COUNT lines"
, Option "o" ["output"]
(ReqArg
(\arg opt -> Right opt { optOutput = Just arg})
"FILE")
"write results to FILE instead of stdout"
, Option "" ["random-source"]
(ReqArg
(\arg opt -> Right opt { optSource = Just arg})
"FILE")
"get random bytes from FILE"
, Option "r" ["repeat"]
(NoArg
(\opt -> Right opt { optRepeat = True }))
"output lines can be repeated"
, Option "h" ["help"]
(NoArg
(\_ -> Left $ usageInfo "head" options))
"Show this help text"
]
|
50f3e95988eab00262d2083f89a115efe2f9c8d7719cd4fd0d7b568d7e4714f3 | Shimuuar/histogram-fill | Tutorial.hs | -- |
-- Module : Data.Histogram.Tutorial
Copyright : Copyright ( c ) 2009 - 2018 , < >
-- License : BSD3
Maintainer : < >
-- Stability : experimental
--
--
= = 1 .
--
The first example illustrates one of the most common use - cases of a histogram , i.e.
--
-- * uniformly-spaced, equal-weight bins
--
* one - dimensional distribuded data
--
-- * binning range equal to the data range
--
-- We can write a helper function that populates a 'Histogram' from a
-- 'Foldable' container (e.g. an array, or a 'Vector', or a tree,
-- etc.) of 'Double's :
--
-- @
histo : : ( ' Foldable ' v , ' Unbox ' a , a ) = >
-- Int
-- -> v Double
- > ' Histogram ' ' BinD ' a
histo n v = ' fillBuilder ' buildr v
-- where
-- mi = minimum v
-- ma = maximum v
bins = ' binD ' mi n ma
buildr = ' mkSimple ' bins
-- @
--
We can now declare our first histogram with 4 bins and a list of data :
--
> > let h0 = histo 4 [ 1,2,3,5,1,-10,2,3,50,1,6,7,4,6,34,45,20,120,-80 ]
--
-- The @Show@ instance of 'Histogram' lets us see the histogram metadata :
--
-- > > h0
-- > # Histogram
> = 0.0
> # Overflows = 1.0
-- > # BinD
-- > # Base = -80.0
> # Step = 50.0
> # N = 4
> -55.0 1.0
> -5.0 13.0
> 45.0 4.0
> 95.0 0.0
--
-- Note : with this binning algorithm, the bin intervals are closed to
the left and open to the right , which is why the 120 element is
-- marked as an overlow.
--
Note 2 : the output of ` show ` should n't generally be used as a form
-- of data serialization.
--
-- The data bin centers and bin counts can be retrieved with 'asList':
--
> >
-- > [(-55.0,1.0),(-5.0,13.0),(45.0,4.0),(95.0,0.0)]
module Data.Histogram.Tutorial where
import Data.Histogram
import Data.Histogram.Bin
import Data.Histogram.Fill (mkSimple, fillBuilder)
import Data.Vector.Unboxed (Unbox(..))
| null | https://raw.githubusercontent.com/Shimuuar/histogram-fill/3dff15027390cf64e7fc3fbaac34c28ffcdacbd6/histogram-fill/Data/Histogram/Tutorial.hs | haskell | |
Module : Data.Histogram.Tutorial
License : BSD3
Stability : experimental
* uniformly-spaced, equal-weight bins
* binning range equal to the data range
We can write a helper function that populates a 'Histogram' from a
'Foldable' container (e.g. an array, or a 'Vector', or a tree,
etc.) of 'Double's :
@
Int
-> v Double
where
mi = minimum v
ma = maximum v
@
The @Show@ instance of 'Histogram' lets us see the histogram metadata :
> > h0
> # Histogram
> # BinD
> # Base = -80.0
Note : with this binning algorithm, the bin intervals are closed to
marked as an overlow.
of data serialization.
The data bin centers and bin counts can be retrieved with 'asList':
> [(-55.0,1.0),(-5.0,13.0),(45.0,4.0),(95.0,0.0)] | Copyright : Copyright ( c ) 2009 - 2018 , < >
Maintainer : < >
= = 1 .
The first example illustrates one of the most common use - cases of a histogram , i.e.
* one - dimensional distribuded data
histo : : ( ' Foldable ' v , ' Unbox ' a , a ) = >
- > ' Histogram ' ' BinD ' a
histo n v = ' fillBuilder ' buildr v
bins = ' binD ' mi n ma
buildr = ' mkSimple ' bins
We can now declare our first histogram with 4 bins and a list of data :
> > let h0 = histo 4 [ 1,2,3,5,1,-10,2,3,50,1,6,7,4,6,34,45,20,120,-80 ]
> = 0.0
> # Overflows = 1.0
> # Step = 50.0
> # N = 4
> -55.0 1.0
> -5.0 13.0
> 45.0 4.0
> 95.0 0.0
the left and open to the right , which is why the 120 element is
Note 2 : the output of ` show ` should n't generally be used as a form
> >
module Data.Histogram.Tutorial where
import Data.Histogram
import Data.Histogram.Bin
import Data.Histogram.Fill (mkSimple, fillBuilder)
import Data.Vector.Unboxed (Unbox(..))
|
6642f7da979c36e9c0a4529164a40823dd4b0630ca0a7bba56640b9f45220fd4 | camlp5/camlp5 | location.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
* Source code locations ( ranges of positions ) , used in parsetree .
open Format
type t = {
loc_start: Lexing.position;
loc_end: Lexing.position;
loc_ghost: bool;
}
(** Note on the use of Lexing.position in this module.
If [pos_fname = ""], then use [!input_name] instead.
If [pos_lnum = -1], then [pos_bol = 0]. Use [pos_cnum] and
re-parse the file to get the line and character numbers.
Else all fields are correct.
*)
val none : t
(** An arbitrary value of type [t]; describes an empty ghost range. *)
val in_file : string -> t
(** Return an empty ghost range located in a given file. *)
val init : Lexing.lexbuf -> string -> unit
(** Set the file name and line number of the [lexbuf] to be the start
of the named file. *)
val curr : Lexing.lexbuf -> t
(** Get the location of the current token from the [lexbuf]. *)
val symbol_rloc: unit -> t
val symbol_gloc: unit -> t
* [ rhs_loc n ] returns the location of the symbol at position [ n ] , starting
at 1 , in the current parser rule .
at 1, in the current parser rule. *)
val rhs_loc: int -> t
val input_name: string ref
val input_lexbuf: Lexing.lexbuf option ref
val get_pos_info: Lexing.position -> string * int * int (* file, line, char *)
val print_loc: formatter -> t -> unit
val print_error: formatter -> t -> unit
val print_error_cur_file: formatter -> unit -> unit
val print_warning: t -> formatter -> Warnings.t -> unit
val formatter_for_warnings : formatter ref
val prerr_warning: t -> Warnings.t -> unit
val echo_eof: unit -> unit
val reset: unit -> unit
val warning_printer : (t -> formatter -> Warnings.t -> unit) ref
(** Hook for intercepting warnings. *)
val default_warning_printer : t -> formatter -> Warnings.t -> unit
(** Original warning printer for use in hooks. *)
val highlight_locations: formatter -> t list -> bool
type 'a loc = {
txt : 'a;
loc : t;
}
val mknoloc : 'a -> 'a loc
val mkloc : 'a -> t -> 'a loc
val print: formatter -> t -> unit
val print_compact: formatter -> t -> unit
val print_filename: formatter -> string -> unit
val absolute_path: string -> string
val show_filename: string -> string
(** In -absname mode, return the absolute path for this filename.
Otherwise, returns the filename unchanged. *)
val absname: bool ref
(* Support for located errors *)
type error =
{
loc: t;
msg: string;
sub: error list;
if_highlight: string; (* alternative message if locations are highlighted *)
}
exception Error of error
val print_error_prefix: formatter -> unit -> unit
(* print the prefix "Error:" possibly with style *)
val error: ?loc:t -> ?sub:error list -> ?if_highlight:string -> string -> error
val errorf: ?loc:t -> ?sub:error list -> ?if_highlight:string
-> ('a, Format.formatter, unit, error) format4 -> 'a
val raise_errorf: ?loc:t -> ?sub:error list -> ?if_highlight:string
-> ('a, Format.formatter, unit, 'b) format4 -> 'a
val error_of_printer: t -> (formatter -> 'a -> unit) -> 'a -> error
val error_of_printer_file: (formatter -> 'a -> unit) -> 'a -> error
val error_of_exn: exn -> error option
val register_error_of_exn: (exn -> error option) -> unit
(* Each compiler module which defines a custom type of exception
which can surface as a user-visible error should register
a "printer" for this exception using [register_error_of_exn].
The result of the printer is an [error] value containing
a location, a message, and optionally sub-messages (each of them
being located as well). *)
val report_error: formatter -> error -> unit
val error_reporter : (formatter -> error -> unit) ref
(** Hook for intercepting error reports. *)
val default_error_reporter : formatter -> error -> unit
(** Original error reporter for use in hooks. *)
val report_exception: formatter -> exn -> unit
(* Reraise the exception if it is unknown. *)
| null | https://raw.githubusercontent.com/camlp5/camlp5/15e03f56f55b2856dafe7dd3ca232799069f5dda/ocaml_stuff/4.05.0/parsing/location.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Note on the use of Lexing.position in this module.
If [pos_fname = ""], then use [!input_name] instead.
If [pos_lnum = -1], then [pos_bol = 0]. Use [pos_cnum] and
re-parse the file to get the line and character numbers.
Else all fields are correct.
* An arbitrary value of type [t]; describes an empty ghost range.
* Return an empty ghost range located in a given file.
* Set the file name and line number of the [lexbuf] to be the start
of the named file.
* Get the location of the current token from the [lexbuf].
file, line, char
* Hook for intercepting warnings.
* Original warning printer for use in hooks.
* In -absname mode, return the absolute path for this filename.
Otherwise, returns the filename unchanged.
Support for located errors
alternative message if locations are highlighted
print the prefix "Error:" possibly with style
Each compiler module which defines a custom type of exception
which can surface as a user-visible error should register
a "printer" for this exception using [register_error_of_exn].
The result of the printer is an [error] value containing
a location, a message, and optionally sub-messages (each of them
being located as well).
* Hook for intercepting error reports.
* Original error reporter for use in hooks.
Reraise the exception if it is unknown. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* Source code locations ( ranges of positions ) , used in parsetree .
open Format
type t = {
loc_start: Lexing.position;
loc_end: Lexing.position;
loc_ghost: bool;
}
val none : t
val in_file : string -> t
val init : Lexing.lexbuf -> string -> unit
val curr : Lexing.lexbuf -> t
val symbol_rloc: unit -> t
val symbol_gloc: unit -> t
* [ rhs_loc n ] returns the location of the symbol at position [ n ] , starting
at 1 , in the current parser rule .
at 1, in the current parser rule. *)
val rhs_loc: int -> t
val input_name: string ref
val input_lexbuf: Lexing.lexbuf option ref
val print_loc: formatter -> t -> unit
val print_error: formatter -> t -> unit
val print_error_cur_file: formatter -> unit -> unit
val print_warning: t -> formatter -> Warnings.t -> unit
val formatter_for_warnings : formatter ref
val prerr_warning: t -> Warnings.t -> unit
val echo_eof: unit -> unit
val reset: unit -> unit
val warning_printer : (t -> formatter -> Warnings.t -> unit) ref
val default_warning_printer : t -> formatter -> Warnings.t -> unit
val highlight_locations: formatter -> t list -> bool
type 'a loc = {
txt : 'a;
loc : t;
}
val mknoloc : 'a -> 'a loc
val mkloc : 'a -> t -> 'a loc
val print: formatter -> t -> unit
val print_compact: formatter -> t -> unit
val print_filename: formatter -> string -> unit
val absolute_path: string -> string
val show_filename: string -> string
val absname: bool ref
type error =
{
loc: t;
msg: string;
sub: error list;
}
exception Error of error
val print_error_prefix: formatter -> unit -> unit
val error: ?loc:t -> ?sub:error list -> ?if_highlight:string -> string -> error
val errorf: ?loc:t -> ?sub:error list -> ?if_highlight:string
-> ('a, Format.formatter, unit, error) format4 -> 'a
val raise_errorf: ?loc:t -> ?sub:error list -> ?if_highlight:string
-> ('a, Format.formatter, unit, 'b) format4 -> 'a
val error_of_printer: t -> (formatter -> 'a -> unit) -> 'a -> error
val error_of_printer_file: (formatter -> 'a -> unit) -> 'a -> error
val error_of_exn: exn -> error option
val register_error_of_exn: (exn -> error option) -> unit
val report_error: formatter -> error -> unit
val error_reporter : (formatter -> error -> unit) ref
val default_error_reporter : formatter -> error -> unit
val report_exception: formatter -> exn -> unit
|
a9acf722a8f88b792735766d34b507dfc35a9a3bcfd58af283bd1d13d04f7ffd | monadic-xyz/ipfs | Options.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE OverloadedStrings #-}
module Network.IPFS.Git.RemoteHelper.Options where
import Control.Applicative (liftA2, (<|>))
import Control.Exception.Safe (throwString)
import Control.Monad.Trans.Maybe (MaybeT(..))
import Data.Function (on)
import Data.Git (withCurrentRepo)
import qualified Data.Git.Repository as Git
import Data.IPLD.CID (CID, cidFromText)
import Data.List (dropWhileEnd)
import Data.Monoid (Last(..))
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Generics.SOP as SOP
import GHC.Generics (Generic)
import GHC.Stack (HasCallStack)
import Network.URI
( parseURI
, uriAuthority
, uriPath
, uriRegName
, uriScheme
)
import Options.Applicative
( Parser
, ReadM
, argument
, eitherReader
, metavar
, strArgument
)
import Servant.Client (BaseUrl(..), Scheme(Http), parseBaseUrl)
import System.Environment (lookupEnv)
import Text.Read (readMaybe)
import Network.IPFS.Git.RemoteHelper.Generic
( HKD
, ginvalidate
, gvalidate
)
import Network.IPFS.Git.RemoteHelper.Internal (note)
data Options = Options
{ optRemoteName :: String
, optRemoteUrl :: RemoteUrl
}
data RemoteUrl = RemoteUrl
{ remoteUrlScheme :: Text
, remoteUrlIpfsPath :: IpfsPath
}
data IpfsPath
= IpfsPathIpfs CID
| IpfsPathIpns Text
type IpfsOptions = IpfsOptions' SOP.I
data IpfsOptions' f = IpfsOptions
{ ipfsApiUrl :: HKD f BaseUrl
^ URL of the IPFS daemon API .
--
-- Default: \":5001\"
, ipfsMaxConns :: HKD f Int
^ Maximum number of concurrent connections to the IPFS daemon . Note that
-- this is approximate.
--
Default : 30
, ipfsMaxBlockSize :: HKD f Int
^ The maximum size of an IPFS block . This is configurable as there is no
-- unambiguous documentation on what the actual value is. It may also be
-- subject to change in the future.
--
Default : 2048000 ( 2 MB )
} deriving Generic
instance SOP.Generic (IpfsOptions' f)
instance Semigroup (IpfsOptions' Last) where
a <> b = IpfsOptions
{ ipfsApiUrl = on (<>) ipfsApiUrl a b
, ipfsMaxConns = on (<>) ipfsMaxConns a b
, ipfsMaxBlockSize = on (<>) ipfsMaxBlockSize a b
}
instance Monoid (IpfsOptions' Last) where
mempty = IpfsOptions mempty mempty mempty
mappend = (<>)
defaultIpfsOptions :: IpfsOptions
defaultIpfsOptions = IpfsOptions
{ ipfsApiUrl = BaseUrl Http "localhost" 5001 mempty
, ipfsMaxConns = 30
, ipfsMaxBlockSize = 2048000
}
parseOptions :: Parser Options
parseOptions = liftA2 Options
(strArgument (metavar "REMOTE_NAME"))
(argument remoteUrl (metavar "URL"))
remoteUrl :: ReadM RemoteUrl
remoteUrl = eitherReader $ \s -> do
uri <- note "Invalid URI" $ parseURI s
let path = dropWhile (== '/') $ uriPath uri
ipfs <-
case uriRegName <$> uriAuthority uri of
Just "ipns" -> pure . IpfsPathIpns . Text.pack $ path
_ -> IpfsPathIpfs <$> cidFromString path
pure RemoteUrl
{ remoteUrlScheme = Text.pack . dropWhileEnd (== ':') $ uriScheme uri
, remoteUrlIpfsPath = ipfs
}
where
cidFromString = cidFromText . \case
[] -> emptyRepo
xs -> Text.pack xs
emptyRepo = "QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn"
| Determine the ' IpfsOptions ' .
--
-- This must happen after 'parseOptions' in order to support per-remote
-- settings. The @GIT_DIR@ environment variable must be set and point to a valid
-- git repository (when the remote helper is invoked by git, this is the current
-- repository).
--
' IpfsOptions ' are configured using @git - config(2)@. The precedence rules
-- specified there apply. However, @$XDG_CONFIG_HOME/git/config@ and
-- @$(prefix)/etc/gitconfig@ (i.e. @--system@) are __not__ yet supported.
--
-- The available configuration keys are:
--
-- * @ipfs.apiurl@
-- * @ipfs.maxconnections@
-- * @ipfs.maxblocksize@
--
-- 'ipfsApiUrl' may be overridden per-remote using the key @remote.<remote
-- name>.ipfsapiurl@ (e.g. @remote.origin.ipfsapiurl@). If the environment
-- variable @IPFS_API_URL@, it will be used instead of any @git-config@
-- settings.
--
getIpfsOptions :: HasCallStack => Options -> IO IpfsOptions
getIpfsOptions Options { optRemoteName } = withCurrentRepo $ \r -> do
ipfsApiUrl <-
fmap Last . runMaybeT $ do
url <-
MaybeT (lookupEnv "IPFS_API_URL")
<|> MaybeT (Git.configGet r ("remote \"" <> optRemoteName <> "\"") "ipfsapiurl")
<|> MaybeT (Git.configGet r "ipfs" "apiurl")
parseBaseUrl url
ipfsMaxConns <-
Last . (>>= readMaybe) <$> Git.configGet r "ipfs" "maxconnections"
ipfsMaxBlockSize <-
Last . (>>= readMaybe) <$> Git.configGet r "ipfs" "maxblocksize"
maybe (throwString "Das Unmögliche ist eingetreten: Invalid IpfsOptions")
pure
. getLast
. gvalidate
$ ginvalidate pure defaultIpfsOptions <> IpfsOptions {..}
| null | https://raw.githubusercontent.com/monadic-xyz/ipfs/b56c0af905e371c231d52f86f2586d473941afec/git-remote-ipfs/src/Network/IPFS/Git/RemoteHelper/Options.hs | haskell | # LANGUAGE OverloadedStrings #
Default: \":5001\"
this is approximate.
unambiguous documentation on what the actual value is. It may also be
subject to change in the future.
This must happen after 'parseOptions' in order to support per-remote
settings. The @GIT_DIR@ environment variable must be set and point to a valid
git repository (when the remote helper is invoked by git, this is the current
repository).
specified there apply. However, @$XDG_CONFIG_HOME/git/config@ and
@$(prefix)/etc/gitconfig@ (i.e. @--system@) are __not__ yet supported.
The available configuration keys are:
* @ipfs.apiurl@
* @ipfs.maxconnections@
* @ipfs.maxblocksize@
'ipfsApiUrl' may be overridden per-remote using the key @remote.<remote
name>.ipfsapiurl@ (e.g. @remote.origin.ipfsapiurl@). If the environment
variable @IPFS_API_URL@, it will be used instead of any @git-config@
settings.
| # LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleInstances #
module Network.IPFS.Git.RemoteHelper.Options where
import Control.Applicative (liftA2, (<|>))
import Control.Exception.Safe (throwString)
import Control.Monad.Trans.Maybe (MaybeT(..))
import Data.Function (on)
import Data.Git (withCurrentRepo)
import qualified Data.Git.Repository as Git
import Data.IPLD.CID (CID, cidFromText)
import Data.List (dropWhileEnd)
import Data.Monoid (Last(..))
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Generics.SOP as SOP
import GHC.Generics (Generic)
import GHC.Stack (HasCallStack)
import Network.URI
( parseURI
, uriAuthority
, uriPath
, uriRegName
, uriScheme
)
import Options.Applicative
( Parser
, ReadM
, argument
, eitherReader
, metavar
, strArgument
)
import Servant.Client (BaseUrl(..), Scheme(Http), parseBaseUrl)
import System.Environment (lookupEnv)
import Text.Read (readMaybe)
import Network.IPFS.Git.RemoteHelper.Generic
( HKD
, ginvalidate
, gvalidate
)
import Network.IPFS.Git.RemoteHelper.Internal (note)
data Options = Options
{ optRemoteName :: String
, optRemoteUrl :: RemoteUrl
}
data RemoteUrl = RemoteUrl
{ remoteUrlScheme :: Text
, remoteUrlIpfsPath :: IpfsPath
}
data IpfsPath
= IpfsPathIpfs CID
| IpfsPathIpns Text
type IpfsOptions = IpfsOptions' SOP.I
data IpfsOptions' f = IpfsOptions
{ ipfsApiUrl :: HKD f BaseUrl
^ URL of the IPFS daemon API .
, ipfsMaxConns :: HKD f Int
^ Maximum number of concurrent connections to the IPFS daemon . Note that
Default : 30
, ipfsMaxBlockSize :: HKD f Int
^ The maximum size of an IPFS block . This is configurable as there is no
Default : 2048000 ( 2 MB )
} deriving Generic
instance SOP.Generic (IpfsOptions' f)
instance Semigroup (IpfsOptions' Last) where
a <> b = IpfsOptions
{ ipfsApiUrl = on (<>) ipfsApiUrl a b
, ipfsMaxConns = on (<>) ipfsMaxConns a b
, ipfsMaxBlockSize = on (<>) ipfsMaxBlockSize a b
}
instance Monoid (IpfsOptions' Last) where
mempty = IpfsOptions mempty mempty mempty
mappend = (<>)
defaultIpfsOptions :: IpfsOptions
defaultIpfsOptions = IpfsOptions
{ ipfsApiUrl = BaseUrl Http "localhost" 5001 mempty
, ipfsMaxConns = 30
, ipfsMaxBlockSize = 2048000
}
parseOptions :: Parser Options
parseOptions = liftA2 Options
(strArgument (metavar "REMOTE_NAME"))
(argument remoteUrl (metavar "URL"))
remoteUrl :: ReadM RemoteUrl
remoteUrl = eitherReader $ \s -> do
uri <- note "Invalid URI" $ parseURI s
let path = dropWhile (== '/') $ uriPath uri
ipfs <-
case uriRegName <$> uriAuthority uri of
Just "ipns" -> pure . IpfsPathIpns . Text.pack $ path
_ -> IpfsPathIpfs <$> cidFromString path
pure RemoteUrl
{ remoteUrlScheme = Text.pack . dropWhileEnd (== ':') $ uriScheme uri
, remoteUrlIpfsPath = ipfs
}
where
cidFromString = cidFromText . \case
[] -> emptyRepo
xs -> Text.pack xs
emptyRepo = "QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn"
| Determine the ' IpfsOptions ' .
' IpfsOptions ' are configured using @git - config(2)@. The precedence rules
getIpfsOptions :: HasCallStack => Options -> IO IpfsOptions
getIpfsOptions Options { optRemoteName } = withCurrentRepo $ \r -> do
ipfsApiUrl <-
fmap Last . runMaybeT $ do
url <-
MaybeT (lookupEnv "IPFS_API_URL")
<|> MaybeT (Git.configGet r ("remote \"" <> optRemoteName <> "\"") "ipfsapiurl")
<|> MaybeT (Git.configGet r "ipfs" "apiurl")
parseBaseUrl url
ipfsMaxConns <-
Last . (>>= readMaybe) <$> Git.configGet r "ipfs" "maxconnections"
ipfsMaxBlockSize <-
Last . (>>= readMaybe) <$> Git.configGet r "ipfs" "maxblocksize"
maybe (throwString "Das Unmögliche ist eingetreten: Invalid IpfsOptions")
pure
. getLast
. gvalidate
$ ginvalidate pure defaultIpfsOptions <> IpfsOptions {..}
|
e7058815995e2881978064f380813b0f4b4695ce83f0bb559b580fd3ed5b5be0 | Dean177/reason-standard | Comparator.mli | * Comparator provide a way for custom data structures to be used with { ! Map}s and { ! Say we have a module [ Book ] which we want to be able to create a { ! Set } of
{ [
module Book = struct
type t = {
: string ;
title : string ;
}
end
] }
First we need to make our module conform to the { ! S } signature .
This can be done by using the { ! make } function or the { ! Make } functor .
{ [
module Book = struct
type t = {
: string ;
title : string ;
}
module ByIsbn = (
bookA bookB - >
String.compare bookA.isbn bookb.isbn
)
)
end
] }
Then we can create a Set
{ [
Set.ofList ( module Book . ByIsbn ) [
{ isbn="9788460767923 " ; title="Moby or The Whale " }
]
] }
Say we have a module [Book] which we want to be able to create a {!Set} of
{[
module Book = struct
type t = {
isbn: string;
title: string;
}
end
]}
First we need to make our module conform to the {!S} signature.
This can be done by using the {!make} function or the {!Make} functor.
{[
module Book = struct
type t = {
isbn: string;
title: string;
}
module ByIsbn = (
val Comparator.make ~compare:(fun bookA bookB ->
String.compare bookA.isbn bookb.isbn
)
)
end
]}
Then we can create a Set
{[
Set.ofList (module Book.ByIsbn) [
{ isbn="9788460767923"; title="Moby Dick or The Whale" }
]
]}
*)
module type T = sig
(** T represents the input for the {!Make} functor *)
type nonrec t
val compare : t -> t -> int
end
type ('a, 'identity) t
(** This just is an alias for {!t} *)
type ('a, 'identity) comparator = ('a, 'identity) t
module type S = sig
(** The output type of {!Make} and {!make}. *)
type t
type identity
val comparator : (t, identity) comparator
end
* A type alias that is useful typing functions which accept first class modules like { ! Map.empty } or { ! Set.ofArray }
type ('a, 'identity) s =
(module S with type identity = 'identity and type t = 'a)
* Create a new comparator by providing a compare function .
{ 2 Examples }
{ [
module Book = struct
type t = {
: string ;
title : string ;
}
module = (
bookA bookB - >
String.compare bookA.title bookb.title )
)
end
let books = Set.empty ( module Book . )
] }
{2 Examples}
{[
module Book = struct
type t = {
isbn: string;
title: string;
}
module ByTitle = (
val Comparator.make ~compare:(fun bookA bookB ->
String.compare bookA.title bookb.title)
)
end
let books = Set.empty (module Book.ByTitle)
]}
*)
val make : compare:('a -> 'a -> int) -> (module S with type t = 'a)
* Create a new comparator by providing a module which satisifies { ! T } .
{ 2 Examples }
{ [
module Book = struct
module T = struct
type t = {
: string ;
title : string ;
}
let compare bookA bookB =
String.compare bookA.isbn bookB.isbn
end
include T
include Comparator . Make(T )
end
let books = Set.empty ( module Book )
] }
{2 Examples}
{[
module Book = struct
module T = struct
type t = {
isbn: string;
title: string;
}
let compare bookA bookB =
String.compare bookA.isbn bookB.isbn
end
include T
include Comparator.Make(T)
end
let books = Set.empty (module Book)
]}
*)
module Make : functor (M: T) -> S with type t := M.t | null | https://raw.githubusercontent.com/Dean177/reason-standard/74ed165c988eb4a4ea54339e829ff1d59b186d15/bucklescript/src/Comparator.mli | ocaml | * T represents the input for the {!Make} functor
* This just is an alias for {!t}
* The output type of {!Make} and {!make}. | * Comparator provide a way for custom data structures to be used with { ! Map}s and { ! Say we have a module [ Book ] which we want to be able to create a { ! Set } of
{ [
module Book = struct
type t = {
: string ;
title : string ;
}
end
] }
First we need to make our module conform to the { ! S } signature .
This can be done by using the { ! make } function or the { ! Make } functor .
{ [
module Book = struct
type t = {
: string ;
title : string ;
}
module ByIsbn = (
bookA bookB - >
String.compare bookA.isbn bookb.isbn
)
)
end
] }
Then we can create a Set
{ [
Set.ofList ( module Book . ByIsbn ) [
{ isbn="9788460767923 " ; title="Moby or The Whale " }
]
] }
Say we have a module [Book] which we want to be able to create a {!Set} of
{[
module Book = struct
type t = {
isbn: string;
title: string;
}
end
]}
First we need to make our module conform to the {!S} signature.
This can be done by using the {!make} function or the {!Make} functor.
{[
module Book = struct
type t = {
isbn: string;
title: string;
}
module ByIsbn = (
val Comparator.make ~compare:(fun bookA bookB ->
String.compare bookA.isbn bookb.isbn
)
)
end
]}
Then we can create a Set
{[
Set.ofList (module Book.ByIsbn) [
{ isbn="9788460767923"; title="Moby Dick or The Whale" }
]
]}
*)
module type T = sig
type nonrec t
val compare : t -> t -> int
end
type ('a, 'identity) t
type ('a, 'identity) comparator = ('a, 'identity) t
module type S = sig
type t
type identity
val comparator : (t, identity) comparator
end
* A type alias that is useful typing functions which accept first class modules like { ! Map.empty } or { ! Set.ofArray }
type ('a, 'identity) s =
(module S with type identity = 'identity and type t = 'a)
* Create a new comparator by providing a compare function .
{ 2 Examples }
{ [
module Book = struct
type t = {
: string ;
title : string ;
}
module = (
bookA bookB - >
String.compare bookA.title bookb.title )
)
end
let books = Set.empty ( module Book . )
] }
{2 Examples}
{[
module Book = struct
type t = {
isbn: string;
title: string;
}
module ByTitle = (
val Comparator.make ~compare:(fun bookA bookB ->
String.compare bookA.title bookb.title)
)
end
let books = Set.empty (module Book.ByTitle)
]}
*)
val make : compare:('a -> 'a -> int) -> (module S with type t = 'a)
* Create a new comparator by providing a module which satisifies { ! T } .
{ 2 Examples }
{ [
module Book = struct
module T = struct
type t = {
: string ;
title : string ;
}
let compare bookA bookB =
String.compare bookA.isbn bookB.isbn
end
include T
include Comparator . Make(T )
end
let books = Set.empty ( module Book )
] }
{2 Examples}
{[
module Book = struct
module T = struct
type t = {
isbn: string;
title: string;
}
let compare bookA bookB =
String.compare bookA.isbn bookB.isbn
end
include T
include Comparator.Make(T)
end
let books = Set.empty (module Book)
]}
*)
module Make : functor (M: T) -> S with type t := M.t |
9f697c8186e7546bb7333c1734187441b34f83073a832cb5d7e549fd2881341e | chaoxu/fancy-walks | 43.hs | {-# OPTIONS_GHC -O2 #-}
perm :: [a] -> [[a]]
perm [] = [[]]
perm (x:xs) = concatMap (insertAll x []) $ perm xs
where
insertAll u prev [] = [prev ++ [u]]
insertAll u prev t@(v:vs) = (prev ++ u:t) : insertAll u (prev ++ [v]) vs
checkList = filter ((/=0).head) $ perm [0..9]
arrayToInt = foldl (\x y -> x * 10 + y) 0
list1 x = map (\n -> (x !! n * 10 + x !! (n+1)) * 10 + x !! (n+2)) [1..7]
list2 = [2,3,5,7,11,13,17]
check x = and $ zipWith (\x y -> x `mod` y == 0) (list1 x) list2
answer = map arrayToInt $ filter check checkList
problem_43 = sum answer
main = print problem_43
| null | https://raw.githubusercontent.com/chaoxu/fancy-walks/952fcc345883181144131f839aa61e36f488998d/projecteuler.net/43.hs | haskell | # OPTIONS_GHC -O2 # |
perm :: [a] -> [[a]]
perm [] = [[]]
perm (x:xs) = concatMap (insertAll x []) $ perm xs
where
insertAll u prev [] = [prev ++ [u]]
insertAll u prev t@(v:vs) = (prev ++ u:t) : insertAll u (prev ++ [v]) vs
checkList = filter ((/=0).head) $ perm [0..9]
arrayToInt = foldl (\x y -> x * 10 + y) 0
list1 x = map (\n -> (x !! n * 10 + x !! (n+1)) * 10 + x !! (n+2)) [1..7]
list2 = [2,3,5,7,11,13,17]
check x = and $ zipWith (\x y -> x `mod` y == 0) (list1 x) list2
answer = map arrayToInt $ filter check checkList
problem_43 = sum answer
main = print problem_43
|
e7556c62321cc2d446895ccfe68547659027c8406caaa6a666846d7dc4bca536 | ocaml/ocaml-re | cset.ml |
RE - A regular expression library
Copyright ( C ) 2001
email :
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation , with
linking exception ; either version 2.1 of the License , or ( at
your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
RE - A regular expression library
Copyright (C) 2001 Jerome Vouillon
email:
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation, with
linking exception; either version 2.1 of the License, or (at
your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*)
type c = int
type t = (c * c) list
let rec union l l' =
match l, l' with
_, [] -> l
| [], _ -> l'
| (c1, c2)::r, (c1', c2')::r' ->
if c2 + 1 < c1' then
(c1, c2)::union r l'
else if c2' + 1 < c1 then
(c1', c2')::union l r'
else if c2 < c2' then
union r ((min c1 c1', c2')::r')
else
union ((min c1 c1', c2)::r) r'
let rec inter l l' =
match l, l' with
_, [] -> []
| [], _ -> []
| (c1, c2)::r, (c1', c2')::r' ->
if c2 < c1' then
inter r l'
else if c2' < c1 then
inter l r'
else if c2 < c2' then
(max c1 c1', c2)::inter r l'
else
(max c1 c1', c2')::inter l r'
let rec diff l l' =
match l, l' with
_, [] -> l
| [], _ -> []
| (c1, c2)::r, (c1', c2')::r' ->
if c2 < c1' then
(c1, c2)::diff r l'
else if c2' < c1 then
diff l r'
else
let r'' = if c2' < c2 then (c2' + 1, c2) :: r else r in
if c1 < c1' then
(c1, c1' - 1)::diff r'' r'
else
diff r'' r'
let single c = [c, c]
let add c l = union (single c) l
let seq c c' = if c <= c' then [c, c'] else [c', c]
let rec offset o l =
match l with
[] -> []
| (c1, c2) :: r -> (c1 + o, c2 + o) :: offset o r
let empty = []
let rec mem (c : int) s =
match s with
[] -> false
| (c1, c2) :: rem -> if c <= c2 then c >= c1 else mem c rem
(****)
type hash = int
let rec hash_rec = function
| [] -> 0
| (i, j)::r -> i + 13 * j + 257 * hash_rec r
let hash l = (hash_rec l) land 0x3FFFFFFF
(****)
let print_one ch (c1, c2) =
if c1 = c2 then
Format.fprintf ch "%d" c1
else
Format.fprintf ch "%d-%d" c1 c2
let pp = Fmt.list print_one
let rec iter t ~f =
match t with
| [] -> ()
| (x, y)::xs ->
f x y;
iter xs ~f
let one_char = function
| [i, j] when i = j -> Some i
| _ -> None
module CSetMap = Map.Make (struct
type t = int * (int * int) list
let compare (i, u) (j, v) =
let c = compare i j in
if c <> 0
then c
else compare u v
end)
let fold_right t ~init ~f = List.fold_right f t init
let csingle c = single (Char.code c)
let cany = [0, 255]
let is_empty = function
| [] -> true
| _ -> false
let rec prepend s x l =
match s, l with
| [], _ -> l
| _r, [] -> []
| (_c, c') :: r, ([d, _d'], _x') :: _r' when c' < d -> prepend r x l
| (c, c') :: r, ([d, d'], x') :: r' ->
if c <= d then begin
if c' < d'
then ([d, c'], x @ x') :: prepend r x (([c' + 1, d'], x') :: r')
else ([d, d'], x @ x') :: prepend s x r'
end else begin
if c > d'
then ([d, d'], x') :: prepend s x r'
else ([d, c - 1], x') :: prepend s x (([c, d'], x') :: r')
end
| _ -> assert false
let pick = function
| [] -> invalid_arg "Re_cset.pick"
| (x, _)::_ -> x
| null | https://raw.githubusercontent.com/ocaml/ocaml-re/09c2745a2d8d1517b3d597396e82e122903b0017/lib/cset.ml | ocaml | **
** |
RE - A regular expression library
Copyright ( C ) 2001
email :
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation , with
linking exception ; either version 2.1 of the License , or ( at
your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
RE - A regular expression library
Copyright (C) 2001 Jerome Vouillon
email:
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation, with
linking exception; either version 2.1 of the License, or (at
your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*)
type c = int
type t = (c * c) list
let rec union l l' =
match l, l' with
_, [] -> l
| [], _ -> l'
| (c1, c2)::r, (c1', c2')::r' ->
if c2 + 1 < c1' then
(c1, c2)::union r l'
else if c2' + 1 < c1 then
(c1', c2')::union l r'
else if c2 < c2' then
union r ((min c1 c1', c2')::r')
else
union ((min c1 c1', c2)::r) r'
let rec inter l l' =
match l, l' with
_, [] -> []
| [], _ -> []
| (c1, c2)::r, (c1', c2')::r' ->
if c2 < c1' then
inter r l'
else if c2' < c1 then
inter l r'
else if c2 < c2' then
(max c1 c1', c2)::inter r l'
else
(max c1 c1', c2')::inter l r'
let rec diff l l' =
match l, l' with
_, [] -> l
| [], _ -> []
| (c1, c2)::r, (c1', c2')::r' ->
if c2 < c1' then
(c1, c2)::diff r l'
else if c2' < c1 then
diff l r'
else
let r'' = if c2' < c2 then (c2' + 1, c2) :: r else r in
if c1 < c1' then
(c1, c1' - 1)::diff r'' r'
else
diff r'' r'
let single c = [c, c]
let add c l = union (single c) l
let seq c c' = if c <= c' then [c, c'] else [c', c]
let rec offset o l =
match l with
[] -> []
| (c1, c2) :: r -> (c1 + o, c2 + o) :: offset o r
let empty = []
let rec mem (c : int) s =
match s with
[] -> false
| (c1, c2) :: rem -> if c <= c2 then c >= c1 else mem c rem
type hash = int
let rec hash_rec = function
| [] -> 0
| (i, j)::r -> i + 13 * j + 257 * hash_rec r
let hash l = (hash_rec l) land 0x3FFFFFFF
let print_one ch (c1, c2) =
if c1 = c2 then
Format.fprintf ch "%d" c1
else
Format.fprintf ch "%d-%d" c1 c2
let pp = Fmt.list print_one
let rec iter t ~f =
match t with
| [] -> ()
| (x, y)::xs ->
f x y;
iter xs ~f
let one_char = function
| [i, j] when i = j -> Some i
| _ -> None
module CSetMap = Map.Make (struct
type t = int * (int * int) list
let compare (i, u) (j, v) =
let c = compare i j in
if c <> 0
then c
else compare u v
end)
let fold_right t ~init ~f = List.fold_right f t init
let csingle c = single (Char.code c)
let cany = [0, 255]
let is_empty = function
| [] -> true
| _ -> false
let rec prepend s x l =
match s, l with
| [], _ -> l
| _r, [] -> []
| (_c, c') :: r, ([d, _d'], _x') :: _r' when c' < d -> prepend r x l
| (c, c') :: r, ([d, d'], x') :: r' ->
if c <= d then begin
if c' < d'
then ([d, c'], x @ x') :: prepend r x (([c' + 1, d'], x') :: r')
else ([d, d'], x @ x') :: prepend s x r'
end else begin
if c > d'
then ([d, d'], x') :: prepend s x r'
else ([d, c - 1], x') :: prepend s x (([c, d'], x') :: r')
end
| _ -> assert false
let pick = function
| [] -> invalid_arg "Re_cset.pick"
| (x, _)::_ -> x
|
8e37f2545c938d36fe8a887961509b20e7aaa7b5c2a5f344b03d0b2d1d0982e1 | theosotr/buildfs | executor.ml |
* Copyright ( c ) 2018 - 2020
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , version 3 .
*
* This program is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program . If not , see < / > .
* Copyright (c) 2018-2020 Thodoris Sotiropoulos
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 3.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see </>.
*)
type option_status =
| Ok
| Err of string
type mode =
| Online
| Offline
module type ToolType =
sig
type tool_options
val validate_options : mode -> tool_options -> option_status
val construct_command : tool_options -> string array
module SysParser : Sys_parser.S
module TraceAnalyzer : Analyzer.S
module FaultDetector : Fault_detection.S with type tool_options = tool_options
end
module type S =
sig
type generic_options =
{mode: mode;
graph_file: string option;
graph_format: Graph.graph_format;
print_stats: bool;
trace_file: string option;
dump_tool_out: string option;
}
type tool_options
val online_analysis : generic_options -> tool_options -> unit
val offline_analysis : generic_options -> tool_options -> unit
end
module Make(T: ToolType) = struct
let syscalls = [
"access";
"chdir";
"chmod";
"chown";
"clone";
"close";
"dup";
"dup2";
"dup3";
"execve";
"fchdir";
"fchmodat";
"fchownat";
"fcntl";
"fork";
"getxattr";
"getcwd";
"lchown";
"lgetxattr";
"lremovexattr";
"lsetxattr";
"lstat";
"link";
"linkat";
"mkdir";
"mkdirat";
"mknod";
"open";
"openat";
"readlink";
"readlinkat";
"removexattr";
"rename";
"renameat";
"rmdir";
"stat";
"statfs";
"symlink";
"symlinkat";
"unlink";
"unlinkat";
"utime";
"utimensat";
"utimes";
"vfork";
"write";
"writev";
]
type generic_options =
{mode: mode;
graph_file: string option;
graph_format: Graph.graph_format;
print_stats: bool;
trace_file: string option;
dump_tool_out: string option;
}
type tool_options = T.tool_options
type read_point =
| File of string
| FileDesc of Unix.file_descr
let child_failed_status_code = 255
let make_executor_err msg =
raise (Errors.Error (Errors.ExecutorError, Some msg))
let string_of_unix_err err call params =
Printf.sprintf "%s: %s (%s)" (Unix.error_message err) call params
let trace_execution generic_options tool_options input =
let tool_cmd = T.construct_command tool_options in
let prog = "/usr/bin/strace" in
let fd_out = input |> Fd_send_recv.int_of_fd |> string_of_int in
let strace_cmd = [|
"strace";
"-s";
"300";
"-e";
(String.concat "," syscalls);
"-o";
("/dev/fd/" ^ fd_out);
"-f"; |]
in
let cmd = Array.append strace_cmd tool_cmd in
try
print_endline ("\x1b[0;32mInfo: Start tracing command: "
^ (String.concat " " (Array.to_list tool_cmd)) ^ " ...\x1b[0m");
let out =
match generic_options.dump_tool_out with
| None -> "/dev/null"
| Some tool_out -> tool_out
in
let fd = Unix.openfile out [Unix.O_WRONLY; Unix.O_CREAT; Unix.O_TRUNC] 0o640 in
let _ = Unix.dup2 fd Unix.stdout in
let _ = Unix.dup2 fd Unix.stderr in
let _ = Unix.close fd in
ignore (Unix.execv prog cmd);
exit 254; (* We should never reach here. *)
with Unix.Unix_error (err, call, params) ->
(* Maybe strace is not installed in the system.
So, we pass the exception to err to the pipe
so that it can be read by the parent process. *)
let msg = string_of_unix_err err call params in
begin
ignore (Unix.write input (Bytes.of_string msg) 0 (String.length msg));
Unix.close input;
exit child_failed_status_code;
end
let analyze_trace_internal read_p debug_trace generic_options tool_options =
let stats, aout =
match read_p with
| File p ->
p
|> T.SysParser.parse_trace_file debug_trace
|> T.TraceAnalyzer.analyze_traces (Stats.init_stats ())
| FileDesc p ->
p
|> T.SysParser.parse_trace_fd debug_trace
|> T.TraceAnalyzer.analyze_traces (Stats.init_stats ())
in
T.FaultDetector.detect_faults
~print_stats: generic_options.print_stats
~graph_format: generic_options.graph_format
stats generic_options.graph_file tool_options aout
let online_analysis generic_options tool_options =
let output, input = Unix.pipe () in
(* We create a child process that is responsible for invoking
strace and run the build script in parallel. *)
match Unix.fork () with
| 0 ->
Unix.close output;
trace_execution generic_options tool_options input
| pid -> (
Unix.close input;
analyze_trace_internal
(FileDesc output) generic_options.trace_file generic_options tool_options;
try
Unix.kill pid Sys.sigkill;
Unix.close output;
with Unix.Unix_error _ -> ())
| exception Unix.Unix_error (err, call, params) ->
params |> string_of_unix_err err call |> make_executor_err
let offline_analysis generic_options tool_options =
match generic_options.trace_file with
| None -> make_executor_err "Offline analysis requires trace file"
| Some trace_file ->
analyze_trace_internal (File trace_file) None generic_options tool_options
end
| null | https://raw.githubusercontent.com/theosotr/buildfs/3cd287da0d1184934c1bcd28d301f2545196f44b/src/executor.ml | ocaml | We should never reach here.
Maybe strace is not installed in the system.
So, we pass the exception to err to the pipe
so that it can be read by the parent process.
We create a child process that is responsible for invoking
strace and run the build script in parallel. |
* Copyright ( c ) 2018 - 2020
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , version 3 .
*
* This program is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program . If not , see < / > .
* Copyright (c) 2018-2020 Thodoris Sotiropoulos
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 3.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see </>.
*)
type option_status =
| Ok
| Err of string
type mode =
| Online
| Offline
module type ToolType =
sig
type tool_options
val validate_options : mode -> tool_options -> option_status
val construct_command : tool_options -> string array
module SysParser : Sys_parser.S
module TraceAnalyzer : Analyzer.S
module FaultDetector : Fault_detection.S with type tool_options = tool_options
end
module type S =
sig
type generic_options =
{mode: mode;
graph_file: string option;
graph_format: Graph.graph_format;
print_stats: bool;
trace_file: string option;
dump_tool_out: string option;
}
type tool_options
val online_analysis : generic_options -> tool_options -> unit
val offline_analysis : generic_options -> tool_options -> unit
end
module Make(T: ToolType) = struct
let syscalls = [
"access";
"chdir";
"chmod";
"chown";
"clone";
"close";
"dup";
"dup2";
"dup3";
"execve";
"fchdir";
"fchmodat";
"fchownat";
"fcntl";
"fork";
"getxattr";
"getcwd";
"lchown";
"lgetxattr";
"lremovexattr";
"lsetxattr";
"lstat";
"link";
"linkat";
"mkdir";
"mkdirat";
"mknod";
"open";
"openat";
"readlink";
"readlinkat";
"removexattr";
"rename";
"renameat";
"rmdir";
"stat";
"statfs";
"symlink";
"symlinkat";
"unlink";
"unlinkat";
"utime";
"utimensat";
"utimes";
"vfork";
"write";
"writev";
]
type generic_options =
{mode: mode;
graph_file: string option;
graph_format: Graph.graph_format;
print_stats: bool;
trace_file: string option;
dump_tool_out: string option;
}
type tool_options = T.tool_options
type read_point =
| File of string
| FileDesc of Unix.file_descr
let child_failed_status_code = 255
let make_executor_err msg =
raise (Errors.Error (Errors.ExecutorError, Some msg))
let string_of_unix_err err call params =
Printf.sprintf "%s: %s (%s)" (Unix.error_message err) call params
let trace_execution generic_options tool_options input =
let tool_cmd = T.construct_command tool_options in
let prog = "/usr/bin/strace" in
let fd_out = input |> Fd_send_recv.int_of_fd |> string_of_int in
let strace_cmd = [|
"strace";
"-s";
"300";
"-e";
(String.concat "," syscalls);
"-o";
("/dev/fd/" ^ fd_out);
"-f"; |]
in
let cmd = Array.append strace_cmd tool_cmd in
try
print_endline ("\x1b[0;32mInfo: Start tracing command: "
^ (String.concat " " (Array.to_list tool_cmd)) ^ " ...\x1b[0m");
let out =
match generic_options.dump_tool_out with
| None -> "/dev/null"
| Some tool_out -> tool_out
in
let fd = Unix.openfile out [Unix.O_WRONLY; Unix.O_CREAT; Unix.O_TRUNC] 0o640 in
let _ = Unix.dup2 fd Unix.stdout in
let _ = Unix.dup2 fd Unix.stderr in
let _ = Unix.close fd in
ignore (Unix.execv prog cmd);
with Unix.Unix_error (err, call, params) ->
let msg = string_of_unix_err err call params in
begin
ignore (Unix.write input (Bytes.of_string msg) 0 (String.length msg));
Unix.close input;
exit child_failed_status_code;
end
let analyze_trace_internal read_p debug_trace generic_options tool_options =
let stats, aout =
match read_p with
| File p ->
p
|> T.SysParser.parse_trace_file debug_trace
|> T.TraceAnalyzer.analyze_traces (Stats.init_stats ())
| FileDesc p ->
p
|> T.SysParser.parse_trace_fd debug_trace
|> T.TraceAnalyzer.analyze_traces (Stats.init_stats ())
in
T.FaultDetector.detect_faults
~print_stats: generic_options.print_stats
~graph_format: generic_options.graph_format
stats generic_options.graph_file tool_options aout
let online_analysis generic_options tool_options =
let output, input = Unix.pipe () in
match Unix.fork () with
| 0 ->
Unix.close output;
trace_execution generic_options tool_options input
| pid -> (
Unix.close input;
analyze_trace_internal
(FileDesc output) generic_options.trace_file generic_options tool_options;
try
Unix.kill pid Sys.sigkill;
Unix.close output;
with Unix.Unix_error _ -> ())
| exception Unix.Unix_error (err, call, params) ->
params |> string_of_unix_err err call |> make_executor_err
let offline_analysis generic_options tool_options =
match generic_options.trace_file with
| None -> make_executor_err "Offline analysis requires trace file"
| Some trace_file ->
analyze_trace_internal (File trace_file) None generic_options tool_options
end
|
f67b2ba028fe69b1043050718255d06b819bc53b796256bd206961aaf7b12a20 | pouriya/director | director_table.erl | %%% ------------------------------------------------------------------------------------------------
Director is available for use under the following license , commonly known as the 3 - clause ( or
%%% "modified") BSD license:
%%%
Copyright ( c ) 2018 - 2019 ,
%%% ()
%%% All rights reserved.
%%%
%%% Redistribution and use in source and binary forms, with or without modification, are permitted
%%% provided that the following conditions are met:
%%%
1 . Redistributions of source code must retain the above copyright notice , this list of
%%% conditions and the following disclaimer.
%%%
2 . Redistributions in binary form must reproduce the above copyright notice , this list of
%%% conditions and the following disclaimer in the documentation and/or other materials provided
%%% with the distribution.
%%%
3 . Neither the name of the copyright holder nor the names of its contributors may be used to
%%% endorse or promote products derived from this software without specific prior written
%%% permission.
%%%
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR
%%% IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
%%% FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
%%% SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR
%%% OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
%%% POSSIBILITY OF SUCH DAMAGE.
%%% ------------------------------------------------------------------------------------------------
@author < >
%% @version 18.2.20
%% @hidden
%% @doc
%% API functions for keeping, updating and fetching
%% childspecs data.<br/>
%% director supports tho type of tables: list, ets.
%% @end
%% -------------------------------------------------------------------------------------------------
-module(director_table).
-author("").
%% -------------------------------------------------------------------------------------------------
%% Exports:
%% Director's API:
-export([create/2
,insert/3
,delete/3
,lookup_id/3
,lookup_pid/3
,lookup_appended/2
,combine_children/3
,separate_children/3
,count/2
,delete_table/2
,tab2list/2
,handle_message/3
,change_parent/3]).
%% Callback module API:
-export([count_children/2
,which_children/2
,get_childspec/3
,get_pid/3
,get_pids/2
,get_restart_count/3]).
%% -------------------------------------------------------------------------------------------------
Records & Macros & Includes :
%% Dependencies:
%% #?CHILD{}
-include("internal/director_child.hrl").
%% -------------------------------------------------------------------------------------------------
%% Behavior information:
-callback
create({'value', InitArgument::any()} | 'undefined') ->
{'ok', State::any()} | {'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
insert(State::any(), Child::#?CHILD{}) ->
{'ok', NewState::any()} | {'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
delete(State::any(), Child::#?CHILD{}) ->
{'ok', NewState::any()} |
{'soft_error', Reason::'not_found'} |
{'soft_error', NewState::any(), Reason::'not_found'} |
{'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
lookup_id(State::any(), Id::any()) ->
{'ok', Child::#?CHILD{}} |
{'soft_error', Reason::'not_found'} |
{'soft_error', NewState::any(), Reason::'not_found'} |
{'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
lookup_pid(State::any(), Pid::pid()) ->
{'ok', Child::#?CHILD{}} |
{'soft_error', Reason::'not_found'} |
{'soft_error', NewState::any(), Reason::'not_found'} |
{'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
lookup_appended(State::any()) ->
{'ok', [Child::#?CHILD{}] | []} | {'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
count(State::any()) ->
{'ok', Count::non_neg_integer()} | {'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
tab2list(State::any()) ->
{'ok', [Child::#?CHILD{}] | []} | {'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
delete_table(State::any()) ->
'ok' | {'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
handle_message(State::any(), Msg::any()) ->
{'ok', Child::#?CHILD{}} |
{'soft_error', Reason::'unknown'} |
{'soft_error', NewState::any(), Reason::'unknown'} |
{'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
change_parent(State::any(), Child::#?CHILD{}) ->
{'ok', NewState::any()} |
{'soft_error', Reason::'not_parent'} |
{'soft_error', NewState::any(), Reason::'not_parent'}|
{'hard_error', {Reason::atom(), ErrorParams::list()}}.
%% -------------------------------------------------------------------------------------------------
%% Callback module API:
count_children(Mod, State) ->
case tab2list(Mod, State) of
{ok, Children} ->
Fun =
fun(#?CHILD{pid = Pid, type = Type}, {Specs, Actives, Sups, Workers}) ->
Actives2 =
if
erlang:is_pid(Pid) ->
Actives+1;
true ->
Actives
end,
{Sups2, Workers2} =
if
Type =:= supervisor ->
{Sups+1, Workers};
Type =:= worker ->
{Sups, Workers+1}
end,
{Specs+1, Actives2, Sups2, Workers2}
end,
{Specs, Actives, Sups, Workers} = lists:foldl(Fun, {0, 0, 0, 0}, Children),
[{specs, Specs}, {active, Actives}, {supervisors, Sups}, {workers, Workers}];
{hard_error, Rsn} ->
{error, Rsn}
end.
which_children(Mod, State) ->
case director_table:tab2list(Mod, State) of
{ok, Children} ->
[{Id, Pid, Type, Mods} || #?CHILD{id = Id
,pid = Pid
,type = Type
,modules = Mods} <- Children];
{hard_error, Rsn} ->
{error, Rsn}
end.
get_childspec(Mod, State, PidOrId) ->
SearchFunc =
if
erlang:is_pid(PidOrId) ->
lookup_pid;
true ->
lookup_id
end,
case ?MODULE:SearchFunc(Mod, State, PidOrId) of
{ok, Child} ->
{ok, director_child:child_to_childspec(Child)};
{soft_error, _, Rsn} ->
{error, Rsn};
{hard_error, Rsn} ->
{error, Rsn}
end.
get_pid(Mod, State, Id) ->
case lookup_id(Mod, State, Id) of
{ok, #?CHILD{pid = Pid}} ->
{ok, Pid};
{soft_error, _, Rsn} ->
{error, Rsn};
{hard_error, Rsn} ->
{error, Rsn}
end.
get_pids(Mod, State) ->
case director_table:tab2list(Mod, State) of
{ok, Children} ->
{ok, [{Id, Pid} || #?CHILD{id = Id, pid = Pid} <- Children, erlang:is_pid(Pid)]};
{hard_error, Rsn} ->
{error, Rsn}
end.
get_restart_count(Mod, State, Id) ->
case lookup_id(Mod, State, Id) of
{ok, #?CHILD{restart_count = ResCount}} ->
{ok, ResCount};
{soft_error, _, Rsn} ->
{error, Rsn};
{hard_error, Rsn} ->
{error, Rsn}
end.
%% -------------------------------------------------------------------------------------------------
%% API functions:
create(Mod, InitArg) ->
try Mod:create(InitArg) of
{ok, _}=Ok ->
Ok;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, create}
,{arguments, [InitArg]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, create}
,{arguments, [InitArg]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, create}
,{arguments, [InitArg]}]}}
end.
delete_table(Mod, State) ->
try Mod:delete_table(State) of
ok ->
ok;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, delete_table}
,{arguments, [State]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, delete_table}
,{arguments, [State]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, delete_table}
,{arguments, [State]}]}}
end.
lookup_id(Mod, State, Id) ->
try Mod:lookup_id(State, Id) of
{ok, #?CHILD{}}=Ok ->
Ok;
{soft_error, not_found} ->
{soft_error, State, not_found};
{soft_error, _, not_found}=SErr ->
SErr;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, lookup_id}
,{arguments, [State, Id]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, lookup_id}
,{arguments, [State, Id]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, lookup_id}
,{arguments, [State, Id]}]}}
end.
count(Mod, State) ->
try Mod:count(State) of
{ok, Count}=Ok when erlang:is_integer(Count) andalso Count > -1 ->
Ok;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, count}
,{arguments, [State]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, count}
,{arguments, [State]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, count}
,{arguments, [State]}]}}
end.
lookup_pid(Mod, State, Pid) ->
try Mod:lookup_pid(State, Pid) of
{ok, #?CHILD{}}=Ok ->
Ok;
{soft_error, not_found} ->
{soft_error, State, not_found};
{soft_error, _, not_found}=SErr ->
SErr;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, lookup_pid}
,{arguments, [State, Pid]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, lookup_pid}
,{arguments, [State, Pid]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, lookup_pid}
,{arguments, [State, Pid]}]}}
end.
lookup_appended(Mod, State) ->
try Mod:lookup_appended(State) of
{ok, List}=Ok when erlang:is_list(List) ->
case validate_children(List) of
true ->
Ok;
false ->
{hard_error, {table_return, [{value, Ok}
,{module, Mod}
,{function, lookup_appended}
,{arguments, [State]}]}}
end;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, lookup_appended}
,{arguments, [State]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, lookup_appended}
,{arguments, [State]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, lookup_appended}
,{arguments, [State]}]}}
end.
insert(Mod, State, Child) ->
try Mod:insert(State, Child) of
{ok, _}=Ok ->
Ok;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, insert}
,{arguments, [State, Child]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, insert}
,{arguments, [State, Child]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, insert}
,{arguments, [State, Child]}]}}
end.
delete(Mod, State, Child) ->
try Mod:delete(State, Child#?CHILD.id) of
{ok, _}=Ok ->
Ok;
{soft_error, not_found} ->
{soft_error, State, not_found};
{soft_error, _, not_found}=SErr ->
SErr;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, delete}
,{arguments, [State, Child]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, delete}
,{arguments, [State, Child]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, delete}
,{arguments, [State, Child]}]}}
end.
tab2list(Mod, State) ->
try Mod:tab2list(State) of
{ok, List}=Ok when erlang:is_list(List) ->
case validate_children(List) of
true ->
Ok;
false ->
{hard_error, {table_return, [{value, Ok}
,{module, Mod}
,{function, tab2list}
,{arguments, [State]}]}}
end;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, tab2list}
,{arguments, [State]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, tab2list}
,{arguments, [State]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, tab2list}
,{arguments, [State]}]}}
end.
combine_children(Mod, State, DefChildSpec) ->
case lookup_appended(Mod, State) of
{ok, Appended} ->
case validate_parent(Appended) of
true ->
Combine =
fun(Child) ->
ChildSpec = director_child:child_to_childspec(Child),
Combined = director_child:combine_child(ChildSpec, DefChildSpec),
director_child:childspec_to_child(Combined)
end,
CombinedChildren = [Combine(Child) || Child <- Appended],
case insert_children(Mod, State, CombinedChildren) of
{ok, _}=Ok ->
Ok;
{hard_error, _}=HErr ->
HErr
end;
false ->
{soft_error, State, not_parent}
end;
{hard_error, {Rsn, ErrParams}} ->
{hard_error, {Rsn, lists:keyreplace(function
,1
,ErrParams
,{function, combine_children})}}
end.
separate_children(Mod, State, DefChildSpec) ->
case lookup_appended(Mod, State) of
{ok, Appended} ->
case validate_parent(Appended) of
true ->
Separate =
fun(Child) ->
ChildSpec = director_child:child_to_childspec(Child),
Separated = director_child:separate_child(ChildSpec, DefChildSpec),
director_child:childspec_to_child(Separated)
end,
SeparatedChildren = [Separate(Child) || Child <- Appended],
case insert_children(Mod, State, SeparatedChildren) of
{ok, _}=Ok ->
Ok;
{hard_error, _}=HErr ->
HErr
end;
false ->
{soft_error, State, not_parent}
end;
{hard_error, {Rsn, ErrParams}} ->
{hard_error, {Rsn, lists:keyreplace(function
,1
,ErrParams
,{function, separate_children})}}
end.
handle_message(Mod, State, Msg) ->
try Mod:handle_message(State, Msg) of
{ok, _}=Ok ->
Ok;
{soft_error, unknown} ->
{soft_error, State, unknown};
{soft_error, _, unknown}=SErr ->
SErr;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, handle_message}
,{arguments, [State, Msg]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, handle_message}
,{arguments, [State, Msg]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, handle_message}
,{arguments, [State, Msg]}]}}
end.
change_parent(Mod, State, Child) ->
try Mod:change_parent(State, Child) of
{ok, _}=Ok ->
Ok;
{soft_error, not_parent} ->
{soft_error, State, not_parent};
{soft_error, _, not_parent}=SErr ->
SErr;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, change_parent}
,{arguments, [State, Child]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, change_parent}
,{arguments, [State, Child]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, change_parent}
,{arguments, [State, Child]}]}}
end.
%% -------------------------------------------------------------------------------------------------
Internal functions :
insert_children(Mod, State, [Child|Children]) ->
case insert(Mod, State, Child) of
{ok, State2} ->
insert_children(Mod, State2, Children);
{hard_error, _}=Err ->
Err
end;
insert_children(_, State, []) ->
{ok, State}.
validate_children(Children) ->
lists:all(fun(Child) -> erlang:is_record(Child, ?CHILD) end, Children).
validate_parent(Children) ->
lists:all(fun(#?CHILD{supervisor = Sup}) -> Sup =:= erlang:self() end, Children). | null | https://raw.githubusercontent.com/pouriya/director/919c30db18c83330290fba9e68de330000978674/src/director_table.erl | erlang | ------------------------------------------------------------------------------------------------
"modified") BSD license:
()
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted
provided that the following conditions are met:
conditions and the following disclaimer.
conditions and the following disclaimer in the documentation and/or other materials provided
with the distribution.
endorse or promote products derived from this software without specific prior written
permission.
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
------------------------------------------------------------------------------------------------
@version 18.2.20
@hidden
@doc
API functions for keeping, updating and fetching
childspecs data.<br/>
director supports tho type of tables: list, ets.
@end
-------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------
Exports:
Director's API:
Callback module API:
-------------------------------------------------------------------------------------------------
Dependencies:
#?CHILD{}
-------------------------------------------------------------------------------------------------
Behavior information:
-------------------------------------------------------------------------------------------------
Callback module API:
-------------------------------------------------------------------------------------------------
API functions:
------------------------------------------------------------------------------------------------- | Director is available for use under the following license , commonly known as the 3 - clause ( or
Copyright ( c ) 2018 - 2019 ,
1 . Redistributions of source code must retain the above copyright notice , this list of
2 . Redistributions in binary form must reproduce the above copyright notice , this list of
3 . Neither the name of the copyright holder nor the names of its contributors may be used to
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR
@author < >
-module(director_table).
-author("").
-export([create/2
,insert/3
,delete/3
,lookup_id/3
,lookup_pid/3
,lookup_appended/2
,combine_children/3
,separate_children/3
,count/2
,delete_table/2
,tab2list/2
,handle_message/3
,change_parent/3]).
-export([count_children/2
,which_children/2
,get_childspec/3
,get_pid/3
,get_pids/2
,get_restart_count/3]).
Records & Macros & Includes :
-include("internal/director_child.hrl").
-callback
create({'value', InitArgument::any()} | 'undefined') ->
{'ok', State::any()} | {'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
insert(State::any(), Child::#?CHILD{}) ->
{'ok', NewState::any()} | {'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
delete(State::any(), Child::#?CHILD{}) ->
{'ok', NewState::any()} |
{'soft_error', Reason::'not_found'} |
{'soft_error', NewState::any(), Reason::'not_found'} |
{'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
lookup_id(State::any(), Id::any()) ->
{'ok', Child::#?CHILD{}} |
{'soft_error', Reason::'not_found'} |
{'soft_error', NewState::any(), Reason::'not_found'} |
{'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
lookup_pid(State::any(), Pid::pid()) ->
{'ok', Child::#?CHILD{}} |
{'soft_error', Reason::'not_found'} |
{'soft_error', NewState::any(), Reason::'not_found'} |
{'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
lookup_appended(State::any()) ->
{'ok', [Child::#?CHILD{}] | []} | {'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
count(State::any()) ->
{'ok', Count::non_neg_integer()} | {'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
tab2list(State::any()) ->
{'ok', [Child::#?CHILD{}] | []} | {'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
delete_table(State::any()) ->
'ok' | {'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
handle_message(State::any(), Msg::any()) ->
{'ok', Child::#?CHILD{}} |
{'soft_error', Reason::'unknown'} |
{'soft_error', NewState::any(), Reason::'unknown'} |
{'hard_error', {Reason::atom(), ErrorParams::list()}}.
-callback
change_parent(State::any(), Child::#?CHILD{}) ->
{'ok', NewState::any()} |
{'soft_error', Reason::'not_parent'} |
{'soft_error', NewState::any(), Reason::'not_parent'}|
{'hard_error', {Reason::atom(), ErrorParams::list()}}.
count_children(Mod, State) ->
case tab2list(Mod, State) of
{ok, Children} ->
Fun =
fun(#?CHILD{pid = Pid, type = Type}, {Specs, Actives, Sups, Workers}) ->
Actives2 =
if
erlang:is_pid(Pid) ->
Actives+1;
true ->
Actives
end,
{Sups2, Workers2} =
if
Type =:= supervisor ->
{Sups+1, Workers};
Type =:= worker ->
{Sups, Workers+1}
end,
{Specs+1, Actives2, Sups2, Workers2}
end,
{Specs, Actives, Sups, Workers} = lists:foldl(Fun, {0, 0, 0, 0}, Children),
[{specs, Specs}, {active, Actives}, {supervisors, Sups}, {workers, Workers}];
{hard_error, Rsn} ->
{error, Rsn}
end.
which_children(Mod, State) ->
case director_table:tab2list(Mod, State) of
{ok, Children} ->
[{Id, Pid, Type, Mods} || #?CHILD{id = Id
,pid = Pid
,type = Type
,modules = Mods} <- Children];
{hard_error, Rsn} ->
{error, Rsn}
end.
get_childspec(Mod, State, PidOrId) ->
SearchFunc =
if
erlang:is_pid(PidOrId) ->
lookup_pid;
true ->
lookup_id
end,
case ?MODULE:SearchFunc(Mod, State, PidOrId) of
{ok, Child} ->
{ok, director_child:child_to_childspec(Child)};
{soft_error, _, Rsn} ->
{error, Rsn};
{hard_error, Rsn} ->
{error, Rsn}
end.
get_pid(Mod, State, Id) ->
case lookup_id(Mod, State, Id) of
{ok, #?CHILD{pid = Pid}} ->
{ok, Pid};
{soft_error, _, Rsn} ->
{error, Rsn};
{hard_error, Rsn} ->
{error, Rsn}
end.
get_pids(Mod, State) ->
case director_table:tab2list(Mod, State) of
{ok, Children} ->
{ok, [{Id, Pid} || #?CHILD{id = Id, pid = Pid} <- Children, erlang:is_pid(Pid)]};
{hard_error, Rsn} ->
{error, Rsn}
end.
get_restart_count(Mod, State, Id) ->
case lookup_id(Mod, State, Id) of
{ok, #?CHILD{restart_count = ResCount}} ->
{ok, ResCount};
{soft_error, _, Rsn} ->
{error, Rsn};
{hard_error, Rsn} ->
{error, Rsn}
end.
create(Mod, InitArg) ->
try Mod:create(InitArg) of
{ok, _}=Ok ->
Ok;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, create}
,{arguments, [InitArg]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, create}
,{arguments, [InitArg]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, create}
,{arguments, [InitArg]}]}}
end.
delete_table(Mod, State) ->
try Mod:delete_table(State) of
ok ->
ok;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, delete_table}
,{arguments, [State]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, delete_table}
,{arguments, [State]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, delete_table}
,{arguments, [State]}]}}
end.
lookup_id(Mod, State, Id) ->
try Mod:lookup_id(State, Id) of
{ok, #?CHILD{}}=Ok ->
Ok;
{soft_error, not_found} ->
{soft_error, State, not_found};
{soft_error, _, not_found}=SErr ->
SErr;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, lookup_id}
,{arguments, [State, Id]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, lookup_id}
,{arguments, [State, Id]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, lookup_id}
,{arguments, [State, Id]}]}}
end.
count(Mod, State) ->
try Mod:count(State) of
{ok, Count}=Ok when erlang:is_integer(Count) andalso Count > -1 ->
Ok;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, count}
,{arguments, [State]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, count}
,{arguments, [State]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, count}
,{arguments, [State]}]}}
end.
lookup_pid(Mod, State, Pid) ->
try Mod:lookup_pid(State, Pid) of
{ok, #?CHILD{}}=Ok ->
Ok;
{soft_error, not_found} ->
{soft_error, State, not_found};
{soft_error, _, not_found}=SErr ->
SErr;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, lookup_pid}
,{arguments, [State, Pid]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, lookup_pid}
,{arguments, [State, Pid]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, lookup_pid}
,{arguments, [State, Pid]}]}}
end.
lookup_appended(Mod, State) ->
try Mod:lookup_appended(State) of
{ok, List}=Ok when erlang:is_list(List) ->
case validate_children(List) of
true ->
Ok;
false ->
{hard_error, {table_return, [{value, Ok}
,{module, Mod}
,{function, lookup_appended}
,{arguments, [State]}]}}
end;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, lookup_appended}
,{arguments, [State]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, lookup_appended}
,{arguments, [State]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, lookup_appended}
,{arguments, [State]}]}}
end.
insert(Mod, State, Child) ->
try Mod:insert(State, Child) of
{ok, _}=Ok ->
Ok;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, insert}
,{arguments, [State, Child]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, insert}
,{arguments, [State, Child]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, insert}
,{arguments, [State, Child]}]}}
end.
delete(Mod, State, Child) ->
try Mod:delete(State, Child#?CHILD.id) of
{ok, _}=Ok ->
Ok;
{soft_error, not_found} ->
{soft_error, State, not_found};
{soft_error, _, not_found}=SErr ->
SErr;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, delete}
,{arguments, [State, Child]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, delete}
,{arguments, [State, Child]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, delete}
,{arguments, [State, Child]}]}}
end.
tab2list(Mod, State) ->
try Mod:tab2list(State) of
{ok, List}=Ok when erlang:is_list(List) ->
case validate_children(List) of
true ->
Ok;
false ->
{hard_error, {table_return, [{value, Ok}
,{module, Mod}
,{function, tab2list}
,{arguments, [State]}]}}
end;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, tab2list}
,{arguments, [State]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, tab2list}
,{arguments, [State]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, tab2list}
,{arguments, [State]}]}}
end.
combine_children(Mod, State, DefChildSpec) ->
case lookup_appended(Mod, State) of
{ok, Appended} ->
case validate_parent(Appended) of
true ->
Combine =
fun(Child) ->
ChildSpec = director_child:child_to_childspec(Child),
Combined = director_child:combine_child(ChildSpec, DefChildSpec),
director_child:childspec_to_child(Combined)
end,
CombinedChildren = [Combine(Child) || Child <- Appended],
case insert_children(Mod, State, CombinedChildren) of
{ok, _}=Ok ->
Ok;
{hard_error, _}=HErr ->
HErr
end;
false ->
{soft_error, State, not_parent}
end;
{hard_error, {Rsn, ErrParams}} ->
{hard_error, {Rsn, lists:keyreplace(function
,1
,ErrParams
,{function, combine_children})}}
end.
separate_children(Mod, State, DefChildSpec) ->
case lookup_appended(Mod, State) of
{ok, Appended} ->
case validate_parent(Appended) of
true ->
Separate =
fun(Child) ->
ChildSpec = director_child:child_to_childspec(Child),
Separated = director_child:separate_child(ChildSpec, DefChildSpec),
director_child:childspec_to_child(Separated)
end,
SeparatedChildren = [Separate(Child) || Child <- Appended],
case insert_children(Mod, State, SeparatedChildren) of
{ok, _}=Ok ->
Ok;
{hard_error, _}=HErr ->
HErr
end;
false ->
{soft_error, State, not_parent}
end;
{hard_error, {Rsn, ErrParams}} ->
{hard_error, {Rsn, lists:keyreplace(function
,1
,ErrParams
,{function, separate_children})}}
end.
handle_message(Mod, State, Msg) ->
try Mod:handle_message(State, Msg) of
{ok, _}=Ok ->
Ok;
{soft_error, unknown} ->
{soft_error, State, unknown};
{soft_error, _, unknown}=SErr ->
SErr;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, handle_message}
,{arguments, [State, Msg]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, handle_message}
,{arguments, [State, Msg]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, handle_message}
,{arguments, [State, Msg]}]}}
end.
change_parent(Mod, State, Child) ->
try Mod:change_parent(State, Child) of
{ok, _}=Ok ->
Ok;
{soft_error, not_parent} ->
{soft_error, State, not_parent};
{soft_error, _, not_parent}=SErr ->
SErr;
{hard_error, {Rsn, ErrParams}} when erlang:is_atom(Rsn) andalso erlang:is_list(ErrParams) ->
{hard_error, {Rsn, ErrParams ++ [{module, Mod}
,{function, change_parent}
,{arguments, [State, Child]}]}};
Other ->
{hard_error, {table_return, [{value, Other}
,{module, Mod}
,{function, change_parent}
,{arguments, [State, Child]}]}}
catch
_:Rsn:Stacktrace ->
{hard_error, {table_crash, [{reason, Rsn}
,{stacktrace, Stacktrace}
,{module, Mod}
,{function, change_parent}
,{arguments, [State, Child]}]}}
end.
Internal functions :
insert_children(Mod, State, [Child|Children]) ->
case insert(Mod, State, Child) of
{ok, State2} ->
insert_children(Mod, State2, Children);
{hard_error, _}=Err ->
Err
end;
insert_children(_, State, []) ->
{ok, State}.
validate_children(Children) ->
lists:all(fun(Child) -> erlang:is_record(Child, ?CHILD) end, Children).
validate_parent(Children) ->
lists:all(fun(#?CHILD{supervisor = Sup}) -> Sup =:= erlang:self() end, Children). |
f914497749597f8db24f238fc0524bf20db0acc8497d33683fe261748e106991 | yallop/ocaml-ctypes | cstubs_c_language.ml |
* Copyright ( c ) 2014 .
*
* This file is distributed under the terms of the MIT License .
* See the file LICENSE for details .
* Copyright (c) 2014 Jeremy Yallop.
*
* This file is distributed under the terms of the MIT License.
* See the file LICENSE for details.
*)
(* C code representation. *)
open Ctypes_static
let fresh_var =
let var_counter = ref 0 in
fun ?(prefix="x") () ->
incr var_counter;
Printf.sprintf "%s%d" prefix !var_counter
type ty = Ty : _ typ -> ty
type tfn = Fn : _ fn -> tfn
type fieldname = string
type cfunction = {
fname: string;
allocates: bool;
reads_ocaml_heap: bool;
fn: tfn;
}
type cglobal = {
name: string;
typ: ty;
references_ocaml_heap: bool;
}
type clocal = [ `Local of string * ty ]
type cvar = [ clocal | `Global of cglobal ]
type storage_class = [`Static | `Extern]
type cconst = [ `Int of Signed.sint ]
type cexp = [ cconst
| clocal
| `Cast of ty * cexp
| `Addr of cvar ]
type clvalue = [ cvar
| `Index of clvalue * cexp
| `Field of clvalue * fieldname
| `PointerField of clvalue * fieldname ]
type camlop = [ `CAMLparam0
| `CAMLlocalN of cexp * cexp
| `CAMLdrop ]
type ceff = [ cexp
| camlop
| `Global of cglobal
| `App of cfunction * cexp list
| `Index of ceff * cexp
| `Deref of cexp
| `DerefField of cexp * fieldname ]
type cbind = clocal * ceff
type ccomp = [ ceff
| `CAMLparam of string list * ccomp
| `LetConst of clocal * cconst * ccomp
| `LetAssign of clvalue * ceff * ccomp
| `CAMLreturnT of ty * cexp
| `Return of ty * cexp
| `Let of cbind * ccomp ]
type cfundec = [ `Fundec of string * (string * ty) list * ty ]
type cfundef = [ `Function of cfundec * ccomp * storage_class ]
let rec return_type : type a. a fn -> ty = function
| Function (_, f) -> return_type f
| Returns t -> Ty t
let args : type a. a fn -> (string * ty) list = fun fn ->
let rec loop : type a. a Ctypes.fn -> (string * ty) list = function
| Ctypes_static.Function (ty, fn) -> (fresh_var (), Ty ty) :: loop fn
| Ctypes_static.Returns _ -> []
in loop fn
module Type_C =
struct
let cexp : cexp -> ty = function
| `Int _ -> Ty sint
| `Local (_, ty) -> ty
| `Cast (Ty ty, _) -> Ty ty
| `Addr (`Global { typ = Ty ty }) -> Ty (Pointer ty)
| `Addr (`Local (_, Ty ty)) -> Ty (Pointer ty)
let camlop : camlop -> ty = function
| `CAMLparam0
| `CAMLlocalN _
| `CAMLdrop -> Ty Void
let rec ceff : ceff -> ty = function
| #cexp as e -> cexp e
| #camlop as o -> camlop o
| `Global { typ } -> typ
| `App ({ fn = Fn f }, _) -> return_type f
| `Index (e, _) -> reference_ceff e
| `Deref e -> reference_ceff (e :> ceff)
| `DerefField (e, f) -> field_ceff (e :> ceff) f
and reference_ceff : ceff -> ty =
fun e ->
begin match ceff e with
| Ty (Pointer ty) -> Ty ty
| Ty (Array (ty, _)) -> Ty ty
| Ty t -> Cstubs_errors.internal_error
"dereferencing expression of non-pointer type %s"
(Ctypes.string_of_typ t)
end
and field_ceff : ceff -> fieldname -> ty =
fun e f ->
begin match ceff e with
Ty (Pointer (Struct { fields } as s)) -> lookup_field f s fields
| Ty t -> Cstubs_errors.internal_error
"accessing a field %s in an expression of type %s, which is not a pointer-to-struct type"
f (Ctypes.string_of_typ t)
end
and lookup_field : type s a. string -> a typ -> s boxed_field list -> ty =
fun f ty fields -> match fields with
[] -> Cstubs_errors.internal_error
"field %s not found in struct %s" f
(Ctypes.string_of_typ ty)
| BoxedField { ftype; fname } :: _ when fname = f -> Ty ftype
| _ :: fields -> lookup_field f ty fields
let rec ccomp : ccomp -> ty = function
| #cexp as e -> cexp e
| #ceff as e -> ceff e
| `CAMLparam (_, c) -> ccomp c
| `Let (_, c)
| `LetConst (_, _, c) -> ccomp c
| `LetAssign (_, _, c) -> ccomp c
| `CAMLreturnT (ty, _) -> ty
| `Return (ty, _) -> ty
end
let value : [`value] abstract typ = abstract ~name:"value" ~size:0 ~alignment:0
let reader fname fn =
{ fname; allocates = false; reads_ocaml_heap = true; fn = Fn fn }
let conser fname fn =
{ fname; allocates = true; reads_ocaml_heap = false; fn = Fn fn }
let immediater fname fn =
{ fname; allocates = false; reads_ocaml_heap = false; fn = Fn fn }
module Unchecked_function_types =
struct
(* We're using an abstract type ([value]) as an argument and return type, so
we'll use the [Function] and [Return] constructors directly. The smart
constructors [@->] and [returning] would reject the abstract type. *)
let (@->) f t = Function (f, t)
let returning t = Returns t
end
let prim_prj : type a. a Ctypes_primitive_types.prim -> _ =
let open Unchecked_function_types in
let open Ctypes_primitive_types in function
| Char -> reader "Int_val" (value @-> returning int)
| Schar -> reader "Int_val" (value @-> returning int)
| Uchar -> reader "Uint8_val" (value @-> returning uint8_t)
| Bool -> reader "Bool_val" (value @-> returning bool)
| Short -> reader "Int_val" (value @-> returning int)
| Int -> reader "Long_val" (value @-> returning int)
| Long -> reader "ctypes_long_val" (value @-> returning long)
| Llong -> reader "ctypes_llong_val" (value @-> returning llong)
| Ushort -> reader "ctypes_ushort_val" (value @-> returning ushort)
| Sint -> reader "ctypes_sint_val" (value @-> returning sint)
| Uint -> reader "ctypes_uint_val" (value @-> returning uint)
| Ulong -> reader "ctypes_ulong_val" (value @-> returning ulong)
| Ullong -> reader "ctypes_ullong_val" (value @-> returning ullong)
| Size_t -> reader "ctypes_size_t_val" (value @-> returning size_t)
| Int8_t -> reader "Int_val" (value @-> returning int)
| Int16_t -> reader "Int_val" (value @-> returning int)
| Int32_t -> reader "Int32_val" (value @-> returning int32_t)
| Int64_t -> reader "Int64_val" (value @-> returning int64_t)
| Uint8_t -> reader "Uint8_val" (value @-> returning uint8_t)
| Uint16_t -> reader "Uint16_val" (value @-> returning uint16_t)
| Uint32_t -> reader "Uint32_val" (value @-> returning uint32_t)
| Uint64_t -> reader "Uint64_val" (value @-> returning uint64_t)
| Camlint -> reader "Long_val" (value @-> returning int)
| Nativeint -> reader "Nativeint_val" (value @-> returning nativeint)
| Float -> reader "Double_val" (value @-> returning double)
| Double -> reader "Double_val" (value @-> returning double)
| LDouble -> reader "ctypes_ldouble_val" (value @-> returning ldouble)
| Complex32 -> reader "ctypes_float_complex_val" (value @-> returning complex32)
| Complex64 -> reader "ctypes_double_complex_val" (value @-> returning complex64)
| Complexld -> reader "ctypes_ldouble_complex_val" (value @-> returning complexld)
let prim_inj : type a. a Ctypes_primitive_types.prim -> _ =
let open Unchecked_function_types in
let open Ctypes_primitive_types in function
| Char -> immediater "Ctypes_val_char" (int @-> returning value)
| Schar -> immediater "Val_int" (int @-> returning value)
| Uchar -> immediater "Integers_val_uint8" (uint8_t @-> returning value)
| Bool -> immediater "Val_bool" (bool @-> returning value)
| Short -> immediater "Val_int" (int @-> returning value)
| Int -> immediater "Val_long" (int @-> returning value)
| Long -> conser "ctypes_copy_long" (long @-> returning value)
| Llong -> conser "ctypes_copy_llong" (llong @-> returning value)
| Ushort -> conser "ctypes_copy_ushort" (ushort @-> returning value)
| Sint -> conser "ctypes_copy_sint" (sint @-> returning value)
| Uint -> conser "ctypes_copy_uint" (uint @-> returning value)
| Ulong -> conser "ctypes_copy_ulong" (ulong @-> returning value)
| Ullong -> conser "ctypes_copy_ullong" (ullong @-> returning value)
| Size_t -> conser "ctypes_copy_size_t" (size_t @-> returning value)
| Int8_t -> immediater "Val_int" (int @-> returning value)
| Int16_t -> immediater "Val_int" (int @-> returning value)
| Int32_t -> conser "caml_copy_int32" (int32_t @-> returning value)
| Int64_t -> conser "caml_copy_int64" (int64_t @-> returning value)
| Uint8_t -> immediater "Integers_val_uint8" (uint8_t @-> returning value)
| Uint16_t -> immediater "Integers_val_uint16" (uint16_t @-> returning value)
| Uint32_t -> conser "integers_copy_uint32" (uint32_t @-> returning value)
| Uint64_t -> conser "integers_copy_uint64" (uint64_t @-> returning value)
| Camlint -> immediater "Val_long" (int @-> returning value)
| Nativeint -> conser "caml_copy_nativeint" (nativeint @-> returning value)
| Float -> conser "caml_copy_double" (double @-> returning value)
| Double -> conser "caml_copy_double" (double @-> returning value)
| LDouble -> conser "ctypes_copy_ldouble" (ldouble @-> returning value)
| Complex32 -> conser "ctypes_copy_float_complex" (complex32 @-> returning value)
| Complex64 -> conser "ctypes_copy_double_complex" (complex64 @-> returning value)
| Complexld -> conser "ctypes_copy_ldouble_complex" (complexld @-> returning value)
| null | https://raw.githubusercontent.com/yallop/ocaml-ctypes/52ff621f47dbc1ee5a90c30af0ae0474549946b4/src/cstubs/cstubs_c_language.ml | ocaml | C code representation.
We're using an abstract type ([value]) as an argument and return type, so
we'll use the [Function] and [Return] constructors directly. The smart
constructors [@->] and [returning] would reject the abstract type. |
* Copyright ( c ) 2014 .
*
* This file is distributed under the terms of the MIT License .
* See the file LICENSE for details .
* Copyright (c) 2014 Jeremy Yallop.
*
* This file is distributed under the terms of the MIT License.
* See the file LICENSE for details.
*)
open Ctypes_static
let fresh_var =
let var_counter = ref 0 in
fun ?(prefix="x") () ->
incr var_counter;
Printf.sprintf "%s%d" prefix !var_counter
type ty = Ty : _ typ -> ty
type tfn = Fn : _ fn -> tfn
type fieldname = string
type cfunction = {
fname: string;
allocates: bool;
reads_ocaml_heap: bool;
fn: tfn;
}
type cglobal = {
name: string;
typ: ty;
references_ocaml_heap: bool;
}
type clocal = [ `Local of string * ty ]
type cvar = [ clocal | `Global of cglobal ]
type storage_class = [`Static | `Extern]
type cconst = [ `Int of Signed.sint ]
type cexp = [ cconst
| clocal
| `Cast of ty * cexp
| `Addr of cvar ]
type clvalue = [ cvar
| `Index of clvalue * cexp
| `Field of clvalue * fieldname
| `PointerField of clvalue * fieldname ]
type camlop = [ `CAMLparam0
| `CAMLlocalN of cexp * cexp
| `CAMLdrop ]
type ceff = [ cexp
| camlop
| `Global of cglobal
| `App of cfunction * cexp list
| `Index of ceff * cexp
| `Deref of cexp
| `DerefField of cexp * fieldname ]
type cbind = clocal * ceff
type ccomp = [ ceff
| `CAMLparam of string list * ccomp
| `LetConst of clocal * cconst * ccomp
| `LetAssign of clvalue * ceff * ccomp
| `CAMLreturnT of ty * cexp
| `Return of ty * cexp
| `Let of cbind * ccomp ]
type cfundec = [ `Fundec of string * (string * ty) list * ty ]
type cfundef = [ `Function of cfundec * ccomp * storage_class ]
let rec return_type : type a. a fn -> ty = function
| Function (_, f) -> return_type f
| Returns t -> Ty t
let args : type a. a fn -> (string * ty) list = fun fn ->
let rec loop : type a. a Ctypes.fn -> (string * ty) list = function
| Ctypes_static.Function (ty, fn) -> (fresh_var (), Ty ty) :: loop fn
| Ctypes_static.Returns _ -> []
in loop fn
module Type_C =
struct
let cexp : cexp -> ty = function
| `Int _ -> Ty sint
| `Local (_, ty) -> ty
| `Cast (Ty ty, _) -> Ty ty
| `Addr (`Global { typ = Ty ty }) -> Ty (Pointer ty)
| `Addr (`Local (_, Ty ty)) -> Ty (Pointer ty)
let camlop : camlop -> ty = function
| `CAMLparam0
| `CAMLlocalN _
| `CAMLdrop -> Ty Void
let rec ceff : ceff -> ty = function
| #cexp as e -> cexp e
| #camlop as o -> camlop o
| `Global { typ } -> typ
| `App ({ fn = Fn f }, _) -> return_type f
| `Index (e, _) -> reference_ceff e
| `Deref e -> reference_ceff (e :> ceff)
| `DerefField (e, f) -> field_ceff (e :> ceff) f
and reference_ceff : ceff -> ty =
fun e ->
begin match ceff e with
| Ty (Pointer ty) -> Ty ty
| Ty (Array (ty, _)) -> Ty ty
| Ty t -> Cstubs_errors.internal_error
"dereferencing expression of non-pointer type %s"
(Ctypes.string_of_typ t)
end
and field_ceff : ceff -> fieldname -> ty =
fun e f ->
begin match ceff e with
Ty (Pointer (Struct { fields } as s)) -> lookup_field f s fields
| Ty t -> Cstubs_errors.internal_error
"accessing a field %s in an expression of type %s, which is not a pointer-to-struct type"
f (Ctypes.string_of_typ t)
end
and lookup_field : type s a. string -> a typ -> s boxed_field list -> ty =
fun f ty fields -> match fields with
[] -> Cstubs_errors.internal_error
"field %s not found in struct %s" f
(Ctypes.string_of_typ ty)
| BoxedField { ftype; fname } :: _ when fname = f -> Ty ftype
| _ :: fields -> lookup_field f ty fields
let rec ccomp : ccomp -> ty = function
| #cexp as e -> cexp e
| #ceff as e -> ceff e
| `CAMLparam (_, c) -> ccomp c
| `Let (_, c)
| `LetConst (_, _, c) -> ccomp c
| `LetAssign (_, _, c) -> ccomp c
| `CAMLreturnT (ty, _) -> ty
| `Return (ty, _) -> ty
end
let value : [`value] abstract typ = abstract ~name:"value" ~size:0 ~alignment:0
let reader fname fn =
{ fname; allocates = false; reads_ocaml_heap = true; fn = Fn fn }
let conser fname fn =
{ fname; allocates = true; reads_ocaml_heap = false; fn = Fn fn }
let immediater fname fn =
{ fname; allocates = false; reads_ocaml_heap = false; fn = Fn fn }
module Unchecked_function_types =
struct
let (@->) f t = Function (f, t)
let returning t = Returns t
end
let prim_prj : type a. a Ctypes_primitive_types.prim -> _ =
let open Unchecked_function_types in
let open Ctypes_primitive_types in function
| Char -> reader "Int_val" (value @-> returning int)
| Schar -> reader "Int_val" (value @-> returning int)
| Uchar -> reader "Uint8_val" (value @-> returning uint8_t)
| Bool -> reader "Bool_val" (value @-> returning bool)
| Short -> reader "Int_val" (value @-> returning int)
| Int -> reader "Long_val" (value @-> returning int)
| Long -> reader "ctypes_long_val" (value @-> returning long)
| Llong -> reader "ctypes_llong_val" (value @-> returning llong)
| Ushort -> reader "ctypes_ushort_val" (value @-> returning ushort)
| Sint -> reader "ctypes_sint_val" (value @-> returning sint)
| Uint -> reader "ctypes_uint_val" (value @-> returning uint)
| Ulong -> reader "ctypes_ulong_val" (value @-> returning ulong)
| Ullong -> reader "ctypes_ullong_val" (value @-> returning ullong)
| Size_t -> reader "ctypes_size_t_val" (value @-> returning size_t)
| Int8_t -> reader "Int_val" (value @-> returning int)
| Int16_t -> reader "Int_val" (value @-> returning int)
| Int32_t -> reader "Int32_val" (value @-> returning int32_t)
| Int64_t -> reader "Int64_val" (value @-> returning int64_t)
| Uint8_t -> reader "Uint8_val" (value @-> returning uint8_t)
| Uint16_t -> reader "Uint16_val" (value @-> returning uint16_t)
| Uint32_t -> reader "Uint32_val" (value @-> returning uint32_t)
| Uint64_t -> reader "Uint64_val" (value @-> returning uint64_t)
| Camlint -> reader "Long_val" (value @-> returning int)
| Nativeint -> reader "Nativeint_val" (value @-> returning nativeint)
| Float -> reader "Double_val" (value @-> returning double)
| Double -> reader "Double_val" (value @-> returning double)
| LDouble -> reader "ctypes_ldouble_val" (value @-> returning ldouble)
| Complex32 -> reader "ctypes_float_complex_val" (value @-> returning complex32)
| Complex64 -> reader "ctypes_double_complex_val" (value @-> returning complex64)
| Complexld -> reader "ctypes_ldouble_complex_val" (value @-> returning complexld)
let prim_inj : type a. a Ctypes_primitive_types.prim -> _ =
let open Unchecked_function_types in
let open Ctypes_primitive_types in function
| Char -> immediater "Ctypes_val_char" (int @-> returning value)
| Schar -> immediater "Val_int" (int @-> returning value)
| Uchar -> immediater "Integers_val_uint8" (uint8_t @-> returning value)
| Bool -> immediater "Val_bool" (bool @-> returning value)
| Short -> immediater "Val_int" (int @-> returning value)
| Int -> immediater "Val_long" (int @-> returning value)
| Long -> conser "ctypes_copy_long" (long @-> returning value)
| Llong -> conser "ctypes_copy_llong" (llong @-> returning value)
| Ushort -> conser "ctypes_copy_ushort" (ushort @-> returning value)
| Sint -> conser "ctypes_copy_sint" (sint @-> returning value)
| Uint -> conser "ctypes_copy_uint" (uint @-> returning value)
| Ulong -> conser "ctypes_copy_ulong" (ulong @-> returning value)
| Ullong -> conser "ctypes_copy_ullong" (ullong @-> returning value)
| Size_t -> conser "ctypes_copy_size_t" (size_t @-> returning value)
| Int8_t -> immediater "Val_int" (int @-> returning value)
| Int16_t -> immediater "Val_int" (int @-> returning value)
| Int32_t -> conser "caml_copy_int32" (int32_t @-> returning value)
| Int64_t -> conser "caml_copy_int64" (int64_t @-> returning value)
| Uint8_t -> immediater "Integers_val_uint8" (uint8_t @-> returning value)
| Uint16_t -> immediater "Integers_val_uint16" (uint16_t @-> returning value)
| Uint32_t -> conser "integers_copy_uint32" (uint32_t @-> returning value)
| Uint64_t -> conser "integers_copy_uint64" (uint64_t @-> returning value)
| Camlint -> immediater "Val_long" (int @-> returning value)
| Nativeint -> conser "caml_copy_nativeint" (nativeint @-> returning value)
| Float -> conser "caml_copy_double" (double @-> returning value)
| Double -> conser "caml_copy_double" (double @-> returning value)
| LDouble -> conser "ctypes_copy_ldouble" (ldouble @-> returning value)
| Complex32 -> conser "ctypes_copy_float_complex" (complex32 @-> returning value)
| Complex64 -> conser "ctypes_copy_double_complex" (complex64 @-> returning value)
| Complexld -> conser "ctypes_copy_ldouble_complex" (complexld @-> returning value)
|
afc199257a4fdd655cde6e097f5f81c4e2d69cd045e0088a3a52b3073d4a3d54 | maxima-project-on-github/maxima-packages | meval.lisp | (setf (symbol-function 'prev-meval) (symbol-function 'meval)) ;; traceable
(defun meval (e)
(if (and (consp e) (eq (caar e) '$closure))
(with-lexical-environment (rest (second e)) (prev-meval (third e)))
(prev-meval e)))
| null | https://raw.githubusercontent.com/maxima-project-on-github/maxima-packages/3fbe9e89b091720207ddebda840fdc251744fb5d/robert-dodier/lexical_symbols/meval.lisp | lisp | traceable | (defun meval (e)
(if (and (consp e) (eq (caar e) '$closure))
(with-lexical-environment (rest (second e)) (prev-meval (third e)))
(prev-meval e)))
|
62a05ad716b61f9a914f67888cb01e22e675539d367c3bf7bb9f78f8de5f0e8b | semilin/layoup | fijiki.lisp |
(MAKE-LAYOUT :NAME "fijiki" :MATRIX (APPLY #'KEY-MATRIX 'NIL) :SHIFT-MATRIX NIL
:KEYBOARD NIL) | null | https://raw.githubusercontent.com/semilin/layoup/27ec9ba9a9388cd944ac46206d10424e3ab45499/data/layouts/fijiki.lisp | lisp |
(MAKE-LAYOUT :NAME "fijiki" :MATRIX (APPLY #'KEY-MATRIX 'NIL) :SHIFT-MATRIX NIL
:KEYBOARD NIL) | |
5b848f1e107d7eb86ceae127b0c2c0acc8fcb110ed2aba85c11c8ec817fde441 | larskuhtz/ghci-pretty | Colored.hs | -- |
-- Module: IPPrint.Colored
Copyright : Copyright © 2014 < >
License : MIT
Maintainer : < >
-- Stability: experimental
--
-- This module combines the ipprint package and
-- the hscolour package to provide colored pretty-printing
-- in ghci.
--
-- /Usage/
--
Add the following lines to your @ghci.conf@ file :
--
-- > -- Pretty printing of it
-- > :set -package ghci-pretty
-- > import Text.Show.Pretty.Colored
-- > :set -interactive-print=cpprint
-- > :def cp (\_ -> return ":set -interactive-print=cpprint"
-- > :def ncp (\_ -> return ":set -interactive-print=print")
--
-- Now you can enable colored pretty-printing in ghci with the commmand
--
-- > :cp
--
-- The following command turns colored pretty-printing off again
--
> : ncp
--
module Text.Show.Pretty.Colored
( cpprint
) where
import Text.Show.Pretty
import Language.Haskell.HsColour
import Language.Haskell.HsColour.Colourise
import Language.Haskell.HsColour.Output
cpprint :: Show a => a -> IO ()
cpprint x = do
putStrLn . hscolour (TTYg XTerm256Compatible) defaultColourPrefs False False "" False . ppShow $ x
| null | https://raw.githubusercontent.com/larskuhtz/ghci-pretty/ff3c974541abbbc26f0dfdc51e729b66caaf843e/src/Text/Show/Pretty/Colored.hs | haskell | |
Module: IPPrint.Colored
Stability: experimental
This module combines the ipprint package and
the hscolour package to provide colored pretty-printing
in ghci.
/Usage/
> -- Pretty printing of it
> :set -package ghci-pretty
> import Text.Show.Pretty.Colored
> :set -interactive-print=cpprint
> :def cp (\_ -> return ":set -interactive-print=cpprint"
> :def ncp (\_ -> return ":set -interactive-print=print")
Now you can enable colored pretty-printing in ghci with the commmand
> :cp
The following command turns colored pretty-printing off again
| Copyright : Copyright © 2014 < >
License : MIT
Maintainer : < >
Add the following lines to your @ghci.conf@ file :
> : ncp
module Text.Show.Pretty.Colored
( cpprint
) where
import Text.Show.Pretty
import Language.Haskell.HsColour
import Language.Haskell.HsColour.Colourise
import Language.Haskell.HsColour.Output
cpprint :: Show a => a -> IO ()
cpprint x = do
putStrLn . hscolour (TTYg XTerm256Compatible) defaultColourPrefs False False "" False . ppShow $ x
|
5f753f1e23778b14be86e6c66ce438a9cbe7c8e3df5b628258340e62c3106908 | ghc/packages-Cabal | cabal.test.hs | import Test.Cabal.Prelude
import System.Directory-- (getDirectoryContents, removeFile)
main = cabalTest $ do
cabal "v2-build" ["inplace-dep"]
env <- getTestEnv
liftIO $ removeEnvFiles $ testSourceDir env -- we don't want existing env files to interfere
-- Drop the compiled executable into the temporary directory, to avoid cluttering the tree. If compilation succeeds, we've tested what we need to!
tmpdir <- fmap testTmpDir getTestEnv
let dest = tmpdir </> "a.out"
cabal "v2-exec" ["ghc", "--", "Main.hs", "-o", dest]
TODO external ( store ) deps , once v2 - install is working
copy - pasted from D.C.CmdClean .
removeEnvFiles :: FilePath -> IO ()
removeEnvFiles dir =
(mapM_ (removeFile . (dir </>)) . filter ((".ghc.environment" ==) . take 16))
=<< getDirectoryContents dir
| null | https://raw.githubusercontent.com/ghc/packages-Cabal/6f22f2a789fa23edb210a2591d74ea6a5f767872/cabal-testsuite/PackageTests/NewBuild/CmdExec/GhcInvocation/cabal.test.hs | haskell | (getDirectoryContents, removeFile)
we don't want existing env files to interfere
Drop the compiled executable into the temporary directory, to avoid cluttering the tree. If compilation succeeds, we've tested what we need to! | import Test.Cabal.Prelude
main = cabalTest $ do
cabal "v2-build" ["inplace-dep"]
env <- getTestEnv
tmpdir <- fmap testTmpDir getTestEnv
let dest = tmpdir </> "a.out"
cabal "v2-exec" ["ghc", "--", "Main.hs", "-o", dest]
TODO external ( store ) deps , once v2 - install is working
copy - pasted from D.C.CmdClean .
removeEnvFiles :: FilePath -> IO ()
removeEnvFiles dir =
(mapM_ (removeFile . (dir </>)) . filter ((".ghc.environment" ==) . take 16))
=<< getDirectoryContents dir
|
ab7fbd95fd1612ee04b2a60b6b3203bf1a13d617d58a6aa5ae36e05f1cc1227d | susisu/est-ocaml | term.mli | open Core
type 'a t = Lit of 'a * Value.t
| Var of 'a * string
| Vec of 'a * 'a t list
| App of 'a * 'a t * 'a t
| Let of 'a * string * 'a t * 'a t
val equal : 'a t -> 'a t -> bool
val get_info : 'a t -> 'a
val to_string : 'a t -> string
| null | https://raw.githubusercontent.com/susisu/est-ocaml/e610d07b166a51e5763aa4f7b12449ec0438071c/src/term.mli | ocaml | open Core
type 'a t = Lit of 'a * Value.t
| Var of 'a * string
| Vec of 'a * 'a t list
| App of 'a * 'a t * 'a t
| Let of 'a * string * 'a t * 'a t
val equal : 'a t -> 'a t -> bool
val get_info : 'a t -> 'a
val to_string : 'a t -> string
| |
e0616c960899e028fca8b995eda46a99f2bc3d33da54bb40d0f65cbc86553113 | dscarpetti/coll-pen | search.cljs | (ns coll-pen.search
(:require
[cljs.reader]
[clojure.string :as str]))
#_(defn build-search-fn [search-string]
(let [pattern (try
(re-pattern search-string)
(catch :default e
(println e)
(re-pattern " ")))]
(fn [el]
(when-not (coll? el)
(re-find pattern (pr-str el))))))
(defn search-seq [coll search-fn]
(filter search-fn coll))
(defn search-vec [coll search-fn]
(reduce-kv (fn [r i v]
(if (search-fn v)
(conj r [i v])
r))
[] coll))
(defn search-map [coll search-fn]
(reduce-kv (fn [m k v]
(if (search-fn k)
m
(dissoc m k)))
coll coll))
#_(defn get-search-coll [coll-type coll search-string limit]
(let [search-fn (build-search-fn search-string)
search-results (case coll-type
:map (search-map coll search-fn)
:vec (search-vec coll search-fn)
(search-seq coll search-fn))]
{:search/status :ok
:search/coll-count (count coll)
:search/result-count (count search-results)
:search/results (take limit search-results)}))
(def regex-search ^{:coll-pen/instructions "Regex Search"}
(fn [coll search-string]
(let [search-fn (try
(let [pattern (re-pattern search-string)]
(fn [el] (when-not (coll? el)
(re-find pattern (pr-str el)))))
(catch :default e
(js/console.log e)
(.-message e)))]
(if (string? search-fn)
search-fn #_(str "Bad Regex: " search-fn)
(cond
(map? coll) (search-map coll search-fn)
(vector? coll) (search-vec coll search-fn)
:else (search-seq coll search-fn))))))
(def subs-search ^{:coll-pen/instructions "Substring Search"}
(fn [coll search-string]
(let [search-fn (fn [el]
(when-not (coll? el)
(str/includes? (pr-str el) search-string)))]
(cond
(map? coll) (search-map coll search-fn)
(vector? coll) (search-vec coll search-fn)
:else (search-seq coll search-fn)))))
(def prefix-search ^{:coll-pen/instructions "Prefix Search"}
(fn [coll search-string]
(let [search-fn (fn [el]
(when-not (coll? el)
(str/starts-with? (pr-str el) search-string)))]
(cond
(map? coll) (search-map coll search-fn)
(vector? coll) (search-vec coll search-fn)
:else (search-seq coll search-fn)))))
(def eq-search ^{:coll-pen/instructions "Equality Search"}
(fn [coll search-string]
(let [search-term (try
(cljs.reader/read-string search-string)
(catch :default e
(js/console.log e)
::error))
search-fn (fn [el] (= el search-term))]
(if (keyword-identical? search-term ::error)
(str "Unable to Parse Term")
(cond
(map? coll) (search-map coll search-fn)
(vector? coll) (search-vec coll search-fn)
:else (search-seq coll search-fn))))))
(defn get-search-handler [fn-or-kw]
(if (fn? fn-or-kw)
fn-or-kw
(case fn-or-kw
:regex regex-search
:subs subs-search
:substr subs-search
:substring subs-search
:prefix prefix-search
:eq eq-search
:= eq-search
nil)))
(defn get-search-instructions [fn-or-string]
(cond
(string? fn-or-string) fn-or-string
(fn? fn-or-string) (:coll-pen/instructions (meta fn-or-string))))
| null | https://raw.githubusercontent.com/dscarpetti/coll-pen/8238a1a19ae79f08e94ec65fce51aa6ec449138f/src/coll_pen/search.cljs | clojure | (ns coll-pen.search
(:require
[cljs.reader]
[clojure.string :as str]))
#_(defn build-search-fn [search-string]
(let [pattern (try
(re-pattern search-string)
(catch :default e
(println e)
(re-pattern " ")))]
(fn [el]
(when-not (coll? el)
(re-find pattern (pr-str el))))))
(defn search-seq [coll search-fn]
(filter search-fn coll))
(defn search-vec [coll search-fn]
(reduce-kv (fn [r i v]
(if (search-fn v)
(conj r [i v])
r))
[] coll))
(defn search-map [coll search-fn]
(reduce-kv (fn [m k v]
(if (search-fn k)
m
(dissoc m k)))
coll coll))
#_(defn get-search-coll [coll-type coll search-string limit]
(let [search-fn (build-search-fn search-string)
search-results (case coll-type
:map (search-map coll search-fn)
:vec (search-vec coll search-fn)
(search-seq coll search-fn))]
{:search/status :ok
:search/coll-count (count coll)
:search/result-count (count search-results)
:search/results (take limit search-results)}))
(def regex-search ^{:coll-pen/instructions "Regex Search"}
(fn [coll search-string]
(let [search-fn (try
(let [pattern (re-pattern search-string)]
(fn [el] (when-not (coll? el)
(re-find pattern (pr-str el)))))
(catch :default e
(js/console.log e)
(.-message e)))]
(if (string? search-fn)
search-fn #_(str "Bad Regex: " search-fn)
(cond
(map? coll) (search-map coll search-fn)
(vector? coll) (search-vec coll search-fn)
:else (search-seq coll search-fn))))))
(def subs-search ^{:coll-pen/instructions "Substring Search"}
(fn [coll search-string]
(let [search-fn (fn [el]
(when-not (coll? el)
(str/includes? (pr-str el) search-string)))]
(cond
(map? coll) (search-map coll search-fn)
(vector? coll) (search-vec coll search-fn)
:else (search-seq coll search-fn)))))
(def prefix-search ^{:coll-pen/instructions "Prefix Search"}
(fn [coll search-string]
(let [search-fn (fn [el]
(when-not (coll? el)
(str/starts-with? (pr-str el) search-string)))]
(cond
(map? coll) (search-map coll search-fn)
(vector? coll) (search-vec coll search-fn)
:else (search-seq coll search-fn)))))
(def eq-search ^{:coll-pen/instructions "Equality Search"}
(fn [coll search-string]
(let [search-term (try
(cljs.reader/read-string search-string)
(catch :default e
(js/console.log e)
::error))
search-fn (fn [el] (= el search-term))]
(if (keyword-identical? search-term ::error)
(str "Unable to Parse Term")
(cond
(map? coll) (search-map coll search-fn)
(vector? coll) (search-vec coll search-fn)
:else (search-seq coll search-fn))))))
(defn get-search-handler [fn-or-kw]
(if (fn? fn-or-kw)
fn-or-kw
(case fn-or-kw
:regex regex-search
:subs subs-search
:substr subs-search
:substring subs-search
:prefix prefix-search
:eq eq-search
:= eq-search
nil)))
(defn get-search-instructions [fn-or-string]
(cond
(string? fn-or-string) fn-or-string
(fn? fn-or-string) (:coll-pen/instructions (meta fn-or-string))))
| |
ee0334b33b26c9ce2f21d959531997f521308a82a0585bf809f2e015a66a95e9 | macourtney/Dark-Exchange | open_offer_panel.clj | (ns darkexchange.view.main.home.open-offer-panel
(:require [darkexchange.model.terms :as terms]
[darkexchange.view.offer.open-offer-table :as open-offer-table-view]
[seesaw.core :as seesaw-core]))
(defn create-table-header-text []
(terms/open-offers))
(defn create-table-header-buttons []
(seesaw-core/horizontal-panel :items
[ (seesaw-core/button :id :new-open-offer-button :text (terms/new))
[:fill-h 3]
(seesaw-core/button :id :delete-open-offer-button :text (terms/delete) :enabled? false)]))
(defn create-table-header []
(seesaw-core/border-panel
:border 5
:vgap 5
:west (create-table-header-text)
:east (create-table-header-buttons)))
(defn create []
(seesaw-core/border-panel
:border 5
:vgap 5
:north (create-table-header)
:center (open-offer-table-view/create { :id :open-offer-table }))) | null | https://raw.githubusercontent.com/macourtney/Dark-Exchange/1654d05cda0c81585da7b8e64f9ea3e2944b27f1/src/darkexchange/view/main/home/open_offer_panel.clj | clojure | (ns darkexchange.view.main.home.open-offer-panel
(:require [darkexchange.model.terms :as terms]
[darkexchange.view.offer.open-offer-table :as open-offer-table-view]
[seesaw.core :as seesaw-core]))
(defn create-table-header-text []
(terms/open-offers))
(defn create-table-header-buttons []
(seesaw-core/horizontal-panel :items
[ (seesaw-core/button :id :new-open-offer-button :text (terms/new))
[:fill-h 3]
(seesaw-core/button :id :delete-open-offer-button :text (terms/delete) :enabled? false)]))
(defn create-table-header []
(seesaw-core/border-panel
:border 5
:vgap 5
:west (create-table-header-text)
:east (create-table-header-buttons)))
(defn create []
(seesaw-core/border-panel
:border 5
:vgap 5
:north (create-table-header)
:center (open-offer-table-view/create { :id :open-offer-table }))) | |
d15eea121fbd9f50bb9081bb2b9e69b7901fc3358217d7dd80fb74625bb0bc6c | dyzsr/ocaml-selectml | t14bad.ml | TEST
flags = " -w -a "
ocamlc_byte_exit_status = " 2 "
* setup - ocamlc.byte - build - env
* * ocamlc.byte
* * * check - ocamlc.byte - output
flags = " -w -a "
ocamlc_byte_exit_status = "2"
* setup-ocamlc.byte-build-env
** ocamlc.byte
*** check-ocamlc.byte-output
*)
Bad - PR 4261
module PR_4261 = struct
module type S =
sig
type t
end
module type T =
sig
module D : S
type t = D.t
end
module rec U : T with type D.t = U'.t = U
and U' : S with type t = U'.t = U
end;;
| null | https://raw.githubusercontent.com/dyzsr/ocaml-selectml/875544110abb3350e9fb5ec9bbadffa332c270d2/testsuite/tests/typing-recmod/t14bad.ml | ocaml | TEST
flags = " -w -a "
ocamlc_byte_exit_status = " 2 "
* setup - ocamlc.byte - build - env
* * ocamlc.byte
* * * check - ocamlc.byte - output
flags = " -w -a "
ocamlc_byte_exit_status = "2"
* setup-ocamlc.byte-build-env
** ocamlc.byte
*** check-ocamlc.byte-output
*)
Bad - PR 4261
module PR_4261 = struct
module type S =
sig
type t
end
module type T =
sig
module D : S
type t = D.t
end
module rec U : T with type D.t = U'.t = U
and U' : S with type t = U'.t = U
end;;
| |
e3662d29414aadb1b02802afd057d8d4087a92609e93ffa02ee1ba1097c3e02d | wireapp/wire-server | App.hs | # LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
-- This file is part of the Wire Server implementation.
--
Copyright ( C ) 2022 Wire Swiss GmbH < >
--
-- This program is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
-- later version.
--
-- This program is distributed in the hope that it will be useful, but WITHOUT
-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
-- details.
--
You should have received a copy of the GNU Affero General Public License along
-- with this program. If not, see </>.
module Federator.App
( AppT,
runAppT,
embedApp,
)
where
import Bilge (MonadHttp (..), RequestId (unRequestId), withResponse)
import Bilge.RPC (HasRequestId (..))
import Control.Lens (view)
import Control.Monad.Catch
import Control.Monad.Except
import Federator.Env (Env, applog, httpManager, requestId)
import Imports
import Polysemy
import Polysemy.Input
import System.Logger.Class as LC
import qualified System.Logger.Extended as Log
FUTUREWORK(federation ): this code re - occurs in every service . introduce ' ' in types - common that
takes ' Env ' as one more argument .
newtype AppT m a = AppT
{ unAppT :: ReaderT Env m a
}
deriving newtype
( Functor,
Applicative,
Monad,
MonadIO,
MonadThrow,
MonadCatch,
MonadMask,
MonadReader Env
)
instance MonadIO m => LC.MonadLogger (AppT m) where
log l m = do
g <- view applog
r <- view requestId
Log.log g l $ field "request" (unRequestId r) ~~ m
instance MonadIO m => LC.MonadLogger (ExceptT err (AppT m)) where
log l m = lift (LC.log l m)
instance Monad m => HasRequestId (AppT m) where
getRequestId = view requestId
instance MonadUnliftIO m => MonadUnliftIO (AppT m) where
withRunInIO inner =
AppT . ReaderT $ \r ->
withRunInIO $ \runner ->
inner (runner . flip runReaderT r . unAppT)
instance MonadTrans AppT where
lift = AppT . lift
instance MonadIO m => MonadHttp (AppT m) where
handleRequestWithCont req handler = do
manager <- view httpManager <$> ask
liftIO $ withResponse req manager handler
runAppT :: forall m a. Env -> AppT m a -> m a
runAppT e (AppT ma) = runReaderT ma e
embedApp ::
( Member (Embed m) r,
Member (Input Env) r
) =>
AppT m a ->
Sem r a
embedApp (AppT action) = do
env <- input
embed $ runReaderT action env
| null | https://raw.githubusercontent.com/wireapp/wire-server/c27541d7456e3ea07ce5c8991585d0ffc4fac226/services/federator/src/Federator/App.hs | haskell | This file is part of the Wire Server implementation.
This program is free software: you can redistribute it and/or modify it under
later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
details.
with this program. If not, see </>. | # LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
Copyright ( C ) 2022 Wire Swiss GmbH < >
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
You should have received a copy of the GNU Affero General Public License along
module Federator.App
( AppT,
runAppT,
embedApp,
)
where
import Bilge (MonadHttp (..), RequestId (unRequestId), withResponse)
import Bilge.RPC (HasRequestId (..))
import Control.Lens (view)
import Control.Monad.Catch
import Control.Monad.Except
import Federator.Env (Env, applog, httpManager, requestId)
import Imports
import Polysemy
import Polysemy.Input
import System.Logger.Class as LC
import qualified System.Logger.Extended as Log
FUTUREWORK(federation ): this code re - occurs in every service . introduce ' ' in types - common that
takes ' Env ' as one more argument .
newtype AppT m a = AppT
{ unAppT :: ReaderT Env m a
}
deriving newtype
( Functor,
Applicative,
Monad,
MonadIO,
MonadThrow,
MonadCatch,
MonadMask,
MonadReader Env
)
instance MonadIO m => LC.MonadLogger (AppT m) where
log l m = do
g <- view applog
r <- view requestId
Log.log g l $ field "request" (unRequestId r) ~~ m
instance MonadIO m => LC.MonadLogger (ExceptT err (AppT m)) where
log l m = lift (LC.log l m)
instance Monad m => HasRequestId (AppT m) where
getRequestId = view requestId
instance MonadUnliftIO m => MonadUnliftIO (AppT m) where
withRunInIO inner =
AppT . ReaderT $ \r ->
withRunInIO $ \runner ->
inner (runner . flip runReaderT r . unAppT)
instance MonadTrans AppT where
lift = AppT . lift
instance MonadIO m => MonadHttp (AppT m) where
handleRequestWithCont req handler = do
manager <- view httpManager <$> ask
liftIO $ withResponse req manager handler
runAppT :: forall m a. Env -> AppT m a -> m a
runAppT e (AppT ma) = runReaderT ma e
embedApp ::
( Member (Embed m) r,
Member (Input Env) r
) =>
AppT m a ->
Sem r a
embedApp (AppT action) = do
env <- input
embed $ runReaderT action env
|
5613e55e6e80053292b8ab8cb2dde32c59b30d4d4e2956e5b5354f50890ec570 | zotonic/zotonic | mod_ssl_letsencrypt.erl | @author < >
2016 ,
%%
%% @doc Certificate handling for Let's Encrypt
Copyright 2016 ,
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(mod_ssl_letsencrypt).
-mod_title("SSL - Let's Encrypt").
-mod_description("Use SSL Certificate from Let's Encrypt.").
-mod_provides([]).
-mod_depends([cron]).
-mod_prio(200).
-behaviour(gen_server).
-author('Marc Worrell <>').
-export([
observe_ssl_options/2,
observe_tick_24h/2,
event/2,
is_self_ping/2,
get_self_ping/1,
get_challenge/1,
status/1,
load_cert/1
]).
-export([
start_link/1,
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
code_change/3,
terminate/2
]).
-include_lib("zotonic_core/include/zotonic.hrl").
-define(SNI_CACHE_TIME, 60).
-define(KEY_BITS, 2048).
-define(CA_CERT_URL, "-encrypt-x3-cross-signed.pem").
-ifdef(TEST).
-define(ACME_SRV_OPTS, [staging]).
-else.
-define(ACME_SRV_OPTS, []).
-endif.
-record(state, {
site :: atom(),
self_ping :: binary() | undefined,
State information when requesting a new cert
request_letsencrypt_pid = undefined :: undefined | pid(),
request_monitor = undefined :: undefined | reference(),
request_hostname = undefined :: binary() | undefined,
request_san = [] :: list(binary()),
request_start = undefined :: undefined | calendar:datetime(),
request_status = none :: none | requesting | ok | error,
% Information about the current certificate
cert_is_valid = false :: boolean(),
cert_hostname = undefined :: binary() | undefined,
cert_san = [] :: list( binary() ),
cert_valid_till = undefined :: undefined | calendar:datetime()
}).
%% @doc Return the certificates of this site.
observe_ssl_options(#ssl_options{server_name=_NormalizedHostnameBin}, Context) ->
z_depcache:memo(
fun() ->
case status(Context) of
{ok, Status} ->
case proplists:get_value(cert_is_valid, Status) of
true -> ssl_options(Context);
false -> undefined
end;
{error, _} ->
undefined
end
end,
sni_ssl_letsencrypt,
?SNI_CACHE_TIME,
Context).
%% @doc Period tick, used to check for cert upgrade
observe_tick_24h(tick_24h, Context) ->
load_cert(Context),
gen_server:cast(z_utils:name_for_site(?MODULE, Context), renewal_check).
%% @doc Handle UI events
%% @todo ACL check
event(#submit{message = {request_cert, Args}}, Context) ->
case z_acl:is_admin_editable(Context) of
true ->
{hostname, Hostname} = proplists:lookup(hostname, Args),
{wrapper, Wrapper} = proplists:lookup(wrapper, Args),
SANs = z_context:get_q_all(<<"san">>, Context),
SANs1 = [ San || San <- SANs, San /= <<>> ],
case gen_server:call(z_utils:name_for_site(?MODULE, Context), {cert_request, Hostname, SANs1}) of
ok ->
z_render:update(Wrapper,
#render{
template="_admin_ssl_letsencrypt_running.tpl",
vars=[
{hostname, Hostname},
{san, SANs1}
]},
Context);
{error, Reason} ->
?LOG_ERROR(#{
text => <<"Could not start Letsencrypt cert request">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
reason => Reason,
hostname => Hostname,
san => SANs1
}),
z_render:wire({alert, [
{title, ?__(<<"SSL Let’s Encrypt Certificate"/utf8>>, Context)},
{text, ?__("Could not start fetching the SSL certificate. Try again later.", Context)},
{button, ?__("Cancel", Context)}
]},
Context)
end;
false ->
z_render:wire({alert, [
{title, ?__(<<"SSL Let’s Encrypt Certificate"/utf8>>, Context)},
{text, ?__("You need to be an administrator to request certificates.", Context)},
{button, ?__("Cancel", Context)}
]},
Context)
end;
event(_Event, Context) ->
Context.
%% @doc Generate a new self-ping value
-spec get_self_ping(#context{}) -> {ok, binary()}.
get_self_ping(Context) ->
gen_server:call(z_utils:name_for_site(?MODULE, Context), get_self_ping).
%% @doc Check if the returned ping is the generated ping
-spec is_self_ping(binary()|string(), #context{}) -> boolean().
is_self_ping(Ping, Context) ->
gen_server:call(z_utils:name_for_site(?MODULE, Context), {is_self_ping, Ping}).
@doc Fetch the challenge requested by the ACME handshake
-spec get_challenge(#context{}) -> {ok, map()}.
get_challenge(Context) ->
gen_server:call(z_utils:name_for_site(?MODULE, Context), get_challenge).
%% @doc Check if the returned ping is the generated ping
-spec status(#context{}) -> {ok, list()} | {error, term()}.
status(Context) ->
gen_server:call(z_utils:name_for_site(?MODULE, Context), status).
%% @doc Load the current certificate metadata
-spec load_cert( z:context() ) -> ok.
load_cert(Context) ->
gen_server:cast(z_utils:name_for_site(?MODULE, Context), load_cert).
%%====================================================================
%% API
%%====================================================================
) - > { ok , Pid } | ignore | { error , Error }
%% @doc Starts the server
start_link(Args) when is_list(Args) ->
{context, Context} = proplists:lookup(context, Args),
gen_server:start_link({local, z_utils:name_for_site(?MODULE, Context)}, ?MODULE, Args, []).
%%====================================================================
%% gen_server callbacks
%%====================================================================
) - > { ok , State } |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% @doc Initiates the server.
init(Args) ->
process_flag(trap_exit, true),
{context, Context} = proplists:lookup(context, Args),
Site = z_context:site(Context),
logger:set_process_metadata(#{
site => Site,
module => ?MODULE
}),
gen_server:cast(self(), load_cert),
{ok, #state{site=Site, self_ping = undefined }}.
handle_call(get_self_ping, _From, State) ->
Ping = z_ids:id(),
{reply, {ok, Ping}, State#state{self_ping = Ping}};
handle_call({is_self_ping, SelfPing}, _From, #state{self_ping = Ping} = State) ->
{reply, z_convert:to_binary(SelfPing) =:= Ping, State};
handle_call({cert_request, _Hostname, _SANs}, _From, #state{request_letsencrypt_pid = Pid} = State) when is_pid(Pid) ->
?LOG_ERROR("Letsencrypt cert request whilst another request is running"),
{reply, {error, busy}, State};
handle_call({cert_request, Hostname, SANs}, _From, State) ->
case start_cert_request(Hostname, SANs, State) of
{ok, State1} ->
z_mqtt:publish(<<"model/letsencrypt/event/status">>, <<"started">>, z_acl:sudo(z_context:new(State#state.site))),
{reply, ok, State1};
{error, Reason, State1} ->
{reply, {error, Reason}, State1}
end;
handle_call(get_challenge, _From, #state{request_letsencrypt_pid = undefined} = State) ->
?LOG_ERROR("Fetching Letsencrypt challenge but no request running"),
{reply, {ok, #{}}, State};
handle_call(get_challenge, _From, #state{request_letsencrypt_pid = _Pid} = State) ->
case z_letsencrypt:get_challenge() of
error ->
?LOG_ERROR("Error fetching Letsencrypt challenge."),
{reply, {ok, #{}}, State};
Map when is_map(Map) ->
{reply, {ok, Map}, State}
end;
handle_call(status, _From, State) ->
Props = [
{request_status, State#state.request_status},
{request_start, State#state.request_start},
{request_hostname, State#state.request_hostname},
{request_san, State#state.request_san},
{cert_is_valid, State#state.cert_is_valid},
{cert_hostname, State#state.cert_hostname},
{cert_san, State#state.cert_san},
{cert_valid_till, State#state.cert_valid_till}
],
{reply, {ok, Props}, State};
handle_call(Message, _From, State) ->
{stop, {unknown_call, Message}, State}.
handle_cast(load_cert, State) ->
State1 = do_load_cert(State),
z_mqtt:publish(<<"model/letsencrypt/event/status">>, <<"reload">>, z_acl:sudo(z_context:new(State#state.site))),
{noreply, State1};
handle_cast({complete, Ret, LetsPid}, #state{request_letsencrypt_pid = LetsPid} = State) ->
State1 = handle_letsencrypt_result(Ret, State),
erlang:demonitor(State#state.request_monitor),
gen_server:cast(self(), load_cert),
{noreply, State1#state{
request_letsencrypt_pid = undefined,
request_monitor = undefined
}};
handle_cast(renewal_check, #state{cert_is_valid = false} = State) ->
{noreply, State};
handle_cast(renewal_check, #state{cert_is_valid = true, cert_hostname = Hostname, cert_san = SANs} = State) ->
We try renewal during the last month of validity
% After the last validity we stop trying, as there is clearly something wrong.
Now = calendar:universal_time(),
NextMonth = z_datetime:next_month(Now),
case NextMonth > State#state.cert_valid_till
andalso Now < State#state.cert_valid_till
of
true ->
SANs1 = lists:usort(SANs) -- [Hostname],
case start_cert_request(Hostname, SANs1, State) of
{ok, State1} ->
z_mqtt:publish(<<"model/letsencrypt/event/status">>, <<"started">>, z_acl:sudo(z_context:new(State#state.site))),
{noreply, State1};
{error, _Reason, State1} ->
{noreply, State1}
end;
false ->
{noreply, State}
end;
handle_cast(Message, State) ->
{stop, {unknown_cast, Message}, State}.
handle_info({'DOWN', MRef, process, _Pid, normal}, #state{request_monitor = MRef} = State) ->
gen_server:cast(self(), load_cert),
{noreply, State#state{
request_monitor = undefined,
request_letsencrypt_pid = undefined,
request_status = error
}};
handle_info({'DOWN', MRef, process, _Pid, Reason}, #state{request_monitor = MRef} = State) ->
?LOG_ERROR(#{
text => <<"Letsencrypt went down whilst requesting cert">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
reason => Reason,
hostname => State#state.request_hostname,
san => State#state.request_san
}),
gen_server:cast(self(), load_cert),
{noreply, State#state{
request_monitor = undefined,
request_letsencrypt_pid = undefined,
request_status = error
}};
handle_info({'DOWN', _MRef, process, _Pid, normal}, #state{request_monitor = undefined} = State) ->
% Late down message
{noreply, State};
handle_info({'EXIT', _Pid, _Reason}, State) ->
{noreply, State};
handle_info(Info, State) ->
?LOG_WARNING(#{
text => <<"Letsencrypt unknown info message">>,
in => zotonic_mod_ssl_letsencrypt,
message => Info
}),
{noreply, State}.
, State ) - > void ( )
%% @doc This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any necessary
%% cleaning up. When it returns, the gen_server terminates with Reason.
%% The return value is ignored.
terminate(_Reason, _State) ->
ok.
, State , Extra ) - > { ok , NewState }
%% @doc Convert process state when code is changed
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%====================================================================
Internal functions
%%====================================================================
@doc Load the Letsencrypt certificate and extract hostnames and validity .
do_load_cert(State) ->
Context = z_context:new(State#state.site),
{ok, Files} = cert_files(Context),
{certfile, CertFile} = proplists:lookup(certfile, Files),
case filelib:is_file(CertFile) of
true ->
case zotonic_ssl_certs:decode_cert(CertFile) of
{ok, CertMap} ->
State#state{
cert_is_valid = true,
cert_hostname = maps:get(common_name, CertMap),
cert_san = maps:get(subject_alt_names, CertMap, []),
cert_valid_till = maps:get(not_after, CertMap)
};
{error, Reason} ->
?LOG_ERROR(#{
text => <<"Could not decode Letsencrypt crt file">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
reason => Reason
}),
invalid_cert_status(State)
end;
false ->
invalid_cert_status(State)
end.
invalid_cert_status(State) ->
State#state{
cert_is_valid = false,
cert_hostname = undefined,
cert_san = [],
cert_valid_till = undefined
}.
@doc Letsencrypt finished , perform housekeeping and logging
handle_letsencrypt_result({ok, LEFiles}, State) ->
?LOG_NOTICE(#{
text => <<"Letsencrypt successfully requested cert">>,
in => zotonic_mod_ssl_letsencrypt,
result => ok,
hostname => State#state.request_hostname,
san => State#state.request_san
}),
Context = z_context:new(State#state.site),
{ok, MyFiles} = cert_files_all(Context),
{certfile, CertFile} = proplists:lookup(certfile, MyFiles),
{cacertfile, CaCertFile} = proplists:lookup(cacertfile, MyFiles),
{keyfile, KeyFile} = proplists:lookup(keyfile, MyFiles),
{CertData, IntermediateData} = split_cert_chain_file(maps:get(cert, LEFiles)),
ok = file:write_file(CertFile, CertData),
case IntermediateData of
none ->
_ = file:delete(CaCertFile),
_ = download_cacert(Context);
_ ->
ok = file:write_file(CaCertFile, IntermediateData),
_ = file:change_mode(CaCertFile, 8#00644)
end,
{ok, _} = file:copy(maps:get(key, LEFiles), KeyFile),
_ = file:change_mode(CertFile, 8#00644),
_ = file:change_mode(KeyFile, 8#00600),
State#state{
request_status = ok
};
handle_letsencrypt_result({error, Reason}, State) ->
?LOG_ERROR(#{
text => <<"Letsencrypt error whilst requesting cert">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
reason => Reason,
hostname => State#state.request_hostname,
san => State#state.request_san
}),
State#state{
request_status = error
}.
start_cert_request(Hostname, SANs, #state{site = Site, request_letsencrypt_pid = undefined} = State) ->
Context = z_context:new(Site),
{ok, KeyFile} = ensure_key_file(Context),
TempDir = cert_temp_dir(Context),
ok = case file:make_dir(TempDir) of
{error, eexist} -> ok;
ok -> ok
end,
CertPath = cert_temp_dir(Context),
LetsOpts = [
{cert_path, CertPath},
{key_file, KeyFile}
| ?ACME_SRV_OPTS
],
{ok, Pid} = z_letsencrypt_job:request(self(), Hostname, SANs, LetsOpts),
{ok, State#state{
request_letsencrypt_pid = Pid,
request_monitor = erlang:monitor(process, Pid),
request_hostname = Hostname,
request_san = SANs,
request_start = calendar:universal_time(),
request_status = requesting
}};
start_cert_request(_Hostname, _SANs, #state{request_letsencrypt_pid = _Pid} = State) ->
{error, already_started, State}.
%% @doc Split the returned cert data in the certificate and the intermediate chain certs.
split_cert_chain_file(File) ->
{ok, Data} = file:read_file(File),
Parts = binary:split(Data, <<"-----END CERTIFICATE-----">>, [ global ]),
Parts1 = lists:filtermap(
fun(D) ->
case z_string:trim(D) of
<<>> -> false;
D1 -> {true, <<D1/binary, 10, "-----END CERTIFICATE-----", 10>>}
end
end,
Parts),
case Parts1 of
[ Cert ] ->
{Cert, none};
[ Cert | Chain ] ->
Chain1 = lists:join(<<10>>, Chain),
{Cert, iolist_to_binary(Chain1)}
end.
ssl_options(Context) ->
{ok, CertFiles} = cert_files(Context),
CertFile = proplists:get_value(certfile, CertFiles),
KeyFile = proplists:get_value(keyfile, CertFiles),
case {filelib:is_file(CertFile), filelib:is_file(KeyFile)} of
{false, false} ->
?LOG_NOTICE(#{
text => <<"mod_ssl_letsencrypt: no cert and key files, skipping.">>,
in => zotonic_mod_ssl_letsencrypt,
cert_filename => CertFile,
key_filename => KeyFile
}),
undefined;
{false, true} ->
?LOG_NOTICE(#{
text => <<"mod_ssl_letsencrypt: no cert file (though there is a key file), skipping.">>,
in => zotonic_mod_ssl_letsencrypt,
cert_filename => CertFile,
key_filename => KeyFile
}),
undefined;
{true, false} ->
?LOG_NOTICE(#{
text => <<"mod_ssl_letsencrypt: no key file (though there is a cert file), skipping.">>,
in => zotonic_mod_ssl_letsencrypt,
cert_filename => CertFile,
key_filename => KeyFile
}),
undefined;
{true, true} ->
case check_keyfile(KeyFile, Context) of
ok -> {ok, CertFiles};
{error, _} -> undefined
end
end.
cert_files(Context) ->
SSLDir = cert_dir(Context),
Hostname = z_context:hostname(Context),
Files = [
{certfile, z_convert:to_list(filename:join(SSLDir, <<Hostname/binary, ".crt">>))},
{keyfile, z_convert:to_list(filename:join(SSLDir, <<Hostname/binary, ".key">>))}
] ++ z_ssl_dhfile:dh_options(),
CaCertFile = filename:join(SSLDir, <<Hostname/binary, ".ca.crt">>),
case filelib:is_file(CaCertFile) of
false -> {ok, Files};
true -> {ok, [{cacertfile, CaCertFile} | Files]}
end.
cert_files_all(Context) ->
SSLDir = cert_dir(Context),
Hostname = z_context:hostname(Context),
{ok, [
{certfile, z_convert:to_list(filename:join(SSLDir, <<Hostname/binary, ".crt">>))},
{cacertfile, z_convert:to_list(filename:join(SSLDir, <<Hostname/binary, ".ca.crt">>))},
{keyfile, z_convert:to_list(filename:join(SSLDir, <<Hostname/binary, ".key">>))}
]}.
cert_dir(Context) ->
PrivSSLDir = filename:join([z_path:site_dir(Context), "priv", "security", "letsencrypt"]),
case filelib:is_dir(PrivSSLDir) of
true ->
PrivSSLDir;
false ->
{ok, SecurityDir} = z_config_files:security_dir(),
filename:join([ SecurityDir, z_context:site(Context), "letsencrypt" ])
end.
cert_temp_dir(Context) ->
filename:join([cert_dir(Context), "tmp"]).
-spec check_keyfile(string(), z:context()) -> ok | {error, no_private_keys_found|term()}.
check_keyfile(KeyFile, Context) ->
Hostname = z_context:hostname(Context),
case file:read_file(KeyFile) of
{ok, Bin} ->
case public_key:pem_decode(Bin) of
[] ->
?LOG_ERROR(#{
text => <<"No private keys for Letsencrypt found">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
hostname => Hostname,
reason => no_private_keys_found,
key_file => KeyFile
}),
{error, no_private_keys_found};
_ ->
ok
end;
{error, Reason} = Error ->
?LOG_ERROR(#{
text => <<"Cannot read Letsencrypt key file">>,
in => zotonic_mod_ssl_letsencrypt,
key_file => KeyFile,
result => error,
hostname => Hostname,
reason => Reason
}),
Error
end.
@doc Ensure that we have a RSA key for Letsencrypt .
-spec ensure_key_file(z:context()) -> {ok, string()} | {error, openssl|term()}.
ensure_key_file(Context) ->
SSLDir = cert_dir(Context),
KeyFile = filename:join(SSLDir, "letsencrypt_api.key"),
case filelib:is_file(KeyFile) of
true ->
{ok, KeyFile};
false ->
?LOG_NOTICE(#{
text => <<"Generating RSA key for LetsEncrypt">>,
in => zotonic_mod_ssl_letsencrypt,
key_file => KeyFile
}),
ok = z_filelib:ensure_dir(KeyFile),
_ = file:change_mode(filename:basename(KeyFile), 8#00700),
Escaped = z_filelib:os_filename(KeyFile),
Cmd = "openssl genrsa -out "
++ Escaped
++ " "
++ z_convert:to_list(?KEY_BITS),
Result = os:cmd(Cmd),
case filelib:is_file(KeyFile) of
true ->
_ = file:change_mode(KeyFile, 8#00600),
case check_keyfile(KeyFile, Context) of
ok ->
{ok, KeyFile};
{error, _} = Error ->
Error
end;
false ->
?LOG_ERROR(#{
text => <<"Error generating RSA key for LetsEncrypt">>,
in => zotonic_mod_ssl_letsencrypt,
key_file => KeyFile,
result => error,
reason => Result
}),
{error, openssl}
end
end.
% @doc Download the intermediate certificates
-spec download_cacert(z:context()) -> ok | {error, term()}.
download_cacert(Context) ->
case z_url_fetch:fetch(?CA_CERT_URL, []) of
{ok, {_Url, Hs, _Size, Cert}} ->
case proplists:get_value("content-type", Hs) of
"application/x-x509-ca-cert" ->
save_ca_cert(Cert, Context);
"application/x-pem-file" ->
save_ca_cert(Cert, Context);
CT ->
?LOG_ERROR(#{
text => <<"Download of cert file returned unexpected content-type">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
reason => content_type,
url => ?CA_CERT_URL,
content_type => CT
}),
{error, content_type}
end;
{error, Reason} = Error ->
?LOG_ERROR(#{
text => <<"Download of cert file failed">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
reason => Reason,
url => ?CA_CERT_URL
}),
Error
end.
save_ca_cert(Cert, Context) ->
SSLDir = cert_dir(Context),
Hostname = z_context:hostname(Context),
CaCertFile = filename:join(SSLDir, <<Hostname/binary, ".ca.crt">>),
case file:write_file(CaCertFile, Cert) of
ok ->
_ = file:change_mode(CaCertFile, 8#00644),
ok;
{error, _} = Error ->
Error
end.
| null | https://raw.githubusercontent.com/zotonic/zotonic/eb2e61a83e11aea79f2cf20fcbd6fd998ddfd4b0/apps/zotonic_mod_ssl_letsencrypt/src/mod_ssl_letsencrypt.erl | erlang |
@doc Certificate handling for Let's Encrypt
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Information about the current certificate
@doc Return the certificates of this site.
@doc Period tick, used to check for cert upgrade
@doc Handle UI events
@todo ACL check
@doc Generate a new self-ping value
@doc Check if the returned ping is the generated ping
@doc Check if the returned ping is the generated ping
@doc Load the current certificate metadata
====================================================================
API
====================================================================
@doc Starts the server
====================================================================
gen_server callbacks
====================================================================
ignore |
{stop, Reason}
@doc Initiates the server.
After the last validity we stop trying, as there is clearly something wrong.
Late down message
@doc This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any necessary
cleaning up. When it returns, the gen_server terminates with Reason.
The return value is ignored.
@doc Convert process state when code is changed
====================================================================
====================================================================
@doc Split the returned cert data in the certificate and the intermediate chain certs.
@doc Download the intermediate certificates | @author < >
2016 ,
Copyright 2016 ,
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(mod_ssl_letsencrypt).
-mod_title("SSL - Let's Encrypt").
-mod_description("Use SSL Certificate from Let's Encrypt.").
-mod_provides([]).
-mod_depends([cron]).
-mod_prio(200).
-behaviour(gen_server).
-author('Marc Worrell <>').
-export([
observe_ssl_options/2,
observe_tick_24h/2,
event/2,
is_self_ping/2,
get_self_ping/1,
get_challenge/1,
status/1,
load_cert/1
]).
-export([
start_link/1,
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
code_change/3,
terminate/2
]).
-include_lib("zotonic_core/include/zotonic.hrl").
-define(SNI_CACHE_TIME, 60).
-define(KEY_BITS, 2048).
-define(CA_CERT_URL, "-encrypt-x3-cross-signed.pem").
-ifdef(TEST).
-define(ACME_SRV_OPTS, [staging]).
-else.
-define(ACME_SRV_OPTS, []).
-endif.
-record(state, {
site :: atom(),
self_ping :: binary() | undefined,
State information when requesting a new cert
request_letsencrypt_pid = undefined :: undefined | pid(),
request_monitor = undefined :: undefined | reference(),
request_hostname = undefined :: binary() | undefined,
request_san = [] :: list(binary()),
request_start = undefined :: undefined | calendar:datetime(),
request_status = none :: none | requesting | ok | error,
cert_is_valid = false :: boolean(),
cert_hostname = undefined :: binary() | undefined,
cert_san = [] :: list( binary() ),
cert_valid_till = undefined :: undefined | calendar:datetime()
}).
observe_ssl_options(#ssl_options{server_name=_NormalizedHostnameBin}, Context) ->
z_depcache:memo(
fun() ->
case status(Context) of
{ok, Status} ->
case proplists:get_value(cert_is_valid, Status) of
true -> ssl_options(Context);
false -> undefined
end;
{error, _} ->
undefined
end
end,
sni_ssl_letsencrypt,
?SNI_CACHE_TIME,
Context).
observe_tick_24h(tick_24h, Context) ->
load_cert(Context),
gen_server:cast(z_utils:name_for_site(?MODULE, Context), renewal_check).
event(#submit{message = {request_cert, Args}}, Context) ->
case z_acl:is_admin_editable(Context) of
true ->
{hostname, Hostname} = proplists:lookup(hostname, Args),
{wrapper, Wrapper} = proplists:lookup(wrapper, Args),
SANs = z_context:get_q_all(<<"san">>, Context),
SANs1 = [ San || San <- SANs, San /= <<>> ],
case gen_server:call(z_utils:name_for_site(?MODULE, Context), {cert_request, Hostname, SANs1}) of
ok ->
z_render:update(Wrapper,
#render{
template="_admin_ssl_letsencrypt_running.tpl",
vars=[
{hostname, Hostname},
{san, SANs1}
]},
Context);
{error, Reason} ->
?LOG_ERROR(#{
text => <<"Could not start Letsencrypt cert request">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
reason => Reason,
hostname => Hostname,
san => SANs1
}),
z_render:wire({alert, [
{title, ?__(<<"SSL Let’s Encrypt Certificate"/utf8>>, Context)},
{text, ?__("Could not start fetching the SSL certificate. Try again later.", Context)},
{button, ?__("Cancel", Context)}
]},
Context)
end;
false ->
z_render:wire({alert, [
{title, ?__(<<"SSL Let’s Encrypt Certificate"/utf8>>, Context)},
{text, ?__("You need to be an administrator to request certificates.", Context)},
{button, ?__("Cancel", Context)}
]},
Context)
end;
event(_Event, Context) ->
Context.
-spec get_self_ping(#context{}) -> {ok, binary()}.
get_self_ping(Context) ->
gen_server:call(z_utils:name_for_site(?MODULE, Context), get_self_ping).
-spec is_self_ping(binary()|string(), #context{}) -> boolean().
is_self_ping(Ping, Context) ->
gen_server:call(z_utils:name_for_site(?MODULE, Context), {is_self_ping, Ping}).
@doc Fetch the challenge requested by the ACME handshake
-spec get_challenge(#context{}) -> {ok, map()}.
get_challenge(Context) ->
gen_server:call(z_utils:name_for_site(?MODULE, Context), get_challenge).
-spec status(#context{}) -> {ok, list()} | {error, term()}.
status(Context) ->
gen_server:call(z_utils:name_for_site(?MODULE, Context), status).
-spec load_cert( z:context() ) -> ok.
load_cert(Context) ->
gen_server:cast(z_utils:name_for_site(?MODULE, Context), load_cert).
) - > { ok , Pid } | ignore | { error , Error }
start_link(Args) when is_list(Args) ->
{context, Context} = proplists:lookup(context, Args),
gen_server:start_link({local, z_utils:name_for_site(?MODULE, Context)}, ?MODULE, Args, []).
) - > { ok , State } |
{ ok , State , Timeout } |
init(Args) ->
process_flag(trap_exit, true),
{context, Context} = proplists:lookup(context, Args),
Site = z_context:site(Context),
logger:set_process_metadata(#{
site => Site,
module => ?MODULE
}),
gen_server:cast(self(), load_cert),
{ok, #state{site=Site, self_ping = undefined }}.
handle_call(get_self_ping, _From, State) ->
Ping = z_ids:id(),
{reply, {ok, Ping}, State#state{self_ping = Ping}};
handle_call({is_self_ping, SelfPing}, _From, #state{self_ping = Ping} = State) ->
{reply, z_convert:to_binary(SelfPing) =:= Ping, State};
handle_call({cert_request, _Hostname, _SANs}, _From, #state{request_letsencrypt_pid = Pid} = State) when is_pid(Pid) ->
?LOG_ERROR("Letsencrypt cert request whilst another request is running"),
{reply, {error, busy}, State};
handle_call({cert_request, Hostname, SANs}, _From, State) ->
case start_cert_request(Hostname, SANs, State) of
{ok, State1} ->
z_mqtt:publish(<<"model/letsencrypt/event/status">>, <<"started">>, z_acl:sudo(z_context:new(State#state.site))),
{reply, ok, State1};
{error, Reason, State1} ->
{reply, {error, Reason}, State1}
end;
handle_call(get_challenge, _From, #state{request_letsencrypt_pid = undefined} = State) ->
?LOG_ERROR("Fetching Letsencrypt challenge but no request running"),
{reply, {ok, #{}}, State};
handle_call(get_challenge, _From, #state{request_letsencrypt_pid = _Pid} = State) ->
case z_letsencrypt:get_challenge() of
error ->
?LOG_ERROR("Error fetching Letsencrypt challenge."),
{reply, {ok, #{}}, State};
Map when is_map(Map) ->
{reply, {ok, Map}, State}
end;
handle_call(status, _From, State) ->
Props = [
{request_status, State#state.request_status},
{request_start, State#state.request_start},
{request_hostname, State#state.request_hostname},
{request_san, State#state.request_san},
{cert_is_valid, State#state.cert_is_valid},
{cert_hostname, State#state.cert_hostname},
{cert_san, State#state.cert_san},
{cert_valid_till, State#state.cert_valid_till}
],
{reply, {ok, Props}, State};
handle_call(Message, _From, State) ->
{stop, {unknown_call, Message}, State}.
handle_cast(load_cert, State) ->
State1 = do_load_cert(State),
z_mqtt:publish(<<"model/letsencrypt/event/status">>, <<"reload">>, z_acl:sudo(z_context:new(State#state.site))),
{noreply, State1};
handle_cast({complete, Ret, LetsPid}, #state{request_letsencrypt_pid = LetsPid} = State) ->
State1 = handle_letsencrypt_result(Ret, State),
erlang:demonitor(State#state.request_monitor),
gen_server:cast(self(), load_cert),
{noreply, State1#state{
request_letsencrypt_pid = undefined,
request_monitor = undefined
}};
handle_cast(renewal_check, #state{cert_is_valid = false} = State) ->
{noreply, State};
handle_cast(renewal_check, #state{cert_is_valid = true, cert_hostname = Hostname, cert_san = SANs} = State) ->
We try renewal during the last month of validity
Now = calendar:universal_time(),
NextMonth = z_datetime:next_month(Now),
case NextMonth > State#state.cert_valid_till
andalso Now < State#state.cert_valid_till
of
true ->
SANs1 = lists:usort(SANs) -- [Hostname],
case start_cert_request(Hostname, SANs1, State) of
{ok, State1} ->
z_mqtt:publish(<<"model/letsencrypt/event/status">>, <<"started">>, z_acl:sudo(z_context:new(State#state.site))),
{noreply, State1};
{error, _Reason, State1} ->
{noreply, State1}
end;
false ->
{noreply, State}
end;
handle_cast(Message, State) ->
{stop, {unknown_cast, Message}, State}.
handle_info({'DOWN', MRef, process, _Pid, normal}, #state{request_monitor = MRef} = State) ->
gen_server:cast(self(), load_cert),
{noreply, State#state{
request_monitor = undefined,
request_letsencrypt_pid = undefined,
request_status = error
}};
handle_info({'DOWN', MRef, process, _Pid, Reason}, #state{request_monitor = MRef} = State) ->
?LOG_ERROR(#{
text => <<"Letsencrypt went down whilst requesting cert">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
reason => Reason,
hostname => State#state.request_hostname,
san => State#state.request_san
}),
gen_server:cast(self(), load_cert),
{noreply, State#state{
request_monitor = undefined,
request_letsencrypt_pid = undefined,
request_status = error
}};
handle_info({'DOWN', _MRef, process, _Pid, normal}, #state{request_monitor = undefined} = State) ->
{noreply, State};
handle_info({'EXIT', _Pid, _Reason}, State) ->
{noreply, State};
handle_info(Info, State) ->
?LOG_WARNING(#{
text => <<"Letsencrypt unknown info message">>,
in => zotonic_mod_ssl_letsencrypt,
message => Info
}),
{noreply, State}.
, State ) - > void ( )
terminate(_Reason, _State) ->
ok.
, State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
@doc Load the Letsencrypt certificate and extract hostnames and validity .
do_load_cert(State) ->
Context = z_context:new(State#state.site),
{ok, Files} = cert_files(Context),
{certfile, CertFile} = proplists:lookup(certfile, Files),
case filelib:is_file(CertFile) of
true ->
case zotonic_ssl_certs:decode_cert(CertFile) of
{ok, CertMap} ->
State#state{
cert_is_valid = true,
cert_hostname = maps:get(common_name, CertMap),
cert_san = maps:get(subject_alt_names, CertMap, []),
cert_valid_till = maps:get(not_after, CertMap)
};
{error, Reason} ->
?LOG_ERROR(#{
text => <<"Could not decode Letsencrypt crt file">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
reason => Reason
}),
invalid_cert_status(State)
end;
false ->
invalid_cert_status(State)
end.
invalid_cert_status(State) ->
State#state{
cert_is_valid = false,
cert_hostname = undefined,
cert_san = [],
cert_valid_till = undefined
}.
@doc Letsencrypt finished , perform housekeeping and logging
handle_letsencrypt_result({ok, LEFiles}, State) ->
?LOG_NOTICE(#{
text => <<"Letsencrypt successfully requested cert">>,
in => zotonic_mod_ssl_letsencrypt,
result => ok,
hostname => State#state.request_hostname,
san => State#state.request_san
}),
Context = z_context:new(State#state.site),
{ok, MyFiles} = cert_files_all(Context),
{certfile, CertFile} = proplists:lookup(certfile, MyFiles),
{cacertfile, CaCertFile} = proplists:lookup(cacertfile, MyFiles),
{keyfile, KeyFile} = proplists:lookup(keyfile, MyFiles),
{CertData, IntermediateData} = split_cert_chain_file(maps:get(cert, LEFiles)),
ok = file:write_file(CertFile, CertData),
case IntermediateData of
none ->
_ = file:delete(CaCertFile),
_ = download_cacert(Context);
_ ->
ok = file:write_file(CaCertFile, IntermediateData),
_ = file:change_mode(CaCertFile, 8#00644)
end,
{ok, _} = file:copy(maps:get(key, LEFiles), KeyFile),
_ = file:change_mode(CertFile, 8#00644),
_ = file:change_mode(KeyFile, 8#00600),
State#state{
request_status = ok
};
handle_letsencrypt_result({error, Reason}, State) ->
?LOG_ERROR(#{
text => <<"Letsencrypt error whilst requesting cert">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
reason => Reason,
hostname => State#state.request_hostname,
san => State#state.request_san
}),
State#state{
request_status = error
}.
start_cert_request(Hostname, SANs, #state{site = Site, request_letsencrypt_pid = undefined} = State) ->
Context = z_context:new(Site),
{ok, KeyFile} = ensure_key_file(Context),
TempDir = cert_temp_dir(Context),
ok = case file:make_dir(TempDir) of
{error, eexist} -> ok;
ok -> ok
end,
CertPath = cert_temp_dir(Context),
LetsOpts = [
{cert_path, CertPath},
{key_file, KeyFile}
| ?ACME_SRV_OPTS
],
{ok, Pid} = z_letsencrypt_job:request(self(), Hostname, SANs, LetsOpts),
{ok, State#state{
request_letsencrypt_pid = Pid,
request_monitor = erlang:monitor(process, Pid),
request_hostname = Hostname,
request_san = SANs,
request_start = calendar:universal_time(),
request_status = requesting
}};
start_cert_request(_Hostname, _SANs, #state{request_letsencrypt_pid = _Pid} = State) ->
{error, already_started, State}.
split_cert_chain_file(File) ->
{ok, Data} = file:read_file(File),
Parts = binary:split(Data, <<"-----END CERTIFICATE-----">>, [ global ]),
Parts1 = lists:filtermap(
fun(D) ->
case z_string:trim(D) of
<<>> -> false;
D1 -> {true, <<D1/binary, 10, "-----END CERTIFICATE-----", 10>>}
end
end,
Parts),
case Parts1 of
[ Cert ] ->
{Cert, none};
[ Cert | Chain ] ->
Chain1 = lists:join(<<10>>, Chain),
{Cert, iolist_to_binary(Chain1)}
end.
ssl_options(Context) ->
{ok, CertFiles} = cert_files(Context),
CertFile = proplists:get_value(certfile, CertFiles),
KeyFile = proplists:get_value(keyfile, CertFiles),
case {filelib:is_file(CertFile), filelib:is_file(KeyFile)} of
{false, false} ->
?LOG_NOTICE(#{
text => <<"mod_ssl_letsencrypt: no cert and key files, skipping.">>,
in => zotonic_mod_ssl_letsencrypt,
cert_filename => CertFile,
key_filename => KeyFile
}),
undefined;
{false, true} ->
?LOG_NOTICE(#{
text => <<"mod_ssl_letsencrypt: no cert file (though there is a key file), skipping.">>,
in => zotonic_mod_ssl_letsencrypt,
cert_filename => CertFile,
key_filename => KeyFile
}),
undefined;
{true, false} ->
?LOG_NOTICE(#{
text => <<"mod_ssl_letsencrypt: no key file (though there is a cert file), skipping.">>,
in => zotonic_mod_ssl_letsencrypt,
cert_filename => CertFile,
key_filename => KeyFile
}),
undefined;
{true, true} ->
case check_keyfile(KeyFile, Context) of
ok -> {ok, CertFiles};
{error, _} -> undefined
end
end.
cert_files(Context) ->
SSLDir = cert_dir(Context),
Hostname = z_context:hostname(Context),
Files = [
{certfile, z_convert:to_list(filename:join(SSLDir, <<Hostname/binary, ".crt">>))},
{keyfile, z_convert:to_list(filename:join(SSLDir, <<Hostname/binary, ".key">>))}
] ++ z_ssl_dhfile:dh_options(),
CaCertFile = filename:join(SSLDir, <<Hostname/binary, ".ca.crt">>),
case filelib:is_file(CaCertFile) of
false -> {ok, Files};
true -> {ok, [{cacertfile, CaCertFile} | Files]}
end.
cert_files_all(Context) ->
SSLDir = cert_dir(Context),
Hostname = z_context:hostname(Context),
{ok, [
{certfile, z_convert:to_list(filename:join(SSLDir, <<Hostname/binary, ".crt">>))},
{cacertfile, z_convert:to_list(filename:join(SSLDir, <<Hostname/binary, ".ca.crt">>))},
{keyfile, z_convert:to_list(filename:join(SSLDir, <<Hostname/binary, ".key">>))}
]}.
cert_dir(Context) ->
PrivSSLDir = filename:join([z_path:site_dir(Context), "priv", "security", "letsencrypt"]),
case filelib:is_dir(PrivSSLDir) of
true ->
PrivSSLDir;
false ->
{ok, SecurityDir} = z_config_files:security_dir(),
filename:join([ SecurityDir, z_context:site(Context), "letsencrypt" ])
end.
cert_temp_dir(Context) ->
filename:join([cert_dir(Context), "tmp"]).
-spec check_keyfile(string(), z:context()) -> ok | {error, no_private_keys_found|term()}.
check_keyfile(KeyFile, Context) ->
Hostname = z_context:hostname(Context),
case file:read_file(KeyFile) of
{ok, Bin} ->
case public_key:pem_decode(Bin) of
[] ->
?LOG_ERROR(#{
text => <<"No private keys for Letsencrypt found">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
hostname => Hostname,
reason => no_private_keys_found,
key_file => KeyFile
}),
{error, no_private_keys_found};
_ ->
ok
end;
{error, Reason} = Error ->
?LOG_ERROR(#{
text => <<"Cannot read Letsencrypt key file">>,
in => zotonic_mod_ssl_letsencrypt,
key_file => KeyFile,
result => error,
hostname => Hostname,
reason => Reason
}),
Error
end.
@doc Ensure that we have a RSA key for Letsencrypt .
-spec ensure_key_file(z:context()) -> {ok, string()} | {error, openssl|term()}.
ensure_key_file(Context) ->
SSLDir = cert_dir(Context),
KeyFile = filename:join(SSLDir, "letsencrypt_api.key"),
case filelib:is_file(KeyFile) of
true ->
{ok, KeyFile};
false ->
?LOG_NOTICE(#{
text => <<"Generating RSA key for LetsEncrypt">>,
in => zotonic_mod_ssl_letsencrypt,
key_file => KeyFile
}),
ok = z_filelib:ensure_dir(KeyFile),
_ = file:change_mode(filename:basename(KeyFile), 8#00700),
Escaped = z_filelib:os_filename(KeyFile),
Cmd = "openssl genrsa -out "
++ Escaped
++ " "
++ z_convert:to_list(?KEY_BITS),
Result = os:cmd(Cmd),
case filelib:is_file(KeyFile) of
true ->
_ = file:change_mode(KeyFile, 8#00600),
case check_keyfile(KeyFile, Context) of
ok ->
{ok, KeyFile};
{error, _} = Error ->
Error
end;
false ->
?LOG_ERROR(#{
text => <<"Error generating RSA key for LetsEncrypt">>,
in => zotonic_mod_ssl_letsencrypt,
key_file => KeyFile,
result => error,
reason => Result
}),
{error, openssl}
end
end.
-spec download_cacert(z:context()) -> ok | {error, term()}.
download_cacert(Context) ->
case z_url_fetch:fetch(?CA_CERT_URL, []) of
{ok, {_Url, Hs, _Size, Cert}} ->
case proplists:get_value("content-type", Hs) of
"application/x-x509-ca-cert" ->
save_ca_cert(Cert, Context);
"application/x-pem-file" ->
save_ca_cert(Cert, Context);
CT ->
?LOG_ERROR(#{
text => <<"Download of cert file returned unexpected content-type">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
reason => content_type,
url => ?CA_CERT_URL,
content_type => CT
}),
{error, content_type}
end;
{error, Reason} = Error ->
?LOG_ERROR(#{
text => <<"Download of cert file failed">>,
in => zotonic_mod_ssl_letsencrypt,
result => error,
reason => Reason,
url => ?CA_CERT_URL
}),
Error
end.
save_ca_cert(Cert, Context) ->
SSLDir = cert_dir(Context),
Hostname = z_context:hostname(Context),
CaCertFile = filename:join(SSLDir, <<Hostname/binary, ".ca.crt">>),
case file:write_file(CaCertFile, Cert) of
ok ->
_ = file:change_mode(CaCertFile, 8#00644),
ok;
{error, _} = Error ->
Error
end.
|
fa34f8647ad0421c99cda3e4980ff7e0a6ed4cc0509a5ae7603c8bda971ee22f | armon/teles | teles_app.erl | -module(teles_app).
-behaviour(application).
%% Application callbacks
-export([start/2, stop/1]).
%% ===================================================================
%% Application callbacks
%% ===================================================================
start(_StartType, _StartArgs) ->
teles_sup:start_link().
stop(_State) ->
ok.
| null | https://raw.githubusercontent.com/armon/teles/d65db29e69e5bcb83ac0e7adae6c56ad42fd323c/src/teles_app.erl | erlang | Application callbacks
===================================================================
Application callbacks
=================================================================== | -module(teles_app).
-behaviour(application).
-export([start/2, stop/1]).
start(_StartType, _StartArgs) ->
teles_sup:start_link().
stop(_State) ->
ok.
|
ab682575399211d5644ee81554db0b23792139bf8ca822928b96ad3e6b883855 | 1HaskellADay/1HAD | Solution.hs | module HAD.Y2014.M03.D10.Solution where
import Data.Maybe (listToMaybe)
import Numeric (readDec)
-- $setup
-- >>> import Test.QuickCheck
> > > import Control . Applicative
-- | maybeReadPositiveInt Try to parse a positive Int
-- Can be done point-free (and it's probably funnier this way).
--
--
-- Examples:
--
-- prop> (==) <$> Just <*> maybeReadPositiveInt . show $ getNonNegative x
--
prop > Nothing = = ( maybeReadPositiveInt . show . negate . getPositive $ x )
--
> > > maybeReadPositiveInt " foo "
-- Nothing
--
> > > maybeReadPositiveInt " 12 "
-- Nothing
maybeReadPositiveInt :: String -> Maybe Int
maybeReadPositiveInt =
fmap fst . listToMaybe . filter (null . snd) . readDec
| null | https://raw.githubusercontent.com/1HaskellADay/1HAD/3b3f9b7448744f9b788034f3aca2d5050d1a5c73/exercises/HAD/Y2014/M03/D10/Solution.hs | haskell | $setup
>>> import Test.QuickCheck
| maybeReadPositiveInt Try to parse a positive Int
Can be done point-free (and it's probably funnier this way).
Examples:
prop> (==) <$> Just <*> maybeReadPositiveInt . show $ getNonNegative x
Nothing
Nothing | module HAD.Y2014.M03.D10.Solution where
import Data.Maybe (listToMaybe)
import Numeric (readDec)
> > > import Control . Applicative
prop > Nothing = = ( maybeReadPositiveInt . show . negate . getPositive $ x )
> > > maybeReadPositiveInt " foo "
> > > maybeReadPositiveInt " 12 "
maybeReadPositiveInt :: String -> Maybe Int
maybeReadPositiveInt =
fmap fst . listToMaybe . filter (null . snd) . readDec
|
faf84e083bf8b0394980c4105f29124d7d9f241742a50801e5daa8e7d4077d92 | Clojure2D/clojure2d | project.clj | (defproject palettes "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0"
:url "-2.0/"}
:dependencies [[org.clojure/clojure "1.10.1"]
[generateme/fastmath "2.0.0-alpha1"]
[scicloj/clojisr "1.0.0-BETA14"]
[org.clojure/data.json "2.4.0"]])
| null | https://raw.githubusercontent.com/Clojure2D/clojure2d/5cb4dbfe6ed492c8c003f28aea4f1611470484c2/utils/palettes/project.clj | clojure | (defproject palettes "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0"
:url "-2.0/"}
:dependencies [[org.clojure/clojure "1.10.1"]
[generateme/fastmath "2.0.0-alpha1"]
[scicloj/clojisr "1.0.0-BETA14"]
[org.clojure/data.json "2.4.0"]])
| |
45bd49dd1fa83ac395d2036c99d954f7f2f7ede466785ada8706dd8e6e044174 | Verites/verigraph | GGXReader.hs | module XML.GGXReader
( readGrammar
, readGGName
, readName
, readNames
, readTypeGraph
, readRules
, readGraphs
, readSequences
, readSequencesWithObjectFlow
, instantiateRule
, instantiateSpan
, minimalSafetyNacsWithLog
, showMinimalSafetyNacsLog
) where
import Data.Function (on)
import qualified Data.List as L
import qualified Data.Map as M
import Data.Maybe (fromJust, fromMaybe, mapMaybe)
import Text.XML.HXT.Core
import Abstract.Category
import Abstract.Constraint
import Abstract.Rewriting.DPO
import Base.Valid
import Category.TypedGraphRule
import qualified Data.Graphs as G
import Data.Graphs.Morphism as GM
import Data.TypedGraph
import Data.TypedGraph.Morphism
import Rewriting.DPO.TypedGraph as GR
import Rewriting.DPO.TypedGraphRule
import qualified XML.Formulas as F
import XML.GGXParseIn
import XML.GGXReader.SndOrder
import XML.GGXReader.Span
import XML.ParsedTypes
import XML.Utilities
import XML.XMLUtilities
| Reads the grammar in the XML , adds the needed minimal safety nacs
to second - order , and returns the grammar and a log
readGrammar :: String -> Bool -> MorphismsConfig (TypedGraphMorphism a b)
-> IO (Grammar (TypedGraphMorphism a b), Grammar (RuleMorphism a b), [(String, Int)])
readGrammar fileName useConstraints morphismsConf = do
parsedTypeGraphs <- readTypeGraph fileName
let parsedTypeGraph = case parsedTypeGraphs of
[] -> error "error, type graph not found"
ptg:_ -> ptg
_ <- parsedTypeGraph `seq` return ()
let typeGraph = instantiateTypeGraph parsedTypeGraph
parsedGraphs <- readGraphs fileName
parsedRules <- readRules fileName
let (sndOrdRules, fstOrdRules) = L.partition (\((x,_,_,_),_) -> L.isPrefixOf "2rule_" x) parsedRules
rulesNames = map (\((x,_,_,_),_) -> x) fstOrdRules
productions = map (instantiateRule typeGraph) fstOrdRules
ensureValid $ validateNamed (\name -> "Rule '"++name++"'") (zip rulesNames productions)
_ <- (L.null productions && error "No first-order productions were found, at least one is needed.") `seq` return ()
parsedAtomicConstraints <- readAtomicConstraints fileName
parsedGraphConstraints <- readGraphConstraints fileName
let cons
| useConstraints =
let namedAtomic = L.groupBy ((==) `on` fst) $ map (instantiateAtomicConstraint typeGraph) parsedAtomicConstraints
atomic = map (joinConstraints . map snd) namedAtomic
in instantiateConstraints parsedGraphConstraints atomic
| otherwise = []
where
We join positive constraints with " or " , which is the behaviour of AGG .
-- We join negative constraints with "and", which is the behaviour of NACs.
joinConstraints [x] = Atomic x
joinConstraints (x:xs)
| positive x = Or (Atomic x) (joinConstraints xs)
| otherwise = And (Atomic x) (joinConstraints xs)
gets only the first graph as initial , because verigraph supports only one initial graph per grammar .
let initGraph = head (map snd parsedGraphs)
fstOrderGrammar = grammar initGraph cons (zip rulesNames productions)
sndOrderRules = instantiateSndOrderRules typeGraph sndOrdRules
emptyRule = emptyGraphRule typeGraph
sndOrderGrammar = grammar emptyRule [] sndOrderRules
morphismsConf' = toSndOrderMorphismsConfig morphismsConf
(sndOrderGrammarWithMinimalSafetyNacs, logNewNacs) =
minimalSafetyNacsWithLog morphismsConf' sndOrderGrammar
_ <- (case L.elemIndices False (map (isValid . snd) sndOrderRules) of
[] -> []
[a] -> error $ "Second Order Rule " ++ show a ++ " is not valid (starting from 0)."
l -> error $ "Second Order Rules " ++ show l ++ " are not valid (starting from 0)."
) `seq` return ()
return (fstOrderGrammar, sndOrderGrammarWithMinimalSafetyNacs, logNewNacs)
readGGName :: String -> IO String
readGGName fileName = do
name <- readName fileName
let ret = case name of
n:_ -> n
_ -> "GraGra"
return ret
-- Minimal Safety Nacs Logs
FIX : find a better place for this two functions
minimalSafetyNacsWithLog :: MorphismsConfig (RuleMorphism a b) -> Grammar (RuleMorphism a b)
-> (Grammar (RuleMorphism a b), [(String, Int)])
minimalSafetyNacsWithLog conf oldGG = (newGG, printNewNacs)
where
newNacs =
map (\(n,r) ->
let newRule = addMinimalSafetyNacs conf r
tamNewNacs = length (nacs newRule)
tamNacs = length (nacs r)
in ((n, newRule), (n, tamNewNacs - tamNacs))
) (productions oldGG)
newGG = oldGG {productions = map fst newNacs}
printNewNacs = map snd newNacs
showMinimalSafetyNacsLog :: [(String, Int)] -> [String]
showMinimalSafetyNacsLog printNewNacs =
[ "Rule " ++ r ++ ", added " ++ show n ++ " nacs" | (r,n) <- printNewNacs ]
-- | Reads the names of node/edge types and NACs, which are necessary when reexporting this grammar.
--
To lookup the name of a node type , use @"I " + + show nodeId@ as key , where @nodeId@ is the ID of
-- the node in the type graph. Lookup of edge types is analogous.
readNames :: String -> IO [(String,String)]
readNames fileName = (++) <$> readTypeNames fileName <*> readNacNames fileName
readName :: String -> IO [String]
readName fileName = runX (parseXML fileName >>> parseGGName)
readTypeGraph :: String -> IO[ParsedTypeGraph]
readTypeGraph fileName = runX (parseXML fileName >>> parseTypeGraph)
readNacNames :: String -> IO [(String,String)]
readNacNames fileName = concat <$> runX (parseXML fileName >>> parseNacNames)
readTypeNames :: String -> IO [(String,String)]
readTypeNames fileName = concat <$> runX (parseXML fileName >>> parseNames)
readAtomicConstraints :: String -> IO[ParsedAtomicConstraint]
readAtomicConstraints fileName = runX (parseXML fileName >>> parseAtomicConstraints)
readGraphConstraints :: String -> IO[(String,F.Formula)]
readGraphConstraints fileName = runX (parseXML fileName >>> parseGraphConstraints)
readGraphs ' : : String - > IO[[ParsedTypedGraph ] ]
readGraphs ' fileName = runX ( parseXML fileName > > > parseGraphs )
readGraphs :: String -> IO [(String, TypedGraph a b)]
readGraphs fileName =
do
[parsedTypeGraph] <- readTypeGraph fileName
let typeGraph = instantiateTypeGraph parsedTypeGraph
[parsedGraphs] <- runX (parseXML fileName >>> parseGraphs)
let instantiate graph@(name, _, _) = (name, instantiateTypedGraph graph typeGraph)
return $ map instantiate parsedGraphs
readRules :: String -> IO[RuleWithNacs]
readRules fileName = runX (parseXML fileName >>> parseRule)
readSequences :: Grammar (TypedGraphMorphism a b) -> String -> IO [(String, [GR.TypedGraphRule a b])]
readSequences grammar fileName = map (expandSequence grammar) <$> runX (parseXML fileName >>> parseRuleSequence)
expandSequence :: Grammar (TypedGraphMorphism a b) -> Sequence -> (String, [GR.TypedGraphRule a b])
expandSequence grammar (name,s,_) = (name, mapMaybe lookupRule . concat $ map expandSub s)
where
expandSub (i, s) = concat $ replicate i $ concatMap expandItens s
expandItens (i, r) = replicate i r
lookupRule name = L.lookup name (productions grammar)
readSequencesWithObjectFlow :: Grammar (TypedGraphMorphism a b) -> String -> IO [(String, [(String, GR.TypedGraphRule a b)], [ObjectFlow (TypedGraphMorphism a b)])]
readSequencesWithObjectFlow grammar fileName = map (prepareFlows grammar) <$> runX (parseXML fileName >>> parseRuleSequence)
prepareFlows :: Grammar (TypedGraphMorphism a b) -> Sequence -> (String, [(String, GR.TypedGraphRule a b)], [ObjectFlow (TypedGraphMorphism a b)])
prepareFlows grammar (name,s,flows) = (name, map fun getAll, objs)
where
fun name = (name, fromJust $ lookupRule name)
gets only the first subsequence
lookupRule name = L.lookup name (productions grammar)
objs = instantiateObjectsFlow (productions grammar) flows
instantiateObjectsFlow :: [(String, Production (TypedGraphMorphism a b))] -> [ParsedObjectFlow] -> [ObjectFlow (TypedGraphMorphism a b)]
instantiateObjectsFlow _ [] = []
instantiateObjectsFlow [] _ = []
instantiateObjectsFlow productions (o:os) =
let
createObject (idx,cons,prod,maps) = ObjectFlow idx prod cons (createSpan prod cons maps)
createSpan prod cons = instantiateSpan (rightObject (searchRight prod)) (leftObject (searchLeft cons))
searchLeft ruleName = fromJust $ L.lookup ruleName productions
searchRight ruleName = fromJust $ L.lookup ruleName productions
in createObject o : instantiateObjectsFlow productions os
instantiateTypeGraph :: ParsedTypeGraph -> TypeGraph a b
instantiateTypeGraph (nodes, edges) = graphWithEdges
where
getNodeType = G.NodeId . toN . lookupNodes nodes
trd (_,_,x) = x
nodesId = map (G.NodeId . toN . trd) nodes
edgesId = map (\(_, _, typ, src, tgt) -> ((G.EdgeId . toN) typ, getNodeType src, getNodeType tgt)) edges
graphWithNodes = foldr G.insertNode G.empty nodesId
graphWithEdges = foldr (\(ide,src,tgt) g -> G.insertEdge ide src tgt g) graphWithNodes edgesId
lookupNodes :: [ParsedTypedNode] -> String -> String
lookupNodes nodes n = fromMaybe
(error ("Error getting node type of: " ++ show n))
(lookup n changeToListOfPairs)
where
changeToListOfPairs = map (\(x,_,y) -> (x,y)) nodes
instantiateAtomicConstraint :: TypeGraph a b -> ParsedAtomicConstraint -> (String, AtomicConstraint (TypedGraphMorphism a b))
instantiateAtomicConstraint tg (name, premise, conclusion, maps) = (name, buildNamedAtomicConstraint name (buildTypedGraphMorphism p c m) isPositive)
where
p = instantiateTypedGraph premise tg
c = instantiateTypedGraph conclusion tg
m = buildGraphMorphism (domain p) (domain c) (map mapToId mNodes) (map mapToId mEdges)
isPositive = not $ L.isPrefixOf "-" name
mapToId (a,_,b) = (toN b, toN a)
pNodes = G.nodeIds (domain p)
(mNodes,mEdges) = L.partition (\(_,_,x) -> G.NodeId (toN x) `elem` pNodes) maps
instantiateConstraints :: [(String, F.Formula)] -> [Constraint (TypedGraphMorphism a b)] -> [Constraint (TypedGraphMorphism a b)]
instantiateConstraints formulas atomicConstraints = map (translateFormula mappings) f
where
f = map snd formulas
mappings = M.fromAscList $ zip [1..] atomicConstraints
translateFormula :: M.Map Int (Constraint (TypedGraphMorphism a b)) -> F.Formula -> Constraint (TypedGraphMorphism a b)
translateFormula m formula =
let
get = (m M.!) . fromIntegral
in
case formula of
F.IntConst n -> get n
F.Not formula' -> Not (translateFormula m formula')
F.Or formula' formula'' -> Or (translateFormula m formula') (translateFormula m formula'')
F.And formula' formula'' -> And (translateFormula m formula') (translateFormula m formula'')
| null | https://raw.githubusercontent.com/Verites/verigraph/754ec08bf4a55ea7402d8cd0705e58b1d2c9cd67/src/library/XML/GGXReader.hs | haskell | We join negative constraints with "and", which is the behaviour of NACs.
Minimal Safety Nacs Logs
| Reads the names of node/edge types and NACs, which are necessary when reexporting this grammar.
the node in the type graph. Lookup of edge types is analogous. | module XML.GGXReader
( readGrammar
, readGGName
, readName
, readNames
, readTypeGraph
, readRules
, readGraphs
, readSequences
, readSequencesWithObjectFlow
, instantiateRule
, instantiateSpan
, minimalSafetyNacsWithLog
, showMinimalSafetyNacsLog
) where
import Data.Function (on)
import qualified Data.List as L
import qualified Data.Map as M
import Data.Maybe (fromJust, fromMaybe, mapMaybe)
import Text.XML.HXT.Core
import Abstract.Category
import Abstract.Constraint
import Abstract.Rewriting.DPO
import Base.Valid
import Category.TypedGraphRule
import qualified Data.Graphs as G
import Data.Graphs.Morphism as GM
import Data.TypedGraph
import Data.TypedGraph.Morphism
import Rewriting.DPO.TypedGraph as GR
import Rewriting.DPO.TypedGraphRule
import qualified XML.Formulas as F
import XML.GGXParseIn
import XML.GGXReader.SndOrder
import XML.GGXReader.Span
import XML.ParsedTypes
import XML.Utilities
import XML.XMLUtilities
| Reads the grammar in the XML , adds the needed minimal safety nacs
to second - order , and returns the grammar and a log
readGrammar :: String -> Bool -> MorphismsConfig (TypedGraphMorphism a b)
-> IO (Grammar (TypedGraphMorphism a b), Grammar (RuleMorphism a b), [(String, Int)])
readGrammar fileName useConstraints morphismsConf = do
parsedTypeGraphs <- readTypeGraph fileName
let parsedTypeGraph = case parsedTypeGraphs of
[] -> error "error, type graph not found"
ptg:_ -> ptg
_ <- parsedTypeGraph `seq` return ()
let typeGraph = instantiateTypeGraph parsedTypeGraph
parsedGraphs <- readGraphs fileName
parsedRules <- readRules fileName
let (sndOrdRules, fstOrdRules) = L.partition (\((x,_,_,_),_) -> L.isPrefixOf "2rule_" x) parsedRules
rulesNames = map (\((x,_,_,_),_) -> x) fstOrdRules
productions = map (instantiateRule typeGraph) fstOrdRules
ensureValid $ validateNamed (\name -> "Rule '"++name++"'") (zip rulesNames productions)
_ <- (L.null productions && error "No first-order productions were found, at least one is needed.") `seq` return ()
parsedAtomicConstraints <- readAtomicConstraints fileName
parsedGraphConstraints <- readGraphConstraints fileName
let cons
| useConstraints =
let namedAtomic = L.groupBy ((==) `on` fst) $ map (instantiateAtomicConstraint typeGraph) parsedAtomicConstraints
atomic = map (joinConstraints . map snd) namedAtomic
in instantiateConstraints parsedGraphConstraints atomic
| otherwise = []
where
We join positive constraints with " or " , which is the behaviour of AGG .
joinConstraints [x] = Atomic x
joinConstraints (x:xs)
| positive x = Or (Atomic x) (joinConstraints xs)
| otherwise = And (Atomic x) (joinConstraints xs)
gets only the first graph as initial , because verigraph supports only one initial graph per grammar .
let initGraph = head (map snd parsedGraphs)
fstOrderGrammar = grammar initGraph cons (zip rulesNames productions)
sndOrderRules = instantiateSndOrderRules typeGraph sndOrdRules
emptyRule = emptyGraphRule typeGraph
sndOrderGrammar = grammar emptyRule [] sndOrderRules
morphismsConf' = toSndOrderMorphismsConfig morphismsConf
(sndOrderGrammarWithMinimalSafetyNacs, logNewNacs) =
minimalSafetyNacsWithLog morphismsConf' sndOrderGrammar
_ <- (case L.elemIndices False (map (isValid . snd) sndOrderRules) of
[] -> []
[a] -> error $ "Second Order Rule " ++ show a ++ " is not valid (starting from 0)."
l -> error $ "Second Order Rules " ++ show l ++ " are not valid (starting from 0)."
) `seq` return ()
return (fstOrderGrammar, sndOrderGrammarWithMinimalSafetyNacs, logNewNacs)
readGGName :: String -> IO String
readGGName fileName = do
name <- readName fileName
let ret = case name of
n:_ -> n
_ -> "GraGra"
return ret
FIX : find a better place for this two functions
minimalSafetyNacsWithLog :: MorphismsConfig (RuleMorphism a b) -> Grammar (RuleMorphism a b)
-> (Grammar (RuleMorphism a b), [(String, Int)])
minimalSafetyNacsWithLog conf oldGG = (newGG, printNewNacs)
where
newNacs =
map (\(n,r) ->
let newRule = addMinimalSafetyNacs conf r
tamNewNacs = length (nacs newRule)
tamNacs = length (nacs r)
in ((n, newRule), (n, tamNewNacs - tamNacs))
) (productions oldGG)
newGG = oldGG {productions = map fst newNacs}
printNewNacs = map snd newNacs
showMinimalSafetyNacsLog :: [(String, Int)] -> [String]
showMinimalSafetyNacsLog printNewNacs =
[ "Rule " ++ r ++ ", added " ++ show n ++ " nacs" | (r,n) <- printNewNacs ]
To lookup the name of a node type , use @"I " + + show nodeId@ as key , where @nodeId@ is the ID of
readNames :: String -> IO [(String,String)]
readNames fileName = (++) <$> readTypeNames fileName <*> readNacNames fileName
readName :: String -> IO [String]
readName fileName = runX (parseXML fileName >>> parseGGName)
readTypeGraph :: String -> IO[ParsedTypeGraph]
readTypeGraph fileName = runX (parseXML fileName >>> parseTypeGraph)
readNacNames :: String -> IO [(String,String)]
readNacNames fileName = concat <$> runX (parseXML fileName >>> parseNacNames)
readTypeNames :: String -> IO [(String,String)]
readTypeNames fileName = concat <$> runX (parseXML fileName >>> parseNames)
readAtomicConstraints :: String -> IO[ParsedAtomicConstraint]
readAtomicConstraints fileName = runX (parseXML fileName >>> parseAtomicConstraints)
readGraphConstraints :: String -> IO[(String,F.Formula)]
readGraphConstraints fileName = runX (parseXML fileName >>> parseGraphConstraints)
readGraphs ' : : String - > IO[[ParsedTypedGraph ] ]
readGraphs ' fileName = runX ( parseXML fileName > > > parseGraphs )
readGraphs :: String -> IO [(String, TypedGraph a b)]
readGraphs fileName =
do
[parsedTypeGraph] <- readTypeGraph fileName
let typeGraph = instantiateTypeGraph parsedTypeGraph
[parsedGraphs] <- runX (parseXML fileName >>> parseGraphs)
let instantiate graph@(name, _, _) = (name, instantiateTypedGraph graph typeGraph)
return $ map instantiate parsedGraphs
readRules :: String -> IO[RuleWithNacs]
readRules fileName = runX (parseXML fileName >>> parseRule)
readSequences :: Grammar (TypedGraphMorphism a b) -> String -> IO [(String, [GR.TypedGraphRule a b])]
readSequences grammar fileName = map (expandSequence grammar) <$> runX (parseXML fileName >>> parseRuleSequence)
expandSequence :: Grammar (TypedGraphMorphism a b) -> Sequence -> (String, [GR.TypedGraphRule a b])
expandSequence grammar (name,s,_) = (name, mapMaybe lookupRule . concat $ map expandSub s)
where
expandSub (i, s) = concat $ replicate i $ concatMap expandItens s
expandItens (i, r) = replicate i r
lookupRule name = L.lookup name (productions grammar)
readSequencesWithObjectFlow :: Grammar (TypedGraphMorphism a b) -> String -> IO [(String, [(String, GR.TypedGraphRule a b)], [ObjectFlow (TypedGraphMorphism a b)])]
readSequencesWithObjectFlow grammar fileName = map (prepareFlows grammar) <$> runX (parseXML fileName >>> parseRuleSequence)
prepareFlows :: Grammar (TypedGraphMorphism a b) -> Sequence -> (String, [(String, GR.TypedGraphRule a b)], [ObjectFlow (TypedGraphMorphism a b)])
prepareFlows grammar (name,s,flows) = (name, map fun getAll, objs)
where
fun name = (name, fromJust $ lookupRule name)
gets only the first subsequence
lookupRule name = L.lookup name (productions grammar)
objs = instantiateObjectsFlow (productions grammar) flows
instantiateObjectsFlow :: [(String, Production (TypedGraphMorphism a b))] -> [ParsedObjectFlow] -> [ObjectFlow (TypedGraphMorphism a b)]
instantiateObjectsFlow _ [] = []
instantiateObjectsFlow [] _ = []
instantiateObjectsFlow productions (o:os) =
let
createObject (idx,cons,prod,maps) = ObjectFlow idx prod cons (createSpan prod cons maps)
createSpan prod cons = instantiateSpan (rightObject (searchRight prod)) (leftObject (searchLeft cons))
searchLeft ruleName = fromJust $ L.lookup ruleName productions
searchRight ruleName = fromJust $ L.lookup ruleName productions
in createObject o : instantiateObjectsFlow productions os
instantiateTypeGraph :: ParsedTypeGraph -> TypeGraph a b
instantiateTypeGraph (nodes, edges) = graphWithEdges
where
getNodeType = G.NodeId . toN . lookupNodes nodes
trd (_,_,x) = x
nodesId = map (G.NodeId . toN . trd) nodes
edgesId = map (\(_, _, typ, src, tgt) -> ((G.EdgeId . toN) typ, getNodeType src, getNodeType tgt)) edges
graphWithNodes = foldr G.insertNode G.empty nodesId
graphWithEdges = foldr (\(ide,src,tgt) g -> G.insertEdge ide src tgt g) graphWithNodes edgesId
lookupNodes :: [ParsedTypedNode] -> String -> String
lookupNodes nodes n = fromMaybe
(error ("Error getting node type of: " ++ show n))
(lookup n changeToListOfPairs)
where
changeToListOfPairs = map (\(x,_,y) -> (x,y)) nodes
instantiateAtomicConstraint :: TypeGraph a b -> ParsedAtomicConstraint -> (String, AtomicConstraint (TypedGraphMorphism a b))
instantiateAtomicConstraint tg (name, premise, conclusion, maps) = (name, buildNamedAtomicConstraint name (buildTypedGraphMorphism p c m) isPositive)
where
p = instantiateTypedGraph premise tg
c = instantiateTypedGraph conclusion tg
m = buildGraphMorphism (domain p) (domain c) (map mapToId mNodes) (map mapToId mEdges)
isPositive = not $ L.isPrefixOf "-" name
mapToId (a,_,b) = (toN b, toN a)
pNodes = G.nodeIds (domain p)
(mNodes,mEdges) = L.partition (\(_,_,x) -> G.NodeId (toN x) `elem` pNodes) maps
instantiateConstraints :: [(String, F.Formula)] -> [Constraint (TypedGraphMorphism a b)] -> [Constraint (TypedGraphMorphism a b)]
instantiateConstraints formulas atomicConstraints = map (translateFormula mappings) f
where
f = map snd formulas
mappings = M.fromAscList $ zip [1..] atomicConstraints
translateFormula :: M.Map Int (Constraint (TypedGraphMorphism a b)) -> F.Formula -> Constraint (TypedGraphMorphism a b)
translateFormula m formula =
let
get = (m M.!) . fromIntegral
in
case formula of
F.IntConst n -> get n
F.Not formula' -> Not (translateFormula m formula')
F.Or formula' formula'' -> Or (translateFormula m formula') (translateFormula m formula'')
F.And formula' formula'' -> And (translateFormula m formula') (translateFormula m formula'')
|
2e0fddca7f2ad86875a419786783d8a4e68afefcebe2f98d74a6a5240bf0558e | vert-x/mod-lang-clojure | eventbus_test.clj | Copyright 2013 the original author or authors .
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns vertx.eventbus-test
(:require [vertx.testtools :as t]
[vertx.core :as core]
[vertx.eventbus :as eb]
[clojure.test :refer [deftest is use-fixtures]])
(:import org.vertx.java.core.eventbus.ReplyException))
(use-fixtures :each t/as-embedded)
(deftest eb-send
(let [msg {:ham "biscuit"}
addr "eb.test"
id (atom nil)]
(reset! id
(eb/on-message
addr
(fn [m]
(t/test-complete
(is (= msg m))
(is eb/*current-message*)
(is (= addr eb/*current-address*))
(eb/unregister-handler @id)))))
(is (not (nil? @id)))
(eb/send addr msg)))
(deftest reply
(let [msg {:ham "biscuit"}
addr "eb.test"
reply {:biscuit "gravy"}
id (atom nil)]
(reset! id
(eb/on-message
addr
(fn [m]
(t/test-complete
(is (= msg m))
(eb/reply reply)))))
(is (not (nil? @id)))
(eb/send addr msg
(fn [m]
(t/test-complete
(is (= reply m))
(eb/unregister-handler @id))))))
(deftest send-unregister-send
(let [msg {:ham "biscuit"}
addr "eb.test"
id (atom nil)
rcvd (atom false)]
(reset! id
(eb/on-message
addr
(fn [m]
(if @rcvd
(throw (IllegalStateException. "Handler already called")))
(is (= msg m))
(eb/unregister-handler @id)
(reset! rcvd true)
(core/timer 100 (t/test-complete)))))
(t/assert-not-nil @id)
(dotimes [_ 2]
(eb/send addr msg))))
(deftest publish-multiple-matching-handlers
(let [msg {:ham "biscuit"}
addr "eb.test"
total 10
count (atom 1)]
(dotimes [_ total]
(let [id (atom nil)]
(reset! id
(eb/on-message
addr
(fn [m]
(t/assert= msg m)
(eb/unregister-handler @id)
(swap! count inc)
(if (= @count total)
(t/test-complete)))))))
(eb/publish addr msg)))
(deftest reply-of-reply-of-reply
(let [addr "eb.test"
id (atom nil)]
(reset! id
(eb/on-message
addr
(fn [m]
(is (= "message" m))
(eb/reply "reply"
(fn [m]
(is (= "reply-of-reply" m))
(eb/reply "reply-of-reply-of-reply"))))))
(eb/send addr "message"
(fn [m]
(is (= "reply" m))
(eb/reply "reply-of-reply"
(fn [m]
(t/test-complete
(is (= "reply-of-reply-of-reply" m))
(eb/unregister-handler @id))))))))
(deftest message-types-roundtrip
(let [addr "eb.test"
tfn
(fn [msg]
(let [id (atom nil)]
(reset! id
(eb/on-message
addr
(fn [m]
(eb/unregister-handler @id)
(eb/reply m))))
(eb/send addr msg
(fn [m]
(t/test-complete
(is (= msg m)))))))]
(doseq [m ["ham"
nil
true
false
1
1.1
[1 2 3]
[{:a "b"} 2]
{:a "biscuit" :b nil :c true :d false :e 1 :f 1.1 :g [1 2 3]}]]
(tfn m))))
(deftest send-with-timeout-times-out
(let [addr "eb.timeout.test"]
(eb/on-message addr identity) ;; won't reply
(eb/send addr "ham" 100
(fn [err _]
(t/test-complete
(is err)
(is (instance? ReplyException (:basis err)))
(is (= :TIMEOUT (:type err))))))))
(deftest reply-with-timeout-times-out
(let [addr "eb.timeout.reply.test"]
(eb/on-message
addr
(fn [m]
(eb/reply
m 100
(fn [err _]
(t/test-complete
(is err)
(is (instance? ReplyException (:basis err)))
(is (= :TIMEOUT (:type err))))))))
(eb/send addr "ham" identity)))
(deftest send-with-timeout-and-no-handler-triggers-error
(eb/send "i-don't-exist" "ham" 100
(fn [err _]
(t/test-complete
(is err)
(is (instance? ReplyException (:basis err)))
(is (= :NO_HANDLERS (:type err)))))))
(deftest reply-with-fail
(let [addr "eb.timeout.reply.fail.test"]
(eb/on-message
addr
(fn [m]
(eb/fail 1 "busted")))
(eb/send
addr "ham"
1000
(fn [err _]
(t/test-complete
(is err)
(is (instance? ReplyException (:basis err)))
(is (= :RECIPIENT_FAILURE (:type err)))
(is (= 1 (:code err)))
(is (= "busted" (:message err))))))))
| null | https://raw.githubusercontent.com/vert-x/mod-lang-clojure/dcf713460b8f46c08d0db6e7bf8537f1dd91f297/api/src/test/clojure/vertx/eventbus_test.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
won't reply | Copyright 2013 the original author or authors .
distributed under the License is distributed on an " AS IS " BASIS ,
(ns vertx.eventbus-test
(:require [vertx.testtools :as t]
[vertx.core :as core]
[vertx.eventbus :as eb]
[clojure.test :refer [deftest is use-fixtures]])
(:import org.vertx.java.core.eventbus.ReplyException))
(use-fixtures :each t/as-embedded)
(deftest eb-send
(let [msg {:ham "biscuit"}
addr "eb.test"
id (atom nil)]
(reset! id
(eb/on-message
addr
(fn [m]
(t/test-complete
(is (= msg m))
(is eb/*current-message*)
(is (= addr eb/*current-address*))
(eb/unregister-handler @id)))))
(is (not (nil? @id)))
(eb/send addr msg)))
(deftest reply
(let [msg {:ham "biscuit"}
addr "eb.test"
reply {:biscuit "gravy"}
id (atom nil)]
(reset! id
(eb/on-message
addr
(fn [m]
(t/test-complete
(is (= msg m))
(eb/reply reply)))))
(is (not (nil? @id)))
(eb/send addr msg
(fn [m]
(t/test-complete
(is (= reply m))
(eb/unregister-handler @id))))))
(deftest send-unregister-send
(let [msg {:ham "biscuit"}
addr "eb.test"
id (atom nil)
rcvd (atom false)]
(reset! id
(eb/on-message
addr
(fn [m]
(if @rcvd
(throw (IllegalStateException. "Handler already called")))
(is (= msg m))
(eb/unregister-handler @id)
(reset! rcvd true)
(core/timer 100 (t/test-complete)))))
(t/assert-not-nil @id)
(dotimes [_ 2]
(eb/send addr msg))))
(deftest publish-multiple-matching-handlers
(let [msg {:ham "biscuit"}
addr "eb.test"
total 10
count (atom 1)]
(dotimes [_ total]
(let [id (atom nil)]
(reset! id
(eb/on-message
addr
(fn [m]
(t/assert= msg m)
(eb/unregister-handler @id)
(swap! count inc)
(if (= @count total)
(t/test-complete)))))))
(eb/publish addr msg)))
(deftest reply-of-reply-of-reply
(let [addr "eb.test"
id (atom nil)]
(reset! id
(eb/on-message
addr
(fn [m]
(is (= "message" m))
(eb/reply "reply"
(fn [m]
(is (= "reply-of-reply" m))
(eb/reply "reply-of-reply-of-reply"))))))
(eb/send addr "message"
(fn [m]
(is (= "reply" m))
(eb/reply "reply-of-reply"
(fn [m]
(t/test-complete
(is (= "reply-of-reply-of-reply" m))
(eb/unregister-handler @id))))))))
(deftest message-types-roundtrip
(let [addr "eb.test"
tfn
(fn [msg]
(let [id (atom nil)]
(reset! id
(eb/on-message
addr
(fn [m]
(eb/unregister-handler @id)
(eb/reply m))))
(eb/send addr msg
(fn [m]
(t/test-complete
(is (= msg m)))))))]
(doseq [m ["ham"
nil
true
false
1
1.1
[1 2 3]
[{:a "b"} 2]
{:a "biscuit" :b nil :c true :d false :e 1 :f 1.1 :g [1 2 3]}]]
(tfn m))))
(deftest send-with-timeout-times-out
(let [addr "eb.timeout.test"]
(eb/send addr "ham" 100
(fn [err _]
(t/test-complete
(is err)
(is (instance? ReplyException (:basis err)))
(is (= :TIMEOUT (:type err))))))))
(deftest reply-with-timeout-times-out
(let [addr "eb.timeout.reply.test"]
(eb/on-message
addr
(fn [m]
(eb/reply
m 100
(fn [err _]
(t/test-complete
(is err)
(is (instance? ReplyException (:basis err)))
(is (= :TIMEOUT (:type err))))))))
(eb/send addr "ham" identity)))
(deftest send-with-timeout-and-no-handler-triggers-error
(eb/send "i-don't-exist" "ham" 100
(fn [err _]
(t/test-complete
(is err)
(is (instance? ReplyException (:basis err)))
(is (= :NO_HANDLERS (:type err)))))))
(deftest reply-with-fail
(let [addr "eb.timeout.reply.fail.test"]
(eb/on-message
addr
(fn [m]
(eb/fail 1 "busted")))
(eb/send
addr "ham"
1000
(fn [err _]
(t/test-complete
(is err)
(is (instance? ReplyException (:basis err)))
(is (= :RECIPIENT_FAILURE (:type err)))
(is (= 1 (:code err)))
(is (= "busted" (:message err))))))))
|
1a93af496d0d8aa882e3d5a1f2babc1a5b38a90460cee0d942ad3fba524518ab | mroman42/mikrokosmos | NamedLambda.hs | |
Module : NamedLambda
Description : Lambda expressions with named variables
License : GPL-3
This package deals with lambda expressions containing named variables
instead of DeBruijn indexes . It contains parsing and printing fuctions .
Module: NamedLambda
Description: Lambda expressions with named variables
License: GPL-3
This package deals with lambda expressions containing named variables
instead of DeBruijn indexes. It contains parsing and printing fuctions.
-}
module NamedLambda
( NamedLambda (LambdaVariable, LambdaAbstraction, LambdaApplication,
TypedPair, TypedPi1, TypedPi2,
TypedInl, TypedInr, TypedCase, TypedUnit, TypedAbort,
TypedAbsurd)
, lambdaexp
, toBruijn
, nameExp
, quicknameIndexes
, variableNames
)
where
import Text.ParserCombinators.Parsec
import Control.Applicative ((<$>), (<*>))
import qualified Data.Map.Strict as Map
import Lambda
import MultiBimap
import Data.Maybe
import Control.Monad
type Context = MultiBimap Exp String
-- Parsing of Lambda Expressions.
-- The user can input a lambda expression with named variables, of
-- the form of "\x.x" or "(\a.(\b.a b))". The interpreter will parse
-- it into an internal representation.
-- | A lambda expression with named variables.
data NamedLambda = LambdaVariable String -- ^ variable
| LambdaAbstraction String NamedLambda -- ^ lambda abstraction
| LambdaApplication NamedLambda NamedLambda -- ^ function application
| TypedPair NamedLambda NamedLambda -- ^ pair of expressions
^ first projection
^ second projection
| TypedInl NamedLambda -- ^ left injection
| TypedInr NamedLambda -- ^ right injection
| TypedCase NamedLambda NamedLambda NamedLambda -- ^ case of expressions
| TypedUnit -- ^ unit
| TypedAbort NamedLambda -- ^ abort
| TypedAbsurd NamedLambda -- ^ absurd
deriving (Eq)
-- | Parses a lambda expression with named variables.
A lambda expression is a sequence of one or more autonomous
-- lambda expressions. They are parsed assuming left-associativity.
--
> > > parse " " " \\f.\\x.f x "
-- Right λf.λx.(f x)
--
-- Note that double backslashes are neccessary only when we quote strings;
-- it will work only with a simple backslash in the interpreter.
lambdaexp :: Parser NamedLambda
lambdaexp = foldl1 LambdaApplication <$> (spaces >> sepBy1 simpleexp spaces)
-- | Parses a simple lambda expression, without function applications
-- at the top level. It can be a lambda abstraction, a variable or another
-- potentially complex lambda expression enclosed in parentheses.
simpleexp :: Parser NamedLambda
simpleexp = choice
[ try pairParser
, try pi1Parser
, try pi2Parser
, try inlParser
, try inrParser
, try caseParser
, try unitParser
, try abortParser
, try absurdParser
, try lambdaAbstractionParser
, try variableParser
, try (parens lambdaexp)
]
-- | The returned parser parenthesizes the given parser
parens :: Parser a -> Parser a
parens = between (char '(') (char ')')
-- | Parses a variable. Any name can form a lambda variable.
variableParser :: Parser NamedLambda
variableParser = LambdaVariable <$> nameParser
-- | Allowed variable names
nameParser :: Parser String
nameParser = many1 alphaNum
choicest :: [String] -> Parser String
choicest sl = choice (try . string <$> sl)
-- | Parses a lambda abstraction. The '\' is used as lambda.
lambdaAbstractionParser :: Parser NamedLambda
lambdaAbstractionParser = LambdaAbstraction <$>
(lambdaChar >> nameParser) <*> (char '.' >> lambdaexp)
| used to represent lambda in user 's input .
lambdaChar :: Parser Char
lambdaChar = choice [try $ char '\\', try $ char 'λ']
pairParser :: Parser NamedLambda
pairParser = parens (TypedPair <$> lambdaexp <*> (char ',' >> lambdaexp))
pi1Parser, pi2Parser :: Parser NamedLambda
pi1Parser = TypedPi1 <$> (choicest namesPi1 >> lambdaexp)
pi2Parser = TypedPi2 <$> (choicest namesPi2 >> lambdaexp)
inlParser, inrParser :: Parser NamedLambda
inlParser = TypedInl <$> (choicest namesInl >> lambdaexp)
inrParser = TypedInr <$> (choicest namesInr >> lambdaexp)
caseParser :: Parser NamedLambda
caseParser =
TypedCase <$>
(choicest namesCase >> simpleexp) <*>
(choicest namesOf >> simpleexp) <*>
(choicest namesCaseSep >> simpleexp)
unitParser :: Parser NamedLambda
unitParser = choicest namesUnit >> return TypedUnit
abortParser :: Parser NamedLambda
abortParser = TypedAbort <$> (choicest namesAbort >> lambdaexp)
absurdParser :: Parser NamedLambda
absurdParser = TypedAbsurd <$> (choicest namesAbsurd >> lambdaexp)
-- | Shows a lambda expression with named variables.
Parentheses are ignored ; they are written only around applications .
showNamedLambda :: NamedLambda -> String
showNamedLambda (LambdaVariable c) = c
showNamedLambda (LambdaAbstraction c e) = "λ" ++ c ++ "." ++ showNamedLambda e
showNamedLambda (LambdaApplication f g) =
showNamedLambdaPar f ++ " " ++ showNamedLambdaPar g
showNamedLambda (TypedPair a b) =
"(" ++ showNamedLambda a ++ "," ++ showNamedLambda b ++ ")"
showNamedLambda (TypedPi1 a) = head namesPi1 ++ showNamedLambdaPar a
showNamedLambda (TypedPi2 a) = head namesPi2 ++ showNamedLambdaPar a
showNamedLambda (TypedInl a) = head namesInl ++ showNamedLambdaPar a
showNamedLambda (TypedInr a) = head namesInr ++ showNamedLambdaPar a
showNamedLambda (TypedCase a b c) =
last namesCase ++
showNamedLambda a ++
last namesOf ++ showNamedLambda b ++ head namesCaseSep ++ showNamedLambda c
showNamedLambda TypedUnit = head namesUnit
showNamedLambda (TypedAbort a) = head namesAbort ++ showNamedLambdaPar a
showNamedLambda (TypedAbsurd a) = head namesAbsurd ++ showNamedLambdaPar a
showNamedLambdaPar :: NamedLambda -> String
showNamedLambdaPar l@(LambdaVariable _) = showNamedLambda l
showNamedLambdaPar l@TypedUnit = showNamedLambda l
showNamedLambdaPar l@(TypedPair _ _) = showNamedLambda l
showNamedLambdaPar l = "(" ++ showNamedLambda l ++ ")"
instance Show NamedLambda where
show = showNamedLambda
-- Name type constructors
namesPi1 :: [String]
namesPi1 = ["π₁ ", "FST "]
namesPi2 :: [String]
namesPi2 = ["π₂ ", "SND "]
namesInl :: [String]
namesInl = ["ιnl ", "INL "]
namesInr :: [String]
namesInr = ["ιnr ", "INR "]
namesCase :: [String]
namesCase = ["CASE ", "Case ", "ᴄᴀꜱᴇ "]
namesOf :: [String]
namesOf = [" OF ", " Of ", " ᴏꜰ "]
namesCaseSep :: [String]
namesCaseSep = ["; ", ";"]
namesUnit :: [String]
namesUnit = ["★", "UNIT"]
namesAbort :: [String]
namesAbort = ["□ ", "ABORT "]
namesAbsurd :: [String]
namesAbsurd = ["■ ", "ABSURD "]
| Translates a named variable expression into a one .
-- Uses a dictionary of already binded numbers and variables.
tobruijn :: Map.Map String Integer -- ^ dictionary of the names of the variables used
-> Context -- ^ dictionary of the names already binded on the scope
-> NamedLambda -- ^ initial expression
-> Exp
-- Every lambda abstraction is inserted in the variable dictionary,
-- and every number in the dictionary increases to reflect we are entering
-- into a deeper context.
tobruijn d context (LambdaAbstraction c e) = Lambda $ tobruijn newdict context e
where newdict = Map.insert c 1 (Map.map succ d)
-- Translation of applications is trivial.
tobruijn d context (LambdaApplication f g) = App (tobruijn d context f) (tobruijn d context g)
-- Every variable is checked on the variable dictionary and in the current scope.
tobruijn d context (LambdaVariable c) =
case Map.lookup c d of
Just n -> Var n
Nothing -> fromMaybe (Var 0) (MultiBimap.lookupR c context)
tobruijn d context (TypedPair a b) = Pair (tobruijn d context a) (tobruijn d context b)
tobruijn d context (TypedPi1 a) = Pi1 (tobruijn d context a)
tobruijn d context (TypedPi2 a) = Pi2 (tobruijn d context a)
tobruijn d context (TypedInl a) = Inl (tobruijn d context a)
tobruijn d context (TypedInr a) = Inr (tobruijn d context a)
tobruijn d context (TypedCase a b c) = Caseof (tobruijn d context a) (tobruijn d context b) (tobruijn d context c)
tobruijn _ _ TypedUnit = Unit
tobruijn d context (TypedAbort a) = Abort (tobruijn d context a)
tobruijn d context (TypedAbsurd a) = Absurd (tobruijn d context a)
-- | Transforms a lambda expression with named variables to a deBruijn index expression.
-- Uses only the dictionary of the variables in the current context.
toBruijn :: Context -- ^ Variable context
-> NamedLambda -- ^ Initial lambda expression with named variables
-> Exp
toBruijn = tobruijn Map.empty
-- | Translates a deBruijn expression into a lambda expression
-- with named variables, given a list of used and unused variable names.
nameIndexes :: [String] -> [String] -> Exp -> NamedLambda
nameIndexes _ _ (Var 0) = LambdaVariable "undefined"
nameIndexes used _ (Var n) = LambdaVariable (used !! pred (fromInteger n))
nameIndexes used new (Lambda e) = LambdaAbstraction (head new) (nameIndexes (head new:used) (tail new) e)
nameIndexes used new (App f g) = LambdaApplication (nameIndexes used new f) (nameIndexes used new g)
nameIndexes used new (Pair a b) = TypedPair (nameIndexes used new a) (nameIndexes used new b)
nameIndexes used new (Pi1 a) = TypedPi1 (nameIndexes used new a)
nameIndexes used new (Pi2 a) = TypedPi2 (nameIndexes used new a)
nameIndexes used new (Inl a) = TypedInl (nameIndexes used new a)
nameIndexes used new (Inr a) = TypedInr (nameIndexes used new a)
nameIndexes used new (Caseof a b c) = TypedCase (nameIndexes used new a) (nameIndexes used new b) (nameIndexes used new c)
nameIndexes _ _ Unit = TypedUnit
nameIndexes used new (Abort a) = TypedAbort (nameIndexes used new a)
nameIndexes used new (Absurd a) = TypedAbsurd (nameIndexes used new a)
quicknameIndexes :: Int -> [String] -> Exp -> NamedLambda
quicknameIndexes _ _ (Var 0) = LambdaVariable "undefined"
quicknameIndexes n vars (Var m) = LambdaVariable (vars !! (n - fromInteger m))
quicknameIndexes n vars (Lambda e) = LambdaAbstraction (vars !! n) (quicknameIndexes (succ n) vars e)
quicknameIndexes n vars (App f g) = LambdaApplication (quicknameIndexes n vars f) (quicknameIndexes n vars g)
quicknameIndexes n vars (Pair a b) = TypedPair (quicknameIndexes n vars a) (quicknameIndexes n vars b)
quicknameIndexes n vars (Pi1 a) = TypedPi1 (quicknameIndexes n vars a)
quicknameIndexes n vars (Pi2 a) = TypedPi2 (quicknameIndexes n vars a)
quicknameIndexes n vars (Inl a) = TypedInl (quicknameIndexes n vars a)
quicknameIndexes n vars (Inr a) = TypedInr (quicknameIndexes n vars a)
quicknameIndexes n vars (Caseof a b c) = TypedCase (quicknameIndexes n vars a) (quicknameIndexes n vars b) (quicknameIndexes n vars c)
quicknameIndexes _ _ Unit = TypedUnit
quicknameIndexes n vars (Abort a) = TypedAbort (quicknameIndexes n vars a)
quicknameIndexes n vars (Absurd a) = TypedAbsurd (quicknameIndexes n vars a)
-- | Gives names to every variable in a deBruijn expression using
-- alphabetic order.
nameExp :: Exp -> NamedLambda
nameExp = nameIndexes [] variableNames
-- | A list of all possible variable names in lexicographical order.
variableNames :: [String]
variableNames = concatMap (`replicateM` ['a'..'z']) [1..]
| null | https://raw.githubusercontent.com/mroman42/mikrokosmos/fa3f9d5ebd9d62a6be44731ae867d020aa082a69/source/NamedLambda.hs | haskell | Parsing of Lambda Expressions.
The user can input a lambda expression with named variables, of
the form of "\x.x" or "(\a.(\b.a b))". The interpreter will parse
it into an internal representation.
| A lambda expression with named variables.
^ variable
^ lambda abstraction
^ function application
^ pair of expressions
^ left injection
^ right injection
^ case of expressions
^ unit
^ abort
^ absurd
| Parses a lambda expression with named variables.
lambda expressions. They are parsed assuming left-associativity.
Right λf.λx.(f x)
Note that double backslashes are neccessary only when we quote strings;
it will work only with a simple backslash in the interpreter.
| Parses a simple lambda expression, without function applications
at the top level. It can be a lambda abstraction, a variable or another
potentially complex lambda expression enclosed in parentheses.
| The returned parser parenthesizes the given parser
| Parses a variable. Any name can form a lambda variable.
| Allowed variable names
| Parses a lambda abstraction. The '\' is used as lambda.
| Shows a lambda expression with named variables.
Name type constructors
Uses a dictionary of already binded numbers and variables.
^ dictionary of the names of the variables used
^ dictionary of the names already binded on the scope
^ initial expression
Every lambda abstraction is inserted in the variable dictionary,
and every number in the dictionary increases to reflect we are entering
into a deeper context.
Translation of applications is trivial.
Every variable is checked on the variable dictionary and in the current scope.
| Transforms a lambda expression with named variables to a deBruijn index expression.
Uses only the dictionary of the variables in the current context.
^ Variable context
^ Initial lambda expression with named variables
| Translates a deBruijn expression into a lambda expression
with named variables, given a list of used and unused variable names.
| Gives names to every variable in a deBruijn expression using
alphabetic order.
| A list of all possible variable names in lexicographical order. | |
Module : NamedLambda
Description : Lambda expressions with named variables
License : GPL-3
This package deals with lambda expressions containing named variables
instead of DeBruijn indexes . It contains parsing and printing fuctions .
Module: NamedLambda
Description: Lambda expressions with named variables
License: GPL-3
This package deals with lambda expressions containing named variables
instead of DeBruijn indexes. It contains parsing and printing fuctions.
-}
module NamedLambda
( NamedLambda (LambdaVariable, LambdaAbstraction, LambdaApplication,
TypedPair, TypedPi1, TypedPi2,
TypedInl, TypedInr, TypedCase, TypedUnit, TypedAbort,
TypedAbsurd)
, lambdaexp
, toBruijn
, nameExp
, quicknameIndexes
, variableNames
)
where
import Text.ParserCombinators.Parsec
import Control.Applicative ((<$>), (<*>))
import qualified Data.Map.Strict as Map
import Lambda
import MultiBimap
import Data.Maybe
import Control.Monad
type Context = MultiBimap Exp String
^ first projection
^ second projection
deriving (Eq)
A lambda expression is a sequence of one or more autonomous
> > > parse " " " \\f.\\x.f x "
lambdaexp :: Parser NamedLambda
lambdaexp = foldl1 LambdaApplication <$> (spaces >> sepBy1 simpleexp spaces)
simpleexp :: Parser NamedLambda
simpleexp = choice
[ try pairParser
, try pi1Parser
, try pi2Parser
, try inlParser
, try inrParser
, try caseParser
, try unitParser
, try abortParser
, try absurdParser
, try lambdaAbstractionParser
, try variableParser
, try (parens lambdaexp)
]
parens :: Parser a -> Parser a
parens = between (char '(') (char ')')
variableParser :: Parser NamedLambda
variableParser = LambdaVariable <$> nameParser
nameParser :: Parser String
nameParser = many1 alphaNum
choicest :: [String] -> Parser String
choicest sl = choice (try . string <$> sl)
lambdaAbstractionParser :: Parser NamedLambda
lambdaAbstractionParser = LambdaAbstraction <$>
(lambdaChar >> nameParser) <*> (char '.' >> lambdaexp)
| used to represent lambda in user 's input .
lambdaChar :: Parser Char
lambdaChar = choice [try $ char '\\', try $ char 'λ']
pairParser :: Parser NamedLambda
pairParser = parens (TypedPair <$> lambdaexp <*> (char ',' >> lambdaexp))
pi1Parser, pi2Parser :: Parser NamedLambda
pi1Parser = TypedPi1 <$> (choicest namesPi1 >> lambdaexp)
pi2Parser = TypedPi2 <$> (choicest namesPi2 >> lambdaexp)
inlParser, inrParser :: Parser NamedLambda
inlParser = TypedInl <$> (choicest namesInl >> lambdaexp)
inrParser = TypedInr <$> (choicest namesInr >> lambdaexp)
caseParser :: Parser NamedLambda
caseParser =
TypedCase <$>
(choicest namesCase >> simpleexp) <*>
(choicest namesOf >> simpleexp) <*>
(choicest namesCaseSep >> simpleexp)
unitParser :: Parser NamedLambda
unitParser = choicest namesUnit >> return TypedUnit
abortParser :: Parser NamedLambda
abortParser = TypedAbort <$> (choicest namesAbort >> lambdaexp)
absurdParser :: Parser NamedLambda
absurdParser = TypedAbsurd <$> (choicest namesAbsurd >> lambdaexp)
Parentheses are ignored ; they are written only around applications .
showNamedLambda :: NamedLambda -> String
showNamedLambda (LambdaVariable c) = c
showNamedLambda (LambdaAbstraction c e) = "λ" ++ c ++ "." ++ showNamedLambda e
showNamedLambda (LambdaApplication f g) =
showNamedLambdaPar f ++ " " ++ showNamedLambdaPar g
showNamedLambda (TypedPair a b) =
"(" ++ showNamedLambda a ++ "," ++ showNamedLambda b ++ ")"
showNamedLambda (TypedPi1 a) = head namesPi1 ++ showNamedLambdaPar a
showNamedLambda (TypedPi2 a) = head namesPi2 ++ showNamedLambdaPar a
showNamedLambda (TypedInl a) = head namesInl ++ showNamedLambdaPar a
showNamedLambda (TypedInr a) = head namesInr ++ showNamedLambdaPar a
showNamedLambda (TypedCase a b c) =
last namesCase ++
showNamedLambda a ++
last namesOf ++ showNamedLambda b ++ head namesCaseSep ++ showNamedLambda c
showNamedLambda TypedUnit = head namesUnit
showNamedLambda (TypedAbort a) = head namesAbort ++ showNamedLambdaPar a
showNamedLambda (TypedAbsurd a) = head namesAbsurd ++ showNamedLambdaPar a
showNamedLambdaPar :: NamedLambda -> String
showNamedLambdaPar l@(LambdaVariable _) = showNamedLambda l
showNamedLambdaPar l@TypedUnit = showNamedLambda l
showNamedLambdaPar l@(TypedPair _ _) = showNamedLambda l
showNamedLambdaPar l = "(" ++ showNamedLambda l ++ ")"
instance Show NamedLambda where
show = showNamedLambda
namesPi1 :: [String]
namesPi1 = ["π₁ ", "FST "]
namesPi2 :: [String]
namesPi2 = ["π₂ ", "SND "]
namesInl :: [String]
namesInl = ["ιnl ", "INL "]
namesInr :: [String]
namesInr = ["ιnr ", "INR "]
namesCase :: [String]
namesCase = ["CASE ", "Case ", "ᴄᴀꜱᴇ "]
namesOf :: [String]
namesOf = [" OF ", " Of ", " ᴏꜰ "]
namesCaseSep :: [String]
namesCaseSep = ["; ", ";"]
namesUnit :: [String]
namesUnit = ["★", "UNIT"]
namesAbort :: [String]
namesAbort = ["□ ", "ABORT "]
namesAbsurd :: [String]
namesAbsurd = ["■ ", "ABSURD "]
| Translates a named variable expression into a one .
-> Exp
tobruijn d context (LambdaAbstraction c e) = Lambda $ tobruijn newdict context e
where newdict = Map.insert c 1 (Map.map succ d)
tobruijn d context (LambdaApplication f g) = App (tobruijn d context f) (tobruijn d context g)
tobruijn d context (LambdaVariable c) =
case Map.lookup c d of
Just n -> Var n
Nothing -> fromMaybe (Var 0) (MultiBimap.lookupR c context)
tobruijn d context (TypedPair a b) = Pair (tobruijn d context a) (tobruijn d context b)
tobruijn d context (TypedPi1 a) = Pi1 (tobruijn d context a)
tobruijn d context (TypedPi2 a) = Pi2 (tobruijn d context a)
tobruijn d context (TypedInl a) = Inl (tobruijn d context a)
tobruijn d context (TypedInr a) = Inr (tobruijn d context a)
tobruijn d context (TypedCase a b c) = Caseof (tobruijn d context a) (tobruijn d context b) (tobruijn d context c)
tobruijn _ _ TypedUnit = Unit
tobruijn d context (TypedAbort a) = Abort (tobruijn d context a)
tobruijn d context (TypedAbsurd a) = Absurd (tobruijn d context a)
-> Exp
toBruijn = tobruijn Map.empty
nameIndexes :: [String] -> [String] -> Exp -> NamedLambda
nameIndexes _ _ (Var 0) = LambdaVariable "undefined"
nameIndexes used _ (Var n) = LambdaVariable (used !! pred (fromInteger n))
nameIndexes used new (Lambda e) = LambdaAbstraction (head new) (nameIndexes (head new:used) (tail new) e)
nameIndexes used new (App f g) = LambdaApplication (nameIndexes used new f) (nameIndexes used new g)
nameIndexes used new (Pair a b) = TypedPair (nameIndexes used new a) (nameIndexes used new b)
nameIndexes used new (Pi1 a) = TypedPi1 (nameIndexes used new a)
nameIndexes used new (Pi2 a) = TypedPi2 (nameIndexes used new a)
nameIndexes used new (Inl a) = TypedInl (nameIndexes used new a)
nameIndexes used new (Inr a) = TypedInr (nameIndexes used new a)
nameIndexes used new (Caseof a b c) = TypedCase (nameIndexes used new a) (nameIndexes used new b) (nameIndexes used new c)
nameIndexes _ _ Unit = TypedUnit
nameIndexes used new (Abort a) = TypedAbort (nameIndexes used new a)
nameIndexes used new (Absurd a) = TypedAbsurd (nameIndexes used new a)
quicknameIndexes :: Int -> [String] -> Exp -> NamedLambda
quicknameIndexes _ _ (Var 0) = LambdaVariable "undefined"
quicknameIndexes n vars (Var m) = LambdaVariable (vars !! (n - fromInteger m))
quicknameIndexes n vars (Lambda e) = LambdaAbstraction (vars !! n) (quicknameIndexes (succ n) vars e)
quicknameIndexes n vars (App f g) = LambdaApplication (quicknameIndexes n vars f) (quicknameIndexes n vars g)
quicknameIndexes n vars (Pair a b) = TypedPair (quicknameIndexes n vars a) (quicknameIndexes n vars b)
quicknameIndexes n vars (Pi1 a) = TypedPi1 (quicknameIndexes n vars a)
quicknameIndexes n vars (Pi2 a) = TypedPi2 (quicknameIndexes n vars a)
quicknameIndexes n vars (Inl a) = TypedInl (quicknameIndexes n vars a)
quicknameIndexes n vars (Inr a) = TypedInr (quicknameIndexes n vars a)
quicknameIndexes n vars (Caseof a b c) = TypedCase (quicknameIndexes n vars a) (quicknameIndexes n vars b) (quicknameIndexes n vars c)
quicknameIndexes _ _ Unit = TypedUnit
quicknameIndexes n vars (Abort a) = TypedAbort (quicknameIndexes n vars a)
quicknameIndexes n vars (Absurd a) = TypedAbsurd (quicknameIndexes n vars a)
nameExp :: Exp -> NamedLambda
nameExp = nameIndexes [] variableNames
variableNames :: [String]
variableNames = concatMap (`replicateM` ['a'..'z']) [1..]
|
e5c340156ee01910dc4767bb49f301df7e8ec802279de64926ff8592b5766cbd | bakul/s9fes | stat-tools.scm | Scheme 9 from Empty Space , Function Library
By , 2015
; Placed in the Public Domain
;
; Convenience file to load statistics procedures.
(load-from-library "cdf.scm")
(load-from-library "erf.scm")
(load-from-library "mean.scm")
(load-from-library "median.scm")
(load-from-library "mode.scm")
(load-from-library "ndf.scm")
(load-from-library "quartile.scm")
(load-from-library "range.scm")
(load-from-library "stddev.scm")
(load-from-library "variance.scm")
| null | https://raw.githubusercontent.com/bakul/s9fes/74c14c0db5f07f5bc6d94131e9e4ee15a29275aa/lib/stat-tools.scm | scheme | Placed in the Public Domain
Convenience file to load statistics procedures. | Scheme 9 from Empty Space , Function Library
By , 2015
(load-from-library "cdf.scm")
(load-from-library "erf.scm")
(load-from-library "mean.scm")
(load-from-library "median.scm")
(load-from-library "mode.scm")
(load-from-library "ndf.scm")
(load-from-library "quartile.scm")
(load-from-library "range.scm")
(load-from-library "stddev.scm")
(load-from-library "variance.scm")
|
d197d76da212f1aa6e7f3f7dd8c9f463d89de08bf93683ca67b35e06c4b5f82c | ucsd-progsys/liquidhaskell | Multi_pred_app_00.hs | {-@ LIQUID "--expect-any-error" @-}
module Multi_pred_app_00 () where
{-@ bar :: forall < p :: Int -> Bool, q :: Int -> Bool>. Int<p> -> Int<p, q> @-}
bar :: Int -> Int
bar x = x
| null | https://raw.githubusercontent.com/ucsd-progsys/liquidhaskell/f46dbafd6ce1f61af5b56f31924c21639c982a8a/tests/neg/Multi_pred_app_00.hs | haskell | @ LIQUID "--expect-any-error" @
@ bar :: forall < p :: Int -> Bool, q :: Int -> Bool>. Int<p> -> Int<p, q> @ | module Multi_pred_app_00 () where
bar :: Int -> Int
bar x = x
|
7a811aaff9015a216f90629a238a6d586a6d76d522005852b03283b765039cbe | ijvcms/chuanqi_dev | team_lib.erl | %%%-------------------------------------------------------------------
%%% @author tuhujia
( C ) 2015 , < COMPANY >
%%% @doc
%%%
%%% @end
Created : 07 . 十二月 2015 10:25
%%%-------------------------------------------------------------------
-module(team_lib).
-include("common.hrl").
-include("record.hrl").
-include("proto.hrl").
-include("cache.hrl").
-include("db_record.hrl").
-include("config.hrl").
-include("uid.hrl").
-include("language_config.hrl").
-include("log_type_config.hrl").
%% API
-export([
player_login/1,
player_logout/1,
create_team/1,
create_and_join_team/4,
get_team_info/1,
set_team_switch/3,
invite_team/2,
agree_team_invite/4,
apply_join_team/2,
agree_apply_team/3,
change_team_leader/2,
remove_team/2,
clear_team/1,
leave_team/1,
pack_near_by_team_info/1,
pack_near_by_player_info/1,
kill_monster/5,
delete_player_team_from_ets/1,
update_player_team_to_ets/2,
send_team_info/2,
transfer_to_leader/1,
get_team_info_from_ets/1,
update_team_info/1,
update_team_info/2
]).
%% ====================================================================
%% API functions
%% ====================================================================
%% 玩家上线 初始化玩家队伍信息
player_login(PlayerState) ->
Base = PlayerState#player_state.db_player_base,
case scene_config:get(Base#db_player_base.scene_id) of
#scene_conf{is_cross = IsCross} when IsCross =:= 1 ->
case cross_lib:send_cross_mfc(?MODULE, player_login, [PlayerState]) of
#player_state{} = TempState ->
TempState;
_ ->
PlayerState#player_state{team_id = 0, team_pid = 0}%%
end;
_ ->
PlayerId = PlayerState#player_state.player_id,
case ets:lookup(?ETS_PlAYER_TEAM, PlayerId) of
[R | _] ->
TeamId = R#ets_player_team.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
delete_player_team_from_ets(PlayerId),
PlayerState#player_state{team_id = 0, team_pid = 0};%%
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_process_alive(TeamPid) of
true ->
Socket = PlayerState#player_state.socket,
DPB = PlayerState#player_state.db_player_base,
Lv = DPB#db_player_base.lv,
Fight = DPB#db_player_base.fight,
gen_server:cast(TeamPid, {'TEAM_BY_PLAYER_LOGIN', PlayerId, PlayerState#player_state.pid, Socket, Lv, Fight}),
PlayerState#player_state{team_id = TeamId};
false ->
delete_player_team_from_ets(PlayerId),
ets:delete(?ETS_TEAM, TeamId),
PlayerState#player_state{team_id = 0, team_pid = 0}%%
end
end;
_ ->
PlayerState#player_state{team_id = 0, team_pid = 0}%%
end
end.
%% 玩家下线处理(如果玩家有队伍保留玩家队伍id不离开队伍)
player_logout(PlayerState) ->
case PlayerState#player_state.team_id > 0 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_TEAM_NOT_EXIST};
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) of
true ->
PlayerId = PlayerState#player_state.player_id,
DPB = PlayerState#player_state.db_player_base,
PlayerLv = DPB#db_player_base.lv,
PlayerFight = DPB#db_player_base.fight,
gen_server:cast(TeamPid, {'TEAM_BY_PLAYER_LOGOUT', PlayerId, PlayerLv, PlayerFight});
false ->
skip
end
end;
false ->
skip
end.
%% 创建队伍
create_team(PlayerState) ->
TeamId = PlayerState#player_state.team_id,
case TeamId == 0 of
true ->
NewTeamId = get_team_uid(),
case team_mod:create_team(PlayerState, NewTeamId, PlayerState#player_state.pid) of
{ok, TeamPid} ->
PlayerState1 = PlayerState#player_state{team_id = NewTeamId, leader = 1, team_pid = TeamPid},
player_lib:update_player_state(PlayerState, PlayerState1),
{ok, PlayerState1};
_ ->
{fail, ?ERR_COMMON_FAIL}
end;
false ->
{fail, ?ERR_PLAYERB_HAVE_BEEN_TEAM}
end.
%% 玩家进程调用
create_and_join_team(PlayerState, JoinDBP, JoinSocket, JoinPid) ->
TeamId = PlayerState#player_state.team_id,
case TeamId == 0 of
true ->
JoinId = JoinDBP#db_player_base.player_id,
case ets:lookup(?ETS_PlAYER_TEAM, JoinId) of
[_R | _] ->
{ok, PlayerState};
_ ->
NewTeamId = get_team_uid(),
case team_mod:create_and_join_team(PlayerState, NewTeamId, PlayerState#player_state.pid, JoinDBP, JoinSocket, JoinPid) of
{ok, TeamPid} ->
PlayerState1 = PlayerState#player_state{team_id = NewTeamId, leader = 1, team_pid = TeamPid},
player_lib:update_player_state(PlayerState, PlayerState1),
{ok, PlayerState1};
_ ->
{ok, PlayerState}
end
end;
false ->
{ok, PlayerState}
end.
%% 获取队伍开关与队伍信息
get_team_info(PlayerState) ->
TeamId = PlayerState#player_state.team_id,
case TeamId > 0 of
true ->
case get_team_info_from_ets(TeamId) of
#ets_team{} = EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) of
true ->
gen_server:cast(TeamPid, {'SEND_TEAM_INFO', PlayerState});
false ->
{fail, ?ERR_COMMON_FAIL}
end;
_ ->
{fail, ?ERR_COMMON_FAIL}
end;
false ->
{fail, ?ERR_PLAYERB_TEAM_NOT_EXIST}
end.
%% 设置开关
set_team_switch(PlayerState, Type, Status) when Status == 0 orelse Status == 1 ->
case Type of
1 ->
PlayerState#player_state{team_switch_1 = Status};
2 ->
PlayerState#player_state{team_switch_2 = Status}
end.
invite_team(PlayerState, InviteId) when InviteId =/= PlayerState#player_state.player_id ->
TeamId = PlayerState#player_state.team_id,
case TeamId > 0 of
true ->
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_PLAYERB_TEAM_NOT_EXIST};
R ->
TeamPid = R#ets_team.team_pid,
case is_pid(TeamPid) andalso R#ets_team.mb_num < ?MAX_TEAM_MB of
true ->
case player_lib:get_player_pid(InviteId) of
null ->
{fail, ?ERR_PLAYER_LOGOUT};
PlayerPid ->
PlayerId = PlayerState#player_state.player_id,
DbBase = PlayerState#player_state.db_player_base,
Name = DbBase#db_player_base.name,
Socket = PlayerState#player_state.socket,
gen_server2:cast(PlayerPid, {invite_team_sw, TeamId, Name, Socket, PlayerId}),
{ok, ?ERR_COMMON_SUCCESS}
end;
false ->
{fail, ?ERR_TEAM_FULL}
end
end;
false ->
case player_lib:get_player_pid(InviteId) of
null ->
{fail, ?ERR_PLAYER_LOGOUT};
PlayerPid ->
PlayerId = PlayerState#player_state.player_id,
DbBase = PlayerState#player_state.db_player_base,
Name = DbBase#db_player_base.name,
Socket = PlayerState#player_state.socket,
gen_server2:cast(PlayerPid, {invite_team_sw, TeamId, Name, Socket, PlayerId}),
{ok, ?ERR_COMMON_SUCCESS}
end
end.
%% 玩家同意加入队伍
agree_team_invite(PlayerState, TeamId, Type, InviteId) ->
case Type of
1 -> %% 同意
case PlayerState#player_state.team_id == 0 of
true ->
%% 添加检测(邀请者如果没有队伍自动创建队伍)
case get_team_info_from_ets(TeamId) of
[] ->
%% 如果对方玩家在线发给对方进程处理(创建 and 加入)
case player_lib:get_player_pid(InviteId) of
null ->
{fail, ?ERR_PLAYER_LOGOUT};
PlayerPid ->
DbBase = PlayerState#player_state.db_player_base,
Socket = PlayerState#player_state.socket,
gen_server2:cast(PlayerPid, {create_and_join_team, DbBase, Socket, PlayerState#player_state.pid}),
{fail, ?ERR_COMMON_SUCCESS}
end;
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) andalso is_process_alive(TeamPid) of
true ->
gen_server:call(TeamPid, {'JOIN_TEAM', PlayerState, PlayerState#player_state.pid});
false ->
{fail, ?ERR_COMMON_FAIL}
end
end;
false ->
{fail, ?ERR_PLAYERB_HAVE_BEEN_TEAM}
end;
0 -> %% 拒绝
skip
end.
%% 玩家申请加入队伍
apply_join_team(PlayerState, TeamId) ->
case TeamId =/= PlayerState#player_state.team_id of
true ->
case PlayerState#player_state.team_id > 0 of
false ->
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_PLAYERB_TEAM_NOT_EXIST};
R ->
case R#ets_team.mb_num < ?MAX_TEAM_MB of
true ->
DbBase = PlayerState#player_state.db_player_base,
Name = DbBase#db_player_base.name,
PlayerId = PlayerState#player_state.player_id,
gen_server2:cast(R#ets_team.pid, {apply_join_team_sw, PlayerId, Name}),
{ok, ?ERR_COMMON_SUCCESS};
false ->
{fail, ?ERR_TEAM_FULL}
end
end;
true ->
{fail, ?ERR_PLAYERB_HAVE_BEEN_TEAM}
end;
false ->
{fail, ?ERR_COMMON_FAIL}
end.
%% 队长同意玩家加入队伍
agree_apply_team(PlayerState, PlayerId, Type) when PlayerId =/= PlayerState#player_state.player_id ->
case Type of
1 -> %% 同意
case PlayerState#player_state.leader == 1 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_TEAM_NOT_EXIST};
EtsTeam ->
case player_lib:get_player_pid(PlayerId) of
null ->
{fail, ?ERR_PLAYER_LOGOUT};
PlayerPid ->
gen_server2:cast(PlayerPid, {join_team, TeamId, EtsTeam#ets_team.team_pid})
end
end;
false ->
{fail, ?ERR_PLAYER_NOT_HUIZHANG}
end;
0 -> %% 拒绝
skip
end.
%% 转移队长
change_team_leader(PlayerState, PlayerId) when PlayerId =/= PlayerState#player_state.player_id ->
case PlayerState#player_state.leader == 1 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_TEAM_NOT_EXIST};
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) andalso is_process_alive(TeamPid) of
true ->
gen_server:call(TeamPid, {'CHANGE_TEAM_LEADER', PlayerId});
false ->
{fail, ?ERR_COMMON_FAIL}
end
end;
false ->
{fail, ?ERR_PLAYER_NOT_HUIZHANG}
end.
踢出队伍
remove_team(PlayerState, PlayerId) when PlayerId =/= PlayerState#player_state.player_id ->
case PlayerState#player_state.leader == 1 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_TEAM_NOT_EXIST};
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) of
true ->
gen_server:cast(TeamPid, {'REMOVE_TEAM', PlayerId}),
{ok, ?ERR_COMMON_SUCCESS};
false ->
{fail, ?ERR_COMMON_FAIL}
end
end;
false ->
{fail, ?ERR_PLAYERB_NOT_DUIZHANG}
end.
%% 解散队伍
clear_team(PlayerState) ->
case PlayerState#player_state.leader == 1 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_TEAM_NOT_EXIST};
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) of
true ->
gen_server:cast(TeamPid, {'CLEAR_TEAM'}),
{ok, ?ERR_COMMON_SUCCESS};
false ->
{fail, ?ERR_COMMON_FAIL}
end
end;
false ->
{fail, ?ERR_PLAYER_NOT_HUIZHANG}
end.
%% 离开队伍
leave_team(PlayerState) ->
case PlayerState#player_state.team_id > 0 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_TEAM_NOT_EXIST};
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) andalso is_process_alive(TeamPid) of
true ->
gen_server:call(TeamPid, {'LEAVE_TEAM', PlayerState#player_state.player_id});
false ->
{fail, ?ERR_COMMON_FAIL}
end
end;
false ->
{fail, ?ERR_PLAYERB_HAVE_BEEN_TEAM}
end.
%% 队伍击杀怪物
kill_monster(TeamId, SceneId, MonsterId, DropOwnerId, KillPlayerId) ->
case get_team_info_from_ets(TeamId) of
[] ->
skip;
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) of
true ->
gen_server:cast(TeamPid, {'KILL_MONSTER', SceneId, MonsterId, DropOwnerId, KillPlayerId});
false ->
skip
end
end.
%% 传送到队长身边
transfer_to_leader(PlayerState) ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_PLAYERB_TEAM_NOT_EXIST};
EtsTeam ->
#scene_conf{is_leader_transfer = IsLeaderTransfer} = scene_config:get(PlayerState#player_state.scene_id),
case IsLeaderTransfer =:= 1 of
true ->
case goods_lib:is_goods_enough(?ITEM_FLYING_SHOES, 1) of
true ->
LeaderId = EtsTeam#ets_team.leader_id,
case player_lib:get_player_pid(LeaderId) of
null ->
{fail, ?ERR_GOODS_XIAOFEIXIE_NOT_ENOUGH};
PlayerPid ->
case gen_server2:apply_sync(PlayerPid, {player_lib, get_scene_xy, []}) of
{ok, #db_player_base{} = LeaderBase} ->
%% 传送
X = LeaderBase#db_player_base.x,
Y = LeaderBase#db_player_base.y,
SceneId = LeaderBase#db_player_base.scene_id,
SceneConf = scene_config:get(SceneId),
case SceneConf#scene_conf.type =/= ?SCENE_TYPE_INSTANCE andalso
SceneConf#scene_conf.activity_id =/= ?SCENE_ACTIVITY_SHACHENG andalso
SceneConf#scene_conf.activity_id =/= ?SCENE_ACTIVITY_PALACE of
true ->
case scene_mgr_lib:change_scene(PlayerState, PlayerState#player_state.pid, SceneId, ?CHANGE_SCENE_TYPE_CHANGE, {X, Y}, {?ITEM_FLYING_SHOES, 1, ?LOG_TYPE_TRANSFER}, false) of%%
{ok, PlayerState1} ->
{ok, PlayerState1};
_ ->
{fail, ?ERR_COMMON_FAIL}
end;
false ->
{fail, ?ERR_COMMON_FAIL}
end;
_ ->
%% 没有该角色
{fail, ?ERR_COMMON_FAIL}
end
end;
false ->
{fail, ?ERR_GOODS_XIAOFEIXIE_NOT_ENOUGH}
end;
false ->
{fail, ?ERR_COMMON_FAIL}
end
end.
%% 更新玩家队伍信息
update_team_info(PlayerState) ->
case PlayerState#player_state.team_id > 0 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
skip;
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
update_team_info(PlayerState, TeamPid)
end;
false ->
skip
end.
%% 更新玩家队伍信息
update_team_info(PlayerState, TeamPid) ->
case is_pid(TeamPid) of
true ->
PlayerId = PlayerState#player_state.player_id,
DPB = PlayerState#player_state.db_player_base,
PlayerLv = DPB#db_player_base.lv,
PlayerFight = DPB#db_player_base.fight,
SceneId = PlayerState#player_state.scene_id,
Update = #team_mb{lv = PlayerLv, fight = PlayerFight, scene_id = SceneId},
gen_server:cast(TeamPid, {'UPDATE_TEAM_MEMBER_INFO', PlayerId, Update});
false ->
skip
end.
%% ====================================================================
%% Server functions
%% ====================================================================
%% 获取队伍记录
get_team_info_from_ets(TeamId) ->
case ets:lookup(?ETS_TEAM, TeamId) of
[R | _] ->
R;
_ ->
[]
end.
获取唯一id
get_team_uid() ->
uid_lib:get_uid(?UID_TYPE_TEAM).
%% 组包
pack_near_by_team_info(TeamList) ->
Fun = fun(TeamId, Acc) ->
case get_team_info_from_ets(TeamId) of
[] ->
Acc;
R ->
[#proto_near_by_team{
team_id = TeamId,
name = R#ets_team.name,
lv = R#ets_team.lv,
career = R#ets_team.career,
memeber_num = R#ets_team.mb_num,
guild_name = R#ets_team.guild_name}]
++ Acc
end
end,
lists:foldl(Fun, [], TeamList).
pack_near_by_player_info(ObjList) ->
Fun = fun(Obj) ->
#proto_near_by_player{
player_id = Obj#scene_obj_state.obj_id,
name = Obj#scene_obj_state.name,
lv = Obj#scene_obj_state.lv,
career = Obj#scene_obj_state.career,
guild_name = guild_lib:get_guild_name(Obj#scene_obj_state.guild_id)
}
end,
[Fun(X) || X <- ObjList, X#scene_obj_state.team_id =:= 0].
delete_player_team_from_ets(PlayerId) ->
ets:delete(?ETS_PlAYER_TEAM, PlayerId).
update_player_team_to_ets(PlayerId, TeamId) ->
R = #ets_player_team{player_id = PlayerId, team_id = TeamId},
ets:insert(?ETS_PlAYER_TEAM, R).
send_team_info(Socket, Proto) ->
net_send:send_to_client(Socket, 21016, #rep_update_team_info{member_list = Proto}).
| null | https://raw.githubusercontent.com/ijvcms/chuanqi_dev/7742184bded15f25be761c4f2d78834249d78097/server/trunk/server/src/business/team/team_lib.erl | erlang | -------------------------------------------------------------------
@author tuhujia
@doc
@end
-------------------------------------------------------------------
API
====================================================================
API functions
====================================================================
玩家上线 初始化玩家队伍信息
玩家下线处理(如果玩家有队伍保留玩家队伍id不离开队伍)
创建队伍
玩家进程调用
获取队伍开关与队伍信息
设置开关
玩家同意加入队伍
同意
添加检测(邀请者如果没有队伍自动创建队伍)
如果对方玩家在线发给对方进程处理(创建 and 加入)
拒绝
玩家申请加入队伍
队长同意玩家加入队伍
同意
拒绝
转移队长
解散队伍
离开队伍
队伍击杀怪物
传送到队长身边
传送
没有该角色
更新玩家队伍信息
更新玩家队伍信息
====================================================================
Server functions
====================================================================
获取队伍记录
组包 | ( C ) 2015 , < COMPANY >
Created : 07 . 十二月 2015 10:25
-module(team_lib).
-include("common.hrl").
-include("record.hrl").
-include("proto.hrl").
-include("cache.hrl").
-include("db_record.hrl").
-include("config.hrl").
-include("uid.hrl").
-include("language_config.hrl").
-include("log_type_config.hrl").
-export([
player_login/1,
player_logout/1,
create_team/1,
create_and_join_team/4,
get_team_info/1,
set_team_switch/3,
invite_team/2,
agree_team_invite/4,
apply_join_team/2,
agree_apply_team/3,
change_team_leader/2,
remove_team/2,
clear_team/1,
leave_team/1,
pack_near_by_team_info/1,
pack_near_by_player_info/1,
kill_monster/5,
delete_player_team_from_ets/1,
update_player_team_to_ets/2,
send_team_info/2,
transfer_to_leader/1,
get_team_info_from_ets/1,
update_team_info/1,
update_team_info/2
]).
player_login(PlayerState) ->
Base = PlayerState#player_state.db_player_base,
case scene_config:get(Base#db_player_base.scene_id) of
#scene_conf{is_cross = IsCross} when IsCross =:= 1 ->
case cross_lib:send_cross_mfc(?MODULE, player_login, [PlayerState]) of
#player_state{} = TempState ->
TempState;
_ ->
end;
_ ->
PlayerId = PlayerState#player_state.player_id,
case ets:lookup(?ETS_PlAYER_TEAM, PlayerId) of
[R | _] ->
TeamId = R#ets_player_team.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
delete_player_team_from_ets(PlayerId),
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_process_alive(TeamPid) of
true ->
Socket = PlayerState#player_state.socket,
DPB = PlayerState#player_state.db_player_base,
Lv = DPB#db_player_base.lv,
Fight = DPB#db_player_base.fight,
gen_server:cast(TeamPid, {'TEAM_BY_PLAYER_LOGIN', PlayerId, PlayerState#player_state.pid, Socket, Lv, Fight}),
PlayerState#player_state{team_id = TeamId};
false ->
delete_player_team_from_ets(PlayerId),
ets:delete(?ETS_TEAM, TeamId),
end
end;
_ ->
end
end.
player_logout(PlayerState) ->
case PlayerState#player_state.team_id > 0 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_TEAM_NOT_EXIST};
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) of
true ->
PlayerId = PlayerState#player_state.player_id,
DPB = PlayerState#player_state.db_player_base,
PlayerLv = DPB#db_player_base.lv,
PlayerFight = DPB#db_player_base.fight,
gen_server:cast(TeamPid, {'TEAM_BY_PLAYER_LOGOUT', PlayerId, PlayerLv, PlayerFight});
false ->
skip
end
end;
false ->
skip
end.
create_team(PlayerState) ->
TeamId = PlayerState#player_state.team_id,
case TeamId == 0 of
true ->
NewTeamId = get_team_uid(),
case team_mod:create_team(PlayerState, NewTeamId, PlayerState#player_state.pid) of
{ok, TeamPid} ->
PlayerState1 = PlayerState#player_state{team_id = NewTeamId, leader = 1, team_pid = TeamPid},
player_lib:update_player_state(PlayerState, PlayerState1),
{ok, PlayerState1};
_ ->
{fail, ?ERR_COMMON_FAIL}
end;
false ->
{fail, ?ERR_PLAYERB_HAVE_BEEN_TEAM}
end.
create_and_join_team(PlayerState, JoinDBP, JoinSocket, JoinPid) ->
TeamId = PlayerState#player_state.team_id,
case TeamId == 0 of
true ->
JoinId = JoinDBP#db_player_base.player_id,
case ets:lookup(?ETS_PlAYER_TEAM, JoinId) of
[_R | _] ->
{ok, PlayerState};
_ ->
NewTeamId = get_team_uid(),
case team_mod:create_and_join_team(PlayerState, NewTeamId, PlayerState#player_state.pid, JoinDBP, JoinSocket, JoinPid) of
{ok, TeamPid} ->
PlayerState1 = PlayerState#player_state{team_id = NewTeamId, leader = 1, team_pid = TeamPid},
player_lib:update_player_state(PlayerState, PlayerState1),
{ok, PlayerState1};
_ ->
{ok, PlayerState}
end
end;
false ->
{ok, PlayerState}
end.
get_team_info(PlayerState) ->
TeamId = PlayerState#player_state.team_id,
case TeamId > 0 of
true ->
case get_team_info_from_ets(TeamId) of
#ets_team{} = EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) of
true ->
gen_server:cast(TeamPid, {'SEND_TEAM_INFO', PlayerState});
false ->
{fail, ?ERR_COMMON_FAIL}
end;
_ ->
{fail, ?ERR_COMMON_FAIL}
end;
false ->
{fail, ?ERR_PLAYERB_TEAM_NOT_EXIST}
end.
set_team_switch(PlayerState, Type, Status) when Status == 0 orelse Status == 1 ->
case Type of
1 ->
PlayerState#player_state{team_switch_1 = Status};
2 ->
PlayerState#player_state{team_switch_2 = Status}
end.
invite_team(PlayerState, InviteId) when InviteId =/= PlayerState#player_state.player_id ->
TeamId = PlayerState#player_state.team_id,
case TeamId > 0 of
true ->
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_PLAYERB_TEAM_NOT_EXIST};
R ->
TeamPid = R#ets_team.team_pid,
case is_pid(TeamPid) andalso R#ets_team.mb_num < ?MAX_TEAM_MB of
true ->
case player_lib:get_player_pid(InviteId) of
null ->
{fail, ?ERR_PLAYER_LOGOUT};
PlayerPid ->
PlayerId = PlayerState#player_state.player_id,
DbBase = PlayerState#player_state.db_player_base,
Name = DbBase#db_player_base.name,
Socket = PlayerState#player_state.socket,
gen_server2:cast(PlayerPid, {invite_team_sw, TeamId, Name, Socket, PlayerId}),
{ok, ?ERR_COMMON_SUCCESS}
end;
false ->
{fail, ?ERR_TEAM_FULL}
end
end;
false ->
case player_lib:get_player_pid(InviteId) of
null ->
{fail, ?ERR_PLAYER_LOGOUT};
PlayerPid ->
PlayerId = PlayerState#player_state.player_id,
DbBase = PlayerState#player_state.db_player_base,
Name = DbBase#db_player_base.name,
Socket = PlayerState#player_state.socket,
gen_server2:cast(PlayerPid, {invite_team_sw, TeamId, Name, Socket, PlayerId}),
{ok, ?ERR_COMMON_SUCCESS}
end
end.
agree_team_invite(PlayerState, TeamId, Type, InviteId) ->
case Type of
case PlayerState#player_state.team_id == 0 of
true ->
case get_team_info_from_ets(TeamId) of
[] ->
case player_lib:get_player_pid(InviteId) of
null ->
{fail, ?ERR_PLAYER_LOGOUT};
PlayerPid ->
DbBase = PlayerState#player_state.db_player_base,
Socket = PlayerState#player_state.socket,
gen_server2:cast(PlayerPid, {create_and_join_team, DbBase, Socket, PlayerState#player_state.pid}),
{fail, ?ERR_COMMON_SUCCESS}
end;
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) andalso is_process_alive(TeamPid) of
true ->
gen_server:call(TeamPid, {'JOIN_TEAM', PlayerState, PlayerState#player_state.pid});
false ->
{fail, ?ERR_COMMON_FAIL}
end
end;
false ->
{fail, ?ERR_PLAYERB_HAVE_BEEN_TEAM}
end;
skip
end.
apply_join_team(PlayerState, TeamId) ->
case TeamId =/= PlayerState#player_state.team_id of
true ->
case PlayerState#player_state.team_id > 0 of
false ->
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_PLAYERB_TEAM_NOT_EXIST};
R ->
case R#ets_team.mb_num < ?MAX_TEAM_MB of
true ->
DbBase = PlayerState#player_state.db_player_base,
Name = DbBase#db_player_base.name,
PlayerId = PlayerState#player_state.player_id,
gen_server2:cast(R#ets_team.pid, {apply_join_team_sw, PlayerId, Name}),
{ok, ?ERR_COMMON_SUCCESS};
false ->
{fail, ?ERR_TEAM_FULL}
end
end;
true ->
{fail, ?ERR_PLAYERB_HAVE_BEEN_TEAM}
end;
false ->
{fail, ?ERR_COMMON_FAIL}
end.
agree_apply_team(PlayerState, PlayerId, Type) when PlayerId =/= PlayerState#player_state.player_id ->
case Type of
case PlayerState#player_state.leader == 1 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_TEAM_NOT_EXIST};
EtsTeam ->
case player_lib:get_player_pid(PlayerId) of
null ->
{fail, ?ERR_PLAYER_LOGOUT};
PlayerPid ->
gen_server2:cast(PlayerPid, {join_team, TeamId, EtsTeam#ets_team.team_pid})
end
end;
false ->
{fail, ?ERR_PLAYER_NOT_HUIZHANG}
end;
skip
end.
change_team_leader(PlayerState, PlayerId) when PlayerId =/= PlayerState#player_state.player_id ->
case PlayerState#player_state.leader == 1 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_TEAM_NOT_EXIST};
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) andalso is_process_alive(TeamPid) of
true ->
gen_server:call(TeamPid, {'CHANGE_TEAM_LEADER', PlayerId});
false ->
{fail, ?ERR_COMMON_FAIL}
end
end;
false ->
{fail, ?ERR_PLAYER_NOT_HUIZHANG}
end.
踢出队伍
remove_team(PlayerState, PlayerId) when PlayerId =/= PlayerState#player_state.player_id ->
case PlayerState#player_state.leader == 1 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_TEAM_NOT_EXIST};
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) of
true ->
gen_server:cast(TeamPid, {'REMOVE_TEAM', PlayerId}),
{ok, ?ERR_COMMON_SUCCESS};
false ->
{fail, ?ERR_COMMON_FAIL}
end
end;
false ->
{fail, ?ERR_PLAYERB_NOT_DUIZHANG}
end.
clear_team(PlayerState) ->
case PlayerState#player_state.leader == 1 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_TEAM_NOT_EXIST};
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) of
true ->
gen_server:cast(TeamPid, {'CLEAR_TEAM'}),
{ok, ?ERR_COMMON_SUCCESS};
false ->
{fail, ?ERR_COMMON_FAIL}
end
end;
false ->
{fail, ?ERR_PLAYER_NOT_HUIZHANG}
end.
leave_team(PlayerState) ->
case PlayerState#player_state.team_id > 0 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_TEAM_NOT_EXIST};
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) andalso is_process_alive(TeamPid) of
true ->
gen_server:call(TeamPid, {'LEAVE_TEAM', PlayerState#player_state.player_id});
false ->
{fail, ?ERR_COMMON_FAIL}
end
end;
false ->
{fail, ?ERR_PLAYERB_HAVE_BEEN_TEAM}
end.
kill_monster(TeamId, SceneId, MonsterId, DropOwnerId, KillPlayerId) ->
case get_team_info_from_ets(TeamId) of
[] ->
skip;
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
case is_pid(TeamPid) of
true ->
gen_server:cast(TeamPid, {'KILL_MONSTER', SceneId, MonsterId, DropOwnerId, KillPlayerId});
false ->
skip
end
end.
transfer_to_leader(PlayerState) ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
{fail, ?ERR_PLAYERB_TEAM_NOT_EXIST};
EtsTeam ->
#scene_conf{is_leader_transfer = IsLeaderTransfer} = scene_config:get(PlayerState#player_state.scene_id),
case IsLeaderTransfer =:= 1 of
true ->
case goods_lib:is_goods_enough(?ITEM_FLYING_SHOES, 1) of
true ->
LeaderId = EtsTeam#ets_team.leader_id,
case player_lib:get_player_pid(LeaderId) of
null ->
{fail, ?ERR_GOODS_XIAOFEIXIE_NOT_ENOUGH};
PlayerPid ->
case gen_server2:apply_sync(PlayerPid, {player_lib, get_scene_xy, []}) of
{ok, #db_player_base{} = LeaderBase} ->
X = LeaderBase#db_player_base.x,
Y = LeaderBase#db_player_base.y,
SceneId = LeaderBase#db_player_base.scene_id,
SceneConf = scene_config:get(SceneId),
case SceneConf#scene_conf.type =/= ?SCENE_TYPE_INSTANCE andalso
SceneConf#scene_conf.activity_id =/= ?SCENE_ACTIVITY_SHACHENG andalso
SceneConf#scene_conf.activity_id =/= ?SCENE_ACTIVITY_PALACE of
true ->
{ok, PlayerState1} ->
{ok, PlayerState1};
_ ->
{fail, ?ERR_COMMON_FAIL}
end;
false ->
{fail, ?ERR_COMMON_FAIL}
end;
_ ->
{fail, ?ERR_COMMON_FAIL}
end
end;
false ->
{fail, ?ERR_GOODS_XIAOFEIXIE_NOT_ENOUGH}
end;
false ->
{fail, ?ERR_COMMON_FAIL}
end
end.
update_team_info(PlayerState) ->
case PlayerState#player_state.team_id > 0 of
true ->
TeamId = PlayerState#player_state.team_id,
case get_team_info_from_ets(TeamId) of
[] ->
skip;
EtsTeam ->
TeamPid = EtsTeam#ets_team.team_pid,
update_team_info(PlayerState, TeamPid)
end;
false ->
skip
end.
update_team_info(PlayerState, TeamPid) ->
case is_pid(TeamPid) of
true ->
PlayerId = PlayerState#player_state.player_id,
DPB = PlayerState#player_state.db_player_base,
PlayerLv = DPB#db_player_base.lv,
PlayerFight = DPB#db_player_base.fight,
SceneId = PlayerState#player_state.scene_id,
Update = #team_mb{lv = PlayerLv, fight = PlayerFight, scene_id = SceneId},
gen_server:cast(TeamPid, {'UPDATE_TEAM_MEMBER_INFO', PlayerId, Update});
false ->
skip
end.
get_team_info_from_ets(TeamId) ->
case ets:lookup(?ETS_TEAM, TeamId) of
[R | _] ->
R;
_ ->
[]
end.
获取唯一id
get_team_uid() ->
uid_lib:get_uid(?UID_TYPE_TEAM).
pack_near_by_team_info(TeamList) ->
Fun = fun(TeamId, Acc) ->
case get_team_info_from_ets(TeamId) of
[] ->
Acc;
R ->
[#proto_near_by_team{
team_id = TeamId,
name = R#ets_team.name,
lv = R#ets_team.lv,
career = R#ets_team.career,
memeber_num = R#ets_team.mb_num,
guild_name = R#ets_team.guild_name}]
++ Acc
end
end,
lists:foldl(Fun, [], TeamList).
pack_near_by_player_info(ObjList) ->
Fun = fun(Obj) ->
#proto_near_by_player{
player_id = Obj#scene_obj_state.obj_id,
name = Obj#scene_obj_state.name,
lv = Obj#scene_obj_state.lv,
career = Obj#scene_obj_state.career,
guild_name = guild_lib:get_guild_name(Obj#scene_obj_state.guild_id)
}
end,
[Fun(X) || X <- ObjList, X#scene_obj_state.team_id =:= 0].
delete_player_team_from_ets(PlayerId) ->
ets:delete(?ETS_PlAYER_TEAM, PlayerId).
update_player_team_to_ets(PlayerId, TeamId) ->
R = #ets_player_team{player_id = PlayerId, team_id = TeamId},
ets:insert(?ETS_PlAYER_TEAM, R).
send_team_info(Socket, Proto) ->
net_send:send_to_client(Socket, 21016, #rep_update_team_info{member_list = Proto}).
|
dfce5540b52d2754369daa7c4860beac2e903e080fba035adc0103731601234e | andorp/bead | Page.hs | {-# LANGUAGE OverloadedStrings #-}
module Bead.View.Content.Evaluation.Page (
evaluation
, modifyEvaluation
) where
import Control.Monad.IO.Class
import Control.Monad
import Control.Arrow ((&&&))
import Data.Maybe (fromMaybe)
import Data.Monoid
import Text.Printf
import Data.String (fromString)
import Data.Time (getCurrentTime)
import qualified Bead.Controller.Pages as Pages
import Bead.Controller.UserStories (submissionDescription)
import Bead.Domain.Entity.Assignment as Assignment
import Bead.Domain.Evaluation
import Bead.View.Content as C
import Bead.View.Content.Bootstrap as Bootstrap
import Bead.View.Content.Comments
import Bead.View.Content.SeeMore
import Bead.View.Content.SubmissionTable (formatSubmissionInfo)
import Bead.View.Content.VisualConstants
import Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
evaluation :: ViewModifyHandler
evaluation = ViewModifyHandler evaluationPage evaluationPostHandler
modifyEvaluation :: ViewModifyHandler
modifyEvaluation = ViewModifyHandler modifyEvaluationPage modifyEvaluationPost
Page Data consitits of a description for a submission key , which contains
-- the assignment key as well, the submission key, an evautation key, and
-- the time converter for the actual user.
-- If the evaluation key is Nothing means a new evaulation, otherwise
-- the modification of a given evaulation is done
data PageData = PageData {
sbmDesc :: SubmissionDesc
, sbmSubmissionKey :: SubmissionKey
, sbmEvaluationKey :: Maybe EvaluationKey
, userTime :: UserTimeConverter
}
evaluationPage :: GETContentHandler
evaluationPage = do
sk <- getParameter submissionKeyPrm
sd <- userStory (submissionDescription sk)
tc <- userTimeZoneToLocalTimeConverter
let pageData = PageData {
sbmDesc = sd
, sbmSubmissionKey = sk
, sbmEvaluationKey = Nothing
, userTime = tc
}
return $ evaluationContent pageData
modifyEvaluationPage :: GETContentHandler
modifyEvaluationPage = do
sk <- getParameter submissionKeyPrm
ek <- getParameter evaluationKeyPrm
sd <- userStory (submissionDescription sk)
tc <- userTimeZoneToLocalTimeConverter
let pageData = PageData {
sbmDesc = sd
, sbmSubmissionKey = sk
, sbmEvaluationKey = Just ek
, userTime = tc
}
return $ evaluationContent pageData
evalConfigParam = evalConfigParameter (fieldName evaluationConfigField)
freeFormEvaluationParam = stringParameter (fieldName evaluationFreeFormField) "Free format evaluation"
-- Reads the evaluation result, from the parameters and determine if the content
-- of the text area would be a comment of the textual evaluation of the given submission.
The result of the computation is a UserActon which is a CreateComment or
-- something that depends on the key end the evaluation itself.
abstractEvaluationPostHandler
:: ContentHandler key
-> (key -> C.Evaluation -> UserAction)
-> POSTContentHandler
abstractEvaluationPostHandler getEvKeyParameter evCommand = do
sk <- getParameter submissionKeyPrm
commentText <- getParameter evaluationValuePrm
config <- getParameter evalConfigParam
commentOrResult <-
evConfigCata
(getJSONParam (fieldName evaluationResultField) "No evaluation can be found.")
(\_ -> do
percentage <- getParameterWithDefault 0 evaluationPercentagePrm
commentOnly <- getParameter evaluationCommentOnlyPrm
return $
if commentOnly
then EvCmtComment
else EvCmtResult $ percentageResult (fromIntegral percentage / 100))
(do freeForm <- getParameter freeFormEvaluationParam
return $ if (null freeForm)
then EvCmtComment
else EvCmtResult $ freeFormResult freeForm)
config
withEvalOrComment commentOrResult
(case null commentText of
True -> return $
ErrorMessage (msg_Evaluation_EmptyCommentAndFreeFormResult "Neither comment nor evaluation was given!")
False -> do
(mrole,mname) <- (getRole &&& getName) <$> userState
let uname = fromMaybe "???" mname
case mrole of
Nothing -> return $ LogMessage "The user is not logged in" -- Impossible
Just role -> do
now <- liftIO $ getCurrentTime
return $ SubmissionComment sk Comment {
comment = commentText
, commentAuthor = uname
, commentDate = now
, commentType = roleToCommentType role
})
(\result -> do
key <- getEvKeyParameter
let e = C.Evaluation {
evaluationResult = result
, writtenEvaluation = commentText
}
return $ evCommand key e)
where
roleToCommentType = roleCata
CT_Student
CT_GroupAdmin
CT_CourseAdmin
CT_Admin
getRole = userStateCata
Nothing
Nothing
Nothing
(\_username _uid _page _name role _token _timezone _status -> Just role)
getName = userStateCata
Nothing
Nothing
Nothing
(\_username _uid _page name _role _token _timezone _status -> Just name)
evaluationPostHandler :: POSTContentHandler
evaluationPostHandler = abstractEvaluationPostHandler (getParameter submissionKeyPrm) NewEvaluation
modifyEvaluationPost :: POSTContentHandler
modifyEvaluationPost = abstractEvaluationPostHandler (getParameter evaluationKeyPrm) ModifyEvaluation
evaluationFrame :: EvConfig -> I18N -> Html -> Html
evaluationFrame evConfig msg content = do
hiddenInput (fieldName evalConfigParam) (encodeToFay' "inputEvalType" evConfig)
withEvConfig evConfig
(do content
Bootstrap.formGroup $ evaluationDiv $
Bootstrap.radioButtonGroup (fieldName evaluationResultField) $
[ (True, encodeToFay' "inputEvalResult" EvCmtComment , msg $ msg_Evaluation_New_Comment "New Comment")
, (False, encodeToFay' "inputEvalResult" $ binary Passed, msg $ msg_Evaluation_Accepted "Accepted")
, (False, encodeToFay' "inputEvalResult" $ binary Failed, msg $ msg_Evaluation_Rejected "Rejected")
])
-- When the page is dynamic the percentage spinner is hooked on the field
(\_ ->
do content
Bootstrap.formGroup . evaluationDiv $ do
Bootstrap.colMd4 $
Bootstrap.radioButtonGroup (fieldName evaluationCommentOnlyPrm) $
[ (True, show True, msg $ msg_Evaluation_New_Comment "New Comment")
, (False, show False, msg $ msg_Evaluation_Percentage "Percentage: ")
]
Bootstrap.colMd4 $
H.input ! A.name (fieldName evaluationPercentagePrm) ! A.type_ "number"
! A.min (fromString $ show 0) ! A.max (fromString $ show 100)
! A.value (fromString $ show 0))
(do Bootstrap.optionalTextInput (fieldName freeFormEvaluationParam) (msg $ msg_Evaluation_FreeFormEvaluation "Evaluation") ""
H.p . fromString $ printf (msg $ msg_Evaluation_FreeForm_Information $ unwords
[ "Note that this text will be used everywhere as the evaluation itself. Hence it is recommended to keep"
, "the length of the text under size %d, otherwise it may not be directly shown." ]) displayableFreeFormResultLength
content)
where
binary = EvCmtResult . binaryResult
evaluationDiv = withEvConfig
evConfig
(H.div)
(const $ H.div ! A.id (fieldName evaluationPercentageDiv))
(H.div)
-- * View
evaluationContent :: PageData -> IHtml
evaluationContent pd = do
let sd = sbmDesc pd
tc = userTime pd
msg <- getI18N
let freeFormCommentTitle = evConfigCata
(return ())
(const $ return ())
(Bootstrap.labelFor (fieldName evaluationValueField) (msg $ msg_Evaluation_FreeFormComment "Comment"))
return $ do
Bootstrap.row $ Bootstrap.colMd12 $
H.p $ fromString . msg $ msg_Evaluation_Info $ concat
[ "It is not mandatory to evaluate the submission, it is allowed to comment on it only. "
, "The student may answer the comments by further comments. The submission may be "
, "evaluated many times."
]
Bootstrap.row $ Bootstrap.colMd12 $ Bootstrap.table $
H.tbody $ do
let aName = assignmentCata (\name _ _ _ _ _ -> name)
(msg $ msg_Evaluation_Course "Course: ") .|. (fromString . eCourse $ sd)
(msg $ msg_Evaluation_Assignment "Assignment: ") .|. (fromString . aName . eAssignment $ sd)
maybe
mempty
(\group -> (msg $ msg_Evaluation_Group "Group: ") .|. (fromString group))
(eGroup sd)
(msg $ msg_Evaluation_Student "Student: ") .|. (fromString . eStudent $ sd)
(msg $ msg_Evaluation_Username "Username: ") .|. (fromString . uid Prelude.id $ eUid sd)
(msg $ msg_Evaluation_SubmissionDate "Date of submission: ") .|. (fromString . showDate . tc $ eSubmissionDate sd)
(msg $ msg_Evaluation_SubmissionInfo "State: ") .|. submissionIcon msg (eSubmissionInfo sd)
Bootstrap.row $ Bootstrap.colMd12 $ do
let downloadSubmissionButton =
Bootstrap.buttonLink
(routeOf $ Pages.getSubmission submissionKey ())
(msg $ msg_Evaluation_Submitted_Solution_Zip_Link "Download")
h2 $ fromString $ msg $ msg_Evaluation_Submitted_Solution "Submission"
if (Assignment.isZippedSubmissions . Assignment.aspects . eAssignment $ sd)
then do
H.p $ fromString . msg $ msg_Evaluation_Submitted_Solution_Zip_Info $ mconcat
[ "The submission was uploaded as a compressed file so it could not be displayed verbatim. "
, "But it may be downloaded as a file by clicking on the link."
]
downloadSubmissionButton
else do
H.p $ fromString . msg $ msg_Evaluation_Submitted_Solution_Text_Info $
"The submission may be downloaded as a plain text file by clicking on the link."
downloadSubmissionButton
H.br
H.div # submissionTextDiv $ seeMoreSubmission "submission-text-" msg maxLength maxLines (eSolution sd)
Bootstrap.row $ Bootstrap.colMd12 $
postForm (routeOf . evPage $ maybeEvalKey) $ do
let evType = Assignment.evType $ eAssignment sd
evaluationFrame evType msg $ do
freeFormCommentTitle evType
Bootstrap.optionalTextArea (fieldName evaluationValueField) "" $ mempty
hiddenInput (fieldName assignmentKeyField) (paramValue $ eAssignmentKey sd)
hiddenInput (fieldName evCommentOnlyText) (msg $ msg_Evaluation_New_Comment "New Comment")
Bootstrap.submitButton
(fieldName saveEvalBtn) (fromString . msg $ msg_Evaluation_SaveButton "Submit")
let comments = submissionDescToCFs sd
when (not $ null comments) $ do
Bootstrap.row $ Bootstrap.colMd12 $ hr
Bootstrap.row $ Bootstrap.colMd12 $
H.h2 (fromString . msg $ msg_Comments_Title "Comments")
-- Renders the comment area where the user can place a comment
i18n msg $ commentsDiv "evaluation-comments-" tc comments
where
submissionKey = sbmSubmissionKey pd
maybeEvalKey = sbmEvaluationKey pd
evPage (Just ek) = Pages.modifyEvaluation submissionKey ek ()
evPage Nothing = Pages.evaluation submissionKey ()
maxLength = 2048
maxLines = 100
submissionIcon :: I18N -> SubmissionInfo -> H.Html
submissionIcon msg =
formatSubmissionInfo
id
mempty -- not found
(H.i ! A.class_ "glyphicon glyphicon-stop" ! A.style "color:#AAAAAA; font-size: large"
! tooltip (msg_Home_SubmissionCell_NonEvaluated "Non evaluated") $ mempty) -- non-evaluated
(bool (H.i ! A.class_ "glyphicon glyphicon-ok-circle" ! A.style "color:#AAAAAA; font-size: large"
! tooltip (msg_Home_SubmissionCell_Tests_Passed "Tests are passed") $ mempty) -- tested accepted
(H.i ! A.class_ "glyphicon glyphicon-remove-circle" ! A.style "color:#AAAAAA; font-size: large"
! tooltip (msg_Home_SubmissionCell_Tests_Failed "Tests are failed") $ mempty)) -- tested rejected
(H.i ! A.class_ "glyphicon glyphicon-thumbs-up" ! A.style "color:#00FF00; font-size: large"
! tooltip (msg_Home_SubmissionCell_Accepted "Accepted") $ mempty) -- accepted
(H.i ! A.class_ "glyphicon glyphicon-thumbs-down" ! A.style "color:#FF0000; font-size: large"
! tooltip (msg_Home_SubmissionCell_Rejected "Rejected") $ mempty) -- rejected
where
tooltip m = A.title (fromString $ msg m)
| null | https://raw.githubusercontent.com/andorp/bead/280dc9c3d5cfe1b9aac0f2f802c705ae65f02ac2/src/Bead/View/Content/Evaluation/Page.hs | haskell | # LANGUAGE OverloadedStrings #
the assignment key as well, the submission key, an evautation key, and
the time converter for the actual user.
If the evaluation key is Nothing means a new evaulation, otherwise
the modification of a given evaulation is done
Reads the evaluation result, from the parameters and determine if the content
of the text area would be a comment of the textual evaluation of the given submission.
something that depends on the key end the evaluation itself.
Impossible
When the page is dynamic the percentage spinner is hooked on the field
* View
Renders the comment area where the user can place a comment
not found
non-evaluated
tested accepted
tested rejected
accepted
rejected | module Bead.View.Content.Evaluation.Page (
evaluation
, modifyEvaluation
) where
import Control.Monad.IO.Class
import Control.Monad
import Control.Arrow ((&&&))
import Data.Maybe (fromMaybe)
import Data.Monoid
import Text.Printf
import Data.String (fromString)
import Data.Time (getCurrentTime)
import qualified Bead.Controller.Pages as Pages
import Bead.Controller.UserStories (submissionDescription)
import Bead.Domain.Entity.Assignment as Assignment
import Bead.Domain.Evaluation
import Bead.View.Content as C
import Bead.View.Content.Bootstrap as Bootstrap
import Bead.View.Content.Comments
import Bead.View.Content.SeeMore
import Bead.View.Content.SubmissionTable (formatSubmissionInfo)
import Bead.View.Content.VisualConstants
import Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
evaluation :: ViewModifyHandler
evaluation = ViewModifyHandler evaluationPage evaluationPostHandler
modifyEvaluation :: ViewModifyHandler
modifyEvaluation = ViewModifyHandler modifyEvaluationPage modifyEvaluationPost
Page Data consitits of a description for a submission key , which contains
data PageData = PageData {
sbmDesc :: SubmissionDesc
, sbmSubmissionKey :: SubmissionKey
, sbmEvaluationKey :: Maybe EvaluationKey
, userTime :: UserTimeConverter
}
evaluationPage :: GETContentHandler
evaluationPage = do
sk <- getParameter submissionKeyPrm
sd <- userStory (submissionDescription sk)
tc <- userTimeZoneToLocalTimeConverter
let pageData = PageData {
sbmDesc = sd
, sbmSubmissionKey = sk
, sbmEvaluationKey = Nothing
, userTime = tc
}
return $ evaluationContent pageData
modifyEvaluationPage :: GETContentHandler
modifyEvaluationPage = do
sk <- getParameter submissionKeyPrm
ek <- getParameter evaluationKeyPrm
sd <- userStory (submissionDescription sk)
tc <- userTimeZoneToLocalTimeConverter
let pageData = PageData {
sbmDesc = sd
, sbmSubmissionKey = sk
, sbmEvaluationKey = Just ek
, userTime = tc
}
return $ evaluationContent pageData
evalConfigParam = evalConfigParameter (fieldName evaluationConfigField)
freeFormEvaluationParam = stringParameter (fieldName evaluationFreeFormField) "Free format evaluation"
The result of the computation is a UserActon which is a CreateComment or
abstractEvaluationPostHandler
:: ContentHandler key
-> (key -> C.Evaluation -> UserAction)
-> POSTContentHandler
abstractEvaluationPostHandler getEvKeyParameter evCommand = do
sk <- getParameter submissionKeyPrm
commentText <- getParameter evaluationValuePrm
config <- getParameter evalConfigParam
commentOrResult <-
evConfigCata
(getJSONParam (fieldName evaluationResultField) "No evaluation can be found.")
(\_ -> do
percentage <- getParameterWithDefault 0 evaluationPercentagePrm
commentOnly <- getParameter evaluationCommentOnlyPrm
return $
if commentOnly
then EvCmtComment
else EvCmtResult $ percentageResult (fromIntegral percentage / 100))
(do freeForm <- getParameter freeFormEvaluationParam
return $ if (null freeForm)
then EvCmtComment
else EvCmtResult $ freeFormResult freeForm)
config
withEvalOrComment commentOrResult
(case null commentText of
True -> return $
ErrorMessage (msg_Evaluation_EmptyCommentAndFreeFormResult "Neither comment nor evaluation was given!")
False -> do
(mrole,mname) <- (getRole &&& getName) <$> userState
let uname = fromMaybe "???" mname
case mrole of
Just role -> do
now <- liftIO $ getCurrentTime
return $ SubmissionComment sk Comment {
comment = commentText
, commentAuthor = uname
, commentDate = now
, commentType = roleToCommentType role
})
(\result -> do
key <- getEvKeyParameter
let e = C.Evaluation {
evaluationResult = result
, writtenEvaluation = commentText
}
return $ evCommand key e)
where
roleToCommentType = roleCata
CT_Student
CT_GroupAdmin
CT_CourseAdmin
CT_Admin
getRole = userStateCata
Nothing
Nothing
Nothing
(\_username _uid _page _name role _token _timezone _status -> Just role)
getName = userStateCata
Nothing
Nothing
Nothing
(\_username _uid _page name _role _token _timezone _status -> Just name)
evaluationPostHandler :: POSTContentHandler
evaluationPostHandler = abstractEvaluationPostHandler (getParameter submissionKeyPrm) NewEvaluation
modifyEvaluationPost :: POSTContentHandler
modifyEvaluationPost = abstractEvaluationPostHandler (getParameter evaluationKeyPrm) ModifyEvaluation
evaluationFrame :: EvConfig -> I18N -> Html -> Html
evaluationFrame evConfig msg content = do
hiddenInput (fieldName evalConfigParam) (encodeToFay' "inputEvalType" evConfig)
withEvConfig evConfig
(do content
Bootstrap.formGroup $ evaluationDiv $
Bootstrap.radioButtonGroup (fieldName evaluationResultField) $
[ (True, encodeToFay' "inputEvalResult" EvCmtComment , msg $ msg_Evaluation_New_Comment "New Comment")
, (False, encodeToFay' "inputEvalResult" $ binary Passed, msg $ msg_Evaluation_Accepted "Accepted")
, (False, encodeToFay' "inputEvalResult" $ binary Failed, msg $ msg_Evaluation_Rejected "Rejected")
])
(\_ ->
do content
Bootstrap.formGroup . evaluationDiv $ do
Bootstrap.colMd4 $
Bootstrap.radioButtonGroup (fieldName evaluationCommentOnlyPrm) $
[ (True, show True, msg $ msg_Evaluation_New_Comment "New Comment")
, (False, show False, msg $ msg_Evaluation_Percentage "Percentage: ")
]
Bootstrap.colMd4 $
H.input ! A.name (fieldName evaluationPercentagePrm) ! A.type_ "number"
! A.min (fromString $ show 0) ! A.max (fromString $ show 100)
! A.value (fromString $ show 0))
(do Bootstrap.optionalTextInput (fieldName freeFormEvaluationParam) (msg $ msg_Evaluation_FreeFormEvaluation "Evaluation") ""
H.p . fromString $ printf (msg $ msg_Evaluation_FreeForm_Information $ unwords
[ "Note that this text will be used everywhere as the evaluation itself. Hence it is recommended to keep"
, "the length of the text under size %d, otherwise it may not be directly shown." ]) displayableFreeFormResultLength
content)
where
binary = EvCmtResult . binaryResult
evaluationDiv = withEvConfig
evConfig
(H.div)
(const $ H.div ! A.id (fieldName evaluationPercentageDiv))
(H.div)
evaluationContent :: PageData -> IHtml
evaluationContent pd = do
let sd = sbmDesc pd
tc = userTime pd
msg <- getI18N
let freeFormCommentTitle = evConfigCata
(return ())
(const $ return ())
(Bootstrap.labelFor (fieldName evaluationValueField) (msg $ msg_Evaluation_FreeFormComment "Comment"))
return $ do
Bootstrap.row $ Bootstrap.colMd12 $
H.p $ fromString . msg $ msg_Evaluation_Info $ concat
[ "It is not mandatory to evaluate the submission, it is allowed to comment on it only. "
, "The student may answer the comments by further comments. The submission may be "
, "evaluated many times."
]
Bootstrap.row $ Bootstrap.colMd12 $ Bootstrap.table $
H.tbody $ do
let aName = assignmentCata (\name _ _ _ _ _ -> name)
(msg $ msg_Evaluation_Course "Course: ") .|. (fromString . eCourse $ sd)
(msg $ msg_Evaluation_Assignment "Assignment: ") .|. (fromString . aName . eAssignment $ sd)
maybe
mempty
(\group -> (msg $ msg_Evaluation_Group "Group: ") .|. (fromString group))
(eGroup sd)
(msg $ msg_Evaluation_Student "Student: ") .|. (fromString . eStudent $ sd)
(msg $ msg_Evaluation_Username "Username: ") .|. (fromString . uid Prelude.id $ eUid sd)
(msg $ msg_Evaluation_SubmissionDate "Date of submission: ") .|. (fromString . showDate . tc $ eSubmissionDate sd)
(msg $ msg_Evaluation_SubmissionInfo "State: ") .|. submissionIcon msg (eSubmissionInfo sd)
Bootstrap.row $ Bootstrap.colMd12 $ do
let downloadSubmissionButton =
Bootstrap.buttonLink
(routeOf $ Pages.getSubmission submissionKey ())
(msg $ msg_Evaluation_Submitted_Solution_Zip_Link "Download")
h2 $ fromString $ msg $ msg_Evaluation_Submitted_Solution "Submission"
if (Assignment.isZippedSubmissions . Assignment.aspects . eAssignment $ sd)
then do
H.p $ fromString . msg $ msg_Evaluation_Submitted_Solution_Zip_Info $ mconcat
[ "The submission was uploaded as a compressed file so it could not be displayed verbatim. "
, "But it may be downloaded as a file by clicking on the link."
]
downloadSubmissionButton
else do
H.p $ fromString . msg $ msg_Evaluation_Submitted_Solution_Text_Info $
"The submission may be downloaded as a plain text file by clicking on the link."
downloadSubmissionButton
H.br
H.div # submissionTextDiv $ seeMoreSubmission "submission-text-" msg maxLength maxLines (eSolution sd)
Bootstrap.row $ Bootstrap.colMd12 $
postForm (routeOf . evPage $ maybeEvalKey) $ do
let evType = Assignment.evType $ eAssignment sd
evaluationFrame evType msg $ do
freeFormCommentTitle evType
Bootstrap.optionalTextArea (fieldName evaluationValueField) "" $ mempty
hiddenInput (fieldName assignmentKeyField) (paramValue $ eAssignmentKey sd)
hiddenInput (fieldName evCommentOnlyText) (msg $ msg_Evaluation_New_Comment "New Comment")
Bootstrap.submitButton
(fieldName saveEvalBtn) (fromString . msg $ msg_Evaluation_SaveButton "Submit")
let comments = submissionDescToCFs sd
when (not $ null comments) $ do
Bootstrap.row $ Bootstrap.colMd12 $ hr
Bootstrap.row $ Bootstrap.colMd12 $
H.h2 (fromString . msg $ msg_Comments_Title "Comments")
i18n msg $ commentsDiv "evaluation-comments-" tc comments
where
submissionKey = sbmSubmissionKey pd
maybeEvalKey = sbmEvaluationKey pd
evPage (Just ek) = Pages.modifyEvaluation submissionKey ek ()
evPage Nothing = Pages.evaluation submissionKey ()
maxLength = 2048
maxLines = 100
submissionIcon :: I18N -> SubmissionInfo -> H.Html
submissionIcon msg =
formatSubmissionInfo
id
(H.i ! A.class_ "glyphicon glyphicon-stop" ! A.style "color:#AAAAAA; font-size: large"
(bool (H.i ! A.class_ "glyphicon glyphicon-ok-circle" ! A.style "color:#AAAAAA; font-size: large"
(H.i ! A.class_ "glyphicon glyphicon-remove-circle" ! A.style "color:#AAAAAA; font-size: large"
(H.i ! A.class_ "glyphicon glyphicon-thumbs-up" ! A.style "color:#00FF00; font-size: large"
(H.i ! A.class_ "glyphicon glyphicon-thumbs-down" ! A.style "color:#FF0000; font-size: large"
where
tooltip m = A.title (fromString $ msg m)
|
1cbf408cce94bbd36b37eb9f5b7f5fb6e361bdfb42998e463d49820a5ed0ad82 | JacquesCarette/Drasil | Choices.hs | | Defines the design language for SCS .
module Language.Drasil.Choices (
Choices(..), Architecture (..), makeArchit, DataInfo(..), makeData, Maps(..),
makeMaps, spaceToCodeType, Constraints(..), makeConstraints, ODE(..), makeODE,
DocConfig(..), makeDocConfig, LogConfig(..), makeLogConfig, OptionalFeatures(..),
makeOptFeats, ExtLib(..), Modularity(..), InputModule(..), inputModule, Structure(..),
ConstantStructure(..), ConstantRepr(..), ConceptMatchMap, MatchedConceptMap,
CodeConcept(..), matchConcepts, SpaceMatch, matchSpaces, ImplementationType(..),
ConstraintBehaviour(..), Comments(..), Verbosity(..), Visibility(..),
Logging(..), AuxFile(..), getSampleData, hasSampleInput, defaultChoices,
choicesSent, showChs) where
import Language.Drasil hiding (None)
import Language.Drasil.Code.Code (spaceToCodeType)
import Language.Drasil.Code.Lang (Lang(..))
import Language.Drasil.Data.ODEInfo (ODEInfo)
import Language.Drasil.Data.ODELibPckg (ODELibPckg)
import GOOL.Drasil (CodeType)
import Control.Lens ((^.))
import Data.Map (Map, fromList)
-- | The instruction indicates how the generated program should be written down.
-- Full details of Choices documentation -Code-Generator
data Choices = Choices {
-- | Target languages.
-- Choosing multiple means program will be generated in multiple languages.
lang :: [Lang],
-- | Architecture of the program, include modularity and implementation type
architecture :: Architecture,
-- | Data structure and represent
dataInfo :: DataInfo,
| Maps for ' concepts ' to ' code concepts ' or ' Space ' to a ' CodeType
maps :: Maps,
| Setting for that can be added to the program or left it out
optFeats :: OptionalFeatures,
-- | Constraint violation behaviour. Exception or Warning.
srsConstraints :: Constraints,
-- | List of external libraries what to utilize
extLibs :: [ExtLib]
}
-- | Renders program choices as a 'Sentence'.
class RenderChoices a where
showChs :: a -> Sentence
showChsList :: [a] -> Sentence
showChsList lst = foldlSent_ (map showChs lst)
-- | Architecture of a program
data Architecture = Archt {
-- | How the program should be modularized.
modularity :: Modularity,
-- | Implementation type, program or library.
impType :: ImplementationType
}
-- | Constructor to create a Architecture
makeArchit :: Modularity -> ImplementationType -> Architecture
makeArchit = Archt
-- | Modularity of a program.
data Modularity = Modular InputModule -- ^ Different modules. For controller,
-- input, calculations, output.
^ All generated code is in one module / file .
-- | Renders the modularity of a program.
instance RenderChoices Modularity where
showChs Unmodular = S "Unmodular"
showChs (Modular Combined) = S "Modular Combined"
showChs (Modular Separated)= S "Modular Separated"
-- | Options for input modules.
^ Input - related functions combined in one module .
| Separated -- ^ Input-related functions each in own module.
-- | Determines whether there is a 'Combined' input module or many 'Separated' input
-- modules, based on a 'Choices' structure. An 'Unmodular' design implicitly means
-- that input modules are 'Combined'.
inputModule :: Choices -> InputModule
inputModule c = inputModule' $ modularity $ architecture c
where inputModule' Unmodular = Combined
inputModule' (Modular im) = im
-- | Program implementation options.
^ Generated code does not include Controller .
^ Generated code includes Controller .
-- | Renders options for program implementation.
instance RenderChoices ImplementationType where
showChs Library = S "Library"
showChs Program = S "Program"
-- | Data of a program - how information should be encoded.
data DataInfo = DataInfo {
-- | Structure of inputs (bundled or not).
inputStructure :: Structure,
-- | Structure of constants (inlined or bundled or not, or stored with inputs).
constStructure :: ConstantStructure,
-- | Representation of constants (as variables or as constants).
constRepr :: ConstantRepr
}
| Constructor to create a DataInfo
makeData :: Structure -> ConstantStructure -> ConstantRepr -> DataInfo
makeData = DataInfo
-- | Variable structure options.
data Structure = Unbundled -- ^ Individual variables
| Bundled -- ^ Variables bundled in a class
-- | Renders the structure of variables in a program.
instance RenderChoices Structure where
showChs Unbundled = S "Unbundled"
showChs Bundled = S "Bundled"
-- | Constants options.
data ConstantStructure = Inline -- ^ Inline values for constants.
| WithInputs -- ^ Store constants with inputs.
| Store Structure -- ^ Store constants separately from
-- inputs, whether bundled or unbundled.
-- | Renders the structure of constants in a program.
instance RenderChoices ConstantStructure where
showChs Inline = S "Inline"
showChs WithInputs = S "WithInputs"
showChs (Store Unbundled) = S "Store Unbundled"
showChs (Store Bundled) = S "Store Bundled"
-- | Options for representing constants in a program.
data ConstantRepr = Var -- ^ Constants represented as regular variables.
| Const -- ^ Use target language's mechanism for defining constants.
-- | Renders the representation of constants in a program.
instance RenderChoices ConstantRepr where
showChs Var = S "Var"
showChs Const = S "Const"
-- | Maps for Concepts and Space
data Maps = Maps {
| Map of ' UID 's for concepts to code concepts .
-- Matching a 'UID' to a code concept means the code concept should be used
-- instead of the chunk associated with the 'UID'.
conceptMatch :: ConceptMatchMap,
-- | Map of 'Space's to 'CodeType's
-- Matching a 'Space' to a 'CodeType' means values of the 'Space' should have that
-- 'CodeType' in the generated code.
spaceMatch :: SpaceMatch
}
-- | Constructor to create a Maps
makeMaps :: ConceptMatchMap -> SpaceMatch -> Maps
makeMaps = Maps
| Specifies matches between chunks and ' CodeConcept 's , meaning the target
-- language's pre-existing definition of the concept should be used instead of
-- defining a new variable for the concept in the generated code.
[ ' CodeConcept ' ] is preferentially - ordered , generator concretizes a
-- 'ConceptMatchMap' to a 'MatchedConceptMap' by checking user's other choices.
type ConceptMatchMap = Map UID [CodeConcept]
| Concrete version of ConceptMatchMap dependent on user choices .
type MatchedConceptMap = Map UID CodeConcept
Currently we only support one code concept , more will be added later
-- | Code concepts. For now, just pi.
data CodeConcept = Pi deriving Eq
| Renders ' CodeConcept 's .
instance RenderChoices CodeConcept where
showChs Pi = S "Pi"
| Builds a ' ConceptMatchMap ' from an association list of chunks and ' CodeConcepts ' .
matchConcepts :: (HasUID c) => [(c, [CodeConcept])] -> ConceptMatchMap
matchConcepts = fromList . map (\(cnc,cdc) -> (cnc ^. uid, cdc))
-- | Specifies which 'CodeType' should be used to represent each mathematical
-- 'Space'. ['CodeType'] is preferentially-ordered, first 'CodeType' that does not
-- conflict with other choices will be selected.
type SpaceMatch = Space -> [CodeType]
-- | Updates a 'SpaceMatch' by matching the given 'Space' with the given ['CodeType'].
matchSpace :: Space -> [CodeType] -> SpaceMatch -> SpaceMatch
matchSpace _ [] _ = error "Must match each Space to at least one CodeType"
matchSpace s ts sm = \sp -> if sp == s then ts else sm sp
| Builds a ' SpaceMatch ' from an association list of ' Spaces ' and ' ' .
matchSpaces :: [(Space, [CodeType])] -> SpaceMatch
matchSpaces spMtchs = matchSpaces' spMtchs spaceToCodeType
where matchSpaces' ((s,ct):sms) sm = matchSpaces' sms $ matchSpace s ct sm
matchSpaces' [] sm = sm
-- Optional Features can be added to the program or left it out
data OptionalFeatures = OptFeats{
docConfig :: DocConfig,
logConfig :: LogConfig,
-- | Turns generation of different auxiliary (non-source-code) files on or off.
auxFiles :: [AuxFile]
}
-- | Constructor to create a OptionalFeatures
makeOptFeats :: DocConfig -> LogConfig -> [AuxFile] -> OptionalFeatures
makeOptFeats = OptFeats
| Configuration for Doxygen documentation
data DocConfig = DocConfig {
| Turns comments for different code structures on or off .
comments :: [Comments],
| Standard output from running Doxygen : verbose or quiet ?
doxVerbosity :: Verbosity,
| Turns date field on or off in the generated module - level Doxygen comments .
dates :: Visibility
}
| Constructor to create a
makeDocConfig :: [Comments] -> Verbosity -> Visibility -> DocConfig
makeDocConfig = DocConfig
-- | Comment implementation options.
data Comments = CommentFunc -- ^ Function/method-level comments.
| CommentClass -- ^ Class-level comments.
| CommentMod -- ^ File/Module-level comments.
deriving Eq
-- | Renders options for implementation of comments.
instance RenderChoices Comments where
showChs CommentFunc = S "CommentFunc"
showChs CommentClass = S "CommentClass"
showChs CommentMod = S "CommentMod"
| Doxygen file verbosity options .
data Verbosity = Verbose | Quiet
| Renders options for doxygen verbosity .
instance RenderChoices Verbosity where
showChs Verbose = S "Verbose"
showChs Quiet = S "Quiet"
| Doxygen date - field visibility options .
data Visibility = Show
| Hide
| Renders options for doxygen date - field visibility .
instance RenderChoices Visibility where
showChs Show = S "Show"
showChs Hide = S "Hide"
-- | Log Configuration
data LogConfig = LogConfig {
-- | Turns different forms of logging on or off.
logging :: [Logging],
-- | Name of log file.
logFile :: FilePath
}
| Constructor to create a LogConfig
makeLogConfig :: [Logging] -> FilePath -> LogConfig
makeLogConfig = LogConfig
-- | Logging options for function calls and variable assignments.
Eq instances required for Logging and Comments because generator needs to
-- check membership of these elements in lists
data Logging = LogFunc -- ^ Log messages generated for function calls.
| LogVar -- ^ Log messages generated for variable assignments.
deriving Eq
-- | Renders options for program logging.
instance RenderChoices Logging where
showChs LogFunc = S "LogFunc"
showChs LogVar = S "LogVar"
| Currently we only support two kind of auxiliary files : sample input file , readme .
-- To generate a sample input file compatible with the generated program,
' FilePath ' is the path to the user - provided file containing a sample set of input data .
data AuxFile = SampleInput FilePath
| ReadME
deriving Eq
-- | Renders options for auxiliary file generation.
instance RenderChoices AuxFile where
showChs (SampleInput fp) = S "SampleInput" +:+ S fp
showChs ReadME = S "ReadME"
-- | Gets the file path to a sample input data set from a 'Choices' structure, if
-- the user chose to generate a sample input file.
getSampleData :: Choices -> Maybe FilePath
getSampleData chs = getSampleData' (auxFiles $ optFeats chs)
where getSampleData' [] = Nothing
getSampleData' (SampleInput fp:_) = Just fp
getSampleData' (_:xs) = getSampleData' xs
| Predicate that returns true if the list of ' AuxFile 's includes a ' SampleInput ' .
hasSampleInput :: [AuxFile] -> Bool
hasSampleInput [] = False
hasSampleInput (SampleInput _:_) = True
hasSampleInput (_:xs) = hasSampleInput xs
-- | SRS Constraints
data Constraints = Constraints{
onSfwrConstraint :: ConstraintBehaviour,
onPhysConstraint :: ConstraintBehaviour
}
-- | Constructor to create a Constraints
makeConstraints :: ConstraintBehaviour -> ConstraintBehaviour -> Constraints
makeConstraints = Constraints
-- | Constraint behaviour options within program.
data ConstraintBehaviour = Warning -- ^ Print warning when constraint violated.
| Exception -- ^ Throw exception when constraint violated.
-- | Renders options for program implementation.
instance RenderChoices ConstraintBehaviour where
showChs Warning = S "Warning"
showChs Exception = S "Exception"
-- | External Library Options
newtype ExtLib = Math ODE
-- | All Information needed to solve an ODE
data ODE = ODE{
FIXME : ODEInfos should be automatically built from Instance models when
-- needed, but we can't do that yet so I'm passing it through Choices instead.
This choice should really just be for an ODEMethod
-- | ODE information.
odeInfo :: [ODEInfo],
-- | Preferentially-ordered list ODE libraries to try.
odeLib :: [ODELibPckg]
}
-- | Constructor to create an ODE
makeODE :: [ODEInfo] -> [ODELibPckg] -> ODE
makeODE = ODE
-- | Default choices to be used as the base from which design specifications
-- can be built.
defaultChoices :: Choices
defaultChoices = Choices {
lang = [Python],
architecture = makeArchit (Modular Combined) Program,
dataInfo = makeData Bundled Inline Const,
maps = makeMaps
(matchConcepts ([] :: [(SimpleQDef, [CodeConcept])]))
spaceToCodeType,
optFeats = makeOptFeats
(makeDocConfig [] Verbose Hide)
(makeLogConfig [] "log.txt")
[ReadME],
srsConstraints = makeConstraints Exception Warning,
extLibs = []
}
-- | Renders 'Choices' as 'Sentence's.
choicesSent :: Choices -> [Sentence]
choicesSent chs = map chsFieldSent [
(S "Languages", foldlSent_ $ map (S . show) $ lang chs),
(S "Modularity", showChs $ modularity $ architecture chs),
(S "Input Structure", showChs $ inputStructure $ dataInfo chs),
(S "Constant Structure", showChs $ constStructure $ dataInfo chs),
(S "Constant Representation", showChs $ constRepr $ dataInfo chs),
(S "Implementation Type", showChs $ impType $ architecture chs),
(S "Software Constraint Behaviour", showChs $ onSfwrConstraint $ srsConstraints chs),
(S "Physical Constraint Behaviour", showChs $ onPhysConstraint $ srsConstraints chs),
(S "Comments", showChsList $ comments $ docConfig $ optFeats chs),
(S "Dox Verbosity", showChs $ doxVerbosity $ docConfig $ optFeats chs),
(S "Dates", showChs $ dates $ docConfig $ optFeats chs),
(S "Log File Name", S $ logFile $ logConfig $ optFeats chs),
(S "Logging", showChsList $ logging $ logConfig $ optFeats chs),
(S "Auxiliary Files", showChsList $ auxFiles $ optFeats chs)
]
-- | Helper to combine pairs of 'Sentence's for rendering 'Choices'.
chsFieldSent :: (Sentence, Sentence) -> Sentence
chsFieldSent (rec, chc) = rec +:+ S "selected as" +:+. chc | null | https://raw.githubusercontent.com/JacquesCarette/Drasil/ec388a0e1690c19f45dd5c166a2796870147edf0/code/drasil-code/lib/Language/Drasil/Choices.hs | haskell | | The instruction indicates how the generated program should be written down.
Full details of Choices documentation -Code-Generator
| Target languages.
Choosing multiple means program will be generated in multiple languages.
| Architecture of the program, include modularity and implementation type
| Data structure and represent
| Constraint violation behaviour. Exception or Warning.
| List of external libraries what to utilize
| Renders program choices as a 'Sentence'.
| Architecture of a program
| How the program should be modularized.
| Implementation type, program or library.
| Constructor to create a Architecture
| Modularity of a program.
^ Different modules. For controller,
input, calculations, output.
| Renders the modularity of a program.
| Options for input modules.
^ Input-related functions each in own module.
| Determines whether there is a 'Combined' input module or many 'Separated' input
modules, based on a 'Choices' structure. An 'Unmodular' design implicitly means
that input modules are 'Combined'.
| Program implementation options.
| Renders options for program implementation.
| Data of a program - how information should be encoded.
| Structure of inputs (bundled or not).
| Structure of constants (inlined or bundled or not, or stored with inputs).
| Representation of constants (as variables or as constants).
| Variable structure options.
^ Individual variables
^ Variables bundled in a class
| Renders the structure of variables in a program.
| Constants options.
^ Inline values for constants.
^ Store constants with inputs.
^ Store constants separately from
inputs, whether bundled or unbundled.
| Renders the structure of constants in a program.
| Options for representing constants in a program.
^ Constants represented as regular variables.
^ Use target language's mechanism for defining constants.
| Renders the representation of constants in a program.
| Maps for Concepts and Space
Matching a 'UID' to a code concept means the code concept should be used
instead of the chunk associated with the 'UID'.
| Map of 'Space's to 'CodeType's
Matching a 'Space' to a 'CodeType' means values of the 'Space' should have that
'CodeType' in the generated code.
| Constructor to create a Maps
language's pre-existing definition of the concept should be used instead of
defining a new variable for the concept in the generated code.
'ConceptMatchMap' to a 'MatchedConceptMap' by checking user's other choices.
| Code concepts. For now, just pi.
| Specifies which 'CodeType' should be used to represent each mathematical
'Space'. ['CodeType'] is preferentially-ordered, first 'CodeType' that does not
conflict with other choices will be selected.
| Updates a 'SpaceMatch' by matching the given 'Space' with the given ['CodeType'].
Optional Features can be added to the program or left it out
| Turns generation of different auxiliary (non-source-code) files on or off.
| Constructor to create a OptionalFeatures
| Comment implementation options.
^ Function/method-level comments.
^ Class-level comments.
^ File/Module-level comments.
| Renders options for implementation of comments.
| Log Configuration
| Turns different forms of logging on or off.
| Name of log file.
| Logging options for function calls and variable assignments.
check membership of these elements in lists
^ Log messages generated for function calls.
^ Log messages generated for variable assignments.
| Renders options for program logging.
To generate a sample input file compatible with the generated program,
| Renders options for auxiliary file generation.
| Gets the file path to a sample input data set from a 'Choices' structure, if
the user chose to generate a sample input file.
| SRS Constraints
| Constructor to create a Constraints
| Constraint behaviour options within program.
^ Print warning when constraint violated.
^ Throw exception when constraint violated.
| Renders options for program implementation.
| External Library Options
| All Information needed to solve an ODE
needed, but we can't do that yet so I'm passing it through Choices instead.
| ODE information.
| Preferentially-ordered list ODE libraries to try.
| Constructor to create an ODE
| Default choices to be used as the base from which design specifications
can be built.
| Renders 'Choices' as 'Sentence's.
| Helper to combine pairs of 'Sentence's for rendering 'Choices'. | | Defines the design language for SCS .
module Language.Drasil.Choices (
Choices(..), Architecture (..), makeArchit, DataInfo(..), makeData, Maps(..),
makeMaps, spaceToCodeType, Constraints(..), makeConstraints, ODE(..), makeODE,
DocConfig(..), makeDocConfig, LogConfig(..), makeLogConfig, OptionalFeatures(..),
makeOptFeats, ExtLib(..), Modularity(..), InputModule(..), inputModule, Structure(..),
ConstantStructure(..), ConstantRepr(..), ConceptMatchMap, MatchedConceptMap,
CodeConcept(..), matchConcepts, SpaceMatch, matchSpaces, ImplementationType(..),
ConstraintBehaviour(..), Comments(..), Verbosity(..), Visibility(..),
Logging(..), AuxFile(..), getSampleData, hasSampleInput, defaultChoices,
choicesSent, showChs) where
import Language.Drasil hiding (None)
import Language.Drasil.Code.Code (spaceToCodeType)
import Language.Drasil.Code.Lang (Lang(..))
import Language.Drasil.Data.ODEInfo (ODEInfo)
import Language.Drasil.Data.ODELibPckg (ODELibPckg)
import GOOL.Drasil (CodeType)
import Control.Lens ((^.))
import Data.Map (Map, fromList)
data Choices = Choices {
lang :: [Lang],
architecture :: Architecture,
dataInfo :: DataInfo,
| Maps for ' concepts ' to ' code concepts ' or ' Space ' to a ' CodeType
maps :: Maps,
| Setting for that can be added to the program or left it out
optFeats :: OptionalFeatures,
srsConstraints :: Constraints,
extLibs :: [ExtLib]
}
class RenderChoices a where
showChs :: a -> Sentence
showChsList :: [a] -> Sentence
showChsList lst = foldlSent_ (map showChs lst)
data Architecture = Archt {
modularity :: Modularity,
impType :: ImplementationType
}
makeArchit :: Modularity -> ImplementationType -> Architecture
makeArchit = Archt
^ All generated code is in one module / file .
instance RenderChoices Modularity where
showChs Unmodular = S "Unmodular"
showChs (Modular Combined) = S "Modular Combined"
showChs (Modular Separated)= S "Modular Separated"
^ Input - related functions combined in one module .
inputModule :: Choices -> InputModule
inputModule c = inputModule' $ modularity $ architecture c
where inputModule' Unmodular = Combined
inputModule' (Modular im) = im
^ Generated code does not include Controller .
^ Generated code includes Controller .
instance RenderChoices ImplementationType where
showChs Library = S "Library"
showChs Program = S "Program"
data DataInfo = DataInfo {
inputStructure :: Structure,
constStructure :: ConstantStructure,
constRepr :: ConstantRepr
}
| Constructor to create a DataInfo
makeData :: Structure -> ConstantStructure -> ConstantRepr -> DataInfo
makeData = DataInfo
instance RenderChoices Structure where
showChs Unbundled = S "Unbundled"
showChs Bundled = S "Bundled"
instance RenderChoices ConstantStructure where
showChs Inline = S "Inline"
showChs WithInputs = S "WithInputs"
showChs (Store Unbundled) = S "Store Unbundled"
showChs (Store Bundled) = S "Store Bundled"
instance RenderChoices ConstantRepr where
showChs Var = S "Var"
showChs Const = S "Const"
data Maps = Maps {
| Map of ' UID 's for concepts to code concepts .
conceptMatch :: ConceptMatchMap,
spaceMatch :: SpaceMatch
}
makeMaps :: ConceptMatchMap -> SpaceMatch -> Maps
makeMaps = Maps
| Specifies matches between chunks and ' CodeConcept 's , meaning the target
[ ' CodeConcept ' ] is preferentially - ordered , generator concretizes a
type ConceptMatchMap = Map UID [CodeConcept]
| Concrete version of ConceptMatchMap dependent on user choices .
type MatchedConceptMap = Map UID CodeConcept
Currently we only support one code concept , more will be added later
data CodeConcept = Pi deriving Eq
| Renders ' CodeConcept 's .
instance RenderChoices CodeConcept where
showChs Pi = S "Pi"
| Builds a ' ConceptMatchMap ' from an association list of chunks and ' CodeConcepts ' .
matchConcepts :: (HasUID c) => [(c, [CodeConcept])] -> ConceptMatchMap
matchConcepts = fromList . map (\(cnc,cdc) -> (cnc ^. uid, cdc))
type SpaceMatch = Space -> [CodeType]
matchSpace :: Space -> [CodeType] -> SpaceMatch -> SpaceMatch
matchSpace _ [] _ = error "Must match each Space to at least one CodeType"
matchSpace s ts sm = \sp -> if sp == s then ts else sm sp
| Builds a ' SpaceMatch ' from an association list of ' Spaces ' and ' ' .
matchSpaces :: [(Space, [CodeType])] -> SpaceMatch
matchSpaces spMtchs = matchSpaces' spMtchs spaceToCodeType
where matchSpaces' ((s,ct):sms) sm = matchSpaces' sms $ matchSpace s ct sm
matchSpaces' [] sm = sm
data OptionalFeatures = OptFeats{
docConfig :: DocConfig,
logConfig :: LogConfig,
auxFiles :: [AuxFile]
}
makeOptFeats :: DocConfig -> LogConfig -> [AuxFile] -> OptionalFeatures
makeOptFeats = OptFeats
| Configuration for Doxygen documentation
data DocConfig = DocConfig {
| Turns comments for different code structures on or off .
comments :: [Comments],
| Standard output from running Doxygen : verbose or quiet ?
doxVerbosity :: Verbosity,
| Turns date field on or off in the generated module - level Doxygen comments .
dates :: Visibility
}
| Constructor to create a
makeDocConfig :: [Comments] -> Verbosity -> Visibility -> DocConfig
makeDocConfig = DocConfig
deriving Eq
instance RenderChoices Comments where
showChs CommentFunc = S "CommentFunc"
showChs CommentClass = S "CommentClass"
showChs CommentMod = S "CommentMod"
| Doxygen file verbosity options .
data Verbosity = Verbose | Quiet
| Renders options for doxygen verbosity .
instance RenderChoices Verbosity where
showChs Verbose = S "Verbose"
showChs Quiet = S "Quiet"
| Doxygen date - field visibility options .
data Visibility = Show
| Hide
| Renders options for doxygen date - field visibility .
instance RenderChoices Visibility where
showChs Show = S "Show"
showChs Hide = S "Hide"
data LogConfig = LogConfig {
logging :: [Logging],
logFile :: FilePath
}
| Constructor to create a LogConfig
makeLogConfig :: [Logging] -> FilePath -> LogConfig
makeLogConfig = LogConfig
Eq instances required for Logging and Comments because generator needs to
deriving Eq
instance RenderChoices Logging where
showChs LogFunc = S "LogFunc"
showChs LogVar = S "LogVar"
| Currently we only support two kind of auxiliary files : sample input file , readme .
' FilePath ' is the path to the user - provided file containing a sample set of input data .
data AuxFile = SampleInput FilePath
| ReadME
deriving Eq
instance RenderChoices AuxFile where
showChs (SampleInput fp) = S "SampleInput" +:+ S fp
showChs ReadME = S "ReadME"
getSampleData :: Choices -> Maybe FilePath
getSampleData chs = getSampleData' (auxFiles $ optFeats chs)
where getSampleData' [] = Nothing
getSampleData' (SampleInput fp:_) = Just fp
getSampleData' (_:xs) = getSampleData' xs
| Predicate that returns true if the list of ' AuxFile 's includes a ' SampleInput ' .
hasSampleInput :: [AuxFile] -> Bool
hasSampleInput [] = False
hasSampleInput (SampleInput _:_) = True
hasSampleInput (_:xs) = hasSampleInput xs
data Constraints = Constraints{
onSfwrConstraint :: ConstraintBehaviour,
onPhysConstraint :: ConstraintBehaviour
}
makeConstraints :: ConstraintBehaviour -> ConstraintBehaviour -> Constraints
makeConstraints = Constraints
instance RenderChoices ConstraintBehaviour where
showChs Warning = S "Warning"
showChs Exception = S "Exception"
newtype ExtLib = Math ODE
data ODE = ODE{
FIXME : ODEInfos should be automatically built from Instance models when
This choice should really just be for an ODEMethod
odeInfo :: [ODEInfo],
odeLib :: [ODELibPckg]
}
makeODE :: [ODEInfo] -> [ODELibPckg] -> ODE
makeODE = ODE
defaultChoices :: Choices
defaultChoices = Choices {
lang = [Python],
architecture = makeArchit (Modular Combined) Program,
dataInfo = makeData Bundled Inline Const,
maps = makeMaps
(matchConcepts ([] :: [(SimpleQDef, [CodeConcept])]))
spaceToCodeType,
optFeats = makeOptFeats
(makeDocConfig [] Verbose Hide)
(makeLogConfig [] "log.txt")
[ReadME],
srsConstraints = makeConstraints Exception Warning,
extLibs = []
}
choicesSent :: Choices -> [Sentence]
choicesSent chs = map chsFieldSent [
(S "Languages", foldlSent_ $ map (S . show) $ lang chs),
(S "Modularity", showChs $ modularity $ architecture chs),
(S "Input Structure", showChs $ inputStructure $ dataInfo chs),
(S "Constant Structure", showChs $ constStructure $ dataInfo chs),
(S "Constant Representation", showChs $ constRepr $ dataInfo chs),
(S "Implementation Type", showChs $ impType $ architecture chs),
(S "Software Constraint Behaviour", showChs $ onSfwrConstraint $ srsConstraints chs),
(S "Physical Constraint Behaviour", showChs $ onPhysConstraint $ srsConstraints chs),
(S "Comments", showChsList $ comments $ docConfig $ optFeats chs),
(S "Dox Verbosity", showChs $ doxVerbosity $ docConfig $ optFeats chs),
(S "Dates", showChs $ dates $ docConfig $ optFeats chs),
(S "Log File Name", S $ logFile $ logConfig $ optFeats chs),
(S "Logging", showChsList $ logging $ logConfig $ optFeats chs),
(S "Auxiliary Files", showChsList $ auxFiles $ optFeats chs)
]
chsFieldSent :: (Sentence, Sentence) -> Sentence
chsFieldSent (rec, chc) = rec +:+ S "selected as" +:+. chc |
238f0473119b98cc5c848ef4370756c92667b26215dbcfae760077094eedd5cb | haskell-lisp/yale-haskell | command-interface.scm | ;;; csys.scm -- compilation unit definition for the compilation system
(define-compilation-unit command-interface
(source-filename "$Y2/command-interface/")
(require global)
(unit command
(source-filename "command.scm"))
(unit command-utils
(source-filename "command-utils.scm"))
(unit incremental-compiler
(source-filename "incremental-compiler.scm")))
| null | https://raw.githubusercontent.com/haskell-lisp/yale-haskell/4e987026148fe65c323afbc93cd560c07bf06b3f/command-interface/command-interface.scm | scheme | csys.scm -- compilation unit definition for the compilation system |
(define-compilation-unit command-interface
(source-filename "$Y2/command-interface/")
(require global)
(unit command
(source-filename "command.scm"))
(unit command-utils
(source-filename "command-utils.scm"))
(unit incremental-compiler
(source-filename "incremental-compiler.scm")))
|
9bdbd823e63aa4c2abb6670b4d6d354196489b6f39a79098d03843e40842975c | facebook/duckling | Rules.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
# LANGUAGE LambdaCase #
# LANGUAGE NoRebindableSyntax #
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.DE.Rules
( rules
) where
import Prelude
import qualified Data.Text as Text
import Duckling.Dimensions.Types
import Duckling.Duration.Helpers (isGrain)
import Duckling.Numeral.Helpers (parseInt)
import Duckling.Ordinal.Types (OrdinalData(..))
import Duckling.Regex.Types (GroupMatch(..))
import Duckling.Time.Computed
import Duckling.Time.Helpers
import Duckling.Time.HolidayHelpers
import Duckling.Time.Types (TimeData(..))
import Duckling.Types
import qualified Duckling.Ordinal.Types as TOrdinal
import qualified Duckling.Time.Types as TTime
import qualified Duckling.TimeGrain.Types as TG
ruleInstants :: [Rule]
ruleInstants = mkRuleInstants
[ ( "now" , TG.Second, 0,
"(genau)? ?jetzt|diesen moment|in diesem moment|gerade eben" )
, ( "today" , TG.Day , 0,
"heute|(um diese zeit|zu dieser zeit|um diesen zeitpunkt|zu diesem zeitpunkt)" )
, ( "tomorrow" , TG.Day , 1, "morgen" )
, ( "yesterday" , TG.Day , -1, "gestern" )
, ( "after tomorrow" , TG.Day , 2, "(ü)bermorgen" )
, ( "before yesterday", TG.Day , -2, "vorgestern" )
, ( "3 days ago" , TG.Day , -3, "vorvorgestern" )
, ( "EOM|End of month", TG.Month , 1, "(das )?ende des monats?" )
, ( "EOY|End of year" , TG.Year , 1,
"(das )?(EOY|jahr(es)? ?ende|ende (des )?jahr(es)?)" )
]
ruleDaysOfWeek :: [Rule]
ruleDaysOfWeek = mkRuleDaysOfWeek
[ ( "Montag" , "montags?|mo\\.?" )
, ( "Dienstag" , "die?nstags?|di\\.?" )
, ( "Mittwoch" , "mittwochs?|mi\\.?" )
, ( "Donnerstag", "donn?erstag|do\\.?" )
, ( "Freitag" , "freitags?|fr\\.?" )
, ( "Samstag" , "samstags?|sonnabends?|sa\\.?" )
, ( "Sonntag" , "sonntags?|so\\." )
]
ruleMonths :: [Rule]
ruleMonths = mkRuleMonths
[ ( "Januar" , "januar|jan\\.?" )
, ( "Februar" , "februar|feb\\.?" )
, ( "Marz" , "m(ä)rz|m(ä)r\\.?" )
, ( "April" , "april|apr\\.?" )
, ( "Mai" , "mai\\.?" )
, ( "Juni" , "juni|jun\\.?" )
, ( "Juli" , "juli|jul\\.?" )
, ( "August" , "august|aug\\.?" )
, ( "September", "september|sept?\\.?" )
, ( "Oktober" , "oktober|okt\\.?" )
, ( "November" , "november|nov\\.?" )
, ( "Dezember" , "dezember|dez\\.?" )
]
ruleSeasons :: [Rule]
ruleSeasons = mkRuleSeasons
[ ( "sommer" , "sommer" , monthDay 6 21, monthDay 9 23 )
, ( "herbst" , "herbst" , monthDay 9 23, monthDay 12 21 )
, ( "winter" , "winter" , monthDay 12 21, monthDay 3 20 )
, ( "fruhling", "fr(ü)h(ling|jahr)", monthDay 3 20, monthDay 6 21 )
]
ruleHolidays :: [Rule]
ruleHolidays = mkRuleHolidays
[ ( "Neujahr" , "neujahr(s?tag)?"
, monthDay 1 1 )
, ( "Valentinstag" , "valentin'?stag"
, monthDay 2 14 )
, ( "Schweizer Bundesfeiertag"
, "schweiz(er)? (bundes)?feiertag|bundes feiertag"
, monthDay 8 1 )
, ( "Tag der Deutschen Einheit" , "tag (der)? deutsc?hen? einheit"
, monthDay 10 3 )
, ( "Oesterreichischer Nationalfeiertag"
, "((ö)sterreichischer?)? nationalfeiertag|national feiertag"
, monthDay 10 26 )
, ( "Halloween" , "hall?owe?en?"
, monthDay 10 31 )
, ( "Allerheiligen" , "allerheiligen?|aller heiligen?"
, monthDay 11 1 )
, ( "Nikolaus" , "nikolaus(tag)?|nikolaus tag|nikolo"
, monthDay 12 6 )
, ( "Heiligabend" , "heilig(er)? abend"
, monthDay 12 24 )
, ( "Weihnachten" , "weih?nacht(en|stag)?"
, monthDay 12 25 )
, ( "Silvester" , "silvester"
, monthDay 12 31 )
, ( "Muttertag" , "mutt?ertag|mutt?er (tag)?"
, nthDOWOfMonth 2 7 5 )
, ( "Vatertag" , "vatt?er( ?tag)?"
, nthDOWOfMonth 3 7 6 )
]
ruleComputedHolidays :: [Rule]
ruleComputedHolidays = mkRuleHolidays
[ ( "Christi Himmelfahrt", "(christi\\s+)?himmelfahrt(stag)?"
, cycleNthAfter False TG.Day 39 easterSunday )
, ( "Aschermittwoch", "ascher?(tag|mittwoch)"
, cycleNthAfter False TG.Day (-46) easterSunday )
, ( "Aschura", "asc?hura(\\-?tag)?"
, cycleNthAfter False TG.Day 9 muharram )
, ( "Bhai Dooj", "bhai(ya)?\\s+d(u|oo)j|bhau\\-beej|bhai\\s+(tika|phonta)"
, cycleNthAfter False TG.Day 4 dhanteras )
, ( "Chhath", "chhathi?|chhath (parv|puja)|dala (chhath|puja)|surya shashthi"
, cycleNthAfter False TG.Day 8 dhanteras )
, ( "Boghi", "boghi|bogi\\s+pandigai"
, cycleNthAfter False TG.Day (-1) thaiPongal )
, ( "Chinesisches Neujahr", "chinesische(s|r)\\s+(neujahr(s(tag|fest))?|frühlingsfest)"
, chineseNewYear )
, ( "Aschermontag"
, "(orthodoxer?\\s+)?(ascher|reiner?\\s+|sauberer?\\s+)montag"
, cycleNthAfter False TG.Day (-48) orthodoxEaster )
, ( "Corpus Christi", "corpus\\s+christi|fronleichnam"
, cycleNthAfter False TG.Day 60 easterSunday )
, ( "Dhanteras", "dhanatrayodashi|dhanteras|dhanvantari\\s+trayodashi"
, dhanteras )
, ( "Diwali", "deepavali|diwali|lakshmi\\s+puja"
, cycleNthAfter False TG.Day 2 dhanteras )
, ( "Durga Ashtami", "(durga|maha)(\\s+a)?shtami"
, cycleNthAfter False TG.Day 7 navaratri )
, ( "Ostermontag", "ostermontag"
, cycleNthAfter False TG.Day 1 easterSunday )
, ( "Ostersonntag", "ostersonntag", easterSunday )
, ( "Eid al-Adha", "bakr[\\-\\s]e?id|e?id [au]l\\-adha|opferfest"
, eidalAdha )
, ( "Eid al-Fitr", "eid al\\-fitr", eidalFitr )
, ( "Govardhan Puja", "govardhan\\s+puja|annak(u|oo)t"
, cycleNthAfter False TG.Day 3 dhanteras )
, ( "Karfreitag", "(kar|stiller\\s+|hoher\\s+)freitag"
, cycleNthAfter False TG.Day (-2) easterSunday )
, ( "Guru Gobind Singh Jayanti"
, "guru\\s+(gobind|govind)\\s+singh\\s+(Geburtstag|jayanti)"
, guruGobindSinghJayanti )
, ( "Holi", "(rangwali )?holi|dhuleti|dhulandi|phagwah"
, cycleNthAfter False TG.Day 39 vasantPanchami )
, ( "Holika Dahan", "holika dahan|kamudu pyre|chhoti holi"
, cycleNthAfter False TG.Day 38 vasantPanchami )
, ( "Karsamstag"
, "(kar|stiller\\s+)samstag|karsonnabend"
, cycleNthAfter False TG.Day (-1) easterSunday )
, ( "Islamisches Neujahr", "(arabisches|hijri|islamisches) neujahr|amun jadid|muharram"
, muharram )
, ( "Isra and Mi'raj"
, "isra and mi'raj|aufstieg des propheten|(die\\s+)?nachtreise|aufstieg\\s+in\\s+den\\s+himmel"
, cycleNthAfter False TG.Day 26 rajab
)
, ( "Jumu'atul-Wida", "jumu'atul\\-widaa?'?|jamat[\\-\\s]ul[\\-\\s]vida"
, predNthAfter (-1) (dayOfWeek 5) eidalFitr )
, ( "Kaanum Pongal", "(kaanum|kanni)\\s+pongal"
, cycleNthAfter False TG.Day 2 thaiPongal )
, ( "Lag BaOmer", "lag (b|l)[a']omer", lagBaOmer )
, ( "Vaisakhi", "mesadi|[bv]aisakhi|vaisakhadi|vasakhi|vaishakhi", vaisakhi)
, ( "Lailat al-Qadr"
, "la[iy]lat al[\\-\\s][qk]adr|(die)? nacht der (bestimmung|allmacht)"
, cycleNthAfter False TG.Day 26 ramadan )
, ( "Lazarus-Samstag", "lazarus(\\-|\\s+)samstag"
, cycleNthAfter False TG.Day (-8) orthodoxEaster )
, ( "Maha Navami", "maha\\s+navami", cycleNthAfter False TG.Day 8 navaratri )
, ( "Maha Saptami", "maha\\s+saptami", cycleNthAfter False TG.Day 6 navaratri )
, ( "Mattu Pongal", "maa?ttu\\s+pongal"
, cycleNthAfter False TG.Day 1 thaiPongal )
, ( "Gründonnerstag"
, "(grün|hoher\\s+|heiliger\\s+|weißer\\s+|palm)donnerstag"
, cycleNthAfter False TG.Day (-3) easterSunday )
, ( "Maulid an-Nabī"
, "Maulid\\s+an\\-Nabī|mawlid(\\s+al\\-nab(awi|i\\s+al\\-sharif))?|mevli[dt]|mulud|geburtstag des propheten( muhammad)?"
, mawlid )
, ( "Naraka Chaturdashi"
, "naraka? (nivaran )?chaturdashi|(kali|roop) chaudas|choti diwali"
, cycleNthAfter False TG.Day 1 dhanteras )
, ( "Orthodoxer Ostermontag", "orthodoxer\\s+ostermontag"
, cycleNthAfter False TG.Day 1 orthodoxEaster )
, ( "Orthodoxer Ostersonntag", "orthodoxer\\s+ostersonntag"
, orthodoxEaster )
, ( "Orthodoxer Karsamstag", "orthodoxer\\s+karsamstag"
, cycleNthAfter False TG.Day (-1) orthodoxEaster )
, ( "Orthodoxer Karfreitag", "orthodoxer\\s+karfreitag"
, cycleNthAfter False TG.Day (-2) orthodoxEaster )
, ( "Orthodoxer Palmsonntag", "orthodoxer\\s+palmsonntag"
, cycleNthAfter False TG.Day (-7) orthodoxEaster )
, ( "Palmsonntag", "palmsonntag"
, cycleNthAfter False TG.Day (-7) easterSunday )
, ( "Pfingsten", "pfingsten|pentecost"
, cycleNthAfter False TG.Day 49 easterSunday )
, ( "Purim", "purim", purim )
, ( "Raksha Bandhan", "raksha(\\s+)?bandhan|rakhi", rakshaBandhan )
, ( "Pargat Diwas", "pargat diwas|(maharishi )?valmiki jayanti", pargatDiwas )
, ( "Mahavir Jayanti", "(mahavir|mahaveer) (jayanti|janma kalyanak)"
, mahavirJayanti )
, ( "Maha Shivaratri", "maha(\\s+)?shivaratri", mahaShivaRatri)
, ( "Dayananda Saraswati Jayanti","((maharishi|swami) )?(dayananda )?saraswati jayanti", saraswatiJayanti )
, ( "Karva Chauth", "karva\\s+chauth|karaka\\s+chaturthi", karvaChauth)
, ( "Krishna Janmashtami", "(krishna )?janmashtami|gokulashtami", krishnaJanmashtami )
, ( "Schmini Azeret", "sc?he?mini\\s+at?zeret"
, cycleNthAfter False TG.Day 21 roshHashana )
, ( "Fastnacht", "fastnacht(sdienstag)?|mardi gras"
, cycleNthAfter False TG.Day (-47) easterSunday )
, ( "Shushan Purim", "shushan\\s+purim", cycleNthAfter False TG.Day 1 purim )
, ( "Simchat Torah", "simc?hat\\s+torah"
, cycleNthAfter False TG.Day 22 roshHashana )
, ( "Thai Pongal"
, "(thai )?pongal|pongal pandigai|(makara? |magha )?sankranth?i|maghi"
, thaiPongal )
, ( "Thiru Onam", "(thiru(v|\\s+))?onam", thiruOnam )
, ( "Tisha B'Av", "tisha b'av", tishaBAv )
, ( "Dreifaltigkeitssonntag",
"trinitatis(fest)?|(dreifaltigkeits|goldener\\s+)sonntag|drei(faltigkeit|einigkeit)(sfest)?"
, cycleNthAfter False TG.Day 56 easterSunday )
, ( "Vasant Panchami", "[bv]asant\\s+panchami", vasantPanchami )
, ( "Vijayadashami", "dasara|duss(eh|he)ra|vijayadashami"
, cycleNthAfter False TG.Day 9 navaratri )
, ( "Tu biSchevat", "tu b[i']sc?he?vat", tuBishvat )
, ( "Vesak", "v(e|ai)sak(ha)?|buddha(\\-?tag|\\s+purnima)|wesakfest", vesak )
, ( "Jom Ha'atzmaut", "[yj]om ha'?atzmaut", yomHaatzmaut )
, ( "Jom HaShoah"
, "[yj]om hashoah|[yj]om hazikaron lashoah ve-lag'vurah|holocaust\\-?gedenktag"
, cycleNthAfter False TG.Day 12 passover )
, ( "Jom Kippur", "[yj]om\\s+kippur", cycleNthAfter False TG.Day 9 roshHashana )
, ( "Pfingstmontag", "pfingstmontag|(pentecost|whit)\\s+montag"
, cycleNthAfter False TG.Day 50 easterSunday )
, ( "Rabindra Jayanti", "rabindra(nath)?\\s+jayanti", rabindraJayanti )
, ("Guru Ravidass Jayanti", "guru\\s+ravidass?\\s+(geburtstag|jayanti)"
, ravidassJayanti )
]
ruleComputedHolidays' :: [Rule]
ruleComputedHolidays' = mkRuleHolidays'
[ ( "Global Youth Service-Tag", "global youth service[\\-\\s]?tag|gysd"
, let start = globalYouthServiceDay
end = cycleNthAfter False TG.Day 2 globalYouthServiceDay
in interval TTime.Open start end )
, ( "Große Fastenzeit", "große\\s+fastenzeit"
, let start = cycleNthAfter False TG.Day (-48) orthodoxEaster
end = cycleNthAfter False TG.Day (-9) orthodoxEaster
in interval TTime.Open start end )
, ( "Chanukka", "c?hann?ukk?ah?"
, let start = chanukah
end = cycleNthAfter False TG.Day 7 chanukah
in interval TTime.Open start end )
, ( "Fastenzeit", "fastenzeit"
, let start = cycleNthAfter False TG.Day (-46) easterSunday
end = cycleNthAfter False TG.Day (-1) easterSunday
in interval TTime.Open start end )
, ( "Navaratri", "durga\\s+puja|durgotsava|nava?rath?ri"
, let start = navaratri
end = cycleNthAfter False TG.Day 9 navaratri
in interval TTime.Open start end )
, ( "Pessach", "passover|pess?a[ck]?h|pascha|Passah?"
, let start = passover
end = cycleNthAfter False TG.Day 8 passover
in interval TTime.Open start end )
, ( "Ramadan", "rama[dt]h?an|ramzaa?n"
, let start = ramadan
end = cycleNthAfter False TG.Day (-1) eidalFitr
in interval TTime.Open start end )
, ( "Rosch haSchana", "rosch ha\\-?schanah?"
, let start = roshHashana
end = cycleNthAfter False TG.Day 2 roshHashana
in interval TTime.Open start end )
, ( "Schawuot", "sc?ha[vw]u'?oth?|shovuos"
, let start = cycleNthAfter False TG.Day 50 passover
end = cycleNthAfter False TG.Day 52 passover
in interval TTime.Open start end )
, ( "Sukkot", "Laubhüttenfest|su[ck]{2}o[st]"
, let start = cycleNthAfter False TG.Day 14 roshHashana
end = cycleNthAfter False TG.Day 22 roshHashana
in interval TTime.Open start end )
Does not account for leap years , so every 365 days .
, ( "Parsi Neujahr", "parsi neujahr|jamshedi navroz"
, predEveryNDaysFrom 365 (2020, 8, 16)
)
, ( "Earth Hour", "earth hour|stunde der erde"
, computeEarthHour )
, ( "Königstag", "königstag|koningsdag"
, computeKingsDay )
]
ruleRelativeMinutesTotillbeforeIntegerHourofday :: Rule
ruleRelativeMinutesTotillbeforeIntegerHourofday = Rule
{ name = "relative minutes to|till|before <integer> (hour-of-day)"
, pattern =
[ Predicate $ isIntegerBetween 1 59
, regex "vor"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(token:_:Token Time td:_) -> do
n <- getIntValue token
Token Time <$> minutesBefore n td
_ -> Nothing
}
ruleQuarterTotillbeforeIntegerHourofday :: Rule
ruleQuarterTotillbeforeIntegerHourofday = Rule
{ name = "quarter to|till|before <integer> (hour-of-day)"
, pattern =
[regex "vie?rtel vor"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$> minutesBefore 15 td
_ -> Nothing
}
ruleHalfTotillbeforeIntegerHourofday :: Rule
ruleHalfTotillbeforeIntegerHourofday = Rule
{ name = "half to|till|before <integer> (hour-of-day)"
, pattern =
[ regex "halbe? vor"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$> minutesBefore 30 td
_ -> Nothing
}
ruleTheOrdinalCycleOfTime :: Rule
ruleTheOrdinalCycleOfTime = Rule
{ name = "the <ordinal> <cycle> of <time>"
, pattern =
[ regex "der|die|das"
, dimension Ordinal
, dimension TimeGrain
, regex "im|in|von"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Ordinal od:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter True grain (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleNthTimeOfTime2 :: Rule
ruleNthTimeOfTime2 = Rule
{ name = "nth <time> of <time>"
, pattern =
[ regex "der|die|das"
, dimension Ordinal
, dimension Time
, regex "im"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:
Token Ordinal OrdinalData{TOrdinal.value = v}:
Token Time td1:
_:
Token Time td2:
_) -> Token Time . predNth (v - 1) False <$> intersect td2 td1
_ -> Nothing
}
ruleLastTime :: Rule
ruleLastTime = Rule
{ name = "last <time>"
, pattern =
[ regex "letzten?|letztes"
, Predicate isOkWithThisNext
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth (-1) False td
_ -> Nothing
}
ruleDatetimeDatetimeInterval :: Rule
ruleDatetimeDatetimeInterval = Rule
{ name = "<datetime> - <datetime> (interval)"
, pattern =
[ Predicate isNotLatent
, regex "\\-|bis( zum)?|auf( den)?"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleDateDateInterval :: Rule
ruleDateDateInterval = Rule
{ name = "dd.(mm.)? - dd.mm.(yy[yy]?)? (interval)"
, pattern =
[ regex "(?:vo[nm]\\s+)?(10|20|30|31|[012]?[1-9])\\.?((?<=\\.)(?:10|11|12|0?[1-9])(?:\\.?))?"
, regex "\\-|/|bis( zum)?|auf( den)?"
, regex "(10|20|30|31|[012]?[1-9])\\.(10|11|12|0?[1-9])\\.?((?<=\\.)\\d{2,4})?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (d1:"":_)):
_:
Token RegexMatch (GroupMatch (d2:m2:"":_)):
_) -> do
d1 <- parseInt d1
d2 <- parseInt d2
m2 <- parseInt m2
Token Time <$> interval TTime.Closed (monthDay m2 d1) (monthDay m2 d2)
(Token RegexMatch (GroupMatch (d1:"":_)):
_:
Token RegexMatch (GroupMatch (d2:m2:y:_)):
_) -> do
d1 <- parseInt d1
d2 <- parseInt d2
m2 <- parseInt m2
y <- parseInt y
Token Time <$> interval TTime.Closed (yearMonthDay y m2 d1) (yearMonthDay y m2 d2)
(Token RegexMatch (GroupMatch (d1:m1:_)):
_:
Token RegexMatch (GroupMatch (d2:m2:"":_)):
_) -> do
d1 <- parseInt d1
d2 <- parseInt d2
m1 <- parseInt m1
m2 <- parseInt m2
Token Time <$> interval TTime.Closed (monthDay m1 d1) (monthDay m2 d2)
(Token RegexMatch (GroupMatch (d1:m1:_)):
_:
Token RegexMatch (GroupMatch (d2:m2:y:_)):
_) -> do
d1 <- parseInt d1
d2 <- parseInt d2
m1 <- parseInt m1
m2 <- parseInt m2
y <- parseInt y
Token Time <$> interval TTime.Closed (yearMonthDay y m1 d1) (yearMonthDay y m2 d2)
_ -> Nothing
}
ruleEvening :: Rule
ruleEvening = Rule
{ name = "evening"
, pattern =
[ regex "abends?"
]
, prod = \_ ->
let from = hour False 18
to = hour False 0
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleTheDayofmonthNonOrdinal :: Rule
ruleTheDayofmonthNonOrdinal = Rule
{ name = "the <day-of-month> (non ordinal)"
, pattern =
[ regex "der"
, Predicate $ isIntegerBetween 1 31
]
, prod = \tokens -> case tokens of
(_:token:_) -> do
v <- getIntValue token
tt $ dayOfMonth v
_ -> Nothing
}
ruleInDuration :: Rule
ruleInDuration = Rule
{ name = "in <duration>"
, pattern =
[ regex "in"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleLastCycleOfTime :: Rule
ruleLastCycleOfTime = Rule
{ name = "last <cycle> of <time>"
, pattern =
[ regex "letzte(r|n|s)?"
, dimension TimeGrain
, regex "um|im"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleLastOf grain td
_ -> Nothing
}
ruleFromDatetimeDatetimeInterval :: Rule
ruleFromDatetimeDatetimeInterval = Rule
{ name = "from <datetime> - <datetime> (interval)"
, pattern =
[ regex "vo[nm]"
, dimension Time
, regex "\\-|bis( zum)?|auf( den)?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleRelativeMinutesAfterpastIntegerHourofday :: Rule
ruleRelativeMinutesAfterpastIntegerHourofday = Rule
{ name = "relative minutes after|past <integer> (hour-of-day)"
, pattern =
[ Predicate $ isIntegerBetween 1 59
, regex "nach"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(token:
_:
Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_) -> do
n <- getIntValue token
tt $ hourMinute is12H hours n
_ -> Nothing
}
ruleQuarterAfterpastIntegerHourofday :: Rule
ruleQuarterAfterpastIntegerHourofday = Rule
{ name = "quarter after|past <integer> (hour-of-day)"
, pattern =
[ regex "vie?rtel nach"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:
Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_) -> tt $ hourMinute is12H hours 15
_ -> Nothing
}
ruleHalfAfterpastIntegerHourofday :: Rule
ruleHalfAfterpastIntegerHourofday = Rule
{ name = "half after|past <integer> (hour-of-day)"
, pattern =
[ regex "halbe? nach"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:
Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_) -> tt $ hourMinute is12H hours 30
_ -> Nothing
}
ruleMonthDdddInterval :: Rule
ruleMonthDdddInterval = Rule
{ name = "<month> dd-dd (interval)"
, pattern =
[ regex "([012]?\\d|30|31)(ter|\\.)?"
, regex "\\-|bis( zum)?|auf( den)?"
, regex "([012]?\\d|30|31)(ter|\\.)?"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:_)):
_:
Token RegexMatch (GroupMatch (m2:_)):
Token Time td:
_) -> do
v1 <- parseInt m1
v2 <- parseInt m2
from <- intersect (dayOfMonth v1) td
to <- intersect (dayOfMonth v2) td
Token Time <$> interval TTime.Closed from to
_ -> Nothing
}
ruleTheCycleAfterTime :: Rule
ruleTheCycleAfterTime = Rule
{ name = "the <cycle> after <time>"
, pattern =
[ regex "der"
, dimension TimeGrain
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter False grain 1 td
_ -> Nothing
}
ruleTheCycleBeforeTime :: Rule
ruleTheCycleBeforeTime = Rule
{ name = "the <cycle> before <time>"
, pattern =
[ regex "der"
, dimension TimeGrain
, regex "vor"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter False grain (-1) td
_ -> Nothing
}
ruleYearLatent2 :: Rule
ruleYearLatent2 = Rule
{ name = "year (latent)"
, pattern =
[ Predicate $ isIntegerBetween 2101 10000
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt . mkLatent $ year v
_ -> Nothing
}
ruleTimeAfterNext :: Rule
ruleTimeAfterNext = Rule
{ name = "<time> after next"
, pattern =
[ dimension Time
, regex "nach dem n(ä)chsten"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
tt $ predNth 1 True td
_ -> Nothing
}
ruleTheIdesOfNamedmonth :: Rule
ruleTheIdesOfNamedmonth = Rule
{ name = "the ides of <named-month>"
, pattern =
[ regex "die iden (des?)"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(_:Token Time td@TimeData {TTime.form = Just (TTime.Month m)}:_) ->
Token Time <$>
intersect (dayOfMonth $ if elem m [3, 5, 7, 10] then 15 else 13) td
_ -> Nothing
}
ruleNoon :: Rule
ruleNoon = Rule
{ name = "noon"
, pattern =
[ regex "mittags?|zw(ö)lf (uhr)?"
]
, prod = \_ -> tt $ hour False 12
}
ruleThisnextDayofweek :: Rule
ruleThisnextDayofweek = Rule
{ name = "this|next <day-of-week>"
, pattern =
[ regex "diese(n|r)|kommenden|n(ä)chsten"
, Predicate isADayOfWeek
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 True td
_ -> Nothing
}
ruleBetweenTimeofdayAndTimeofdayInterval :: Rule
ruleBetweenTimeofdayAndTimeofdayInterval = Rule
{ name = "between <time-of-day> and <time-of-day> (interval)"
, pattern =
[ regex "zwischen"
, Predicate isATimeOfDay
, regex "und"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleNextCycle :: Rule
ruleNextCycle = Rule
{ name = "next <cycle>"
, pattern =
[ regex "n(ä)chste(r|n|s)?|kommende(r|n|s)?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 1
_ -> Nothing
}
ruleAfterNextCycle :: Rule
ruleAfterNextCycle = Rule
{ name = "after next <cycle>"
, pattern =
[ regex "(ü)ber ?n(ä)chste[ns]?"
, dimension TimeGrain
]
, prod = \case
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 2
_ -> Nothing
}
ruleTimeofdayApproximately :: Rule
ruleTimeofdayApproximately = Rule
{ name = "<time-of-day> approximately"
, pattern =
[ Predicate isATimeOfDay
, regex "ca\\.?|circa|zirka|ungef(ä)hr|(in )?etwa"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleOnDate :: Rule
ruleOnDate = Rule
{ name = "on <date>"
, pattern =
[ regex "am"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:x:_) -> Just x
_ -> Nothing
}
ruleDurationFromNow :: Rule
ruleDurationFromNow = Rule
{ name = "<duration> from now"
, pattern =
[ dimension Duration
, regex "ab (heute|jetzt)"
]
, prod = \tokens -> case tokens of
(Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleLunch :: Rule
ruleLunch = Rule
{ name = "lunch"
, pattern =
[ regex "(am |zu )?mittags?"
]
, prod = \_ ->
let from = hour False 12
to = hour False 14
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleLastCycle :: Rule
ruleLastCycle = Rule
{ name = "last <cycle>"
, pattern =
[ regex "letzte(r|n|s)?|vergangene(r|n|s)?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt . cycleNth grain $ - 1
_ -> Nothing
}
ruleAfternoon :: Rule
ruleAfternoon = Rule
{ name = "afternoon"
, pattern =
[ regex "nach ?mittags?"
]
, prod = \_ ->
let from = hour False 12
to = hour False 19
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleTimeBeforeLast :: Rule
ruleTimeBeforeLast = Rule
{ name = "<time> before last"
, pattern =
[ regex "vorletzten?|vor ?letztes?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth (-2) False td
_ -> Nothing
}
ruleNamedmonthDayofmonthOrdinal :: Rule
ruleNamedmonthDayofmonthOrdinal = Rule
{ name = "<named-month> <day-of-month> (ordinal)"
, pattern =
[ Predicate isAMonth
, Predicate isDOMOrdinal
]
, prod = \tokens -> case tokens of
(Token Time td:token:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleInduringThePartofday :: Rule
ruleInduringThePartofday = Rule
{ name = "in|during the <part-of-day>"
, pattern =
[ regex "(in|an|am|w(ä)h?rend)( der| dem| des)?"
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ notLatent td
_ -> Nothing
}
ruleHourofdayIntegerAsRelativeMinutes :: Rule
ruleHourofdayIntegerAsRelativeMinutes = Rule
{ name = "<hour-of-day> <integer> (as relative minutes)"
, pattern =
[ Predicate $ and . sequence [isNotLatent, isAnHourOfDay]
, Predicate $ isIntegerBetween 1 59
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute is12H hours n
_ -> Nothing
}
ruleHourofdayQuarter :: Rule
ruleHourofdayQuarter = Rule
{ name = "<hour-of-day> <quarter> (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "vie?rtel"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:_) ->
tt $ hourMinute is12H hours 15
_ -> Nothing
}
ruleHourofdayHalf :: Rule
ruleHourofdayHalf = Rule
{ name = "<hour-of-day> <half> (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "halbe?"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:_) ->
tt $ hourMinute is12H hours 30
_ -> Nothing
}
ruleDayofmonthordinalNamedmonth :: Rule
ruleDayofmonthordinalNamedmonth = Rule
{ name = "<day-of-month>(ordinal) <named-month>"
, pattern =
[ Predicate isDOMOrdinal
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleIntersectBy :: Rule
ruleIntersectBy = Rule
{ name = "intersect by ','"
, pattern =
[ Predicate isNotLatent
, regex ",( den|r)?"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleNthTimeAfterTime :: Rule
ruleNthTimeAfterTime = Rule
{ name = "nth <time> after <time>"
, pattern =
[ dimension Ordinal
, dimension Time
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal OrdinalData{TOrdinal.value = v}:
Token Time td1:
_:
Token Time td2:
_) -> tt $ predNthAfter (v - 1) td1 td2
_ -> Nothing
}
ruleMmdd :: Rule
ruleMmdd = Rule
{ name = "mm/dd"
, pattern =
[ regex "(?:am\\s+)?([012]?[1-9]|10|20|30|31)\\.(10|11|12|0?[1-9])\\.?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:_)):_) -> do
d <- parseInt m1
m <- parseInt m2
tt $ monthDay m d
_ -> Nothing
}
ruleAfterDuration :: Rule
ruleAfterDuration = Rule
{ name = "after <duration>"
, pattern =
[ regex "nach"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleTimeofdayLatent :: Rule
ruleTimeofdayLatent = Rule
{ name = "time-of-day (latent)"
, pattern =
[ Predicate $ isIntegerBetween 0 23
]
, prod = \tokens -> case tokens of
(token:_) -> do
n <- getIntValue token
tt . mkLatent $ hour (n < 12) n
_ -> Nothing
}
ruleFromTimeofdayTimeofdayInterval :: Rule
ruleFromTimeofdayTimeofdayInterval = Rule
{ name = "from <time-of-day> - <time-of-day> (interval)"
, pattern =
[ regex "(von|nach|ab|fr(ü)hestens (um)?)"
, Predicate isATimeOfDay
, regex "((noch|aber|jedoch)? vor)|\\-|bis"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleExactlyTimeofday :: Rule
ruleExactlyTimeofday = Rule
{ name = "exactly <time-of-day>"
, pattern =
[ regex "genau|exakt|p(ü)nktlich|punkt( um)?"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleBetweenDatetimeAndDatetimeInterval :: Rule
ruleBetweenDatetimeAndDatetimeInterval = Rule
{ name = "between <datetime> and <datetime> (interval)"
, pattern =
[ regex "zwischen"
, dimension Time
, regex "und"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleDurationAgo :: Rule
ruleDurationAgo = Rule
{ name = "<duration> ago"
, pattern =
[ regex "vor"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ durationAgo dd
_ -> Nothing
}
ruleByTheEndOfTime :: Rule
ruleByTheEndOfTime = Rule
{ name = "by the end of <time>"
, pattern =
[ regex "bis (zum)? ende (von)?|(noch)? vor"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$> interval TTime.Closed td now
_ -> Nothing
}
ruleAfterWork :: Rule
ruleAfterWork = Rule
{ name = "after work"
, pattern =
[ regex "nach (der)? arbeit|(am)? feier ?abend"
]
, prod = \_ -> do
td2 <- interval TTime.Open (hour False 17) (hour False 21)
Token Time . partOfDay <$> intersect today td2
}
ruleLastNCycle :: Rule
ruleLastNCycle = Rule
{ name = "last n <cycle>"
, pattern =
[ regex "letzten?|vergangenen?"
, Predicate $ isIntegerBetween 1 9999
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:token:Token TimeGrain grain:_) -> do
n <- getIntValue token
tt $ cycleN True grain (- n)
_ -> Nothing
}
ruleTimeofdaySharp :: Rule
ruleTimeofdaySharp = Rule
{ name = "<time-of-day> sharp"
, pattern =
[ Predicate isATimeOfDay
, regex "genau|exakt|p(ü)nktlich|punkt( um)?"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleWithinDuration :: Rule
ruleWithinDuration = Rule
{ name = "within <duration>"
, pattern =
[ regex "binnen|innerhalb( von)?"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) -> Token Time <$>
interval TTime.Open now (inDuration dd)
_ -> Nothing
}
ruleMidnighteodendOfDay :: Rule
ruleMidnighteodendOfDay = Rule
{ name = "midnight|EOD|end of day"
, pattern =
[ regex "mitternacht|EOD|tagesende|ende (des)? tag(es)?"
]
, prod = \_ -> tt $ hour False 0
}
ruleDayofmonthNonOrdinalNamedmonth :: Rule
ruleDayofmonthNonOrdinalNamedmonth = Rule
{ name = "<day-of-month> (non ordinal) <named-month>"
, pattern =
[ Predicate isDOMInteger
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleIntersect :: Rule
ruleIntersect = Rule
{ name = "intersect"
, pattern =
[ Predicate isNotLatent
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleDayOfWeekIntersectDuration :: Rule
ruleDayOfWeekIntersectDuration = Rule
{ name = "<day-of-week> in <duration>"
, pattern =
[ Predicate isADayOfWeek
, regex "(in|vor)"
, dimension Duration
]
, prod = \case
(Token Time td:Token RegexMatch (GroupMatch (match:_)):Token Duration dd:_) ->
case Text.toLower match of
"vor" -> Token Time <$> intersect td (durationIntervalAgo dd)
_ -> Token Time <$> intersect td (inDurationInterval dd)
_ -> Nothing
}
ruleAboutTimeofday :: Rule
ruleAboutTimeofday = Rule
{ name = "about <time-of-day>"
, pattern =
[ regex "so( um)?|(so |um |so um )?circa|zirka|ca\\.?|ungef(ä)hr|(etwa|gegen)( so| um| so um)?"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleUntilTimeofday :: Rule
ruleUntilTimeofday = Rule
{ name = "until <time-of-day>"
, pattern =
[ regex "vor|bis( zu[rm]?)?|sp(ä)testens?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ withDirection TTime.Before td
_ -> Nothing
}
ruleUntilTimeofdayPostfix :: Rule
ruleUntilTimeofdayPostfix = Rule
{ name = "<time-of-day> until"
, pattern =
[ dimension Time
, regex "sp(ä)testens"
]
, prod = \tokens -> case tokens of
(Token Time td:_:_) -> tt $ withDirection TTime.Before td
_ -> Nothing
}
ruleAtTimeofday :: Rule
ruleAtTimeofday = Rule
{ name = "at <time-of-day>"
, pattern =
[ regex "um|@"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleNthTimeOfTime :: Rule
ruleNthTimeOfTime = Rule
{ name = "nth <time> of <time>"
, pattern =
[ dimension Ordinal
, dimension Time
, regex "im"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal OrdinalData{TOrdinal.value = v}:
Token Time td1:
_:
Token Time td2:
_) -> Token Time . predNth (v - 1) False <$> intersect td2 td1
_ -> Nothing
}
ruleTimePartofday :: Rule
ruleTimePartofday = Rule
{ name = "<time> <part-of-day>"
, pattern =
[ dimension Time
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleWeekend :: Rule
ruleWeekend = Rule
{ name = "week-end"
, pattern =
[ regex "wochen ?ende?"
]
, prod = \_ -> tt $ mkOkForThisNext weekend
}
ruleNthTimeAfterTime2 :: Rule
ruleNthTimeAfterTime2 = Rule
{ name = "nth <time> after <time>"
, pattern =
[ regex "der|das"
, dimension Ordinal
, dimension Time
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:
Token Ordinal OrdinalData{TOrdinal.value = v}:
Token Time td1:
_:
Token Time td2:
_) -> tt $ predNthAfter (v - 1) td1 td2
_ -> Nothing
}
ruleNextTime :: Rule
ruleNextTime = Rule
{ name = "next <time>"
, pattern =
[ regex "(n(ä)chste|kommende)[ns]?"
, Predicate $ and . sequence [isNotLatent, isOkWithThisNext]
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 True td
_ -> Nothing
}
ruleOrdinalQuarterYear :: Rule
ruleOrdinalQuarterYear = Rule
{ name = "<ordinal> quarter <year>"
, pattern =
[ dimension Ordinal
, Predicate $ isGrain TG.Quarter
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal od:_:Token Time td:_) ->
tt $ cycleNthAfter False TG.Quarter (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleYyyymmdd :: Rule
ruleYyyymmdd = Rule
{ name = "yyyy-mm-dd"
, pattern =
[ regex "(\\d{2,4})-(1[0-2]|0?[1-9])-(3[01]|[12]\\d|0?[1-9])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:m3:_)):_) -> do
y <- parseInt m1
m <- parseInt m2
d <- parseInt m3
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleTheOrdinalCycleAfterTime :: Rule
ruleTheOrdinalCycleAfterTime = Rule
{ name = "the <ordinal> <cycle> after <time>"
, pattern =
[ regex "the"
, dimension Ordinal
, dimension TimeGrain
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Ordinal od:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter True grain (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleIntersectByOfFromS :: Rule
ruleIntersectByOfFromS = Rule
{ name = "intersect by 'of', 'from', 's"
, pattern =
[ Predicate isNotLatent
, regex "von|der|im"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleNextNCycle :: Rule
ruleNextNCycle = Rule
{ name = "next n <cycle>"
, pattern =
[ regex "n(ä)chsten?|kommenden?"
, Predicate $ isIntegerBetween 1 9999
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:token:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain v
_ -> Nothing
}
ruleADuration :: Rule
ruleADuration = Rule
{ name = "a <duration>"
, pattern =
[ regex "(in )?eine?(r|n)?"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleMorning :: Rule
ruleMorning = Rule
{ name = "morning"
, pattern =
[ regex "morgens|(in der )?fr(ü)h|vor ?mittags?|am morgen"
]
, prod = \_ ->
let from = hour False 3
to = hour False 12
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleThisPartofday :: Rule
ruleThisPartofday = Rule
{ name = "this <part-of-day>"
, pattern =
[ regex "diesen?|dieses|heute"
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time . partOfDay <$> intersect today td
_ -> Nothing
}
ruleThisCycle :: Rule
ruleThisCycle = Rule
{ name = "this <cycle>"
, pattern =
[ regex "diese(r|n|s)?|kommende(r|n|s)?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 0
_ -> Nothing
}
ruleThisTime :: Rule
ruleThisTime = Rule
{ name = "this <time>"
, pattern =
[ regex "diese(n|r|s)?|(im )?laufenden"
, Predicate isOkWithThisNext
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 False td
_ -> Nothing
}
ruleDurationHence :: Rule
ruleDurationHence = Rule
{ name = "<duration> hence"
, pattern =
[ dimension Duration
, regex "hence"
]
, prod = \tokens -> case tokens of
(Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleDayofmonthNonOrdinalOfNamedmonth :: Rule
ruleDayofmonthNonOrdinalOfNamedmonth = Rule
{ name = "<day-of-month> (non ordinal) of <named-month>"
, pattern =
[ Predicate isDOMInteger
, regex "vom|von"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:_:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleAfterLunch :: Rule
ruleAfterLunch = Rule
{ name = "after lunch"
, pattern =
[ regex "nach dem mittagessen|nachmittags?"
]
, prod = \_ -> do
td2 <- interval TTime.Open (hour False 13) (hour False 17)
Token Time . partOfDay <$> intersect today td2
}
ruleOnANamedday :: Rule
ruleOnANamedday = Rule
{ name = "on a named-day"
, pattern =
[ regex "an einem"
, Predicate isADayOfWeek
]
, prod = \tokens -> case tokens of
(_:x:_) -> Just x
_ -> Nothing
}
ruleYearLatent :: Rule
ruleYearLatent = Rule
{ name = "year (latent)"
, pattern =
[ Predicate $ isIntegerBetween 25 999
]
, prod = \tokens -> case tokens of
(token:_) -> do
y <- getIntValue token
tt . mkLatent $ year y
_ -> Nothing
}
ruleAfterTimeofday :: Rule
ruleAfterTimeofday = Rule
{ name = "after <time-of-day>"
, pattern =
[ regex "nach|ab|fr(ü)he?stens"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ withDirection TTime.After td
_ -> Nothing
}
ruleAfterTimeofdayPostfix :: Rule
ruleAfterTimeofdayPostfix = Rule
{ name = "<time-of-day> after"
, pattern =
[ dimension Time
, regex "fr(ü)he?stens"
]
, prod = \tokens -> case tokens of
(Token Time td:_:_) -> tt $ withDirection TTime.After td
_ -> Nothing
}
ruleNight :: Rule
ruleNight = Rule
{ name = "night"
, pattern =
[ regex "nachts?"
]
, prod = \_ ->
let from = hour False 0
to = hour False 4
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleDayofmonthOrdinal :: Rule
ruleDayofmonthOrdinal = Rule
{ name = "<day-of-month> (ordinal)"
, pattern =
[ Predicate isDOMOrdinal
]
, prod = \tokens -> case tokens of
(Token Ordinal OrdinalData{TOrdinal.value = v}:_) ->
tt $ dayOfMonth v
_ -> Nothing
}
ruleHalfIntegerGermanStyleHourofday :: Rule
ruleHalfIntegerGermanStyleHourofday = Rule
{ name = "half <integer> (german style hour-of-day)"
, pattern =
[ regex "halb"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$> minutesBefore 30 td
_ -> Nothing
}
ruleOrdinalCycleAfterTime :: Rule
ruleOrdinalCycleAfterTime = Rule
{ name = "<ordinal> <cycle> after <time>"
, pattern =
[ dimension Ordinal
, dimension TimeGrain
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal od:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter True grain (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleOrdinalCycleOfTime :: Rule
ruleOrdinalCycleOfTime = Rule
{ name = "<ordinal> <cycle> of <time>"
, pattern =
[ dimension Ordinal
, dimension TimeGrain
, regex "im|in|von"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal od:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter True grain (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleAfterNextTime :: Rule
ruleAfterNextTime = Rule
{ name = "after next <time>"
, pattern =
[ regex "(ü)ber ?n(ä)chste[ns]?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 1 True td
_ -> Nothing
}
ruleHhmm :: Rule
ruleHhmm = Rule
{ name = "hh:mm"
, pattern =
[ regex "((?:[01]?\\d)|(?:2[0-3]))[:.h]([0-5]\\d)(?:uhr|h)?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:_)):_) -> do
h <- parseInt m1
m <- parseInt m2
tt $ hourMinute False h m
_ -> Nothing
}
ruleTonight :: Rule
ruleTonight = Rule
{ name = "tonight"
, pattern =
[ regex "heute? (am)? abends?"
]
, prod = \_ -> do
td2 <- interval TTime.Open (hour False 18) (hour False 0)
Token Time . partOfDay <$> intersect today td2
}
ruleYear :: Rule
ruleYear = Rule
{ name = "year"
, pattern =
[ Predicate $ isIntegerBetween 1000 2100
]
, prod = \tokens -> case tokens of
(token:_) -> do
y <- getIntValue token
tt $ year y
_ -> Nothing
}
ruleNamedmonthDayofmonthNonOrdinal :: Rule
ruleNamedmonthDayofmonthNonOrdinal = Rule
{ name = "<named-month> <day-of-month> (non ordinal)"
, pattern =
[ Predicate isAMonth
, Predicate isDOMInteger
]
, prod = \tokens -> case tokens of
(Token Time td:token:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleHhmmMilitary :: Rule
ruleHhmmMilitary = Rule
{ name = "hhmm (military)"
, pattern =
[ regex "((?:[01]?\\d)|(?:2[0-3]))([0-5]\\d)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (h:m:_)):_) -> do
hh <- parseInt h
mm <- parseInt m
tt . mkLatent $ hourMinute False hh mm
_ -> Nothing
}
ruleAbsorptionOfAfterNamedDay :: Rule
ruleAbsorptionOfAfterNamedDay = Rule
{ name = "absorption of , after named day"
, pattern =
[ Predicate isADayOfWeek
, regex ","
]
, prod = \tokens -> case tokens of
(x:_) -> Just x
_ -> Nothing
}
ruleLastDayofweekOfTime :: Rule
ruleLastDayofweekOfTime = Rule
{ name = "last <day-of-week> of <time>"
, pattern =
[ regex "letzte(r|n|s)?"
, Predicate isADayOfWeek
, regex "[ui]m"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
tt $ predLastOf td1 td2
_ -> Nothing
}
ruleHhmmMilitaryAmpm :: Rule
ruleHhmmMilitaryAmpm = Rule
{ name = "hhmm (military) am|pm"
, pattern =
[ regex "((?:1[012]|0?\\d))([0-5]\\d)"
, regex "([ap])\\.?m\\.?(?:[\\s'\"-_{}\\[\\]()]|$)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (hh:mm:_)):Token RegexMatch (GroupMatch (ap:_)):_) -> do
h <- parseInt hh
m <- parseInt mm
tt . timeOfDayAMPM (Text.toLower ap == "a") $ hourMinute True h m
_ -> Nothing
}
ruleTimeofdayTimeofdayInterval :: Rule
ruleTimeofdayTimeofdayInterval = Rule
{ name = "<time-of-day> - <time-of-day> (interval)"
, pattern =
[ Predicate $ and . sequence [isNotLatent, isATimeOfDay]
, regex "\\-|bis"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleTimeofdayTimeofdayInterval2 :: Rule
ruleTimeofdayTimeofdayInterval2 = Rule
{ name = "<time-of-day> - <time-of-day> (interval)"
, pattern =
[ Predicate isATimeOfDay
, regex "\\-|/|bis"
, Predicate $ and . sequence [isNotLatent, isATimeOfDay]
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleDurationAfterTime :: Rule
ruleDurationAfterTime = Rule
{ name = "<duration> after <time>"
, pattern =
[ dimension Duration
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Duration dd:_:Token Time td:_) ->
tt $ durationAfter dd td
_ -> Nothing
}
ruleOrdinalQuarter :: Rule
ruleOrdinalQuarter = Rule
{ name = "<ordinal> quarter"
, pattern =
[ dimension Ordinal
, Predicate $ isGrain TG.Quarter
]
, prod = \tokens -> case tokens of
(Token Ordinal OrdinalData{TOrdinal.value = v}:_) -> tt .
cycleNthAfter False TG.Quarter (v - 1) $ cycleNth TG.Year 0
_ -> Nothing
}
ruleTheDayofmonthOrdinal :: Rule
ruleTheDayofmonthOrdinal = Rule
{ name = "the <day-of-month> (ordinal)"
, pattern =
[ regex "der"
, Predicate isDOMOrdinal
]
, prod = \tokens -> case tokens of
(_:Token Ordinal OrdinalData{TOrdinal.value = v}:_) ->
tt $ dayOfMonth v
_ -> Nothing
}
ruleDurationBeforeTime :: Rule
ruleDurationBeforeTime = Rule
{ name = "<duration> before <time>"
, pattern =
[ dimension Duration
, regex "vor"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Duration dd:_:Token Time td:_) ->
tt $ durationBefore dd td
_ -> Nothing
}
rulePartofdayOfTime :: Rule
rulePartofdayOfTime = Rule
{ name = "<part-of-day> of <time>"
, pattern =
[ Predicate isAPartOfDay
, regex "des|von|vom|am"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleMmddyyyy :: Rule
ruleMmddyyyy = Rule
{ name = "mm/dd/yyyy"
, pattern =
[ regex "([012]?[1-9]|10|20|30|31)\\.(0?[1-9]|10|11|12)\\.(\\d{2,4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:m3:_)):_) -> do
y <- parseInt m3
m <- parseInt m2
d <- parseInt m1
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleTimeofdayOclock :: Rule
ruleTimeofdayOclock = Rule
{ name = "<time-of-day> o'clock"
, pattern =
[ Predicate isATimeOfDay
, regex "uhr|h(?:[\\s'\"-_{}\\[\\]()]|$)"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
tt $ notLatent td
_ -> Nothing
}
ruleDayofmonthordinalNamedmonthYear :: Rule
ruleDayofmonthordinalNamedmonthYear = Rule
{ name = "<day-of-month>(ordinal) <named-month> year"
, pattern =
[ Predicate isDOMOrdinal
, Predicate isAMonth
, regex "(\\d{2,4})"
]
, prod = \tokens -> case tokens of
(token:
Token Time td:
Token RegexMatch (GroupMatch (match:_)):
_) -> do
n <- parseInt match
dom <- intersectDOM td token
Token Time <$> intersect dom (year n)
_ -> Nothing
}
ruleTimezone :: Rule
ruleTimezone = Rule
{ name = "<time> timezone"
, pattern =
[ Predicate $ and . sequence [isNotLatent, isATimeOfDay]
, regex "\\b(YEKT|YEKST|YAKT|YAKST|WITA|WIT|WIB|WGT|WGST|WFT|WET|WEST|WAT|WAST|VUT|VLAT|VLAST|VET|UZT|UYT|UYST|UTC|ULAT|TVT|TMT|TLT|TKT|TJT|TFT|TAHT|SST|SRT|SGT|SCT|SBT|SAST|SAMT|RET|PYT|PYST|PWT|PST|PONT|PMST|PMDT|PKT|PHT|PHOT|PGT|PETT|PETST|PET|PDT|OMST|OMSST|NZST|NZDT|NUT|NST|NPT|NOVT|NOVST|NFT|NDT|NCT|MYT|MVT|MUT|MST|MSK|MSD|MMT|MHT|MDT|MAWT|MART|MAGT|MAGST|LINT|LHST|LHDT|KUYT|KST|KRAT|KRAST|KGT|JST|IST|IRST|IRKT|IRKST|IRDT|IOT|IDT|ICT|HOVT|HKT|GYT|GST|GMT|GILT|GFT|GET|GAMT|GALT|FNT|FKT|FKST|FJT|FJST|EST|EGT|EGST|EET|EEST|EDT|ECT|EAT|EAST|EASST|DAVT|ChST|CXT|CVT|CST|COT|CLT|CLST|CKT|CHAST|CHADT|CET|CEST|CDT|CCT|CAT|CAST|BTT|BST|BRT|BRST|BOT|BNT|AZT|AZST|AZOT|AZOST|AWST|AWDT|AST|ART|AQTT|ANAT|ANAST|AMT|AMST|ALMT|AKST|AKDT|AFT|AEST|AEDT|ADT|ACST|ACDT)\\b"
]
, prod = \tokens -> case tokens of
(Token Time td:
Token RegexMatch (GroupMatch (tz:_)):
_) -> Token Time <$> inTimezone (Text.toUpper tz) td
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleADuration
, ruleAboutTimeofday
, ruleAbsorptionOfAfterNamedDay
, ruleAfterDuration
, ruleAfterLunch
, ruleAfterNextTime
, ruleAfterTimeofday
, ruleAfterTimeofdayPostfix
, ruleAfterWork
, ruleAfternoon
, ruleAtTimeofday
, ruleBetweenDatetimeAndDatetimeInterval
, ruleBetweenTimeofdayAndTimeofdayInterval
, ruleByTheEndOfTime
, ruleDatetimeDatetimeInterval
, ruleDateDateInterval
, ruleDayofmonthNonOrdinalNamedmonth
, ruleDayofmonthNonOrdinalOfNamedmonth
, ruleDayofmonthOrdinal
, ruleDayofmonthordinalNamedmonth
, ruleDayofmonthordinalNamedmonthYear
, ruleDurationAfterTime
, ruleDurationAgo
, ruleDurationBeforeTime
, ruleDurationFromNow
, ruleDurationHence
, ruleEvening
, ruleExactlyTimeofday
, ruleFromDatetimeDatetimeInterval
, ruleFromTimeofdayTimeofdayInterval
, ruleHalfIntegerGermanStyleHourofday
, ruleHhmm
, ruleHhmmMilitary
, ruleHhmmMilitaryAmpm
, ruleHourofdayIntegerAsRelativeMinutes
, ruleInDuration
, ruleInduringThePartofday
, ruleIntersect
, ruleIntersectBy
, ruleIntersectByOfFromS
, ruleDayOfWeekIntersectDuration
, ruleLastCycle
, ruleLastCycleOfTime
, ruleLastDayofweekOfTime
, ruleLastNCycle
, ruleLastTime
, ruleLunch
, ruleMidnighteodendOfDay
, ruleMmdd
, ruleMmddyyyy
, ruleMonthDdddInterval
, ruleMorning
, ruleNamedmonthDayofmonthNonOrdinal
, ruleNamedmonthDayofmonthOrdinal
, ruleNextCycle
, ruleAfterNextCycle
, ruleNextNCycle
, ruleNextTime
, ruleNight
, ruleNoon
, ruleNthTimeAfterTime
, ruleNthTimeAfterTime2
, ruleNthTimeOfTime
, ruleNthTimeOfTime2
, ruleOnANamedday
, ruleOnDate
, ruleOrdinalCycleAfterTime
, ruleOrdinalCycleOfTime
, ruleOrdinalQuarter
, ruleOrdinalQuarterYear
, rulePartofdayOfTime
, ruleRelativeMinutesAfterpastIntegerHourofday
, ruleRelativeMinutesTotillbeforeIntegerHourofday
, ruleTheCycleAfterTime
, ruleTheCycleBeforeTime
, ruleTheDayofmonthNonOrdinal
, ruleTheDayofmonthOrdinal
, ruleTheIdesOfNamedmonth
, ruleTheOrdinalCycleAfterTime
, ruleTheOrdinalCycleOfTime
, ruleThisCycle
, ruleThisPartofday
, ruleThisTime
, ruleThisnextDayofweek
, ruleTimeAfterNext
, ruleTimeBeforeLast
, ruleTimePartofday
, ruleTimeofdayApproximately
, ruleTimeofdayLatent
, ruleTimeofdayOclock
, ruleTimeofdaySharp
, ruleTimeofdayTimeofdayInterval
, ruleTimeofdayTimeofdayInterval2
, ruleTonight
, ruleUntilTimeofday
, ruleUntilTimeofdayPostfix
, ruleWeekend
, ruleWithinDuration
, ruleYear
, ruleYearLatent
, ruleYearLatent2
, ruleYyyymmdd
, ruleQuarterTotillbeforeIntegerHourofday
, ruleHalfTotillbeforeIntegerHourofday
, ruleQuarterAfterpastIntegerHourofday
, ruleHalfAfterpastIntegerHourofday
, ruleHourofdayQuarter
, ruleHourofdayHalf
, ruleTimezone
]
++ ruleInstants
++ ruleDaysOfWeek
++ ruleMonths
++ ruleSeasons
++ ruleHolidays
++ ruleComputedHolidays
++ ruleComputedHolidays'
| null | https://raw.githubusercontent.com/facebook/duckling/03c6197283943c595608bb977a88a07c9e997006/Duckling/Time/DE/Rules.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
# LANGUAGE GADTs #
# LANGUAGE OverloadedStrings # | Copyright ( c ) 2016 - present , Facebook , Inc.
# LANGUAGE LambdaCase #
# LANGUAGE NoRebindableSyntax #
module Duckling.Time.DE.Rules
( rules
) where
import Prelude
import qualified Data.Text as Text
import Duckling.Dimensions.Types
import Duckling.Duration.Helpers (isGrain)
import Duckling.Numeral.Helpers (parseInt)
import Duckling.Ordinal.Types (OrdinalData(..))
import Duckling.Regex.Types (GroupMatch(..))
import Duckling.Time.Computed
import Duckling.Time.Helpers
import Duckling.Time.HolidayHelpers
import Duckling.Time.Types (TimeData(..))
import Duckling.Types
import qualified Duckling.Ordinal.Types as TOrdinal
import qualified Duckling.Time.Types as TTime
import qualified Duckling.TimeGrain.Types as TG
ruleInstants :: [Rule]
ruleInstants = mkRuleInstants
[ ( "now" , TG.Second, 0,
"(genau)? ?jetzt|diesen moment|in diesem moment|gerade eben" )
, ( "today" , TG.Day , 0,
"heute|(um diese zeit|zu dieser zeit|um diesen zeitpunkt|zu diesem zeitpunkt)" )
, ( "tomorrow" , TG.Day , 1, "morgen" )
, ( "yesterday" , TG.Day , -1, "gestern" )
, ( "after tomorrow" , TG.Day , 2, "(ü)bermorgen" )
, ( "before yesterday", TG.Day , -2, "vorgestern" )
, ( "3 days ago" , TG.Day , -3, "vorvorgestern" )
, ( "EOM|End of month", TG.Month , 1, "(das )?ende des monats?" )
, ( "EOY|End of year" , TG.Year , 1,
"(das )?(EOY|jahr(es)? ?ende|ende (des )?jahr(es)?)" )
]
ruleDaysOfWeek :: [Rule]
ruleDaysOfWeek = mkRuleDaysOfWeek
[ ( "Montag" , "montags?|mo\\.?" )
, ( "Dienstag" , "die?nstags?|di\\.?" )
, ( "Mittwoch" , "mittwochs?|mi\\.?" )
, ( "Donnerstag", "donn?erstag|do\\.?" )
, ( "Freitag" , "freitags?|fr\\.?" )
, ( "Samstag" , "samstags?|sonnabends?|sa\\.?" )
, ( "Sonntag" , "sonntags?|so\\." )
]
ruleMonths :: [Rule]
ruleMonths = mkRuleMonths
[ ( "Januar" , "januar|jan\\.?" )
, ( "Februar" , "februar|feb\\.?" )
, ( "Marz" , "m(ä)rz|m(ä)r\\.?" )
, ( "April" , "april|apr\\.?" )
, ( "Mai" , "mai\\.?" )
, ( "Juni" , "juni|jun\\.?" )
, ( "Juli" , "juli|jul\\.?" )
, ( "August" , "august|aug\\.?" )
, ( "September", "september|sept?\\.?" )
, ( "Oktober" , "oktober|okt\\.?" )
, ( "November" , "november|nov\\.?" )
, ( "Dezember" , "dezember|dez\\.?" )
]
ruleSeasons :: [Rule]
ruleSeasons = mkRuleSeasons
[ ( "sommer" , "sommer" , monthDay 6 21, monthDay 9 23 )
, ( "herbst" , "herbst" , monthDay 9 23, monthDay 12 21 )
, ( "winter" , "winter" , monthDay 12 21, monthDay 3 20 )
, ( "fruhling", "fr(ü)h(ling|jahr)", monthDay 3 20, monthDay 6 21 )
]
ruleHolidays :: [Rule]
ruleHolidays = mkRuleHolidays
[ ( "Neujahr" , "neujahr(s?tag)?"
, monthDay 1 1 )
, ( "Valentinstag" , "valentin'?stag"
, monthDay 2 14 )
, ( "Schweizer Bundesfeiertag"
, "schweiz(er)? (bundes)?feiertag|bundes feiertag"
, monthDay 8 1 )
, ( "Tag der Deutschen Einheit" , "tag (der)? deutsc?hen? einheit"
, monthDay 10 3 )
, ( "Oesterreichischer Nationalfeiertag"
, "((ö)sterreichischer?)? nationalfeiertag|national feiertag"
, monthDay 10 26 )
, ( "Halloween" , "hall?owe?en?"
, monthDay 10 31 )
, ( "Allerheiligen" , "allerheiligen?|aller heiligen?"
, monthDay 11 1 )
, ( "Nikolaus" , "nikolaus(tag)?|nikolaus tag|nikolo"
, monthDay 12 6 )
, ( "Heiligabend" , "heilig(er)? abend"
, monthDay 12 24 )
, ( "Weihnachten" , "weih?nacht(en|stag)?"
, monthDay 12 25 )
, ( "Silvester" , "silvester"
, monthDay 12 31 )
, ( "Muttertag" , "mutt?ertag|mutt?er (tag)?"
, nthDOWOfMonth 2 7 5 )
, ( "Vatertag" , "vatt?er( ?tag)?"
, nthDOWOfMonth 3 7 6 )
]
ruleComputedHolidays :: [Rule]
ruleComputedHolidays = mkRuleHolidays
[ ( "Christi Himmelfahrt", "(christi\\s+)?himmelfahrt(stag)?"
, cycleNthAfter False TG.Day 39 easterSunday )
, ( "Aschermittwoch", "ascher?(tag|mittwoch)"
, cycleNthAfter False TG.Day (-46) easterSunday )
, ( "Aschura", "asc?hura(\\-?tag)?"
, cycleNthAfter False TG.Day 9 muharram )
, ( "Bhai Dooj", "bhai(ya)?\\s+d(u|oo)j|bhau\\-beej|bhai\\s+(tika|phonta)"
, cycleNthAfter False TG.Day 4 dhanteras )
, ( "Chhath", "chhathi?|chhath (parv|puja)|dala (chhath|puja)|surya shashthi"
, cycleNthAfter False TG.Day 8 dhanteras )
, ( "Boghi", "boghi|bogi\\s+pandigai"
, cycleNthAfter False TG.Day (-1) thaiPongal )
, ( "Chinesisches Neujahr", "chinesische(s|r)\\s+(neujahr(s(tag|fest))?|frühlingsfest)"
, chineseNewYear )
, ( "Aschermontag"
, "(orthodoxer?\\s+)?(ascher|reiner?\\s+|sauberer?\\s+)montag"
, cycleNthAfter False TG.Day (-48) orthodoxEaster )
, ( "Corpus Christi", "corpus\\s+christi|fronleichnam"
, cycleNthAfter False TG.Day 60 easterSunday )
, ( "Dhanteras", "dhanatrayodashi|dhanteras|dhanvantari\\s+trayodashi"
, dhanteras )
, ( "Diwali", "deepavali|diwali|lakshmi\\s+puja"
, cycleNthAfter False TG.Day 2 dhanteras )
, ( "Durga Ashtami", "(durga|maha)(\\s+a)?shtami"
, cycleNthAfter False TG.Day 7 navaratri )
, ( "Ostermontag", "ostermontag"
, cycleNthAfter False TG.Day 1 easterSunday )
, ( "Ostersonntag", "ostersonntag", easterSunday )
, ( "Eid al-Adha", "bakr[\\-\\s]e?id|e?id [au]l\\-adha|opferfest"
, eidalAdha )
, ( "Eid al-Fitr", "eid al\\-fitr", eidalFitr )
, ( "Govardhan Puja", "govardhan\\s+puja|annak(u|oo)t"
, cycleNthAfter False TG.Day 3 dhanteras )
, ( "Karfreitag", "(kar|stiller\\s+|hoher\\s+)freitag"
, cycleNthAfter False TG.Day (-2) easterSunday )
, ( "Guru Gobind Singh Jayanti"
, "guru\\s+(gobind|govind)\\s+singh\\s+(Geburtstag|jayanti)"
, guruGobindSinghJayanti )
, ( "Holi", "(rangwali )?holi|dhuleti|dhulandi|phagwah"
, cycleNthAfter False TG.Day 39 vasantPanchami )
, ( "Holika Dahan", "holika dahan|kamudu pyre|chhoti holi"
, cycleNthAfter False TG.Day 38 vasantPanchami )
, ( "Karsamstag"
, "(kar|stiller\\s+)samstag|karsonnabend"
, cycleNthAfter False TG.Day (-1) easterSunday )
, ( "Islamisches Neujahr", "(arabisches|hijri|islamisches) neujahr|amun jadid|muharram"
, muharram )
, ( "Isra and Mi'raj"
, "isra and mi'raj|aufstieg des propheten|(die\\s+)?nachtreise|aufstieg\\s+in\\s+den\\s+himmel"
, cycleNthAfter False TG.Day 26 rajab
)
, ( "Jumu'atul-Wida", "jumu'atul\\-widaa?'?|jamat[\\-\\s]ul[\\-\\s]vida"
, predNthAfter (-1) (dayOfWeek 5) eidalFitr )
, ( "Kaanum Pongal", "(kaanum|kanni)\\s+pongal"
, cycleNthAfter False TG.Day 2 thaiPongal )
, ( "Lag BaOmer", "lag (b|l)[a']omer", lagBaOmer )
, ( "Vaisakhi", "mesadi|[bv]aisakhi|vaisakhadi|vasakhi|vaishakhi", vaisakhi)
, ( "Lailat al-Qadr"
, "la[iy]lat al[\\-\\s][qk]adr|(die)? nacht der (bestimmung|allmacht)"
, cycleNthAfter False TG.Day 26 ramadan )
, ( "Lazarus-Samstag", "lazarus(\\-|\\s+)samstag"
, cycleNthAfter False TG.Day (-8) orthodoxEaster )
, ( "Maha Navami", "maha\\s+navami", cycleNthAfter False TG.Day 8 navaratri )
, ( "Maha Saptami", "maha\\s+saptami", cycleNthAfter False TG.Day 6 navaratri )
, ( "Mattu Pongal", "maa?ttu\\s+pongal"
, cycleNthAfter False TG.Day 1 thaiPongal )
, ( "Gründonnerstag"
, "(grün|hoher\\s+|heiliger\\s+|weißer\\s+|palm)donnerstag"
, cycleNthAfter False TG.Day (-3) easterSunday )
, ( "Maulid an-Nabī"
, "Maulid\\s+an\\-Nabī|mawlid(\\s+al\\-nab(awi|i\\s+al\\-sharif))?|mevli[dt]|mulud|geburtstag des propheten( muhammad)?"
, mawlid )
, ( "Naraka Chaturdashi"
, "naraka? (nivaran )?chaturdashi|(kali|roop) chaudas|choti diwali"
, cycleNthAfter False TG.Day 1 dhanteras )
, ( "Orthodoxer Ostermontag", "orthodoxer\\s+ostermontag"
, cycleNthAfter False TG.Day 1 orthodoxEaster )
, ( "Orthodoxer Ostersonntag", "orthodoxer\\s+ostersonntag"
, orthodoxEaster )
, ( "Orthodoxer Karsamstag", "orthodoxer\\s+karsamstag"
, cycleNthAfter False TG.Day (-1) orthodoxEaster )
, ( "Orthodoxer Karfreitag", "orthodoxer\\s+karfreitag"
, cycleNthAfter False TG.Day (-2) orthodoxEaster )
, ( "Orthodoxer Palmsonntag", "orthodoxer\\s+palmsonntag"
, cycleNthAfter False TG.Day (-7) orthodoxEaster )
, ( "Palmsonntag", "palmsonntag"
, cycleNthAfter False TG.Day (-7) easterSunday )
, ( "Pfingsten", "pfingsten|pentecost"
, cycleNthAfter False TG.Day 49 easterSunday )
, ( "Purim", "purim", purim )
, ( "Raksha Bandhan", "raksha(\\s+)?bandhan|rakhi", rakshaBandhan )
, ( "Pargat Diwas", "pargat diwas|(maharishi )?valmiki jayanti", pargatDiwas )
, ( "Mahavir Jayanti", "(mahavir|mahaveer) (jayanti|janma kalyanak)"
, mahavirJayanti )
, ( "Maha Shivaratri", "maha(\\s+)?shivaratri", mahaShivaRatri)
, ( "Dayananda Saraswati Jayanti","((maharishi|swami) )?(dayananda )?saraswati jayanti", saraswatiJayanti )
, ( "Karva Chauth", "karva\\s+chauth|karaka\\s+chaturthi", karvaChauth)
, ( "Krishna Janmashtami", "(krishna )?janmashtami|gokulashtami", krishnaJanmashtami )
, ( "Schmini Azeret", "sc?he?mini\\s+at?zeret"
, cycleNthAfter False TG.Day 21 roshHashana )
, ( "Fastnacht", "fastnacht(sdienstag)?|mardi gras"
, cycleNthAfter False TG.Day (-47) easterSunday )
, ( "Shushan Purim", "shushan\\s+purim", cycleNthAfter False TG.Day 1 purim )
, ( "Simchat Torah", "simc?hat\\s+torah"
, cycleNthAfter False TG.Day 22 roshHashana )
, ( "Thai Pongal"
, "(thai )?pongal|pongal pandigai|(makara? |magha )?sankranth?i|maghi"
, thaiPongal )
, ( "Thiru Onam", "(thiru(v|\\s+))?onam", thiruOnam )
, ( "Tisha B'Av", "tisha b'av", tishaBAv )
, ( "Dreifaltigkeitssonntag",
"trinitatis(fest)?|(dreifaltigkeits|goldener\\s+)sonntag|drei(faltigkeit|einigkeit)(sfest)?"
, cycleNthAfter False TG.Day 56 easterSunday )
, ( "Vasant Panchami", "[bv]asant\\s+panchami", vasantPanchami )
, ( "Vijayadashami", "dasara|duss(eh|he)ra|vijayadashami"
, cycleNthAfter False TG.Day 9 navaratri )
, ( "Tu biSchevat", "tu b[i']sc?he?vat", tuBishvat )
, ( "Vesak", "v(e|ai)sak(ha)?|buddha(\\-?tag|\\s+purnima)|wesakfest", vesak )
, ( "Jom Ha'atzmaut", "[yj]om ha'?atzmaut", yomHaatzmaut )
, ( "Jom HaShoah"
, "[yj]om hashoah|[yj]om hazikaron lashoah ve-lag'vurah|holocaust\\-?gedenktag"
, cycleNthAfter False TG.Day 12 passover )
, ( "Jom Kippur", "[yj]om\\s+kippur", cycleNthAfter False TG.Day 9 roshHashana )
, ( "Pfingstmontag", "pfingstmontag|(pentecost|whit)\\s+montag"
, cycleNthAfter False TG.Day 50 easterSunday )
, ( "Rabindra Jayanti", "rabindra(nath)?\\s+jayanti", rabindraJayanti )
, ("Guru Ravidass Jayanti", "guru\\s+ravidass?\\s+(geburtstag|jayanti)"
, ravidassJayanti )
]
ruleComputedHolidays' :: [Rule]
ruleComputedHolidays' = mkRuleHolidays'
[ ( "Global Youth Service-Tag", "global youth service[\\-\\s]?tag|gysd"
, let start = globalYouthServiceDay
end = cycleNthAfter False TG.Day 2 globalYouthServiceDay
in interval TTime.Open start end )
, ( "Große Fastenzeit", "große\\s+fastenzeit"
, let start = cycleNthAfter False TG.Day (-48) orthodoxEaster
end = cycleNthAfter False TG.Day (-9) orthodoxEaster
in interval TTime.Open start end )
, ( "Chanukka", "c?hann?ukk?ah?"
, let start = chanukah
end = cycleNthAfter False TG.Day 7 chanukah
in interval TTime.Open start end )
, ( "Fastenzeit", "fastenzeit"
, let start = cycleNthAfter False TG.Day (-46) easterSunday
end = cycleNthAfter False TG.Day (-1) easterSunday
in interval TTime.Open start end )
, ( "Navaratri", "durga\\s+puja|durgotsava|nava?rath?ri"
, let start = navaratri
end = cycleNthAfter False TG.Day 9 navaratri
in interval TTime.Open start end )
, ( "Pessach", "passover|pess?a[ck]?h|pascha|Passah?"
, let start = passover
end = cycleNthAfter False TG.Day 8 passover
in interval TTime.Open start end )
, ( "Ramadan", "rama[dt]h?an|ramzaa?n"
, let start = ramadan
end = cycleNthAfter False TG.Day (-1) eidalFitr
in interval TTime.Open start end )
, ( "Rosch haSchana", "rosch ha\\-?schanah?"
, let start = roshHashana
end = cycleNthAfter False TG.Day 2 roshHashana
in interval TTime.Open start end )
, ( "Schawuot", "sc?ha[vw]u'?oth?|shovuos"
, let start = cycleNthAfter False TG.Day 50 passover
end = cycleNthAfter False TG.Day 52 passover
in interval TTime.Open start end )
, ( "Sukkot", "Laubhüttenfest|su[ck]{2}o[st]"
, let start = cycleNthAfter False TG.Day 14 roshHashana
end = cycleNthAfter False TG.Day 22 roshHashana
in interval TTime.Open start end )
Does not account for leap years , so every 365 days .
, ( "Parsi Neujahr", "parsi neujahr|jamshedi navroz"
, predEveryNDaysFrom 365 (2020, 8, 16)
)
, ( "Earth Hour", "earth hour|stunde der erde"
, computeEarthHour )
, ( "Königstag", "königstag|koningsdag"
, computeKingsDay )
]
ruleRelativeMinutesTotillbeforeIntegerHourofday :: Rule
ruleRelativeMinutesTotillbeforeIntegerHourofday = Rule
{ name = "relative minutes to|till|before <integer> (hour-of-day)"
, pattern =
[ Predicate $ isIntegerBetween 1 59
, regex "vor"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(token:_:Token Time td:_) -> do
n <- getIntValue token
Token Time <$> minutesBefore n td
_ -> Nothing
}
ruleQuarterTotillbeforeIntegerHourofday :: Rule
ruleQuarterTotillbeforeIntegerHourofday = Rule
{ name = "quarter to|till|before <integer> (hour-of-day)"
, pattern =
[regex "vie?rtel vor"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$> minutesBefore 15 td
_ -> Nothing
}
ruleHalfTotillbeforeIntegerHourofday :: Rule
ruleHalfTotillbeforeIntegerHourofday = Rule
{ name = "half to|till|before <integer> (hour-of-day)"
, pattern =
[ regex "halbe? vor"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$> minutesBefore 30 td
_ -> Nothing
}
ruleTheOrdinalCycleOfTime :: Rule
ruleTheOrdinalCycleOfTime = Rule
{ name = "the <ordinal> <cycle> of <time>"
, pattern =
[ regex "der|die|das"
, dimension Ordinal
, dimension TimeGrain
, regex "im|in|von"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Ordinal od:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter True grain (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleNthTimeOfTime2 :: Rule
ruleNthTimeOfTime2 = Rule
{ name = "nth <time> of <time>"
, pattern =
[ regex "der|die|das"
, dimension Ordinal
, dimension Time
, regex "im"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:
Token Ordinal OrdinalData{TOrdinal.value = v}:
Token Time td1:
_:
Token Time td2:
_) -> Token Time . predNth (v - 1) False <$> intersect td2 td1
_ -> Nothing
}
ruleLastTime :: Rule
ruleLastTime = Rule
{ name = "last <time>"
, pattern =
[ regex "letzten?|letztes"
, Predicate isOkWithThisNext
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth (-1) False td
_ -> Nothing
}
ruleDatetimeDatetimeInterval :: Rule
ruleDatetimeDatetimeInterval = Rule
{ name = "<datetime> - <datetime> (interval)"
, pattern =
[ Predicate isNotLatent
, regex "\\-|bis( zum)?|auf( den)?"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleDateDateInterval :: Rule
ruleDateDateInterval = Rule
{ name = "dd.(mm.)? - dd.mm.(yy[yy]?)? (interval)"
, pattern =
[ regex "(?:vo[nm]\\s+)?(10|20|30|31|[012]?[1-9])\\.?((?<=\\.)(?:10|11|12|0?[1-9])(?:\\.?))?"
, regex "\\-|/|bis( zum)?|auf( den)?"
, regex "(10|20|30|31|[012]?[1-9])\\.(10|11|12|0?[1-9])\\.?((?<=\\.)\\d{2,4})?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (d1:"":_)):
_:
Token RegexMatch (GroupMatch (d2:m2:"":_)):
_) -> do
d1 <- parseInt d1
d2 <- parseInt d2
m2 <- parseInt m2
Token Time <$> interval TTime.Closed (monthDay m2 d1) (monthDay m2 d2)
(Token RegexMatch (GroupMatch (d1:"":_)):
_:
Token RegexMatch (GroupMatch (d2:m2:y:_)):
_) -> do
d1 <- parseInt d1
d2 <- parseInt d2
m2 <- parseInt m2
y <- parseInt y
Token Time <$> interval TTime.Closed (yearMonthDay y m2 d1) (yearMonthDay y m2 d2)
(Token RegexMatch (GroupMatch (d1:m1:_)):
_:
Token RegexMatch (GroupMatch (d2:m2:"":_)):
_) -> do
d1 <- parseInt d1
d2 <- parseInt d2
m1 <- parseInt m1
m2 <- parseInt m2
Token Time <$> interval TTime.Closed (monthDay m1 d1) (monthDay m2 d2)
(Token RegexMatch (GroupMatch (d1:m1:_)):
_:
Token RegexMatch (GroupMatch (d2:m2:y:_)):
_) -> do
d1 <- parseInt d1
d2 <- parseInt d2
m1 <- parseInt m1
m2 <- parseInt m2
y <- parseInt y
Token Time <$> interval TTime.Closed (yearMonthDay y m1 d1) (yearMonthDay y m2 d2)
_ -> Nothing
}
ruleEvening :: Rule
ruleEvening = Rule
{ name = "evening"
, pattern =
[ regex "abends?"
]
, prod = \_ ->
let from = hour False 18
to = hour False 0
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleTheDayofmonthNonOrdinal :: Rule
ruleTheDayofmonthNonOrdinal = Rule
{ name = "the <day-of-month> (non ordinal)"
, pattern =
[ regex "der"
, Predicate $ isIntegerBetween 1 31
]
, prod = \tokens -> case tokens of
(_:token:_) -> do
v <- getIntValue token
tt $ dayOfMonth v
_ -> Nothing
}
ruleInDuration :: Rule
ruleInDuration = Rule
{ name = "in <duration>"
, pattern =
[ regex "in"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleLastCycleOfTime :: Rule
ruleLastCycleOfTime = Rule
{ name = "last <cycle> of <time>"
, pattern =
[ regex "letzte(r|n|s)?"
, dimension TimeGrain
, regex "um|im"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleLastOf grain td
_ -> Nothing
}
ruleFromDatetimeDatetimeInterval :: Rule
ruleFromDatetimeDatetimeInterval = Rule
{ name = "from <datetime> - <datetime> (interval)"
, pattern =
[ regex "vo[nm]"
, dimension Time
, regex "\\-|bis( zum)?|auf( den)?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleRelativeMinutesAfterpastIntegerHourofday :: Rule
ruleRelativeMinutesAfterpastIntegerHourofday = Rule
{ name = "relative minutes after|past <integer> (hour-of-day)"
, pattern =
[ Predicate $ isIntegerBetween 1 59
, regex "nach"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(token:
_:
Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_) -> do
n <- getIntValue token
tt $ hourMinute is12H hours n
_ -> Nothing
}
ruleQuarterAfterpastIntegerHourofday :: Rule
ruleQuarterAfterpastIntegerHourofday = Rule
{ name = "quarter after|past <integer> (hour-of-day)"
, pattern =
[ regex "vie?rtel nach"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:
Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_) -> tt $ hourMinute is12H hours 15
_ -> Nothing
}
ruleHalfAfterpastIntegerHourofday :: Rule
ruleHalfAfterpastIntegerHourofday = Rule
{ name = "half after|past <integer> (hour-of-day)"
, pattern =
[ regex "halbe? nach"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:
Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_) -> tt $ hourMinute is12H hours 30
_ -> Nothing
}
ruleMonthDdddInterval :: Rule
ruleMonthDdddInterval = Rule
{ name = "<month> dd-dd (interval)"
, pattern =
[ regex "([012]?\\d|30|31)(ter|\\.)?"
, regex "\\-|bis( zum)?|auf( den)?"
, regex "([012]?\\d|30|31)(ter|\\.)?"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:_)):
_:
Token RegexMatch (GroupMatch (m2:_)):
Token Time td:
_) -> do
v1 <- parseInt m1
v2 <- parseInt m2
from <- intersect (dayOfMonth v1) td
to <- intersect (dayOfMonth v2) td
Token Time <$> interval TTime.Closed from to
_ -> Nothing
}
ruleTheCycleAfterTime :: Rule
ruleTheCycleAfterTime = Rule
{ name = "the <cycle> after <time>"
, pattern =
[ regex "der"
, dimension TimeGrain
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter False grain 1 td
_ -> Nothing
}
ruleTheCycleBeforeTime :: Rule
ruleTheCycleBeforeTime = Rule
{ name = "the <cycle> before <time>"
, pattern =
[ regex "der"
, dimension TimeGrain
, regex "vor"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter False grain (-1) td
_ -> Nothing
}
ruleYearLatent2 :: Rule
ruleYearLatent2 = Rule
{ name = "year (latent)"
, pattern =
[ Predicate $ isIntegerBetween 2101 10000
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt . mkLatent $ year v
_ -> Nothing
}
ruleTimeAfterNext :: Rule
ruleTimeAfterNext = Rule
{ name = "<time> after next"
, pattern =
[ dimension Time
, regex "nach dem n(ä)chsten"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
tt $ predNth 1 True td
_ -> Nothing
}
ruleTheIdesOfNamedmonth :: Rule
ruleTheIdesOfNamedmonth = Rule
{ name = "the ides of <named-month>"
, pattern =
[ regex "die iden (des?)"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(_:Token Time td@TimeData {TTime.form = Just (TTime.Month m)}:_) ->
Token Time <$>
intersect (dayOfMonth $ if elem m [3, 5, 7, 10] then 15 else 13) td
_ -> Nothing
}
ruleNoon :: Rule
ruleNoon = Rule
{ name = "noon"
, pattern =
[ regex "mittags?|zw(ö)lf (uhr)?"
]
, prod = \_ -> tt $ hour False 12
}
ruleThisnextDayofweek :: Rule
ruleThisnextDayofweek = Rule
{ name = "this|next <day-of-week>"
, pattern =
[ regex "diese(n|r)|kommenden|n(ä)chsten"
, Predicate isADayOfWeek
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 True td
_ -> Nothing
}
ruleBetweenTimeofdayAndTimeofdayInterval :: Rule
ruleBetweenTimeofdayAndTimeofdayInterval = Rule
{ name = "between <time-of-day> and <time-of-day> (interval)"
, pattern =
[ regex "zwischen"
, Predicate isATimeOfDay
, regex "und"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleNextCycle :: Rule
ruleNextCycle = Rule
{ name = "next <cycle>"
, pattern =
[ regex "n(ä)chste(r|n|s)?|kommende(r|n|s)?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 1
_ -> Nothing
}
ruleAfterNextCycle :: Rule
ruleAfterNextCycle = Rule
{ name = "after next <cycle>"
, pattern =
[ regex "(ü)ber ?n(ä)chste[ns]?"
, dimension TimeGrain
]
, prod = \case
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 2
_ -> Nothing
}
ruleTimeofdayApproximately :: Rule
ruleTimeofdayApproximately = Rule
{ name = "<time-of-day> approximately"
, pattern =
[ Predicate isATimeOfDay
, regex "ca\\.?|circa|zirka|ungef(ä)hr|(in )?etwa"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleOnDate :: Rule
ruleOnDate = Rule
{ name = "on <date>"
, pattern =
[ regex "am"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:x:_) -> Just x
_ -> Nothing
}
ruleDurationFromNow :: Rule
ruleDurationFromNow = Rule
{ name = "<duration> from now"
, pattern =
[ dimension Duration
, regex "ab (heute|jetzt)"
]
, prod = \tokens -> case tokens of
(Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleLunch :: Rule
ruleLunch = Rule
{ name = "lunch"
, pattern =
[ regex "(am |zu )?mittags?"
]
, prod = \_ ->
let from = hour False 12
to = hour False 14
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleLastCycle :: Rule
ruleLastCycle = Rule
{ name = "last <cycle>"
, pattern =
[ regex "letzte(r|n|s)?|vergangene(r|n|s)?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt . cycleNth grain $ - 1
_ -> Nothing
}
ruleAfternoon :: Rule
ruleAfternoon = Rule
{ name = "afternoon"
, pattern =
[ regex "nach ?mittags?"
]
, prod = \_ ->
let from = hour False 12
to = hour False 19
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleTimeBeforeLast :: Rule
ruleTimeBeforeLast = Rule
{ name = "<time> before last"
, pattern =
[ regex "vorletzten?|vor ?letztes?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth (-2) False td
_ -> Nothing
}
ruleNamedmonthDayofmonthOrdinal :: Rule
ruleNamedmonthDayofmonthOrdinal = Rule
{ name = "<named-month> <day-of-month> (ordinal)"
, pattern =
[ Predicate isAMonth
, Predicate isDOMOrdinal
]
, prod = \tokens -> case tokens of
(Token Time td:token:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleInduringThePartofday :: Rule
ruleInduringThePartofday = Rule
{ name = "in|during the <part-of-day>"
, pattern =
[ regex "(in|an|am|w(ä)h?rend)( der| dem| des)?"
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ notLatent td
_ -> Nothing
}
ruleHourofdayIntegerAsRelativeMinutes :: Rule
ruleHourofdayIntegerAsRelativeMinutes = Rule
{ name = "<hour-of-day> <integer> (as relative minutes)"
, pattern =
[ Predicate $ and . sequence [isNotLatent, isAnHourOfDay]
, Predicate $ isIntegerBetween 1 59
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute is12H hours n
_ -> Nothing
}
ruleHourofdayQuarter :: Rule
ruleHourofdayQuarter = Rule
{ name = "<hour-of-day> <quarter> (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "vie?rtel"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:_) ->
tt $ hourMinute is12H hours 15
_ -> Nothing
}
ruleHourofdayHalf :: Rule
ruleHourofdayHalf = Rule
{ name = "<hour-of-day> <half> (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "halbe?"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:_) ->
tt $ hourMinute is12H hours 30
_ -> Nothing
}
ruleDayofmonthordinalNamedmonth :: Rule
ruleDayofmonthordinalNamedmonth = Rule
{ name = "<day-of-month>(ordinal) <named-month>"
, pattern =
[ Predicate isDOMOrdinal
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleIntersectBy :: Rule
ruleIntersectBy = Rule
{ name = "intersect by ','"
, pattern =
[ Predicate isNotLatent
, regex ",( den|r)?"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleNthTimeAfterTime :: Rule
ruleNthTimeAfterTime = Rule
{ name = "nth <time> after <time>"
, pattern =
[ dimension Ordinal
, dimension Time
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal OrdinalData{TOrdinal.value = v}:
Token Time td1:
_:
Token Time td2:
_) -> tt $ predNthAfter (v - 1) td1 td2
_ -> Nothing
}
ruleMmdd :: Rule
ruleMmdd = Rule
{ name = "mm/dd"
, pattern =
[ regex "(?:am\\s+)?([012]?[1-9]|10|20|30|31)\\.(10|11|12|0?[1-9])\\.?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:_)):_) -> do
d <- parseInt m1
m <- parseInt m2
tt $ monthDay m d
_ -> Nothing
}
ruleAfterDuration :: Rule
ruleAfterDuration = Rule
{ name = "after <duration>"
, pattern =
[ regex "nach"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleTimeofdayLatent :: Rule
ruleTimeofdayLatent = Rule
{ name = "time-of-day (latent)"
, pattern =
[ Predicate $ isIntegerBetween 0 23
]
, prod = \tokens -> case tokens of
(token:_) -> do
n <- getIntValue token
tt . mkLatent $ hour (n < 12) n
_ -> Nothing
}
ruleFromTimeofdayTimeofdayInterval :: Rule
ruleFromTimeofdayTimeofdayInterval = Rule
{ name = "from <time-of-day> - <time-of-day> (interval)"
, pattern =
[ regex "(von|nach|ab|fr(ü)hestens (um)?)"
, Predicate isATimeOfDay
, regex "((noch|aber|jedoch)? vor)|\\-|bis"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleExactlyTimeofday :: Rule
ruleExactlyTimeofday = Rule
{ name = "exactly <time-of-day>"
, pattern =
[ regex "genau|exakt|p(ü)nktlich|punkt( um)?"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleBetweenDatetimeAndDatetimeInterval :: Rule
ruleBetweenDatetimeAndDatetimeInterval = Rule
{ name = "between <datetime> and <datetime> (interval)"
, pattern =
[ regex "zwischen"
, dimension Time
, regex "und"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleDurationAgo :: Rule
ruleDurationAgo = Rule
{ name = "<duration> ago"
, pattern =
[ regex "vor"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ durationAgo dd
_ -> Nothing
}
ruleByTheEndOfTime :: Rule
ruleByTheEndOfTime = Rule
{ name = "by the end of <time>"
, pattern =
[ regex "bis (zum)? ende (von)?|(noch)? vor"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$> interval TTime.Closed td now
_ -> Nothing
}
ruleAfterWork :: Rule
ruleAfterWork = Rule
{ name = "after work"
, pattern =
[ regex "nach (der)? arbeit|(am)? feier ?abend"
]
, prod = \_ -> do
td2 <- interval TTime.Open (hour False 17) (hour False 21)
Token Time . partOfDay <$> intersect today td2
}
ruleLastNCycle :: Rule
ruleLastNCycle = Rule
{ name = "last n <cycle>"
, pattern =
[ regex "letzten?|vergangenen?"
, Predicate $ isIntegerBetween 1 9999
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:token:Token TimeGrain grain:_) -> do
n <- getIntValue token
tt $ cycleN True grain (- n)
_ -> Nothing
}
ruleTimeofdaySharp :: Rule
ruleTimeofdaySharp = Rule
{ name = "<time-of-day> sharp"
, pattern =
[ Predicate isATimeOfDay
, regex "genau|exakt|p(ü)nktlich|punkt( um)?"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleWithinDuration :: Rule
ruleWithinDuration = Rule
{ name = "within <duration>"
, pattern =
[ regex "binnen|innerhalb( von)?"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) -> Token Time <$>
interval TTime.Open now (inDuration dd)
_ -> Nothing
}
ruleMidnighteodendOfDay :: Rule
ruleMidnighteodendOfDay = Rule
{ name = "midnight|EOD|end of day"
, pattern =
[ regex "mitternacht|EOD|tagesende|ende (des)? tag(es)?"
]
, prod = \_ -> tt $ hour False 0
}
ruleDayofmonthNonOrdinalNamedmonth :: Rule
ruleDayofmonthNonOrdinalNamedmonth = Rule
{ name = "<day-of-month> (non ordinal) <named-month>"
, pattern =
[ Predicate isDOMInteger
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleIntersect :: Rule
ruleIntersect = Rule
{ name = "intersect"
, pattern =
[ Predicate isNotLatent
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleDayOfWeekIntersectDuration :: Rule
ruleDayOfWeekIntersectDuration = Rule
{ name = "<day-of-week> in <duration>"
, pattern =
[ Predicate isADayOfWeek
, regex "(in|vor)"
, dimension Duration
]
, prod = \case
(Token Time td:Token RegexMatch (GroupMatch (match:_)):Token Duration dd:_) ->
case Text.toLower match of
"vor" -> Token Time <$> intersect td (durationIntervalAgo dd)
_ -> Token Time <$> intersect td (inDurationInterval dd)
_ -> Nothing
}
ruleAboutTimeofday :: Rule
ruleAboutTimeofday = Rule
{ name = "about <time-of-day>"
, pattern =
[ regex "so( um)?|(so |um |so um )?circa|zirka|ca\\.?|ungef(ä)hr|(etwa|gegen)( so| um| so um)?"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleUntilTimeofday :: Rule
ruleUntilTimeofday = Rule
{ name = "until <time-of-day>"
, pattern =
[ regex "vor|bis( zu[rm]?)?|sp(ä)testens?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ withDirection TTime.Before td
_ -> Nothing
}
ruleUntilTimeofdayPostfix :: Rule
ruleUntilTimeofdayPostfix = Rule
{ name = "<time-of-day> until"
, pattern =
[ dimension Time
, regex "sp(ä)testens"
]
, prod = \tokens -> case tokens of
(Token Time td:_:_) -> tt $ withDirection TTime.Before td
_ -> Nothing
}
ruleAtTimeofday :: Rule
ruleAtTimeofday = Rule
{ name = "at <time-of-day>"
, pattern =
[ regex "um|@"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleNthTimeOfTime :: Rule
ruleNthTimeOfTime = Rule
{ name = "nth <time> of <time>"
, pattern =
[ dimension Ordinal
, dimension Time
, regex "im"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal OrdinalData{TOrdinal.value = v}:
Token Time td1:
_:
Token Time td2:
_) -> Token Time . predNth (v - 1) False <$> intersect td2 td1
_ -> Nothing
}
ruleTimePartofday :: Rule
ruleTimePartofday = Rule
{ name = "<time> <part-of-day>"
, pattern =
[ dimension Time
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleWeekend :: Rule
ruleWeekend = Rule
{ name = "week-end"
, pattern =
[ regex "wochen ?ende?"
]
, prod = \_ -> tt $ mkOkForThisNext weekend
}
ruleNthTimeAfterTime2 :: Rule
ruleNthTimeAfterTime2 = Rule
{ name = "nth <time> after <time>"
, pattern =
[ regex "der|das"
, dimension Ordinal
, dimension Time
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:
Token Ordinal OrdinalData{TOrdinal.value = v}:
Token Time td1:
_:
Token Time td2:
_) -> tt $ predNthAfter (v - 1) td1 td2
_ -> Nothing
}
ruleNextTime :: Rule
ruleNextTime = Rule
{ name = "next <time>"
, pattern =
[ regex "(n(ä)chste|kommende)[ns]?"
, Predicate $ and . sequence [isNotLatent, isOkWithThisNext]
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 True td
_ -> Nothing
}
ruleOrdinalQuarterYear :: Rule
ruleOrdinalQuarterYear = Rule
{ name = "<ordinal> quarter <year>"
, pattern =
[ dimension Ordinal
, Predicate $ isGrain TG.Quarter
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal od:_:Token Time td:_) ->
tt $ cycleNthAfter False TG.Quarter (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleYyyymmdd :: Rule
ruleYyyymmdd = Rule
{ name = "yyyy-mm-dd"
, pattern =
[ regex "(\\d{2,4})-(1[0-2]|0?[1-9])-(3[01]|[12]\\d|0?[1-9])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:m3:_)):_) -> do
y <- parseInt m1
m <- parseInt m2
d <- parseInt m3
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleTheOrdinalCycleAfterTime :: Rule
ruleTheOrdinalCycleAfterTime = Rule
{ name = "the <ordinal> <cycle> after <time>"
, pattern =
[ regex "the"
, dimension Ordinal
, dimension TimeGrain
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Ordinal od:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter True grain (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleIntersectByOfFromS :: Rule
ruleIntersectByOfFromS = Rule
{ name = "intersect by 'of', 'from', 's"
, pattern =
[ Predicate isNotLatent
, regex "von|der|im"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleNextNCycle :: Rule
ruleNextNCycle = Rule
{ name = "next n <cycle>"
, pattern =
[ regex "n(ä)chsten?|kommenden?"
, Predicate $ isIntegerBetween 1 9999
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:token:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain v
_ -> Nothing
}
ruleADuration :: Rule
ruleADuration = Rule
{ name = "a <duration>"
, pattern =
[ regex "(in )?eine?(r|n)?"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleMorning :: Rule
ruleMorning = Rule
{ name = "morning"
, pattern =
[ regex "morgens|(in der )?fr(ü)h|vor ?mittags?|am morgen"
]
, prod = \_ ->
let from = hour False 3
to = hour False 12
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleThisPartofday :: Rule
ruleThisPartofday = Rule
{ name = "this <part-of-day>"
, pattern =
[ regex "diesen?|dieses|heute"
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time . partOfDay <$> intersect today td
_ -> Nothing
}
ruleThisCycle :: Rule
ruleThisCycle = Rule
{ name = "this <cycle>"
, pattern =
[ regex "diese(r|n|s)?|kommende(r|n|s)?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 0
_ -> Nothing
}
ruleThisTime :: Rule
ruleThisTime = Rule
{ name = "this <time>"
, pattern =
[ regex "diese(n|r|s)?|(im )?laufenden"
, Predicate isOkWithThisNext
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 False td
_ -> Nothing
}
ruleDurationHence :: Rule
ruleDurationHence = Rule
{ name = "<duration> hence"
, pattern =
[ dimension Duration
, regex "hence"
]
, prod = \tokens -> case tokens of
(Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleDayofmonthNonOrdinalOfNamedmonth :: Rule
ruleDayofmonthNonOrdinalOfNamedmonth = Rule
{ name = "<day-of-month> (non ordinal) of <named-month>"
, pattern =
[ Predicate isDOMInteger
, regex "vom|von"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:_:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleAfterLunch :: Rule
ruleAfterLunch = Rule
{ name = "after lunch"
, pattern =
[ regex "nach dem mittagessen|nachmittags?"
]
, prod = \_ -> do
td2 <- interval TTime.Open (hour False 13) (hour False 17)
Token Time . partOfDay <$> intersect today td2
}
ruleOnANamedday :: Rule
ruleOnANamedday = Rule
{ name = "on a named-day"
, pattern =
[ regex "an einem"
, Predicate isADayOfWeek
]
, prod = \tokens -> case tokens of
(_:x:_) -> Just x
_ -> Nothing
}
ruleYearLatent :: Rule
ruleYearLatent = Rule
{ name = "year (latent)"
, pattern =
[ Predicate $ isIntegerBetween 25 999
]
, prod = \tokens -> case tokens of
(token:_) -> do
y <- getIntValue token
tt . mkLatent $ year y
_ -> Nothing
}
ruleAfterTimeofday :: Rule
ruleAfterTimeofday = Rule
{ name = "after <time-of-day>"
, pattern =
[ regex "nach|ab|fr(ü)he?stens"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ withDirection TTime.After td
_ -> Nothing
}
ruleAfterTimeofdayPostfix :: Rule
ruleAfterTimeofdayPostfix = Rule
{ name = "<time-of-day> after"
, pattern =
[ dimension Time
, regex "fr(ü)he?stens"
]
, prod = \tokens -> case tokens of
(Token Time td:_:_) -> tt $ withDirection TTime.After td
_ -> Nothing
}
ruleNight :: Rule
ruleNight = Rule
{ name = "night"
, pattern =
[ regex "nachts?"
]
, prod = \_ ->
let from = hour False 0
to = hour False 4
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleDayofmonthOrdinal :: Rule
ruleDayofmonthOrdinal = Rule
{ name = "<day-of-month> (ordinal)"
, pattern =
[ Predicate isDOMOrdinal
]
, prod = \tokens -> case tokens of
(Token Ordinal OrdinalData{TOrdinal.value = v}:_) ->
tt $ dayOfMonth v
_ -> Nothing
}
ruleHalfIntegerGermanStyleHourofday :: Rule
ruleHalfIntegerGermanStyleHourofday = Rule
{ name = "half <integer> (german style hour-of-day)"
, pattern =
[ regex "halb"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$> minutesBefore 30 td
_ -> Nothing
}
ruleOrdinalCycleAfterTime :: Rule
ruleOrdinalCycleAfterTime = Rule
{ name = "<ordinal> <cycle> after <time>"
, pattern =
[ dimension Ordinal
, dimension TimeGrain
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal od:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter True grain (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleOrdinalCycleOfTime :: Rule
ruleOrdinalCycleOfTime = Rule
{ name = "<ordinal> <cycle> of <time>"
, pattern =
[ dimension Ordinal
, dimension TimeGrain
, regex "im|in|von"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal od:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter True grain (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleAfterNextTime :: Rule
ruleAfterNextTime = Rule
{ name = "after next <time>"
, pattern =
[ regex "(ü)ber ?n(ä)chste[ns]?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 1 True td
_ -> Nothing
}
ruleHhmm :: Rule
ruleHhmm = Rule
{ name = "hh:mm"
, pattern =
[ regex "((?:[01]?\\d)|(?:2[0-3]))[:.h]([0-5]\\d)(?:uhr|h)?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:_)):_) -> do
h <- parseInt m1
m <- parseInt m2
tt $ hourMinute False h m
_ -> Nothing
}
ruleTonight :: Rule
ruleTonight = Rule
{ name = "tonight"
, pattern =
[ regex "heute? (am)? abends?"
]
, prod = \_ -> do
td2 <- interval TTime.Open (hour False 18) (hour False 0)
Token Time . partOfDay <$> intersect today td2
}
ruleYear :: Rule
ruleYear = Rule
{ name = "year"
, pattern =
[ Predicate $ isIntegerBetween 1000 2100
]
, prod = \tokens -> case tokens of
(token:_) -> do
y <- getIntValue token
tt $ year y
_ -> Nothing
}
ruleNamedmonthDayofmonthNonOrdinal :: Rule
ruleNamedmonthDayofmonthNonOrdinal = Rule
{ name = "<named-month> <day-of-month> (non ordinal)"
, pattern =
[ Predicate isAMonth
, Predicate isDOMInteger
]
, prod = \tokens -> case tokens of
(Token Time td:token:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleHhmmMilitary :: Rule
ruleHhmmMilitary = Rule
{ name = "hhmm (military)"
, pattern =
[ regex "((?:[01]?\\d)|(?:2[0-3]))([0-5]\\d)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (h:m:_)):_) -> do
hh <- parseInt h
mm <- parseInt m
tt . mkLatent $ hourMinute False hh mm
_ -> Nothing
}
ruleAbsorptionOfAfterNamedDay :: Rule
ruleAbsorptionOfAfterNamedDay = Rule
{ name = "absorption of , after named day"
, pattern =
[ Predicate isADayOfWeek
, regex ","
]
, prod = \tokens -> case tokens of
(x:_) -> Just x
_ -> Nothing
}
ruleLastDayofweekOfTime :: Rule
ruleLastDayofweekOfTime = Rule
{ name = "last <day-of-week> of <time>"
, pattern =
[ regex "letzte(r|n|s)?"
, Predicate isADayOfWeek
, regex "[ui]m"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
tt $ predLastOf td1 td2
_ -> Nothing
}
ruleHhmmMilitaryAmpm :: Rule
ruleHhmmMilitaryAmpm = Rule
{ name = "hhmm (military) am|pm"
, pattern =
[ regex "((?:1[012]|0?\\d))([0-5]\\d)"
, regex "([ap])\\.?m\\.?(?:[\\s'\"-_{}\\[\\]()]|$)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (hh:mm:_)):Token RegexMatch (GroupMatch (ap:_)):_) -> do
h <- parseInt hh
m <- parseInt mm
tt . timeOfDayAMPM (Text.toLower ap == "a") $ hourMinute True h m
_ -> Nothing
}
ruleTimeofdayTimeofdayInterval :: Rule
ruleTimeofdayTimeofdayInterval = Rule
{ name = "<time-of-day> - <time-of-day> (interval)"
, pattern =
[ Predicate $ and . sequence [isNotLatent, isATimeOfDay]
, regex "\\-|bis"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleTimeofdayTimeofdayInterval2 :: Rule
ruleTimeofdayTimeofdayInterval2 = Rule
{ name = "<time-of-day> - <time-of-day> (interval)"
, pattern =
[ Predicate isATimeOfDay
, regex "\\-|/|bis"
, Predicate $ and . sequence [isNotLatent, isATimeOfDay]
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleDurationAfterTime :: Rule
ruleDurationAfterTime = Rule
{ name = "<duration> after <time>"
, pattern =
[ dimension Duration
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Duration dd:_:Token Time td:_) ->
tt $ durationAfter dd td
_ -> Nothing
}
ruleOrdinalQuarter :: Rule
ruleOrdinalQuarter = Rule
{ name = "<ordinal> quarter"
, pattern =
[ dimension Ordinal
, Predicate $ isGrain TG.Quarter
]
, prod = \tokens -> case tokens of
(Token Ordinal OrdinalData{TOrdinal.value = v}:_) -> tt .
cycleNthAfter False TG.Quarter (v - 1) $ cycleNth TG.Year 0
_ -> Nothing
}
ruleTheDayofmonthOrdinal :: Rule
ruleTheDayofmonthOrdinal = Rule
{ name = "the <day-of-month> (ordinal)"
, pattern =
[ regex "der"
, Predicate isDOMOrdinal
]
, prod = \tokens -> case tokens of
(_:Token Ordinal OrdinalData{TOrdinal.value = v}:_) ->
tt $ dayOfMonth v
_ -> Nothing
}
ruleDurationBeforeTime :: Rule
ruleDurationBeforeTime = Rule
{ name = "<duration> before <time>"
, pattern =
[ dimension Duration
, regex "vor"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Duration dd:_:Token Time td:_) ->
tt $ durationBefore dd td
_ -> Nothing
}
rulePartofdayOfTime :: Rule
rulePartofdayOfTime = Rule
{ name = "<part-of-day> of <time>"
, pattern =
[ Predicate isAPartOfDay
, regex "des|von|vom|am"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleMmddyyyy :: Rule
ruleMmddyyyy = Rule
{ name = "mm/dd/yyyy"
, pattern =
[ regex "([012]?[1-9]|10|20|30|31)\\.(0?[1-9]|10|11|12)\\.(\\d{2,4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:m3:_)):_) -> do
y <- parseInt m3
m <- parseInt m2
d <- parseInt m1
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleTimeofdayOclock :: Rule
ruleTimeofdayOclock = Rule
{ name = "<time-of-day> o'clock"
, pattern =
[ Predicate isATimeOfDay
, regex "uhr|h(?:[\\s'\"-_{}\\[\\]()]|$)"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
tt $ notLatent td
_ -> Nothing
}
ruleDayofmonthordinalNamedmonthYear :: Rule
ruleDayofmonthordinalNamedmonthYear = Rule
{ name = "<day-of-month>(ordinal) <named-month> year"
, pattern =
[ Predicate isDOMOrdinal
, Predicate isAMonth
, regex "(\\d{2,4})"
]
, prod = \tokens -> case tokens of
(token:
Token Time td:
Token RegexMatch (GroupMatch (match:_)):
_) -> do
n <- parseInt match
dom <- intersectDOM td token
Token Time <$> intersect dom (year n)
_ -> Nothing
}
ruleTimezone :: Rule
ruleTimezone = Rule
{ name = "<time> timezone"
, pattern =
[ Predicate $ and . sequence [isNotLatent, isATimeOfDay]
, regex "\\b(YEKT|YEKST|YAKT|YAKST|WITA|WIT|WIB|WGT|WGST|WFT|WET|WEST|WAT|WAST|VUT|VLAT|VLAST|VET|UZT|UYT|UYST|UTC|ULAT|TVT|TMT|TLT|TKT|TJT|TFT|TAHT|SST|SRT|SGT|SCT|SBT|SAST|SAMT|RET|PYT|PYST|PWT|PST|PONT|PMST|PMDT|PKT|PHT|PHOT|PGT|PETT|PETST|PET|PDT|OMST|OMSST|NZST|NZDT|NUT|NST|NPT|NOVT|NOVST|NFT|NDT|NCT|MYT|MVT|MUT|MST|MSK|MSD|MMT|MHT|MDT|MAWT|MART|MAGT|MAGST|LINT|LHST|LHDT|KUYT|KST|KRAT|KRAST|KGT|JST|IST|IRST|IRKT|IRKST|IRDT|IOT|IDT|ICT|HOVT|HKT|GYT|GST|GMT|GILT|GFT|GET|GAMT|GALT|FNT|FKT|FKST|FJT|FJST|EST|EGT|EGST|EET|EEST|EDT|ECT|EAT|EAST|EASST|DAVT|ChST|CXT|CVT|CST|COT|CLT|CLST|CKT|CHAST|CHADT|CET|CEST|CDT|CCT|CAT|CAST|BTT|BST|BRT|BRST|BOT|BNT|AZT|AZST|AZOT|AZOST|AWST|AWDT|AST|ART|AQTT|ANAT|ANAST|AMT|AMST|ALMT|AKST|AKDT|AFT|AEST|AEDT|ADT|ACST|ACDT)\\b"
]
, prod = \tokens -> case tokens of
(Token Time td:
Token RegexMatch (GroupMatch (tz:_)):
_) -> Token Time <$> inTimezone (Text.toUpper tz) td
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleADuration
, ruleAboutTimeofday
, ruleAbsorptionOfAfterNamedDay
, ruleAfterDuration
, ruleAfterLunch
, ruleAfterNextTime
, ruleAfterTimeofday
, ruleAfterTimeofdayPostfix
, ruleAfterWork
, ruleAfternoon
, ruleAtTimeofday
, ruleBetweenDatetimeAndDatetimeInterval
, ruleBetweenTimeofdayAndTimeofdayInterval
, ruleByTheEndOfTime
, ruleDatetimeDatetimeInterval
, ruleDateDateInterval
, ruleDayofmonthNonOrdinalNamedmonth
, ruleDayofmonthNonOrdinalOfNamedmonth
, ruleDayofmonthOrdinal
, ruleDayofmonthordinalNamedmonth
, ruleDayofmonthordinalNamedmonthYear
, ruleDurationAfterTime
, ruleDurationAgo
, ruleDurationBeforeTime
, ruleDurationFromNow
, ruleDurationHence
, ruleEvening
, ruleExactlyTimeofday
, ruleFromDatetimeDatetimeInterval
, ruleFromTimeofdayTimeofdayInterval
, ruleHalfIntegerGermanStyleHourofday
, ruleHhmm
, ruleHhmmMilitary
, ruleHhmmMilitaryAmpm
, ruleHourofdayIntegerAsRelativeMinutes
, ruleInDuration
, ruleInduringThePartofday
, ruleIntersect
, ruleIntersectBy
, ruleIntersectByOfFromS
, ruleDayOfWeekIntersectDuration
, ruleLastCycle
, ruleLastCycleOfTime
, ruleLastDayofweekOfTime
, ruleLastNCycle
, ruleLastTime
, ruleLunch
, ruleMidnighteodendOfDay
, ruleMmdd
, ruleMmddyyyy
, ruleMonthDdddInterval
, ruleMorning
, ruleNamedmonthDayofmonthNonOrdinal
, ruleNamedmonthDayofmonthOrdinal
, ruleNextCycle
, ruleAfterNextCycle
, ruleNextNCycle
, ruleNextTime
, ruleNight
, ruleNoon
, ruleNthTimeAfterTime
, ruleNthTimeAfterTime2
, ruleNthTimeOfTime
, ruleNthTimeOfTime2
, ruleOnANamedday
, ruleOnDate
, ruleOrdinalCycleAfterTime
, ruleOrdinalCycleOfTime
, ruleOrdinalQuarter
, ruleOrdinalQuarterYear
, rulePartofdayOfTime
, ruleRelativeMinutesAfterpastIntegerHourofday
, ruleRelativeMinutesTotillbeforeIntegerHourofday
, ruleTheCycleAfterTime
, ruleTheCycleBeforeTime
, ruleTheDayofmonthNonOrdinal
, ruleTheDayofmonthOrdinal
, ruleTheIdesOfNamedmonth
, ruleTheOrdinalCycleAfterTime
, ruleTheOrdinalCycleOfTime
, ruleThisCycle
, ruleThisPartofday
, ruleThisTime
, ruleThisnextDayofweek
, ruleTimeAfterNext
, ruleTimeBeforeLast
, ruleTimePartofday
, ruleTimeofdayApproximately
, ruleTimeofdayLatent
, ruleTimeofdayOclock
, ruleTimeofdaySharp
, ruleTimeofdayTimeofdayInterval
, ruleTimeofdayTimeofdayInterval2
, ruleTonight
, ruleUntilTimeofday
, ruleUntilTimeofdayPostfix
, ruleWeekend
, ruleWithinDuration
, ruleYear
, ruleYearLatent
, ruleYearLatent2
, ruleYyyymmdd
, ruleQuarterTotillbeforeIntegerHourofday
, ruleHalfTotillbeforeIntegerHourofday
, ruleQuarterAfterpastIntegerHourofday
, ruleHalfAfterpastIntegerHourofday
, ruleHourofdayQuarter
, ruleHourofdayHalf
, ruleTimezone
]
++ ruleInstants
++ ruleDaysOfWeek
++ ruleMonths
++ ruleSeasons
++ ruleHolidays
++ ruleComputedHolidays
++ ruleComputedHolidays'
|
690e23dfc06af7893012707ea8a34e0b8b417d0138f478f13ba595e538086278 | sealchain-project/sealchain | UnitsOfMeasure.hs | {-# LANGUAGE DataKinds #-}
# LANGUAGE KindSignatures #
module Pos.Util.UnitsOfMeasure
( UnitOfMeasure (..)
, MeasuredIn(..)
) where
import Control.Lens (at, (?~))
import Data.Aeson (FromJSON (..), ToJSON (..), Value (..), object,
withObject, (.:), (.=))
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.Builder as B
import Formatting ((%))
import qualified Formatting as F
import Formatting.Buildable (Buildable (..))
import Pos.Core.Util.LogSafe (BuildableSafeGen (..))
import Universum
import Data.Swagger (NamedSchema (..), Referenced (..),
SwaggerType (..), ToSchema (..), enum_, properties,
required, type_)
-- | A finite sum type representing time units we might want to show to
-- clients. The idea is that whenever we have a quantity represeting some
-- form of time, we should render it together with the relevant unit, to
-- not leave anything to guessing.
data UnitOfMeasure =
Seconds
| Milliseconds
| Microseconds
| % ranging from 0 to 100 .
| Percentage100
-- | Number of blocks.
| Blocks
| Number of blocks per second .
| BlocksPerSecond
| Bytes
| Lovelace
| LovelacePerByte
deriving (Show, Eq, Ord)
instance Buildable UnitOfMeasure where
build = \case
Bytes -> "bytes"
LovelacePerByte -> "lovelace/byte"
Lovelace -> "lovelace"
Seconds -> "seconds"
Milliseconds -> "milliseconds"
Microseconds -> "microseconds"
Percentage100 -> "percent"
Blocks -> "blocks"
BlocksPerSecond -> "blocks/second"
instance ToJSON UnitOfMeasure where
toJSON = String . T.toStrict . B.toLazyText . build
-- | Represent data with a given unit of measure
data MeasuredIn (u :: UnitOfMeasure) a
= MeasuredIn a
deriving (Show, Eq, Ord)
instance (Demote u, Buildable a) => BuildableSafeGen (MeasuredIn u a) where
buildSafeGen _ = build
instance (Demote u, Buildable a) => Buildable (MeasuredIn u a) where
build (MeasuredIn a) = F.bprint
(F.build % " " % F.build)
a
(demote $ Proxy @u)
instance (Demote u, ToJSON a) => ToJSON (MeasuredIn u a) where
toJSON (MeasuredIn a) = object
[ "unit" .= demote (Proxy @u)
, "quantity" .= toJSON a
]
instance (Demote u, FromJSON a) => FromJSON (MeasuredIn u a) where
parseJSON = withObject "MeasuredIn" $ \o -> do
verifyUnit =<< o .: "unit"
MeasuredIn <$> o .: "quantity"
where
unitS = toString $ T.toStrict $ B.toLazyText $ build $ demote $ Proxy @u
verifyUnit = \case
u@(String _) | u == toJSON (demote $ Proxy @u) ->
pure ()
_ ->
fail
$ "failed to parse quantified value. Expected value in '"
<> unitS <> "' but got something else. e.g.: "
<> "{ \"unit\": \"" <> unitS <> "\", \"quantity\": ...}"
instance (Demote u, ToSchema a) => ToSchema (MeasuredIn u a) where
declareNamedSchema _ = do
NamedSchema _ schema <- declareNamedSchema (Proxy @a)
pure $ NamedSchema (Just "MeasuredIn") $ mempty
& type_ .~ SwaggerObject
& required .~ ["quantity", "unit"]
& properties .~ (mempty
& at "quantity" ?~ Inline schema
& at "unit" ?~ (Inline $ mempty
& type_ .~ SwaggerString
& enum_ ?~ [toJSON $ demote $ Proxy @u]
)
)
--
Internal
--
-- | Bring a type back to the world of value (invert of promote)
class Demote (u :: UnitOfMeasure) where
demote :: Proxy u -> UnitOfMeasure
instance Demote 'Bytes where demote _ = Bytes
instance Demote 'LovelacePerByte where demote _ = LovelacePerByte
instance Demote 'Lovelace where demote _ = Lovelace
instance Demote 'Seconds where demote _ = Seconds
instance Demote 'Milliseconds where demote _ = Milliseconds
instance Demote 'Microseconds where demote _ = Microseconds
instance Demote 'Percentage100 where demote _ = Percentage100
instance Demote 'Blocks where demote _ = Blocks
instance Demote 'BlocksPerSecond where demote _ = BlocksPerSecond
| null | https://raw.githubusercontent.com/sealchain-project/sealchain/e97b4bac865fb147979cb14723a12c716a62e51e/lib/src/Pos/Util/UnitsOfMeasure.hs | haskell | # LANGUAGE DataKinds #
| A finite sum type representing time units we might want to show to
clients. The idea is that whenever we have a quantity represeting some
form of time, we should render it together with the relevant unit, to
not leave anything to guessing.
| Number of blocks.
| Represent data with a given unit of measure
| Bring a type back to the world of value (invert of promote) | # LANGUAGE KindSignatures #
module Pos.Util.UnitsOfMeasure
( UnitOfMeasure (..)
, MeasuredIn(..)
) where
import Control.Lens (at, (?~))
import Data.Aeson (FromJSON (..), ToJSON (..), Value (..), object,
withObject, (.:), (.=))
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.Builder as B
import Formatting ((%))
import qualified Formatting as F
import Formatting.Buildable (Buildable (..))
import Pos.Core.Util.LogSafe (BuildableSafeGen (..))
import Universum
import Data.Swagger (NamedSchema (..), Referenced (..),
SwaggerType (..), ToSchema (..), enum_, properties,
required, type_)
data UnitOfMeasure =
Seconds
| Milliseconds
| Microseconds
| % ranging from 0 to 100 .
| Percentage100
| Blocks
| Number of blocks per second .
| BlocksPerSecond
| Bytes
| Lovelace
| LovelacePerByte
deriving (Show, Eq, Ord)
instance Buildable UnitOfMeasure where
build = \case
Bytes -> "bytes"
LovelacePerByte -> "lovelace/byte"
Lovelace -> "lovelace"
Seconds -> "seconds"
Milliseconds -> "milliseconds"
Microseconds -> "microseconds"
Percentage100 -> "percent"
Blocks -> "blocks"
BlocksPerSecond -> "blocks/second"
instance ToJSON UnitOfMeasure where
toJSON = String . T.toStrict . B.toLazyText . build
data MeasuredIn (u :: UnitOfMeasure) a
= MeasuredIn a
deriving (Show, Eq, Ord)
instance (Demote u, Buildable a) => BuildableSafeGen (MeasuredIn u a) where
buildSafeGen _ = build
instance (Demote u, Buildable a) => Buildable (MeasuredIn u a) where
build (MeasuredIn a) = F.bprint
(F.build % " " % F.build)
a
(demote $ Proxy @u)
instance (Demote u, ToJSON a) => ToJSON (MeasuredIn u a) where
toJSON (MeasuredIn a) = object
[ "unit" .= demote (Proxy @u)
, "quantity" .= toJSON a
]
instance (Demote u, FromJSON a) => FromJSON (MeasuredIn u a) where
parseJSON = withObject "MeasuredIn" $ \o -> do
verifyUnit =<< o .: "unit"
MeasuredIn <$> o .: "quantity"
where
unitS = toString $ T.toStrict $ B.toLazyText $ build $ demote $ Proxy @u
verifyUnit = \case
u@(String _) | u == toJSON (demote $ Proxy @u) ->
pure ()
_ ->
fail
$ "failed to parse quantified value. Expected value in '"
<> unitS <> "' but got something else. e.g.: "
<> "{ \"unit\": \"" <> unitS <> "\", \"quantity\": ...}"
instance (Demote u, ToSchema a) => ToSchema (MeasuredIn u a) where
declareNamedSchema _ = do
NamedSchema _ schema <- declareNamedSchema (Proxy @a)
pure $ NamedSchema (Just "MeasuredIn") $ mempty
& type_ .~ SwaggerObject
& required .~ ["quantity", "unit"]
& properties .~ (mempty
& at "quantity" ?~ Inline schema
& at "unit" ?~ (Inline $ mempty
& type_ .~ SwaggerString
& enum_ ?~ [toJSON $ demote $ Proxy @u]
)
)
Internal
class Demote (u :: UnitOfMeasure) where
demote :: Proxy u -> UnitOfMeasure
instance Demote 'Bytes where demote _ = Bytes
instance Demote 'LovelacePerByte where demote _ = LovelacePerByte
instance Demote 'Lovelace where demote _ = Lovelace
instance Demote 'Seconds where demote _ = Seconds
instance Demote 'Milliseconds where demote _ = Milliseconds
instance Demote 'Microseconds where demote _ = Microseconds
instance Demote 'Percentage100 where demote _ = Percentage100
instance Demote 'Blocks where demote _ = Blocks
instance Demote 'BlocksPerSecond where demote _ = BlocksPerSecond
|
fa5d7894cca9b285fd8c9f79274b78ad530f77c89a249d7af2f0ffa5059e6489 | boxer-project/boxer-sunrise | clipboard.lisp | ;;;; Boxer
Copyright 1985 - 2022 Andrea A. diSessa and the Estate of Edward H. Lay
;;;;
Portions of this code may be copyright 1982 - 1985 Massachusetts Institute of Technology . Those portions may be
used for any purpose , including commercial ones , providing that notice of MIT copyright is retained .
;;;;
;;;; Licensed under the 3-Clause BSD license. You may not use this file except in compliance with this license.
;;;;
;;;; -3-Clause
;;;;
;;;;
;;;; +-Data--+
;;;; This file is part of the | BOXER | system
;;;; +-------+
;;;;
versions of Clipboard functionality
;;;;
(in-package :boxer-lw-capi)
;; uncolor = result of color:unconvert-color which is a simple-vector with components of
: RGB , red , green , blue & alpha in 0->1.0 floa format
(defun uncolor->pixel (uncolor)
(flet ((convert-color-component (cc)
(floor (* cc 255))))
(opengl::make-offscreen-pixel (convert-color-component (svref uncolor 1))
(convert-color-component (svref uncolor 2))
(convert-color-component (svref uncolor 3))
(convert-color-component (svref uncolor 4)))))
capi : clipboard returns IMAGES
(defun copy-image-to-bitmap (image bm w h)
(let ((ia (gp:make-image-access bw::*boxer-pane* image))
(bdata (opengl::ogl-pixmap-data bm)))
(unwind-protect
(progn
(gp::image-access-transfer-from-image ia)
(dotimes (y h)
(dotimes (x w)
(setf
(cffi:mem-aref bdata opengl::*pixmap-ffi-type* (+ x (* (- h y 1) w)))
(uncolor->pixel
(color:unconvert-color bw::*boxer-pane* (gp:image-access-pixel ia x y)))))))
(gp:free-image-access ia))))
(defun image-to-bitmap (image)
(unless (null image)
(let* ((wid (gp:image-width image)) (hei (gp:image-height image))
(bim (boxer::make-offscreen-bitmap bw::*boxer-pane* wid hei)))
(copy-image-to-bitmap image bim wid hei)
(values bim wid hei))))
System clipboard
(defun paste-text ()
(let ((string (capi::clipboard bw::*boxer-pane* :string)))
(unless (null string)
(dotimes (i (length string))
(let ((char (aref string i)))
(if (member char '(#\Newline #\Return #\Linefeed))
(boxer::insert-row boxer::*point*
(boxer::make-initialized-row) :moving)
(boxer::insert-cha boxer::*point* char :moving)))))))
(defun paste-pict (&optional (img (capi::clipboard bw::*boxer-pane* :image)
img-supplied-p))
(multiple-value-bind (bm wid hei)
(image-to-bitmap img)
(unless (null bm)
;; memory leak ?
(unless img-supplied-p (gp:free-image bw::*boxer-pane* img))
(let* ((gb (boxer::make-box '(())))
(gs (boxer::make-graphics-sheet wid hei gb)))
(setf (boxer::graphics-sheet-bit-array gs) bm)
(setf (boxer::graphics-sheet-bit-array-dirty? gs) T)
(setf (boxer::graphics-info gb) gs)
(setf (boxer::display-style-graphics-mode?
(boxer::display-style-list gb)) T)
(boxer::insert-cha boxer::*point* gb :moving)))))
(defmethod paste ((self bw::boxer-frame))
(cond ((equal '(nil :lisp) (multiple-value-list (capi:clipboard-empty self :value)))
;; We are looking an undocumented multiple return value for type :value where
the second return value will be symbol : lisp if it came from . If
;; this is the case we know we were the last one to set the clipboard and yank
;; in our most recent item.
;; -W/html/capi-w-206.htm#82688
(boxer::com-yank))
((not (capi:clipboard-empty self :string))
(paste-text))
((not (capi:clipboard-empty self :image))
(paste-pict))
(t (boxer::com-yank)))
(boxer::repaint))
| null | https://raw.githubusercontent.com/boxer-project/boxer-sunrise/93dc8ca848a67f5fd409daf9b2859136d3b81b98/src/boxwin/lw-capi/clipboard.lisp | lisp | Boxer
Licensed under the 3-Clause BSD license. You may not use this file except in compliance with this license.
-3-Clause
+-Data--+
This file is part of the | BOXER | system
+-------+
uncolor = result of color:unconvert-color which is a simple-vector with components of
memory leak ?
We are looking an undocumented multiple return value for type :value where
this is the case we know we were the last one to set the clipboard and yank
in our most recent item.
-W/html/capi-w-206.htm#82688 | Copyright 1985 - 2022 Andrea A. diSessa and the Estate of Edward H. Lay
Portions of this code may be copyright 1982 - 1985 Massachusetts Institute of Technology . Those portions may be
used for any purpose , including commercial ones , providing that notice of MIT copyright is retained .
versions of Clipboard functionality
(in-package :boxer-lw-capi)
: RGB , red , green , blue & alpha in 0->1.0 floa format
(defun uncolor->pixel (uncolor)
(flet ((convert-color-component (cc)
(floor (* cc 255))))
(opengl::make-offscreen-pixel (convert-color-component (svref uncolor 1))
(convert-color-component (svref uncolor 2))
(convert-color-component (svref uncolor 3))
(convert-color-component (svref uncolor 4)))))
capi : clipboard returns IMAGES
(defun copy-image-to-bitmap (image bm w h)
(let ((ia (gp:make-image-access bw::*boxer-pane* image))
(bdata (opengl::ogl-pixmap-data bm)))
(unwind-protect
(progn
(gp::image-access-transfer-from-image ia)
(dotimes (y h)
(dotimes (x w)
(setf
(cffi:mem-aref bdata opengl::*pixmap-ffi-type* (+ x (* (- h y 1) w)))
(uncolor->pixel
(color:unconvert-color bw::*boxer-pane* (gp:image-access-pixel ia x y)))))))
(gp:free-image-access ia))))
(defun image-to-bitmap (image)
(unless (null image)
(let* ((wid (gp:image-width image)) (hei (gp:image-height image))
(bim (boxer::make-offscreen-bitmap bw::*boxer-pane* wid hei)))
(copy-image-to-bitmap image bim wid hei)
(values bim wid hei))))
System clipboard
(defun paste-text ()
(let ((string (capi::clipboard bw::*boxer-pane* :string)))
(unless (null string)
(dotimes (i (length string))
(let ((char (aref string i)))
(if (member char '(#\Newline #\Return #\Linefeed))
(boxer::insert-row boxer::*point*
(boxer::make-initialized-row) :moving)
(boxer::insert-cha boxer::*point* char :moving)))))))
(defun paste-pict (&optional (img (capi::clipboard bw::*boxer-pane* :image)
img-supplied-p))
(multiple-value-bind (bm wid hei)
(image-to-bitmap img)
(unless (null bm)
(unless img-supplied-p (gp:free-image bw::*boxer-pane* img))
(let* ((gb (boxer::make-box '(())))
(gs (boxer::make-graphics-sheet wid hei gb)))
(setf (boxer::graphics-sheet-bit-array gs) bm)
(setf (boxer::graphics-sheet-bit-array-dirty? gs) T)
(setf (boxer::graphics-info gb) gs)
(setf (boxer::display-style-graphics-mode?
(boxer::display-style-list gb)) T)
(boxer::insert-cha boxer::*point* gb :moving)))))
(defmethod paste ((self bw::boxer-frame))
(cond ((equal '(nil :lisp) (multiple-value-list (capi:clipboard-empty self :value)))
the second return value will be symbol : lisp if it came from . If
(boxer::com-yank))
((not (capi:clipboard-empty self :string))
(paste-text))
((not (capi:clipboard-empty self :image))
(paste-pict))
(t (boxer::com-yank)))
(boxer::repaint))
|
019b45d6094a4daadfeacd5edb3d2358512e73bea515e568b8c8083cbf150e06 | samsergey/formica | circuit.rkt | #lang formica
(:: sum ((Any -> Num) list? -> Num)
(define (sum f lst)
(apply + (map f lst))))
(:: bisection ((Real -> Real) Real Real -> (∪ Real #f))
(define (bisection f a b)
(and (<= (* (f a) (f b)) 0)
(let ([c (* 0.5 (+ a b))])
(if (< (abs (/ (- b a) c)) 1e-10)
c
(or (bisection f a c)
(bisection f c b)))))))
(define-type (R positive?))
(define-type (C positive?))
(define-type (L positive?))
(define-type --)
(define-type ==)
(define-type Circuit
R? C? L?
(--: Circuit ..)
(==: Circuit ..))
(define-type ω positive?)
(define-type Imp complex?)
(:: impedance (Circuit -> (ω -> Imp))
(define (impedance cir)
(λ (w)
(define/. Z
(R r) --> r
(C c) --> (/ -i c w)
(L l) --> (* +i l w)
(-- i ...) --> (sum Z i)
(== i ...) --> (/ (sum / (map Z i))))
(Z cir))))
(define (S c)
(-- (R 10)
(== (-- (R 3)
(L 0.5e-6))
(C c))))
(:: resonance (Circuit positive? positive? -> positive?)
(define/memo (resonance c w1 w2)
(bisection (∘ imag-part (memoized (impedance c))) w1 w2)))
(require plot)
(time (plot (function (∘ angle (impedance (S 10e-9))) 1e6 50e6)))
| null | https://raw.githubusercontent.com/samsergey/formica/b4410b4b6da63ecb15b4c25080951a7ba4d90d2c/examples/circuit.rkt | racket | #lang formica
(:: sum ((Any -> Num) list? -> Num)
(define (sum f lst)
(apply + (map f lst))))
(:: bisection ((Real -> Real) Real Real -> (∪ Real #f))
(define (bisection f a b)
(and (<= (* (f a) (f b)) 0)
(let ([c (* 0.5 (+ a b))])
(if (< (abs (/ (- b a) c)) 1e-10)
c
(or (bisection f a c)
(bisection f c b)))))))
(define-type (R positive?))
(define-type (C positive?))
(define-type (L positive?))
(define-type --)
(define-type ==)
(define-type Circuit
R? C? L?
(--: Circuit ..)
(==: Circuit ..))
(define-type ω positive?)
(define-type Imp complex?)
(:: impedance (Circuit -> (ω -> Imp))
(define (impedance cir)
(λ (w)
(define/. Z
(R r) --> r
(C c) --> (/ -i c w)
(L l) --> (* +i l w)
(-- i ...) --> (sum Z i)
(== i ...) --> (/ (sum / (map Z i))))
(Z cir))))
(define (S c)
(-- (R 10)
(== (-- (R 3)
(L 0.5e-6))
(C c))))
(:: resonance (Circuit positive? positive? -> positive?)
(define/memo (resonance c w1 w2)
(bisection (∘ imag-part (memoized (impedance c))) w1 w2)))
(require plot)
(time (plot (function (∘ angle (impedance (S 10e-9))) 1e6 50e6)))
| |
76b960c8b2e463c6f93d2f1f6489869cd205a90a400246fc2b627931839b002d | yapsterapp/er-cassandra | coerce_value_callback.cljc | (ns er-cassandra.model.callbacks.coerce-value-callback)
(defn coerce-value-callback
[col f]
(fn [r]
(if (contains? r col)
(assoc r col (f (get r col)))
r)))
| null | https://raw.githubusercontent.com/yapsterapp/er-cassandra/1d059f47bdf8654c7a4dd6f0759f1a114fdeba81/src/er_cassandra/model/callbacks/coerce_value_callback.cljc | clojure | (ns er-cassandra.model.callbacks.coerce-value-callback)
(defn coerce-value-callback
[col f]
(fn [r]
(if (contains? r col)
(assoc r col (f (get r col)))
r)))
| |
d8d08a873e5148d644129c1246a0683d48b1132fc1f3b9c1df091afc5b670b92 | rawleyfowler/sluj | core_test.clj | (ns sluj.core-test
(:require
[clojure.test :refer [deftest is testing]]
[sluj.core :refer [sluj]]))
(deftest basic
(testing "Sluggify a given plain text string"
(is (= (sluj "My name is mud") "my-name-is-mud"))
(is (= (sluj "My name is definitley mud!") "my-name-is-definitley-mud")))
(testing "Sluggify a given string with exotic UTF characters"
(is (= (sluj "🧡 epic") "epic"))
(is (= (sluj "пo pomegranates") "po-pomegranates"))
(is (= (sluj "маленький подъезд") "malenkij-poduezd"))))
(deftest japanese
(testing "hiragana"
(is (= (sluj "あなたはばかです") "anatahabakadesu"))
(is (= (sluj "せんぱいがすきです") "senpaigasukidesu")))
(testing "katakana"
(is (= (sluj "アナタハバカ") "anatahabaka"))
(is (= (sluj "センパイガスキ") "senpaigasuki"))))
(deftest opts
(testing "Sluggify a given plain text string with custom separator"
(is (= (sluj "Super awesome clojure code" :separator "_") "super_awesome_clojure_code"))
(is (= (sluj "Massive crab" :separator "~") "massive~crab")))
(testing "Sluggify a given string with a custom UTF-18 mapping"
(is (= (sluj "I 🧡 Clojure" :🧡 "orange-heart") "i-orange-heart-clojure"))
(is (= (sluj "Super awesome sentence!" :e "eee") "supeeer-aweeesomeee-seeenteeenceee"))
(testing "Sluggify with a charmap map"
(is (= (sluj "🐮 goes moo" :charmap {:🐮 "cow"}) "cow-goes-moo"))
(is (= (sluj "🐮 loves 🦊" :charmap {:🦊 "fox" :🐮 "cow"}) "cow-loves-fox"))))
(testing "Sluggify with a given locale"
(is (= (sluj "маленький подъезд" :locale "bg") "malenykiy-podaezd")))
(testing "Changing case"
(is (= (sluj "make me uppercase" :casing "upper") "MAKE-ME-UPPERCASE"))
;; This is the default
(is (= (sluj "make me lowercase" :casing "lower") "make-me-lowercase"))))
| null | https://raw.githubusercontent.com/rawleyfowler/sluj/fd32770585a9b8acbc864edd6767d41a988dd8a9/test/sluj/core_test.clj | clojure | This is the default | (ns sluj.core-test
(:require
[clojure.test :refer [deftest is testing]]
[sluj.core :refer [sluj]]))
(deftest basic
(testing "Sluggify a given plain text string"
(is (= (sluj "My name is mud") "my-name-is-mud"))
(is (= (sluj "My name is definitley mud!") "my-name-is-definitley-mud")))
(testing "Sluggify a given string with exotic UTF characters"
(is (= (sluj "🧡 epic") "epic"))
(is (= (sluj "пo pomegranates") "po-pomegranates"))
(is (= (sluj "маленький подъезд") "malenkij-poduezd"))))
(deftest japanese
(testing "hiragana"
(is (= (sluj "あなたはばかです") "anatahabakadesu"))
(is (= (sluj "せんぱいがすきです") "senpaigasukidesu")))
(testing "katakana"
(is (= (sluj "アナタハバカ") "anatahabaka"))
(is (= (sluj "センパイガスキ") "senpaigasuki"))))
(deftest opts
(testing "Sluggify a given plain text string with custom separator"
(is (= (sluj "Super awesome clojure code" :separator "_") "super_awesome_clojure_code"))
(is (= (sluj "Massive crab" :separator "~") "massive~crab")))
(testing "Sluggify a given string with a custom UTF-18 mapping"
(is (= (sluj "I 🧡 Clojure" :🧡 "orange-heart") "i-orange-heart-clojure"))
(is (= (sluj "Super awesome sentence!" :e "eee") "supeeer-aweeesomeee-seeenteeenceee"))
(testing "Sluggify with a charmap map"
(is (= (sluj "🐮 goes moo" :charmap {:🐮 "cow"}) "cow-goes-moo"))
(is (= (sluj "🐮 loves 🦊" :charmap {:🦊 "fox" :🐮 "cow"}) "cow-loves-fox"))))
(testing "Sluggify with a given locale"
(is (= (sluj "маленький подъезд" :locale "bg") "malenykiy-podaezd")))
(testing "Changing case"
(is (= (sluj "make me uppercase" :casing "upper") "MAKE-ME-UPPERCASE"))
(is (= (sluj "make me lowercase" :casing "lower") "make-me-lowercase"))))
|
7228b1fcef6dd51d161db5982c4f8e1a2190a0005dd89f29d5e80fc863b0f5ce | TerrorJack/ghc-alter | hFlush001.hs | -- !!! Flushing
module Main(main) where
import Control.Monad
import System.Directory ( removeFile, doesFileExist )
import System.IO
import System.IO.Error
main = do
hFlush stdin `catchIOError` \ _ -> putStrLn "No can do - flushing read-only handles isn't legal"
putStr "Hello,"
hFlush stdout
putStr "Hello - "
hFlush stderr
hdl <- openFile "hFlush001.hs" ReadMode
hFlush hdl `catchIOError` \ _ -> putStrLn "No can do - flushing read-only handles isn't legal"
hClose hdl
remove
hdl <- openFile "hFlush001.out" WriteMode
hFlush hdl
hClose hdl
remove
hdl <- openFile "hFlush001.out" AppendMode
hFlush hdl
hClose hdl
remove
hdl <- openFile "hFlush001.out" ReadWriteMode
hFlush hdl
hClose hdl
where remove = do
f <- doesFileExist "hFlush001.out"
when f (removeFile "hFlush001.out")
| null | https://raw.githubusercontent.com/TerrorJack/ghc-alter/db736f34095eef416b7e077f9b26fc03aa78c311/ghc-alter/boot-lib/base/tests/IO/hFlush001.hs | haskell | !!! Flushing | module Main(main) where
import Control.Monad
import System.Directory ( removeFile, doesFileExist )
import System.IO
import System.IO.Error
main = do
hFlush stdin `catchIOError` \ _ -> putStrLn "No can do - flushing read-only handles isn't legal"
putStr "Hello,"
hFlush stdout
putStr "Hello - "
hFlush stderr
hdl <- openFile "hFlush001.hs" ReadMode
hFlush hdl `catchIOError` \ _ -> putStrLn "No can do - flushing read-only handles isn't legal"
hClose hdl
remove
hdl <- openFile "hFlush001.out" WriteMode
hFlush hdl
hClose hdl
remove
hdl <- openFile "hFlush001.out" AppendMode
hFlush hdl
hClose hdl
remove
hdl <- openFile "hFlush001.out" ReadWriteMode
hFlush hdl
hClose hdl
where remove = do
f <- doesFileExist "hFlush001.out"
when f (removeFile "hFlush001.out")
|
1092bf9537f19c0cf6d07f7360058b70cc526672a61455c1b69072082a32396e | aligusnet/mltool | GradientDescentTest.hs | module MachineLearning.Optimization.GradientDescentTest
(
tests
)
where
import Test.Framework (testGroup)
import Test.Framework.Providers.HUnit
import Test.HUnit
import Test.HUnit.Approx
import Test.HUnit.Plus
import qualified Data.Vector.Storable as V
import qualified Numeric.LinearAlgebra as LA
import qualified MachineLearning as ML
import MachineLearning.Regularization (Regularization(..))
import MachineLearning.LeastSquaresModel (LeastSquaresModel(..))
import MachineLearning.Optimization.GradientDescent
import MachineLearning.DataSets (dataset1)
(x, y) = ML.splitToXY dataset1
muSigma = ML.meanStddev x
xNorm = ML.featureNormalization muSigma x
x1 = ML.addBiasDimension xNorm
initialTheta = LA.konst 0 (LA.cols x1)
Normal Equation 's Result : 340412.660,110631.050,-6649.474
lsExpectedTheta = LA.vector [340412.660, 110630.886, -6649.310]
eps = 1e-3
isInDescendingOrder :: [Double] -> Bool
isInDescendingOrder lst = and . snd . unzip $ scanl (\(prev, _) current -> (current, prev >= current)) (1/0, True) lst
testGradientDescent model expectedTheta = do
let (theta, optPath) = gradientDescent 0.01 model eps 5000 RegNone x1 y initialTheta
js = V.toList $ (LA.toColumns optPath) !! 1
assertVector "theta" 0.01 expectedTheta theta
assertBool "non-increasing errors" $ isInDescendingOrder js
tests = [testGroup "gradientDescent" [
testCase "leastSquares" $ testGradientDescent LeastSquares lsExpectedTheta
]
]
| null | https://raw.githubusercontent.com/aligusnet/mltool/92d74c4cc79221bfdcfb76aa058a2e8992ecfe2b/test/MachineLearning/Optimization/GradientDescentTest.hs | haskell | module MachineLearning.Optimization.GradientDescentTest
(
tests
)
where
import Test.Framework (testGroup)
import Test.Framework.Providers.HUnit
import Test.HUnit
import Test.HUnit.Approx
import Test.HUnit.Plus
import qualified Data.Vector.Storable as V
import qualified Numeric.LinearAlgebra as LA
import qualified MachineLearning as ML
import MachineLearning.Regularization (Regularization(..))
import MachineLearning.LeastSquaresModel (LeastSquaresModel(..))
import MachineLearning.Optimization.GradientDescent
import MachineLearning.DataSets (dataset1)
(x, y) = ML.splitToXY dataset1
muSigma = ML.meanStddev x
xNorm = ML.featureNormalization muSigma x
x1 = ML.addBiasDimension xNorm
initialTheta = LA.konst 0 (LA.cols x1)
Normal Equation 's Result : 340412.660,110631.050,-6649.474
lsExpectedTheta = LA.vector [340412.660, 110630.886, -6649.310]
eps = 1e-3
isInDescendingOrder :: [Double] -> Bool
isInDescendingOrder lst = and . snd . unzip $ scanl (\(prev, _) current -> (current, prev >= current)) (1/0, True) lst
testGradientDescent model expectedTheta = do
let (theta, optPath) = gradientDescent 0.01 model eps 5000 RegNone x1 y initialTheta
js = V.toList $ (LA.toColumns optPath) !! 1
assertVector "theta" 0.01 expectedTheta theta
assertBool "non-increasing errors" $ isInDescendingOrder js
tests = [testGroup "gradientDescent" [
testCase "leastSquares" $ testGradientDescent LeastSquares lsExpectedTheta
]
]
| |
8c70dbda4e2d7422c3c55908492d4bdac1d9b78e7df7833550181e2496e16fda | lem-project/lem | package.lisp | (defpackage :lem-base
(:use :cl)
#+sbcl
(:lock t)
;; utils.lisp
(:export
:collect-subclasses
:utf8-bytes
:bests-if
:max-if
:min-if
:find-tree
:do-sequence
:if-push)
;; string-width-utils
(:export :+default-tab-size+
:control-char
:wide-char-p
:char-width
:string-width
:wide-index)
file-utils.lisp
(:export :expand-file-name
:tail-of-pathname
:directory-files
:list-directory
:file-size
:virtual-probe-file
:with-open-virtual-file)
;; errors.lisp
(:export :editor-condition
:directory-does-not-exist
:directory-does-not-exist-directory
:read-only-error
:editor-error
:scan-error
:editor-interrupt)
;; hooks.lisp
(:export :run-hooks
:add-hook
:remove-hook)
;; var.lisp
(:export
:editor-variable
:define-editor-variable
:clear-editor-local-variables
:variable-value
:variable-documentation
:find-editor-variable
:with-global-variable-value)
;; editor-variables.lisp
(:export
:tab-width)
macros.lisp
(:export
:save-excursion
:with-point
:with-buffer-read-only
:without-interrupts)
;; mark.lisp
(:export
:mark
:mark-point
:mark-active-p
:mark-cancel
:mark-set-point)
;; buffer.lisp
(:export
:fundamental-mode
:primordial-buffer
:current-buffer
:make-buffer
:buffer
:bufferp
:buffer-start-point
:buffer-end-point
:deleted-buffer-p
:buffer-name
:buffer-temporary-p
:buffer-modified-tick
:buffer-modified-p
:buffer-read-only-p
:buffer-syntax-table
:buffer-major-mode
:buffer-minor-modes
:buffer-mark-object
:buffer-mark-p
:buffer-mark
:buffer-point
:buffer-nlines
:buffer-enable-undo-p
:buffer-enable-undo
:buffer-disable-undo
:buffer-filename
:buffer-directory
:buffer-unmark
:buffer-mark-cancel
:buffer-rename
:buffer-undo
:buffer-redo
:buffer-undo-boundary
:buffer-value
:buffer-unbound
:clear-buffer-variables)
;; buffer-insert.lisp
(:export
:*inhibit-read-only*
:*inhibit-modification-hooks*
:before-change-functions
:after-change-functions)
;; buffer-list-manager.lisp
(:export
:delete-buffer-using-manager
:buffer-list-manager
:with-current-buffer)
;; buffers.lisp
(:export
:kill-buffer-hook
:buffer-list
:any-modified-buffer-p
:get-buffer
:unique-buffer-name
:delete-buffer
:get-next-buffer
:get-previous-buffer
:unbury-buffer
:bury-buffer
:get-file-buffer)
;; point.lisp
(:export
:current-point
:point
:pointp
:copy-point-using-class
:copy-point
:delete-point
:point-buffer
:point-charpos
:point-kind
:point=
:point/=
:point<
:point<=
:point>
:point>=
:point-min
:point-max)
;; basic.lisp
(:export
:first-line-p
:last-line-p
:start-line-p
:end-line-p
:start-buffer-p
:end-buffer-p
:same-line-p
:move-point
:line-start
:line-end
:buffer-start
:buffer-end
:line-offset
:character-offset
:character-at
:line-string
:text-property-at
:put-text-property
:remove-text-property
:next-single-property-change
:previous-single-property-change
:insert-character
:insert-string
:delete-character
:erase-buffer
:region-beginning
:region-end
:map-region
:points-to-string
:count-characters
:delete-between-points
:count-lines
:apply-region-lines
:line-number-at-point
:point-column
:move-to-column
:position-at-point
:move-to-position
:point-bytes
:move-to-bytes
:move-to-line
:check-marked
:set-current-mark
:blank-line-p
:skip-chars-forward
:skip-chars-backward
:insert-buffer
:buffer-text)
;; syntax-table.lisp
(:export
:syntax-table
:set-syntax-parser
:fundamental-syntax-table
:current-syntax
:with-current-syntax
:make-syntax-table
:syntax-word-char-p
:syntax-space-char-p
:syntax-symbol-char-p
:syntax-open-paren-char-p
:syntax-closed-paren-char-p
:syntax-string-quote-char-p
:syntax-escape-char-p
:syntax-expr-prefix-char-p
:syntax-skip-expr-prefix-forward
:syntax-skip-expr-prefix-backward)
;; search.lisp
(:export
:*case-fold-search*
:search-forward
:search-backward
:search-forward-regexp
:search-backward-regexp
:search-forward-symbol
:search-backward-symbol
:looking-at
:match-string-at)
;; syntax-scan.lisp
(:export
:skip-space-and-comment-forward
:skip-space-and-comment-backward
:form-offset
:scan-lists
:skip-whitespace-forward
:skip-whitespace-backward
:skip-symbol-forward
:skip-symbol-backward
:symbol-string-at-point
:make-pps-state
:pps-state-type
:pps-state-token-start-point
:pps-state-end-char
:pps-state-block-comment-depth
:pps-state-block-pair
:pps-state-paren-stack
:pps-state-paren-depth
:parse-partial-sexp
:syntax-ppss
:pps-state-string-p
:pps-state-comment-p
:pps-state-string-or-comment-p
:in-string-p
:in-comment-p
:in-string-or-comment-p
:maybe-beginning-of-string
:maybe-beginning-of-comment
:maybe-beginning-of-string-or-comment)
;; syntax-parser.lisp
(:export
:syntax-string-attribute
:syntax-comment-attribute
:syntax-keyword-attribute
:syntax-constant-attribute
:syntax-function-name-attribute
:syntax-variable-attribute
:syntax-type-attribute
:*global-syntax-highlight*
:before-syntax-scan-hook
:after-syntax-scan-hook
:enable-syntax-highlight
:enable-syntax-highlight-p
:syntax-scan-region)
;; tmlanguage.lisp
(:export
:make-tmlanguage
:make-tm-repository
:make-tm-match
:make-tm-region
:make-tm-include
:make-tm-patterns
:make-tm-name
:add-tm-repository
:add-tm-pattern)
;; encodings.lisp
(:export
:encoding
:encoding-read
:encoding-write
:register-encoding
:encoding-end-of-line
:unregister-encoding)
;; file.lisp
(:export
:*find-file-hook*
:before-save-hook
:after-save-hook
:*external-format-function*
:*find-directory-function*
:*default-external-format*
:insert-file-contents
:find-file-buffer
:write-to-file
:write-region-to-file
:update-changed-disk-date
:changed-disk-p)
;; indent.lisp
(:export
:back-to-indentation
:indent-tabs-mode
:calc-indent-function
:indent-line
:indent-points
:indent-buffer))
| null | https://raw.githubusercontent.com/lem-project/lem/b471ee1881a275323f4839acf0e940ba37e874e4/src/base/package.lisp | lisp | utils.lisp
string-width-utils
errors.lisp
hooks.lisp
var.lisp
editor-variables.lisp
mark.lisp
buffer.lisp
buffer-insert.lisp
buffer-list-manager.lisp
buffers.lisp
point.lisp
basic.lisp
syntax-table.lisp
search.lisp
syntax-scan.lisp
syntax-parser.lisp
tmlanguage.lisp
encodings.lisp
file.lisp
indent.lisp | (defpackage :lem-base
(:use :cl)
#+sbcl
(:lock t)
(:export
:collect-subclasses
:utf8-bytes
:bests-if
:max-if
:min-if
:find-tree
:do-sequence
:if-push)
(:export :+default-tab-size+
:control-char
:wide-char-p
:char-width
:string-width
:wide-index)
file-utils.lisp
(:export :expand-file-name
:tail-of-pathname
:directory-files
:list-directory
:file-size
:virtual-probe-file
:with-open-virtual-file)
(:export :editor-condition
:directory-does-not-exist
:directory-does-not-exist-directory
:read-only-error
:editor-error
:scan-error
:editor-interrupt)
(:export :run-hooks
:add-hook
:remove-hook)
(:export
:editor-variable
:define-editor-variable
:clear-editor-local-variables
:variable-value
:variable-documentation
:find-editor-variable
:with-global-variable-value)
(:export
:tab-width)
macros.lisp
(:export
:save-excursion
:with-point
:with-buffer-read-only
:without-interrupts)
(:export
:mark
:mark-point
:mark-active-p
:mark-cancel
:mark-set-point)
(:export
:fundamental-mode
:primordial-buffer
:current-buffer
:make-buffer
:buffer
:bufferp
:buffer-start-point
:buffer-end-point
:deleted-buffer-p
:buffer-name
:buffer-temporary-p
:buffer-modified-tick
:buffer-modified-p
:buffer-read-only-p
:buffer-syntax-table
:buffer-major-mode
:buffer-minor-modes
:buffer-mark-object
:buffer-mark-p
:buffer-mark
:buffer-point
:buffer-nlines
:buffer-enable-undo-p
:buffer-enable-undo
:buffer-disable-undo
:buffer-filename
:buffer-directory
:buffer-unmark
:buffer-mark-cancel
:buffer-rename
:buffer-undo
:buffer-redo
:buffer-undo-boundary
:buffer-value
:buffer-unbound
:clear-buffer-variables)
(:export
:*inhibit-read-only*
:*inhibit-modification-hooks*
:before-change-functions
:after-change-functions)
(:export
:delete-buffer-using-manager
:buffer-list-manager
:with-current-buffer)
(:export
:kill-buffer-hook
:buffer-list
:any-modified-buffer-p
:get-buffer
:unique-buffer-name
:delete-buffer
:get-next-buffer
:get-previous-buffer
:unbury-buffer
:bury-buffer
:get-file-buffer)
(:export
:current-point
:point
:pointp
:copy-point-using-class
:copy-point
:delete-point
:point-buffer
:point-charpos
:point-kind
:point=
:point/=
:point<
:point<=
:point>
:point>=
:point-min
:point-max)
(:export
:first-line-p
:last-line-p
:start-line-p
:end-line-p
:start-buffer-p
:end-buffer-p
:same-line-p
:move-point
:line-start
:line-end
:buffer-start
:buffer-end
:line-offset
:character-offset
:character-at
:line-string
:text-property-at
:put-text-property
:remove-text-property
:next-single-property-change
:previous-single-property-change
:insert-character
:insert-string
:delete-character
:erase-buffer
:region-beginning
:region-end
:map-region
:points-to-string
:count-characters
:delete-between-points
:count-lines
:apply-region-lines
:line-number-at-point
:point-column
:move-to-column
:position-at-point
:move-to-position
:point-bytes
:move-to-bytes
:move-to-line
:check-marked
:set-current-mark
:blank-line-p
:skip-chars-forward
:skip-chars-backward
:insert-buffer
:buffer-text)
(:export
:syntax-table
:set-syntax-parser
:fundamental-syntax-table
:current-syntax
:with-current-syntax
:make-syntax-table
:syntax-word-char-p
:syntax-space-char-p
:syntax-symbol-char-p
:syntax-open-paren-char-p
:syntax-closed-paren-char-p
:syntax-string-quote-char-p
:syntax-escape-char-p
:syntax-expr-prefix-char-p
:syntax-skip-expr-prefix-forward
:syntax-skip-expr-prefix-backward)
(:export
:*case-fold-search*
:search-forward
:search-backward
:search-forward-regexp
:search-backward-regexp
:search-forward-symbol
:search-backward-symbol
:looking-at
:match-string-at)
(:export
:skip-space-and-comment-forward
:skip-space-and-comment-backward
:form-offset
:scan-lists
:skip-whitespace-forward
:skip-whitespace-backward
:skip-symbol-forward
:skip-symbol-backward
:symbol-string-at-point
:make-pps-state
:pps-state-type
:pps-state-token-start-point
:pps-state-end-char
:pps-state-block-comment-depth
:pps-state-block-pair
:pps-state-paren-stack
:pps-state-paren-depth
:parse-partial-sexp
:syntax-ppss
:pps-state-string-p
:pps-state-comment-p
:pps-state-string-or-comment-p
:in-string-p
:in-comment-p
:in-string-or-comment-p
:maybe-beginning-of-string
:maybe-beginning-of-comment
:maybe-beginning-of-string-or-comment)
(:export
:syntax-string-attribute
:syntax-comment-attribute
:syntax-keyword-attribute
:syntax-constant-attribute
:syntax-function-name-attribute
:syntax-variable-attribute
:syntax-type-attribute
:*global-syntax-highlight*
:before-syntax-scan-hook
:after-syntax-scan-hook
:enable-syntax-highlight
:enable-syntax-highlight-p
:syntax-scan-region)
(:export
:make-tmlanguage
:make-tm-repository
:make-tm-match
:make-tm-region
:make-tm-include
:make-tm-patterns
:make-tm-name
:add-tm-repository
:add-tm-pattern)
(:export
:encoding
:encoding-read
:encoding-write
:register-encoding
:encoding-end-of-line
:unregister-encoding)
(:export
:*find-file-hook*
:before-save-hook
:after-save-hook
:*external-format-function*
:*find-directory-function*
:*default-external-format*
:insert-file-contents
:find-file-buffer
:write-to-file
:write-region-to-file
:update-changed-disk-date
:changed-disk-p)
(:export
:back-to-indentation
:indent-tabs-mode
:calc-indent-function
:indent-line
:indent-points
:indent-buffer))
|
590354a1dafad4459185ba7684d2116feb4bd83c7c3eb676f42ad6b4a0a9b85b | rbkmoney/fistful-server | ff_withdrawal_session_handler.erl | -module(ff_withdrawal_session_handler).
-behaviour(ff_woody_wrapper).
-include_lib("fistful_proto/include/ff_proto_withdrawal_session_thrift.hrl").
%% ff_woody_wrapper callbacks
-export([handle_function/3]).
%%
%% ff_woody_wrapper callbacks
%%
-spec handle_function(woody:func(), woody:args(), woody:options()) -> {ok, woody:result()} | no_return().
handle_function(Func, Args, Opts) ->
scoper:scope(
withdrawal_session,
#{},
fun() ->
handle_function_(Func, Args, Opts)
end
).
%%
%% Internals
%%
handle_function_('Get', {ID, EventRange}, _Opts) ->
case ff_withdrawal_session_machine:get(ID, ff_codec:unmarshal(event_range, EventRange)) of
{ok, Machine} ->
State = ff_withdrawal_session_machine:session(Machine),
Ctx = ff_withdrawal_session_machine:ctx(Machine),
Response = ff_withdrawal_session_codec:marshal_state(State, ID, Ctx),
{ok, Response};
{error, notfound} ->
woody_error:raise(business, #fistful_WithdrawalSessionNotFound{})
end;
handle_function_('GetContext', {ID}, _Opts) ->
case ff_withdrawal_session_machine:get(ID, {undefined, 0}) of
{ok, Machine} ->
Ctx = ff_withdrawal_session_machine:ctx(Machine),
Response = ff_withdrawal_session_codec:marshal(ctx, Ctx),
{ok, Response};
{error, notfound} ->
woody_error:raise(business, #fistful_WithdrawalSessionNotFound{})
end.
| null | https://raw.githubusercontent.com/rbkmoney/fistful-server/60b964d0e07f911c841903bc61d8d9fb20a32658/apps/ff_server/src/ff_withdrawal_session_handler.erl | erlang | ff_woody_wrapper callbacks
ff_woody_wrapper callbacks
Internals
| -module(ff_withdrawal_session_handler).
-behaviour(ff_woody_wrapper).
-include_lib("fistful_proto/include/ff_proto_withdrawal_session_thrift.hrl").
-export([handle_function/3]).
-spec handle_function(woody:func(), woody:args(), woody:options()) -> {ok, woody:result()} | no_return().
handle_function(Func, Args, Opts) ->
scoper:scope(
withdrawal_session,
#{},
fun() ->
handle_function_(Func, Args, Opts)
end
).
handle_function_('Get', {ID, EventRange}, _Opts) ->
case ff_withdrawal_session_machine:get(ID, ff_codec:unmarshal(event_range, EventRange)) of
{ok, Machine} ->
State = ff_withdrawal_session_machine:session(Machine),
Ctx = ff_withdrawal_session_machine:ctx(Machine),
Response = ff_withdrawal_session_codec:marshal_state(State, ID, Ctx),
{ok, Response};
{error, notfound} ->
woody_error:raise(business, #fistful_WithdrawalSessionNotFound{})
end;
handle_function_('GetContext', {ID}, _Opts) ->
case ff_withdrawal_session_machine:get(ID, {undefined, 0}) of
{ok, Machine} ->
Ctx = ff_withdrawal_session_machine:ctx(Machine),
Response = ff_withdrawal_session_codec:marshal(ctx, Ctx),
{ok, Response};
{error, notfound} ->
woody_error:raise(business, #fistful_WithdrawalSessionNotFound{})
end.
|
3ba640c505aac6310f0a73cf9b6d31766a2c81a302ab279ab72e26d93c5dc386 | parapluu/monadic-typechecker | AST.hs | -- |
-- Module : PhantomPhases.AST
Copyright : © 2019 and
License : MIT
--
-- Stability : experimental
-- Portability : portable
--
-- This module includes functionality for creating an Abstract Syntax Tree (AST),
-- as well as helper functions for checking different
-- aspects of the AST. The AST abstract over their kind 'Phase', where
' Phase ' represent the current state of the AST . For example , after parsing
the AST is of ' ' @Phase@ ; after type checking with ' PhantomFunctors.tcProgram ' the
returned AST is of ' Checked ' @Phase@ , indicating that the AST has been
-- type checked.
# LANGUAGE NamedFieldPuns , KindSignatures , DataKinds #
module PhantomPhases.AST where
import Data.Maybe
import Data.List
import Text.Printf (printf)
type Name = String
-- | Check if a name is a constructor name
isConstructorName = (=="init")
-- * AST declarations
-- $
-- Declaration for the Abstract Syntax Tree of the language. This section
-- contains the type, class, methods, fields and expressions represented
as an AST . The AST is produced by a parser . For more information on
-- building parsers, we recommend to read
< -7.0.5 > .
-- | Representation of types abstracting over the 'Phase'
data Type (p :: Phase) =
ClassType Name
-- ^ Represents a class of name 'Name'
| IntType
-- ^ Represents integers
| BoolType
-- ^ Represents booleans
| Arrow {tparams :: [Type p], tresult :: Type p}
-- ^ Represents a function type
| UnitType
-- ^ Represents the unit (void) type
deriving (Eq)
instance Show (Type p) where
show (ClassType c) = c
show IntType = "int"
show BoolType = "bool"
show (Arrow ts t) = "(" ++ commaSep ts ++ ")" ++ " -> " ++ show t
show UnitType = "unit"
-- | The representation of a program in the form of an AST node.
newtype Program (ip :: Phase) =
| Programs are simply a list of class definitions ( ' ClassDef ' ) in a certain ' Phase '
Program [ClassDef ip] deriving (Show)
-- | Phases that have already been passed. This has been thought as going
-- through different phases of a compiler. We assume that there is a total order
-- between phases.
data Phase = Parsed -- ^ -- ^ Initial status of an AST node after parsing
| Checked -- ^ Status of an AST node after type checking
-- | A representation of a class in the form of an AST node. As an example:
--
> class :
> x : Int
> var y :
-- > def main(): Int
> 42
--
-- the code above, after parsing, would generate the following AST:
--
-- > ClassDef {cname = "Foo"
-- > ,fields = [FieldDef {fname = "x"
> , ftype = IntType
-- > ,fmod = Val}]
> , methods = [ MethodDef { mname = " main "
-- > ,mparams = []
> , = IntType
> , = [ IntLit { etype = Nothing , ival = 42 } ]
-- > }]}
data ClassDef (ip :: Phase) =
ClassDef {cname :: Name -- ^ String that represents the name of the class
,fields :: [FieldDef ip] -- ^ List of field definitions of a class
,methods :: [MethodDef ip] -- ^ List of method definitions of a class
}
instance Show (ClassDef ip) where
show ClassDef {cname, fields, methods} =
"class " ++ cname ++ concatMap show fields ++ concatMap show methods ++ "end"
-- | Field qualifiers in a class. It is thought for a made up syntax such as:
--
> class :
> x : Int
> var y :
--
-- This indicates that the variable @x@ is immutable, and @y@ can be mutated.
--
data Mod = Var -- ^ Indicates that the field can be mutated
| Val -- ^ Indicates that the field is immutable
deriving (Eq)
instance Show Mod where
show Var = "var"
show Val = "val"
-- | Representation of a field declaration in the form of an AST node.
-- As an example, the following code:
--
> class :
> x : Int
--
-- could be parsed to the following field representation:
--
-- > FieldDef {fname = "x"
> , ftype = IntType
-- > ,fmod = Val}
--
data FieldDef (p :: Phase) =
FieldDef {fname :: Name -- ^ Name of the field name
,ftype :: Type p -- ^ Type of the field
,fmod :: Mod -- ^ Field qualifier
}
-- | Helper function to check whether a 'FieldDef' is immutable.
isValField :: FieldDef p -> Bool
isValField FieldDef{fmod} = fmod == Val
-- | Helper function to check whether a 'FieldDef' is mutable.
isVarField :: FieldDef p -> Bool
isVarField = not . isValField
instance Show (FieldDef p) where
show FieldDef{fname, ftype, fmod} =
show fmod ++ " " ++ fname ++ " : " ++ show ftype
-- | Representation of parameters in the form of an AST.
data Param (p :: Phase) = Param {pname :: Name -- ^ Name of the parameter
,ptype :: Type p -- ^ Type of the parameter
}
instance Show (Param p) where
show Param{pname, ptype} = pname ++ " : " ++ show ptype
-- | Representation of a method declaration in the form of an AST. For example:
--
> class :
-- > def main(): Int
> 42
--
-- the code above, after parsing, would generate the following AST:
--
-- > ClassDef {cname = "Foo"
-- > ,fields = []
> , methods = [ MethodDef { mname = " main "
-- > ,mparams = []
> , = IntType
> , = [ IntLit { etype = Nothing , ival = 42 } ]
-- > }]}
--
data MethodDef (ip :: Phase) =
MethodDef {mname :: Name -- ^ Name of the method definition
,mparams :: [Param ip] -- ^ List of arguments to the method
,mtype :: Type ip -- ^ Return type
,mbody :: Expr ip -- ^ Body of the method
}
-- | Takes a list of things that can be shown, and creates a comma
-- separated string.
commaSep :: Show t => [t] -> String
commaSep = intercalate ", " . map show
instance Show (MethodDef ip) where
show MethodDef{mname, mparams, mtype, mbody} =
"def " ++ mname ++ "(" ++ commaSep mparams ++ ") : " ++
show mtype ++ show mbody
-- | Representation of integer operations
data Op = Add | Sub | Mul | Div deriving (Eq)
instance Show Op where
show Add = "+"
show Sub = "-"
show Mul = "*"
show Div = "/"
-- | Representation of expressions in the form of an AST node. The language
-- is expression-based, so there are no statements. As an example, the following
-- identity function:
--
> let i d = \x : Int - > x
-- > in id 42
--
generates this ' ' :
--
-- > Let {etype = Nothing
-- > ,name = "id"
> , = Lambda { etype = Nothing
> , params = [ Param " x " IntType ]
> , body = Nothing
-- > ,target = VarAccess Nothing "id"
-- > ,args = [IntLit Nothing 42]}
-- > }
> , body : :
-- > }
-- >
data Expr (p :: Phase) =
-- | Representation of a boolean literal
BoolLit {etype :: Maybe (Type p) -- ^ Type of the expression
^ The " Haskell " ' Bool ' data constructor
}
| IntLit {etype :: Maybe (Type p) -- ^ Type of the expression
,ival :: Int
}
-- ^ Representation of an integer literal
| Null {etype :: Maybe (Type p) -- ^ Type of the expression
}
| Lambda {etype :: Maybe (Type p) -- ^ Type of the expression
,params :: [Param p] -- ^ List of arguments with their types ('Param')
,body :: Expr p -- ^ The body of the lambda abstraction
}
| VarAccess {etype :: Maybe (Type p) -- ^ Type of the expression
,name :: Name -- ^ Variable name
}
| FieldAccess {etype :: Maybe (Type p) -- ^ Type of the expression
,target :: Expr p -- ^ The target in a field access, e.g., @x.foo@, then @x@ is the target.
,name :: Name -- ^ Field name, e.g., @x.foo@, then @foo@ is the 'Name'
}
| Assignment {etype :: Maybe (Type p) -- ^ Type of the expression
,lhs :: Expr p -- ^ Left-hand side expression
,rhs :: Expr p -- ^ Right-hand side expression
}
| MethodCall {etype :: Maybe (Type p) -- ^ Type of the expression
^ The target of a method call , e.g. , @x.bar()@ , then @x@ is the target
,name :: Name -- ^ The method name
,args :: [Expr p] -- ^ The arguments of the method call
}
| FunctionCall {etype :: Maybe (Type p) -- ^ Type of the expression
^ The target of the function call , e.g. , @bar()@ , then @bar@ is the target
,args :: [Expr p] -- ^ The function arguments
}
| If {etype :: Maybe (Type p) -- ^ Type of the expression
^ The condition in the @if - else@ expression
^ The body of the @then@ branch
,els :: Expr p -- ^ The body of the @else@ branch
}
| Let {etype :: Maybe (Type p) -- ^ Type of the expression
,name :: Name -- ^ Variable name to bound a value to
,val :: Expr p -- ^ Expression that will bound variable @name@ with value @val@
,body :: Expr p -- ^ Body of the let expression
}
| BinOp {etype :: Maybe (Type p) -- ^ Type of the expression
,op :: Op -- ^ Binary operation
,lhs :: Expr p -- ^ Left-hand side expression
,rhs :: Expr p -- ^ Right-hand side expression
}
| New {etype :: Maybe (Type p) -- ^ The type of the expression
,ty :: Type p -- ^ The class that one instantiates, e.g., `new C`
,args :: [Expr p] -- ^ Constructor arguments
}
-- ^ It is useful to decouple the type of the expression from the type of the
-- instantiated class. This distinction becomes important whenever we have
-- subtyping, e.g., an interface `Animal` where `Animal x = new Dog`
| Cast {etype :: Maybe (Type p) -- ^ Type of the expression
^ Body that will be casted to type @ty@
,ty :: Type p -- ^ The casting type
}
-- * Helper functions
-- $helper-functions
-- The helper functions of this section operate on AST nodes to check
-- for different properties. As an example, to check whether an expression
-- is a field, instead of having to pattern match in all places, i.e.,
--
-- > exampleFunction :: Expr -> Bool
-- > exampleFunction expr =
-- > -- does some stuff
-- > ...
-- > case expr of
-- > FieldAccess expr -> True
-- > _ -> False
-- >
we define the ' isFieldAccess ' helper function , which checks
-- whether a given expression is a 'FieldAccess':
--
-- > exampleFunction :: Expr -> Bool
-- > exampleFunction expr =
-- > -- does some stuff
-- > ...
> isFieldAccess expr
-- >
-- | Constant for the name @this@, commonly used in object-oriented languages.
thisName :: Name
thisName = "this"
-- | Checks whether a 'Type' is a function (arrow) type
isArrowType :: (Type p) -> Bool
isArrowType Arrow {} = True
isArrowType _ = False
-- | Checks whether an expression is a 'FieldAccess'.
isFieldAccess :: Expr p -> Bool
isFieldAccess FieldAccess{} = True
isFieldAccess _ = False
| Checks whether an expression is a ' VarAccess ' .
isVarAccess :: Expr p -> Bool
isVarAccess VarAccess{} = True
isVarAccess _ = False
| Checks whether an expression is a ' VarAccess ' of ' this ' .
isThisAccess :: Expr p -> Bool
isThisAccess VarAccess{name} = name == thisName
isThisAccess _ = False
-- | Checks whether an expression is an lval.
isLVal :: Expr p -> Bool
isLVal e = isFieldAccess e || isVarAccess e
instance Show (Expr p) where
show BoolLit{bval} = show bval
show IntLit{ival} = show ival
show Null{} = "null"
show Lambda{params, body} =
printf "fun (%s) => %s" (commaSep params) (show body)
show VarAccess{name} = name
show FieldAccess{target, name} =
printf "%s.%s" (show target) name
show Assignment{lhs, rhs} =
printf "%s = %s" (show lhs) (show rhs)
show MethodCall{target, name, args} =
printf "%s.%s(%s)" (show target) name (commaSep args)
show FunctionCall{target, args} =
printf "%s(%s)" (show target) (commaSep args)
show If{cond, thn, els} =
printf "if %s then %s else %s" (show cond) (show thn) (show els)
show Let{name, val, body} =
printf "let %s = %s in %s" name (show val) (show body)
show BinOp{op, lhs, rhs} =
printf "%s %s %s" (show lhs) (show op) (show rhs)
show New {ty, args} =
printf "new %s(%s)" (show ty) (commaSep args)
show Cast{body, ty} =
printf "%s : %s" (show body) (show ty)
-- | Helper function to check whether a 'Type' is a class
isClassType :: Type p -> Bool
isClassType (ClassType _) = True
isClassType _ = False
-- | Helper function to extract the type from an expression.
getType :: Expr 'Checked -> Type 'Checked
getType = fromJust . etype
| Sets the type of an expression to
setType :: Type 'Checked -> Expr 'Checked -> Expr 'Checked
setType t e = e{etype = Just t}
| null | https://raw.githubusercontent.com/parapluu/monadic-typechecker/9f737a9ed2a3ac4ff5245e2e48deeac7bc2ee73d/artifact/typechecker-oopl/src/PhantomPhases/AST.hs | haskell | |
Module : PhantomPhases.AST
Stability : experimental
Portability : portable
This module includes functionality for creating an Abstract Syntax Tree (AST),
as well as helper functions for checking different
aspects of the AST. The AST abstract over their kind 'Phase', where
type checked.
| Check if a name is a constructor name
* AST declarations
$
Declaration for the Abstract Syntax Tree of the language. This section
contains the type, class, methods, fields and expressions represented
building parsers, we recommend to read
| Representation of types abstracting over the 'Phase'
^ Represents a class of name 'Name'
^ Represents integers
^ Represents booleans
^ Represents a function type
^ Represents the unit (void) type
| The representation of a program in the form of an AST node.
| Phases that have already been passed. This has been thought as going
through different phases of a compiler. We assume that there is a total order
between phases.
^ -- ^ Initial status of an AST node after parsing
^ Status of an AST node after type checking
| A representation of a class in the form of an AST node. As an example:
> def main(): Int
the code above, after parsing, would generate the following AST:
> ClassDef {cname = "Foo"
> ,fields = [FieldDef {fname = "x"
> ,fmod = Val}]
> ,mparams = []
> }]}
^ String that represents the name of the class
^ List of field definitions of a class
^ List of method definitions of a class
| Field qualifiers in a class. It is thought for a made up syntax such as:
This indicates that the variable @x@ is immutable, and @y@ can be mutated.
^ Indicates that the field can be mutated
^ Indicates that the field is immutable
| Representation of a field declaration in the form of an AST node.
As an example, the following code:
could be parsed to the following field representation:
> FieldDef {fname = "x"
> ,fmod = Val}
^ Name of the field name
^ Type of the field
^ Field qualifier
| Helper function to check whether a 'FieldDef' is immutable.
| Helper function to check whether a 'FieldDef' is mutable.
| Representation of parameters in the form of an AST.
^ Name of the parameter
^ Type of the parameter
| Representation of a method declaration in the form of an AST. For example:
> def main(): Int
the code above, after parsing, would generate the following AST:
> ClassDef {cname = "Foo"
> ,fields = []
> ,mparams = []
> }]}
^ Name of the method definition
^ List of arguments to the method
^ Return type
^ Body of the method
| Takes a list of things that can be shown, and creates a comma
separated string.
| Representation of integer operations
| Representation of expressions in the form of an AST node. The language
is expression-based, so there are no statements. As an example, the following
identity function:
> in id 42
> Let {etype = Nothing
> ,name = "id"
> ,target = VarAccess Nothing "id"
> ,args = [IntLit Nothing 42]}
> }
> }
>
| Representation of a boolean literal
^ Type of the expression
^ Type of the expression
^ Representation of an integer literal
^ Type of the expression
^ Type of the expression
^ List of arguments with their types ('Param')
^ The body of the lambda abstraction
^ Type of the expression
^ Variable name
^ Type of the expression
^ The target in a field access, e.g., @x.foo@, then @x@ is the target.
^ Field name, e.g., @x.foo@, then @foo@ is the 'Name'
^ Type of the expression
^ Left-hand side expression
^ Right-hand side expression
^ Type of the expression
^ The method name
^ The arguments of the method call
^ Type of the expression
^ The function arguments
^ Type of the expression
^ The body of the @else@ branch
^ Type of the expression
^ Variable name to bound a value to
^ Expression that will bound variable @name@ with value @val@
^ Body of the let expression
^ Type of the expression
^ Binary operation
^ Left-hand side expression
^ Right-hand side expression
^ The type of the expression
^ The class that one instantiates, e.g., `new C`
^ Constructor arguments
^ It is useful to decouple the type of the expression from the type of the
instantiated class. This distinction becomes important whenever we have
subtyping, e.g., an interface `Animal` where `Animal x = new Dog`
^ Type of the expression
^ The casting type
* Helper functions
$helper-functions
The helper functions of this section operate on AST nodes to check
for different properties. As an example, to check whether an expression
is a field, instead of having to pattern match in all places, i.e.,
> exampleFunction :: Expr -> Bool
> exampleFunction expr =
> -- does some stuff
> ...
> case expr of
> FieldAccess expr -> True
> _ -> False
>
whether a given expression is a 'FieldAccess':
> exampleFunction :: Expr -> Bool
> exampleFunction expr =
> -- does some stuff
> ...
>
| Constant for the name @this@, commonly used in object-oriented languages.
| Checks whether a 'Type' is a function (arrow) type
| Checks whether an expression is a 'FieldAccess'.
| Checks whether an expression is an lval.
| Helper function to check whether a 'Type' is a class
| Helper function to extract the type from an expression. | Copyright : © 2019 and
License : MIT
' Phase ' represent the current state of the AST . For example , after parsing
the AST is of ' ' @Phase@ ; after type checking with ' PhantomFunctors.tcProgram ' the
returned AST is of ' Checked ' @Phase@ , indicating that the AST has been
# LANGUAGE NamedFieldPuns , KindSignatures , DataKinds #
module PhantomPhases.AST where
import Data.Maybe
import Data.List
import Text.Printf (printf)
type Name = String
isConstructorName = (=="init")
as an AST . The AST is produced by a parser . For more information on
< -7.0.5 > .
data Type (p :: Phase) =
ClassType Name
| IntType
| BoolType
| Arrow {tparams :: [Type p], tresult :: Type p}
| UnitType
deriving (Eq)
instance Show (Type p) where
show (ClassType c) = c
show IntType = "int"
show BoolType = "bool"
show (Arrow ts t) = "(" ++ commaSep ts ++ ")" ++ " -> " ++ show t
show UnitType = "unit"
newtype Program (ip :: Phase) =
| Programs are simply a list of class definitions ( ' ClassDef ' ) in a certain ' Phase '
Program [ClassDef ip] deriving (Show)
> class :
> x : Int
> var y :
> 42
> , ftype = IntType
> , methods = [ MethodDef { mname = " main "
> , = IntType
> , = [ IntLit { etype = Nothing , ival = 42 } ]
data ClassDef (ip :: Phase) =
}
instance Show (ClassDef ip) where
show ClassDef {cname, fields, methods} =
"class " ++ cname ++ concatMap show fields ++ concatMap show methods ++ "end"
> class :
> x : Int
> var y :
deriving (Eq)
instance Show Mod where
show Var = "var"
show Val = "val"
> class :
> x : Int
> , ftype = IntType
data FieldDef (p :: Phase) =
}
isValField :: FieldDef p -> Bool
isValField FieldDef{fmod} = fmod == Val
isVarField :: FieldDef p -> Bool
isVarField = not . isValField
instance Show (FieldDef p) where
show FieldDef{fname, ftype, fmod} =
show fmod ++ " " ++ fname ++ " : " ++ show ftype
}
instance Show (Param p) where
show Param{pname, ptype} = pname ++ " : " ++ show ptype
> class :
> 42
> , methods = [ MethodDef { mname = " main "
> , = IntType
> , = [ IntLit { etype = Nothing , ival = 42 } ]
data MethodDef (ip :: Phase) =
}
commaSep :: Show t => [t] -> String
commaSep = intercalate ", " . map show
instance Show (MethodDef ip) where
show MethodDef{mname, mparams, mtype, mbody} =
"def " ++ mname ++ "(" ++ commaSep mparams ++ ") : " ++
show mtype ++ show mbody
data Op = Add | Sub | Mul | Div deriving (Eq)
instance Show Op where
show Add = "+"
show Sub = "-"
show Mul = "*"
show Div = "/"
> let i d = \x : Int - > x
generates this ' ' :
> , = Lambda { etype = Nothing
> , params = [ Param " x " IntType ]
> , body = Nothing
> , body : :
data Expr (p :: Phase) =
^ The " Haskell " ' Bool ' data constructor
}
,ival :: Int
}
}
}
}
}
}
^ The target of a method call , e.g. , @x.bar()@ , then @x@ is the target
}
^ The target of the function call , e.g. , @bar()@ , then @bar@ is the target
}
^ The condition in the @if - else@ expression
^ The body of the @then@ branch
}
}
}
}
^ Body that will be casted to type @ty@
}
we define the ' isFieldAccess ' helper function , which checks
> isFieldAccess expr
thisName :: Name
thisName = "this"
isArrowType :: (Type p) -> Bool
isArrowType Arrow {} = True
isArrowType _ = False
isFieldAccess :: Expr p -> Bool
isFieldAccess FieldAccess{} = True
isFieldAccess _ = False
| Checks whether an expression is a ' VarAccess ' .
isVarAccess :: Expr p -> Bool
isVarAccess VarAccess{} = True
isVarAccess _ = False
| Checks whether an expression is a ' VarAccess ' of ' this ' .
isThisAccess :: Expr p -> Bool
isThisAccess VarAccess{name} = name == thisName
isThisAccess _ = False
isLVal :: Expr p -> Bool
isLVal e = isFieldAccess e || isVarAccess e
instance Show (Expr p) where
show BoolLit{bval} = show bval
show IntLit{ival} = show ival
show Null{} = "null"
show Lambda{params, body} =
printf "fun (%s) => %s" (commaSep params) (show body)
show VarAccess{name} = name
show FieldAccess{target, name} =
printf "%s.%s" (show target) name
show Assignment{lhs, rhs} =
printf "%s = %s" (show lhs) (show rhs)
show MethodCall{target, name, args} =
printf "%s.%s(%s)" (show target) name (commaSep args)
show FunctionCall{target, args} =
printf "%s(%s)" (show target) (commaSep args)
show If{cond, thn, els} =
printf "if %s then %s else %s" (show cond) (show thn) (show els)
show Let{name, val, body} =
printf "let %s = %s in %s" name (show val) (show body)
show BinOp{op, lhs, rhs} =
printf "%s %s %s" (show lhs) (show op) (show rhs)
show New {ty, args} =
printf "new %s(%s)" (show ty) (commaSep args)
show Cast{body, ty} =
printf "%s : %s" (show body) (show ty)
isClassType :: Type p -> Bool
isClassType (ClassType _) = True
isClassType _ = False
getType :: Expr 'Checked -> Type 'Checked
getType = fromJust . etype
| Sets the type of an expression to
setType :: Type 'Checked -> Expr 'Checked -> Expr 'Checked
setType t e = e{etype = Just t}
|
edd98650991f9b854d8476dee7e8b01e4425118f93d2e2a2afeb8b18b4dafffe | racket/typed-racket | subst-tests.rkt | #lang racket/base
(require "test-utils.rkt"
typed-racket/rep/type-rep
typed-racket/types/utils
typed-racket/types/abbrev
typed-racket/types/numeric-tower
typed-racket/types/substitute
rackunit)
(provide tests)
(gen-test-main)
(define-syntax-rule (s img var tgt result)
(test-equal? (format "~a" '(img tgt))
(substitute img 'var tgt)
result))
(define-syntax-rule (s* imgs rest var tgt result)
(test-equal? (format "~a" '(img tgt))
(substitute-dots (list . imgs) rest 'var tgt)
result))
(define-syntax-rule (s... imgs var tgt result)
(test-equal? (format "~a" '(img tgt))
(substitute-dots (list . imgs) #f 'var tgt)
result))
(define tests
(test-suite "Tests for type substitution"
(s -Number a (-v a) -Number)
(s -Number a (-pair (-v a) -String) (-pair -Number -String))
(s -Number a (-pair -String (-v a)) (-pair -String -Number))
(s* (-Symbol -String) #f a (make-ListDots (-v a) 'a) (-lst* -Symbol -String))
(s* (-Symbol -String) Univ a (make-ListDots (-v a) 'a) (-lst* -Symbol -String #:tail (-lst Univ)))
(s... (-Number -Boolean) a (make-Fun (list (make-arr-dots null -Number (-v a) 'a))) (-Number -Boolean . -> . -Number))
(s... (-Number -Boolean) a (make-Fun (list (make-arr-dots (list -String) -Number (-v a) 'a))) (-String -Number -Boolean . -> . -Number))
(s... (-Number -Boolean) a (make-Fun (list (make-arr-dots (list -String) -Number (-v b) 'a))) (-String (-v b) (-v b) . -> . -Number))
(s... (-Number -Boolean) a (make-Fun (list (make-arr-dots (list -String) -Number (-v b) 'b)))
(make-Fun (list (make-arr-dots (list -String) -Number (-v b) 'b))))))
| null | https://raw.githubusercontent.com/racket/typed-racket/6ea20bec8d41e1a188d7f831c35423293a89c98e/typed-racket-test/unit-tests/subst-tests.rkt | racket | #lang racket/base
(require "test-utils.rkt"
typed-racket/rep/type-rep
typed-racket/types/utils
typed-racket/types/abbrev
typed-racket/types/numeric-tower
typed-racket/types/substitute
rackunit)
(provide tests)
(gen-test-main)
(define-syntax-rule (s img var tgt result)
(test-equal? (format "~a" '(img tgt))
(substitute img 'var tgt)
result))
(define-syntax-rule (s* imgs rest var tgt result)
(test-equal? (format "~a" '(img tgt))
(substitute-dots (list . imgs) rest 'var tgt)
result))
(define-syntax-rule (s... imgs var tgt result)
(test-equal? (format "~a" '(img tgt))
(substitute-dots (list . imgs) #f 'var tgt)
result))
(define tests
(test-suite "Tests for type substitution"
(s -Number a (-v a) -Number)
(s -Number a (-pair (-v a) -String) (-pair -Number -String))
(s -Number a (-pair -String (-v a)) (-pair -String -Number))
(s* (-Symbol -String) #f a (make-ListDots (-v a) 'a) (-lst* -Symbol -String))
(s* (-Symbol -String) Univ a (make-ListDots (-v a) 'a) (-lst* -Symbol -String #:tail (-lst Univ)))
(s... (-Number -Boolean) a (make-Fun (list (make-arr-dots null -Number (-v a) 'a))) (-Number -Boolean . -> . -Number))
(s... (-Number -Boolean) a (make-Fun (list (make-arr-dots (list -String) -Number (-v a) 'a))) (-String -Number -Boolean . -> . -Number))
(s... (-Number -Boolean) a (make-Fun (list (make-arr-dots (list -String) -Number (-v b) 'a))) (-String (-v b) (-v b) . -> . -Number))
(s... (-Number -Boolean) a (make-Fun (list (make-arr-dots (list -String) -Number (-v b) 'b)))
(make-Fun (list (make-arr-dots (list -String) -Number (-v b) 'b))))))
| |
ef09bf423e393cf69a8bb2a438a7296f93e5936d9ad5f5749892ea8945abf6f1 | uw-unsat/serval-sosp19 | nickel-ni.rkt | #lang rosette/safe
(require
serval/lib/unittest
serval/lib/core
(prefix-in serval: serval/spec/ni)
"state.rkt"
"spec.rkt"
"invariants.rkt"
(only-in racket/base exn:fail? struct-copy string-append parameterize)
(prefix-in certikos: "generated/monitors/certikos/verif/asm-offsets.rkt")
rackunit
rackunit/text-ui
rosette/lib/roseunit
rosette/lib/angelic
)
(struct yield-domain (pid) #:transparent)
(struct spawn-domain (parent child) #:transparent)
(struct regular-domain (pid) #:transparent)
(struct antiyield-domain (pid) #:transparent)
(define (fresh-domain)
(define-symbolic* pid child (bitvector 64))
(choose*
(yield-domain pid)
(spawn-domain pid child)
(antiyield-domain pid)
(regular-domain pid)))
(define (active u s)
(equal? u (state-current-pid s)))
(define (dom a s)
(define spec (car a))
(define args (cdr a))
(define current (state-current-pid s))
(cond
[(eq? spec spec-sys_yield) (yield-domain current)]
[(eq? spec spec-sys_spawn)
(define child-pid (list-ref args 2))
(if (&& (pid-valid? child-pid)
(proc-parent? s current child-pid)
(not (proc-runnable? s child-pid)))
(spawn-domain current child-pid)
(regular-domain current))]
[else (regular-domain current)]))
(define (flowsto u v)
(cond
yield interferes with everyone _ except _ others ' domains
[(yield-domain? u)
(cond
[(antiyield-domain? v) (equal? (antiyield-domain-pid v) (yield-domain-pid u))]
[else #t])]
; Any domain except yield can interfere with antiyield
[(antiyield-domain? v) #t]
; Spawn(parent, child) interferes with
; yield(parent) yield(child)
; regular(parent) regular(child)
; spawn(parent, _) spawn(child, _)
[(spawn-domain? u)
(define parent (spawn-domain-parent u))
(define child (spawn-domain-child u))
(define reader (cond
[(yield-domain? v) (yield-domain-pid v)]
[(regular-domain? v) (regular-domain-pid v)]
[(spawn-domain? v) (spawn-domain-parent v)]))
(|| (equal? reader parent) (equal? reader child))]
; Regular(pid) interferes with
; Yield(pid) Regular(pid) Spawn(pid, _)
[(regular-domain? u)
(define pid (regular-domain-pid u))
(cond
[(yield-domain? v) #f]
[(regular-domain? v) (equal? pid (regular-domain-pid v))]
[(spawn-domain? v) (equal? pid (spawn-domain-parent v))])]))
(define (unwinding dom s t)
(define d dom)
(cond
[(spawn-domain? d) (spawn-eqv (spawn-domain-parent d) (spawn-domain-child d) s t)]
[(yield-domain? d) (yield-eqv (yield-domain-pid d) s t)]
[(regular-domain? d) (eqv (regular-domain-pid d) s t)]
[(antiyield-domain? d) (antiyield-eqv (antiyield-domain-pid d) s t)]))
(define (eqv u s t)
(define-symbolic q pid page offset (bitvector 64))
(=> (pid-valid? u)
(&&
(equal? (state-current-pid s) (state-current-pid t))
(equal? ((state-proc.upper s) u) ((state-proc.upper t) u))
(equal? ((state-proc.lower s) u) ((state-proc.lower t) u))
(equal? ((state-proc.state s) u) ((state-proc.state t) u))
(equal? ((state-proc.owner s) u) ((state-proc.owner t) u))
(equal? ((state-proc.next s) u) ((state-proc.next t) u))
(equal? ((state-proc.saved-regs s) u) ((state-proc.saved-regs t) u))
(equal? (active u s) (active u t))
(forall (var pid)
(=> (pid-valid? pid)
(equal?
(proc-parent? s u pid)
(proc-parent? t u pid))))
(forall (var page offset)
(=> (&& (bvuge page ((state-proc.lower s) u))
(bvult page ((state-proc.upper s) u))
(poffset-valid? offset))
(equal? ((state-pages s) page offset) ((state-pages t) page offset))))
(forall (var pid)
(=> (&& (pid-valid? pid) (proc-parent? s u pid))
(equal? ((state-proc.state s) pid) ((state-proc.state t) pid))))
(=> (active u s)
(&& (equal? (state-regs s) (state-regs t)))))))
(define (antiyield-eqv u s t)
(define-symbolic q pid page offset (bitvector 64))
(=> (pid-valid? u)
(&&
(equal? ((state-proc.upper s) u) ((state-proc.upper t) u))
(equal? ((state-proc.lower s) u) ((state-proc.lower t) u))
(equal? ((state-proc.state s) u) ((state-proc.state t) u))
(equal? ((state-proc.owner s) u) ((state-proc.owner t) u))
(equal? ((state-proc.next s) u) ((state-proc.next t) u))
(forall (var pid)
(=> (pid-valid? pid)
(equal?
(proc-parent? s u pid)
(proc-parent? t u pid))))
(forall (var page offset)
(=> (&& (bvuge page ((state-proc.lower s) u))
(bvult page ((state-proc.upper s) u))
(poffset-valid? offset))
(equal? ((state-pages s) page offset) ((state-pages t) page offset))))
(forall (var pid)
(=> (&& (pid-valid? pid) (proc-parent? s u pid))
(equal? ((state-proc.state s) pid) ((state-proc.state t) pid)))))))
(define (verify-dom-respect spec [args null])
(define s (make-havoc-state))
(define t (make-havoc-state))
(define u (fresh-domain))
(define dom-a-s (dom (cons spec args) s))
(define dom-a-t (dom (cons spec args) s))
(define pre (&& (spec-invariants s)
(spec-invariants t)
(unwinding u s t)))
(define post (<=> (flowsto dom-a-s u)
(flowsto dom-a-t u)))
(check-unsat? (verify (assert (=> pre post)))))
(define (verify-dom-consistency spec [args null])
(define s (make-havoc-state))
(define t (make-havoc-state))
(define dom-a-s (dom (cons spec args) s))
(define dom-a-t (dom (cons spec args) t))
(define pre (&& (spec-invariants s)
(spec-invariants t)
(unwinding dom-a-s s t)))
(define post (equal? dom-a-s dom-a-t))
(check-unsat? (verify (assert (=> pre post)))))
(define (spawn-eqv u child s t)
(=> (&& (pid-valid? u) (pid-valid? child))
(&&
(equal? (state-current-pid s) (state-current-pid t))
(equal? (active u s) (active u t))
(equal? ((state-proc.upper s) u) ((state-proc.upper t) u))
(equal? ((state-proc.lower s) u) ((state-proc.lower t) u))
(equal? ((state-proc.state s) u) ((state-proc.state t) u))
(equal? ((state-proc.next s) u) ((state-proc.next t) u))
(equal? (proc-parent? s u child) (proc-parent? t u child))
(=> (proc-parent? s u child)
(equal? ((state-proc.state s) child) ((state-proc.state t) child))))))
(define (yield-eqv u s t)
(=> (pid-valid? u)
(&&
(equal? (state-current-pid s) (state-current-pid t))
(equal? ((state-proc.next s) u) ((state-proc.next t) u)))))
(define (verify-unwinding-symmetry)
(define s (make-havoc-state))
(define t (make-havoc-state))
(define u (fresh-domain))
(check-unsat? (verify (assert (=> (unwinding u s t) (unwinding u t s))))))
(define (verify-unwinding-reflexivity)
(define s (make-havoc-state))
(define u (fresh-domain))
(check-unsat? (verify (assert (unwinding u s s)))))
(define (verify-unwinding-transitivity)
(define s (make-havoc-state))
(define t (make-havoc-state))
(define v (make-havoc-state))
(define u (fresh-domain))
(check-unsat? (verify (assert
(=> (&& (unwinding u s t) (unwinding u t v))
(unwinding u s v))))))
(define (verify-unwinding-negatable)
(define s (make-havoc-state))
(define t (make-havoc-state))
(define u (fresh-domain))
(check-sat? (solve (assert (not (unwinding u s t))))))
(define (verify-weak-step-consistency spec [args null])
(serval:check-local-respect
#:state-init make-havoc-state
#:state-copy state-copy
#:invariants spec-invariants
#:dom dom
#:u (fresh-domain)
#:unwinding unwinding
#:flowsto flowsto
(cons spec args)
spec
args))
(define (verify-local-respect spec [args null])
(serval:check-local-respect
#:state-init make-havoc-state
#:state-copy state-copy
#:invariants spec-invariants
#:dom dom
#:u (fresh-domain)
#:unwinding unwinding
#:flowsto flowsto
(cons spec args)
spec
args))
(define-syntax-rule (ni-case+ name op args)
(begin
(test-case+ (string-append name " weak-step-consistency") (verify-weak-step-consistency op args))
(test-case+ (string-append name " local-respect") (verify-local-respect op args))
(test-case+ (string-append name " dom consistency") (verify-dom-consistency op args))
(test-case+ (string-append name " dom respect") (verify-dom-respect op args))
))
(define certikos-ni-tests
(test-suite+ "certikos NI tests"
(test-case+ "unwinding negatable" (verify-unwinding-negatable))
(test-case+ "unwinding symmetry" (verify-unwinding-symmetry))
(test-case+ "unwinding reflexivity" (verify-unwinding-reflexivity))
(test-case+ "unwinding transitivity" (verify-unwinding-transitivity))
(ni-case+ "spec-write-regs" spec-write-regs (list (make-havoc-regs)))
(ni-case+ "spec-write" spec-write (list (make-bv64) (make-bv64) (make-bv64)))
(ni-case+ "spec-read" spec-read (list (make-bv64) (make-bv64)))
(ni-case+ "spec-write-regs" spec-write-regs (list (make-havoc-regs)))
(ni-case+ "spawn" spec-sys_spawn (list (make-bv64) (make-bv64) (make-bv64)))
(ni-case+ "get_quota" spec-sys_get_quota null)
(ni-case+ "getpid" spec-sys_getpid null)
(ni-case+ "yield" spec-sys_yield null)
))
(module+ test
(time (run-tests certikos-ni-tests)))
| null | https://raw.githubusercontent.com/uw-unsat/serval-sosp19/175c42660fad84b44e4c9f6f723fd3c9450d65d4/monitors/certikos/verif/nickel-ni.rkt | racket | Any domain except yield can interfere with antiyield
Spawn(parent, child) interferes with
yield(parent) yield(child)
regular(parent) regular(child)
spawn(parent, _) spawn(child, _)
Regular(pid) interferes with
Yield(pid) Regular(pid) Spawn(pid, _) | #lang rosette/safe
(require
serval/lib/unittest
serval/lib/core
(prefix-in serval: serval/spec/ni)
"state.rkt"
"spec.rkt"
"invariants.rkt"
(only-in racket/base exn:fail? struct-copy string-append parameterize)
(prefix-in certikos: "generated/monitors/certikos/verif/asm-offsets.rkt")
rackunit
rackunit/text-ui
rosette/lib/roseunit
rosette/lib/angelic
)
(struct yield-domain (pid) #:transparent)
(struct spawn-domain (parent child) #:transparent)
(struct regular-domain (pid) #:transparent)
(struct antiyield-domain (pid) #:transparent)
(define (fresh-domain)
(define-symbolic* pid child (bitvector 64))
(choose*
(yield-domain pid)
(spawn-domain pid child)
(antiyield-domain pid)
(regular-domain pid)))
(define (active u s)
(equal? u (state-current-pid s)))
(define (dom a s)
(define spec (car a))
(define args (cdr a))
(define current (state-current-pid s))
(cond
[(eq? spec spec-sys_yield) (yield-domain current)]
[(eq? spec spec-sys_spawn)
(define child-pid (list-ref args 2))
(if (&& (pid-valid? child-pid)
(proc-parent? s current child-pid)
(not (proc-runnable? s child-pid)))
(spawn-domain current child-pid)
(regular-domain current))]
[else (regular-domain current)]))
(define (flowsto u v)
(cond
yield interferes with everyone _ except _ others ' domains
[(yield-domain? u)
(cond
[(antiyield-domain? v) (equal? (antiyield-domain-pid v) (yield-domain-pid u))]
[else #t])]
[(antiyield-domain? v) #t]
[(spawn-domain? u)
(define parent (spawn-domain-parent u))
(define child (spawn-domain-child u))
(define reader (cond
[(yield-domain? v) (yield-domain-pid v)]
[(regular-domain? v) (regular-domain-pid v)]
[(spawn-domain? v) (spawn-domain-parent v)]))
(|| (equal? reader parent) (equal? reader child))]
[(regular-domain? u)
(define pid (regular-domain-pid u))
(cond
[(yield-domain? v) #f]
[(regular-domain? v) (equal? pid (regular-domain-pid v))]
[(spawn-domain? v) (equal? pid (spawn-domain-parent v))])]))
(define (unwinding dom s t)
(define d dom)
(cond
[(spawn-domain? d) (spawn-eqv (spawn-domain-parent d) (spawn-domain-child d) s t)]
[(yield-domain? d) (yield-eqv (yield-domain-pid d) s t)]
[(regular-domain? d) (eqv (regular-domain-pid d) s t)]
[(antiyield-domain? d) (antiyield-eqv (antiyield-domain-pid d) s t)]))
(define (eqv u s t)
(define-symbolic q pid page offset (bitvector 64))
(=> (pid-valid? u)
(&&
(equal? (state-current-pid s) (state-current-pid t))
(equal? ((state-proc.upper s) u) ((state-proc.upper t) u))
(equal? ((state-proc.lower s) u) ((state-proc.lower t) u))
(equal? ((state-proc.state s) u) ((state-proc.state t) u))
(equal? ((state-proc.owner s) u) ((state-proc.owner t) u))
(equal? ((state-proc.next s) u) ((state-proc.next t) u))
(equal? ((state-proc.saved-regs s) u) ((state-proc.saved-regs t) u))
(equal? (active u s) (active u t))
(forall (var pid)
(=> (pid-valid? pid)
(equal?
(proc-parent? s u pid)
(proc-parent? t u pid))))
(forall (var page offset)
(=> (&& (bvuge page ((state-proc.lower s) u))
(bvult page ((state-proc.upper s) u))
(poffset-valid? offset))
(equal? ((state-pages s) page offset) ((state-pages t) page offset))))
(forall (var pid)
(=> (&& (pid-valid? pid) (proc-parent? s u pid))
(equal? ((state-proc.state s) pid) ((state-proc.state t) pid))))
(=> (active u s)
(&& (equal? (state-regs s) (state-regs t)))))))
(define (antiyield-eqv u s t)
(define-symbolic q pid page offset (bitvector 64))
(=> (pid-valid? u)
(&&
(equal? ((state-proc.upper s) u) ((state-proc.upper t) u))
(equal? ((state-proc.lower s) u) ((state-proc.lower t) u))
(equal? ((state-proc.state s) u) ((state-proc.state t) u))
(equal? ((state-proc.owner s) u) ((state-proc.owner t) u))
(equal? ((state-proc.next s) u) ((state-proc.next t) u))
(forall (var pid)
(=> (pid-valid? pid)
(equal?
(proc-parent? s u pid)
(proc-parent? t u pid))))
(forall (var page offset)
(=> (&& (bvuge page ((state-proc.lower s) u))
(bvult page ((state-proc.upper s) u))
(poffset-valid? offset))
(equal? ((state-pages s) page offset) ((state-pages t) page offset))))
(forall (var pid)
(=> (&& (pid-valid? pid) (proc-parent? s u pid))
(equal? ((state-proc.state s) pid) ((state-proc.state t) pid)))))))
(define (verify-dom-respect spec [args null])
(define s (make-havoc-state))
(define t (make-havoc-state))
(define u (fresh-domain))
(define dom-a-s (dom (cons spec args) s))
(define dom-a-t (dom (cons spec args) s))
(define pre (&& (spec-invariants s)
(spec-invariants t)
(unwinding u s t)))
(define post (<=> (flowsto dom-a-s u)
(flowsto dom-a-t u)))
(check-unsat? (verify (assert (=> pre post)))))
(define (verify-dom-consistency spec [args null])
(define s (make-havoc-state))
(define t (make-havoc-state))
(define dom-a-s (dom (cons spec args) s))
(define dom-a-t (dom (cons spec args) t))
(define pre (&& (spec-invariants s)
(spec-invariants t)
(unwinding dom-a-s s t)))
(define post (equal? dom-a-s dom-a-t))
(check-unsat? (verify (assert (=> pre post)))))
(define (spawn-eqv u child s t)
(=> (&& (pid-valid? u) (pid-valid? child))
(&&
(equal? (state-current-pid s) (state-current-pid t))
(equal? (active u s) (active u t))
(equal? ((state-proc.upper s) u) ((state-proc.upper t) u))
(equal? ((state-proc.lower s) u) ((state-proc.lower t) u))
(equal? ((state-proc.state s) u) ((state-proc.state t) u))
(equal? ((state-proc.next s) u) ((state-proc.next t) u))
(equal? (proc-parent? s u child) (proc-parent? t u child))
(=> (proc-parent? s u child)
(equal? ((state-proc.state s) child) ((state-proc.state t) child))))))
(define (yield-eqv u s t)
(=> (pid-valid? u)
(&&
(equal? (state-current-pid s) (state-current-pid t))
(equal? ((state-proc.next s) u) ((state-proc.next t) u)))))
(define (verify-unwinding-symmetry)
(define s (make-havoc-state))
(define t (make-havoc-state))
(define u (fresh-domain))
(check-unsat? (verify (assert (=> (unwinding u s t) (unwinding u t s))))))
(define (verify-unwinding-reflexivity)
(define s (make-havoc-state))
(define u (fresh-domain))
(check-unsat? (verify (assert (unwinding u s s)))))
(define (verify-unwinding-transitivity)
(define s (make-havoc-state))
(define t (make-havoc-state))
(define v (make-havoc-state))
(define u (fresh-domain))
(check-unsat? (verify (assert
(=> (&& (unwinding u s t) (unwinding u t v))
(unwinding u s v))))))
(define (verify-unwinding-negatable)
(define s (make-havoc-state))
(define t (make-havoc-state))
(define u (fresh-domain))
(check-sat? (solve (assert (not (unwinding u s t))))))
(define (verify-weak-step-consistency spec [args null])
(serval:check-local-respect
#:state-init make-havoc-state
#:state-copy state-copy
#:invariants spec-invariants
#:dom dom
#:u (fresh-domain)
#:unwinding unwinding
#:flowsto flowsto
(cons spec args)
spec
args))
(define (verify-local-respect spec [args null])
(serval:check-local-respect
#:state-init make-havoc-state
#:state-copy state-copy
#:invariants spec-invariants
#:dom dom
#:u (fresh-domain)
#:unwinding unwinding
#:flowsto flowsto
(cons spec args)
spec
args))
(define-syntax-rule (ni-case+ name op args)
(begin
(test-case+ (string-append name " weak-step-consistency") (verify-weak-step-consistency op args))
(test-case+ (string-append name " local-respect") (verify-local-respect op args))
(test-case+ (string-append name " dom consistency") (verify-dom-consistency op args))
(test-case+ (string-append name " dom respect") (verify-dom-respect op args))
))
(define certikos-ni-tests
(test-suite+ "certikos NI tests"
(test-case+ "unwinding negatable" (verify-unwinding-negatable))
(test-case+ "unwinding symmetry" (verify-unwinding-symmetry))
(test-case+ "unwinding reflexivity" (verify-unwinding-reflexivity))
(test-case+ "unwinding transitivity" (verify-unwinding-transitivity))
(ni-case+ "spec-write-regs" spec-write-regs (list (make-havoc-regs)))
(ni-case+ "spec-write" spec-write (list (make-bv64) (make-bv64) (make-bv64)))
(ni-case+ "spec-read" spec-read (list (make-bv64) (make-bv64)))
(ni-case+ "spec-write-regs" spec-write-regs (list (make-havoc-regs)))
(ni-case+ "spawn" spec-sys_spawn (list (make-bv64) (make-bv64) (make-bv64)))
(ni-case+ "get_quota" spec-sys_get_quota null)
(ni-case+ "getpid" spec-sys_getpid null)
(ni-case+ "yield" spec-sys_yield null)
))
(module+ test
(time (run-tests certikos-ni-tests)))
|
e78363b10bfec37bff72fe18896c08f3d3829d43d002feabd68d1e4cafb879b8 | OCamlPro/ocp-build | buildOCamlGlobals.ml | (**************************************************************************)
(* *)
(* Typerex Tools *)
(* *)
Copyright 2011 - 2017 OCamlPro SAS
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU General Public License version 3 described in the file
(* LICENSE. *)
(* *)
(**************************************************************************)
open OcpCompat
open BuildTypes
open BuildOCPTypes
open BuildOCamlTypes
open BuildOptions
open BuildOCamlVariables
open BuildValue.TYPES
let list_byte_targets_arg = ref false
let list_asm_targets_arg = ref false
let ocaml_packages = Hashtbl.create 111
let reset () =
Hashtbl.clear ocaml_packages
let create_package lib opk =
let envs = opk.opk_options in
let b = lib.lib_context in
let p = lib.lib_package in
let bc = lib.lib_builder_context in
let pk = opk.opk_package in
let lib_archive = BuildValue.get_string_with_default envs "archive" lib.lib_name in
let lib_stubarchive = BuildValue.get_string_with_default envs "stubarchive" ("ml" ^ lib_archive) in
let lib_requires = List.map (fun dep ->
let pk2 = dep.dep_project.opk_package in
let lib2 =
try
Hashtbl.find ocaml_packages pk2.package_id
with Not_found ->
Printf.eprintf "Unknown dependency %d (%s) of package %S\n%!"
pk2.package_id
pk2.package_name
lib.lib_name;
BuildMisc.clean_exit 2
in
{ dep with dep_project = lib2 }
) opk.opk_requires
in
let lib_autolink = match lib.lib_type with
| TestPackage
| ObjectsPackage
| RulesPackage
-> BuildValue.get_bool_with_default envs "autolink" false
| ProgramPackage
| LibraryPackage
| SyntaxPackage ->
BuildValue.get_bool_with_default envs "autolink" true
in
let lib_ready =
if opk.opk_installed then [] else
let file_ready =
BuildEngineContext.add_virtual_file p lib.lib_dst_dir
(lib.lib_name ^ " validated") in
let r = BuildEngineRules.new_rule b lib.lib_loc file_ready [] in
List.iter (fun filename ->
BuildEngineRules.add_rule_source r
(BuildGlobals.config_filename_validated bc lib.lib_loc filename)
) pk.package_filenames;
[file_ready]
in
let lib_meta = BuildValue.get_bool_with_default envs "meta" false in
let lib_alias = BuildValue.get_string_option_with_default
opk.opk_options "alias" None
in
let lib_aliases = StringMap.empty in
let lib = {
lib = lib;
lib_opk = opk;
lib_alias;
lib_aliases;
lib_autolink;
lib_byte_targets = [];
lib_asm_targets = [];
lib_intf_targets = [];
lib_stub_targets = [];
lib_modules = [];
lib_internal_modules = StringsMap.empty;
(* lib_dep_deps = IntMap.empty; *)
lib_includes = None;
lib_linkdeps = [];
lib_sources = BuildValue.get_local_prop_list_with_default envs "files" [];
lib_tests = BuildValue.get_local_prop_list_with_default envs "tests" [];
lib_doc_targets = ref [];
lib_test_targets = ref [];
lib_build_targets = ref [];
lib_archive;
lib_stubarchive;
lib_ready;
lib_meta ;
lib_requires;
}
in
Hashtbl.add ocaml_packages lib.lib.lib_id lib;
if BuildGlobals.verbose 5 then begin
Printf.eprintf "BuildOCamlGlobals.create_package %S\n" lib.lib.lib_name;
List.iter (fun (s, _) ->
Printf.eprintf " MOD %S\n%!" s;
) lib.lib_sources;
end;
lib
let get_by_id lib =
try
Some (Hashtbl.find ocaml_packages lib.lib_id)
with Not_found -> None
let get_by_name bc name =
try
let lib =
StringMap.find name bc.packages_by_name
in
get_by_id lib
with Not_found -> None
let make_build_targets lib cin =
match get_by_id lib with
| None -> []
| Some lib ->
(if cin.cin_bytecode then
List.map fst lib.lib_byte_targets
else []) @
(if cin.cin_native then
List.map fst lib.lib_asm_targets
else []) @
(List.fold_left (fun list (file, kind) ->
match kind with
| CMI -> file :: list
| CMX when cin.cin_native -> file :: list
| _ -> list
) [] lib.lib_intf_targets) @
(List.map fst lib.lib_stub_targets) @
!(lib.lib_build_targets)
let make_doc_targets lib _cin =
match get_by_id lib with
| None -> []
| Some lib -> !(lib.lib_doc_targets)
let make_test_targets lib _cin =
match get_by_id lib with
| None -> []
| Some lib -> !(lib.lib_test_targets)
| null | https://raw.githubusercontent.com/OCamlPro/ocp-build/56aff560bb438c12b2929feaf8379bc6f31b9840/tools/ocp-build/ocaml/buildOCamlGlobals.ml | ocaml | ************************************************************************
Typerex Tools
All rights reserved. This file is distributed under the terms of
LICENSE.
************************************************************************
lib_dep_deps = IntMap.empty; | Copyright 2011 - 2017 OCamlPro SAS
the GNU General Public License version 3 described in the file
open OcpCompat
open BuildTypes
open BuildOCPTypes
open BuildOCamlTypes
open BuildOptions
open BuildOCamlVariables
open BuildValue.TYPES
let list_byte_targets_arg = ref false
let list_asm_targets_arg = ref false
let ocaml_packages = Hashtbl.create 111
let reset () =
Hashtbl.clear ocaml_packages
let create_package lib opk =
let envs = opk.opk_options in
let b = lib.lib_context in
let p = lib.lib_package in
let bc = lib.lib_builder_context in
let pk = opk.opk_package in
let lib_archive = BuildValue.get_string_with_default envs "archive" lib.lib_name in
let lib_stubarchive = BuildValue.get_string_with_default envs "stubarchive" ("ml" ^ lib_archive) in
let lib_requires = List.map (fun dep ->
let pk2 = dep.dep_project.opk_package in
let lib2 =
try
Hashtbl.find ocaml_packages pk2.package_id
with Not_found ->
Printf.eprintf "Unknown dependency %d (%s) of package %S\n%!"
pk2.package_id
pk2.package_name
lib.lib_name;
BuildMisc.clean_exit 2
in
{ dep with dep_project = lib2 }
) opk.opk_requires
in
let lib_autolink = match lib.lib_type with
| TestPackage
| ObjectsPackage
| RulesPackage
-> BuildValue.get_bool_with_default envs "autolink" false
| ProgramPackage
| LibraryPackage
| SyntaxPackage ->
BuildValue.get_bool_with_default envs "autolink" true
in
let lib_ready =
if opk.opk_installed then [] else
let file_ready =
BuildEngineContext.add_virtual_file p lib.lib_dst_dir
(lib.lib_name ^ " validated") in
let r = BuildEngineRules.new_rule b lib.lib_loc file_ready [] in
List.iter (fun filename ->
BuildEngineRules.add_rule_source r
(BuildGlobals.config_filename_validated bc lib.lib_loc filename)
) pk.package_filenames;
[file_ready]
in
let lib_meta = BuildValue.get_bool_with_default envs "meta" false in
let lib_alias = BuildValue.get_string_option_with_default
opk.opk_options "alias" None
in
let lib_aliases = StringMap.empty in
let lib = {
lib = lib;
lib_opk = opk;
lib_alias;
lib_aliases;
lib_autolink;
lib_byte_targets = [];
lib_asm_targets = [];
lib_intf_targets = [];
lib_stub_targets = [];
lib_modules = [];
lib_internal_modules = StringsMap.empty;
lib_includes = None;
lib_linkdeps = [];
lib_sources = BuildValue.get_local_prop_list_with_default envs "files" [];
lib_tests = BuildValue.get_local_prop_list_with_default envs "tests" [];
lib_doc_targets = ref [];
lib_test_targets = ref [];
lib_build_targets = ref [];
lib_archive;
lib_stubarchive;
lib_ready;
lib_meta ;
lib_requires;
}
in
Hashtbl.add ocaml_packages lib.lib.lib_id lib;
if BuildGlobals.verbose 5 then begin
Printf.eprintf "BuildOCamlGlobals.create_package %S\n" lib.lib.lib_name;
List.iter (fun (s, _) ->
Printf.eprintf " MOD %S\n%!" s;
) lib.lib_sources;
end;
lib
let get_by_id lib =
try
Some (Hashtbl.find ocaml_packages lib.lib_id)
with Not_found -> None
let get_by_name bc name =
try
let lib =
StringMap.find name bc.packages_by_name
in
get_by_id lib
with Not_found -> None
let make_build_targets lib cin =
match get_by_id lib with
| None -> []
| Some lib ->
(if cin.cin_bytecode then
List.map fst lib.lib_byte_targets
else []) @
(if cin.cin_native then
List.map fst lib.lib_asm_targets
else []) @
(List.fold_left (fun list (file, kind) ->
match kind with
| CMI -> file :: list
| CMX when cin.cin_native -> file :: list
| _ -> list
) [] lib.lib_intf_targets) @
(List.map fst lib.lib_stub_targets) @
!(lib.lib_build_targets)
let make_doc_targets lib _cin =
match get_by_id lib with
| None -> []
| Some lib -> !(lib.lib_doc_targets)
let make_test_targets lib _cin =
match get_by_id lib with
| None -> []
| Some lib -> !(lib.lib_test_targets)
|
70f52b797b225fc5144badb3e10a97e9e9067919c37ffebee0a78a2cb904802c | duct-framework/middleware.buddy | project.clj | (defproject duct/middleware.buddy "0.2.0"
:description "Duct library for Buddy middleware"
:url "-framework/middleware.buddy"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[buddy/buddy-auth "2.2.0"]
[duct/core "0.8.0"]
[integrant "0.8.0"]]
:profiles
{:dev {:dependencies [[ring/ring-mock "0.4.0"]]}})
| null | https://raw.githubusercontent.com/duct-framework/middleware.buddy/f311128af9f18c25f12b24baf5bdc6f91f9e7042/project.clj | clojure | (defproject duct/middleware.buddy "0.2.0"
:description "Duct library for Buddy middleware"
:url "-framework/middleware.buddy"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[buddy/buddy-auth "2.2.0"]
[duct/core "0.8.0"]
[integrant "0.8.0"]]
:profiles
{:dev {:dependencies [[ring/ring-mock "0.4.0"]]}})
| |
5a87e51b6a58c1dbb4a73c819ccab994bbf47cf897f33f174580dee020612cc5 | ragkousism/Guix-on-Hurd | lua.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2013 < >
Copyright © 2014 < >
Copyright © 2014 < >
Copyright © 2014 < >
Copyright © 2016 < >
Copyright © 2016 < >
Copyright © 2016 doncatnip < >
Copyright © 2016 Clément < >
Copyright © 2016 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages lua)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module (gnu packages)
#:use-module (gnu packages readline)
#:use-module (gnu packages tls)
#:use-module (gnu packages xml)
#:use-module (gnu packages glib)
#:use-module (gnu packages libffi)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages xorg)
#:use-module (gnu packages gtk))
(define-public lua
(package
(name "lua")
(version "5.3.4")
(source (origin
(method url-fetch)
(uri (string-append "-"
version ".tar.gz"))
(sha256
(base32 "0320a8dg3aci4hxla380dx1ifkw8gj4gbw5c4dz41g1kh98sm0gn"))
(patches (search-patches "lua-pkgconfig.patch"
"lua-liblua-so.patch"))))
(build-system gnu-build-system)
(inputs `(("readline" ,readline)))
(arguments
'(#:modules ((guix build gnu-build-system)
(guix build utils)
(srfi srfi-1))
#:test-target "test"
#:make-flags
'("MYCFLAGS=-fPIC -DLUA_DL_DLOPEN"
"linux")
#:phases
(modify-phases %standard-phases
(delete 'configure)
(replace 'install
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(zero? (system* "make" "install"
(string-append "INSTALL_TOP=" out)
(string-append "INSTALL_MAN=" out
"/share/man/man1")))))))))
(home-page "/")
(synopsis "Embeddable scripting language")
(description
"Lua is a powerful, fast, lightweight, embeddable scripting language. Lua
combines simple procedural syntax with powerful data description constructs
based on associative arrays and extensible semantics. Lua is dynamically typed,
runs by interpreting bytecode for a register-based virtual machine, and has
automatic memory management with incremental garbage collection, making it ideal
for configuration, scripting, and rapid prototyping.")
(license license:x11)))
(define-public lua-5.2
(package (inherit lua)
(version "5.2.4")
(source
(origin
(method url-fetch)
(uri (string-append "-"
version ".tar.gz"))
(sha256
(base32 "0jwznq0l8qg9wh5grwg07b5cy3lzngvl5m2nl1ikp6vqssmf9qmr"))
(patches (search-patches "lua-pkgconfig.patch"
"lua-liblua-so.patch"))))))
(define-public lua-5.1
(package (inherit lua)
(version "5.1.5")
(source (origin
(method url-fetch)
(uri (string-append "-"
version ".tar.gz"))
(sha256
(base32 "0cskd4w0g6rdm2q8q3i4n1h3j8kylhs3rq8mxwl9vwlmlxbgqh16"))
(patches (search-patches "lua51-liblua-so.patch"
"lua-CVE-2014-5461.patch"
"lua51-pkgconfig.patch"))))))
(define-public luajit
(package
(name "luajit")
(version "2.0.4")
(source (origin
(method url-fetch)
(uri (string-append "-"
version ".tar.gz"))
(sha256
(base32 "0zc0y7p6nx1c0pp4nhgbdgjljpfxsb5kgwp4ysz22l1p2bms83v2"))
(patches (search-patches "luajit-symlinks.patch"
"luajit-no_ldconfig.patch"))))
(build-system gnu-build-system)
(arguments
is distributed without tests
#:phases (alist-delete 'configure %standard-phases)
#:make-flags (list (string-append "PREFIX=" (assoc-ref %outputs "out")))))
(home-page "/")
(synopsis "Just in time compiler for Lua programming language version 5.1")
(description
"LuaJIT is a Just-In-Time Compiler (JIT) for the Lua
programming language. Lua is a powerful, dynamic and light-weight programming
language. It may be embedded or used as a general-purpose, stand-alone
language.")
(license license:x11)))
(define-public lua5.1-expat
(package
(name "lua5.1-expat")
(version "1.3.0")
(source (origin
(method url-fetch)
(uri (string-append "/"
"luaexpat/luaexpat-" version ".tar.gz"))
(sha256
(base32
"1hvxqngn0wf5642i5p3vcyhg3pmp102k63s9ry4jqyyqc1wkjq6h"))))
(build-system gnu-build-system)
(arguments
`(#:make-flags
(let ((out (assoc-ref %outputs "out")))
(list "CC=gcc"
(string-append "LUA_LDIR=" out "/share/lua/$(LUA_V)")
(string-append "LUA_CDIR=" out "/lib/lua/$(LUA_V)")))
#:phases
(modify-phases %standard-phases
(delete 'configure)
(replace 'check
(lambda _
(setenv "LUA_CPATH" "src/?.so;;")
(setenv "LUA_PATH" "src/?.lua;;")
(and (zero? (system* "lua" "tests/test.lua"))
(zero? (system* "lua" "tests/test-lom.lua"))))))))
(inputs
`(("lua" ,lua-5.1)
("expat" ,expat)))
(home-page "/")
(synopsis "SAX XML parser based on the Expat library")
(description "LuaExpat is a SAX XML parser based on the Expat library.")
(license (package-license lua-5.1))))
(define-public lua5.1-socket
(package
(name "lua5.1-socket")
(version "2.0.2")
(source (origin
(method url-fetch)
(uri (string-append "/"
"luasocket/luasocket/luasocket-"
version "/luasocket-" version ".tar.gz"))
(sha256
(base32
"19ichkbc4rxv00ggz8gyf29jibvc2wq9pqjik0ll326rrxswgnag"))))
(build-system gnu-build-system)
(arguments
`(#:make-flags
(let ((out (assoc-ref %outputs "out")))
(list (string-append "INSTALL_TOP_SHARE=" out "/share/lua/5.1")
(string-append "INSTALL_TOP_LIB=" out "/lib/lua/5.1")))
#:phases
(modify-phases %standard-phases
(delete 'configure)
(replace 'check
(lambda _
(setenv "LUA_CPATH" (string-append "src/?.so." ,version ";;"))
(setenv "LUA_PATH" "src/?.lua;;")
(when (zero? (primitive-fork))
(system* "lua" "test/testsrvr.lua"))
(zero? (system* "lua" "test/testclnt.lua")))))))
(inputs
`(("lua" ,lua-5.1)))
(home-page "-rio.br/~diego/professional/luasocket/")
(synopsis "Socket library for Lua")
(description "LuaSocket is a Lua extension library that is composed by two
parts: a C core that provides support for the TCP and UDP transport layers,
and a set of Lua modules that add support for functionality commonly needed by
applications that deal with the Internet.
Among the supported modules, the most commonly used implement the
SMTP (sending e-mails), HTTP (WWW access) and FTP (uploading and downloading
files) client protocols. These provide a very natural and generic interface
to the functionality defined by each protocol. In addition, you will find
that the MIME (common encodings), URL (anything you could possible want to do
with one) and LTN12 (filters, sinks, sources and pumps) modules can be very
handy.")
(license (package-license lua-5.1))))
(define-public lua5.1-filesystem
(package
(name "lua5.1-filesystem")
(version "1.6.3")
(source (origin
(method url-fetch)
(uri (string-append "/"
"luafilesystem/archive/v_"
"1_6_3" ".tar.gz"))
(file-name (string-append name "-" version ".tar.gz"))
(sha256
(base32
"0s10ckxin0bysd6gaywqhxkpw3ybjhprr8m655b8cx3pxjwd49am"))))
(build-system gnu-build-system)
(arguments
`(#:make-flags
(list (string-append "PREFIX=" (assoc-ref %outputs "out")))
#:test-target "test"
#:phases
(modify-phases %standard-phases
(delete 'configure))))
(inputs
`(("lua" ,lua-5.1)))
(home-page "")
(synopsis "File system library for Lua")
(description "LuaFileSystem is a Lua library developed to complement the
set of functions related to file systems offered by the standard Lua
distribution. LuaFileSystem offers a portable way to access the underlying
directory structure and file attributes.")
(license (package-license lua-5.1))))
(define-public lua5.1-sec
(package
(name "lua5.1-sec")
(version "0.6")
(source (origin
(method url-fetch)
(uri (string-append "/"
"luasec-" version ".tar.gz"))
(sha256
(base32
"0pgd1anzznl4s0h16wg8dlw9mgdb9h52drlcki6sbf5y31fa7wyf"))))
(build-system gnu-build-system)
(arguments
`(#:make-flags
(let ((out (assoc-ref %outputs "out")))
(list "linux"
"CC=gcc"
"LD=gcc"
(string-append "LUAPATH=" out "/share/lua/5.1")
(string-append "LUACPATH=" out "/lib/lua/5.1")))
#:tests? #f ; no tests included
#:phases
(modify-phases %standard-phases
(delete 'configure))))
(inputs
`(("lua" ,lua-5.1)
("openssl" ,openssl)))
(propagated-inputs
`(("lua-socket" ,lua5.1-socket)))
(home-page "")
(synopsis "OpenSSL bindings for Lua")
(description "LuaSec is a binding for OpenSSL library to provide TLS/SSL
communication. It takes an already established TCP connection and creates a
secure session between the peers.")
(license (package-license lua-5.1))))
(define-public lua5.1-sec-0.5
(package
(inherit lua5.1-sec)
(version "0.5.1")
(source (origin
(method url-fetch)
(uri (string-append "/"
"luasec-" version ".tar.gz"))
(sha256
(base32
"01llf5bcrjmqqy6m65avqkajz7h79rvkka6rd131kwr10n75yp3d"))))))
(define-public lua-lgi
(package
(name "lua-lgi")
(version "0.9.1")
(source
(origin
(method url-fetch)
(uri (string-append
"/"
version ".tar.gz"))
(file-name (string-append name "-" version ".tar.gz"))
(sha256
(base32
"1fmgdl5y4ph3yc6ycg865s3vai1rjkyda61cgqxk6zd13hmznw0c"))))
(build-system gnu-build-system)
(arguments
'(#:make-flags (list "CC=gcc"
(string-append "PREFIX=" (assoc-ref %outputs "out")))
#:phases
(modify-phases %standard-phases
(delete 'configure) ; no configure script
(add-before 'build 'set-env
(lambda* (#:key inputs #:allow-other-keys)
we need to load cairo dynamically
(let* ((cairo (string-append
(assoc-ref inputs "cairo") "/lib" )))
(setenv "LD_LIBRARY_PATH" cairo)
#t)))
(add-before 'build 'set-lua-version
(lambda _
lua version and therefore install directories are hardcoded
FIXME : This breaks when we update lua to > = 5.3
(substitute* "./lgi/Makefile"
(("LUA_VERSION=5.1") "LUA_VERSION=5.2"))
#t))
(add-before 'check 'skip-test-gtk
(lambda _
;; FIXME: Skip GTK tests:
gtk3 - ca n't get it to run with the xorg - server config below
;; and some non-gtk tests will also fail
gtk2 - lots of functions are n't implemented
We choose gtk2 as the lesser evil and simply skip the test .
;; Currently, awesome is the only package that uses lua-lgi but
it does n't need or interact with GTK using lua - lgi .
(substitute* "./tests/test.lua"
(("'gtk.lua',") "-- 'gtk.lua',"))
#t))
(add-before 'check 'start-xserver-instance
(lambda* (#:key inputs #:allow-other-keys)
;; There must be a running X server during tests.
(system (format #f "~a/bin/Xvfb :1 &"
(assoc-ref inputs "xorg-server")))
(setenv "DISPLAY" ":1")
#t)))))
(inputs
`(("gobject-introspection" ,gobject-introspection)
("glib" ,glib)
("pango", pango)
("gtk", gtk+-2)
("lua" ,lua)
("cairo" ,cairo)
("libffi" ,libffi)
("xorg-server", xorg-server)))
(native-inputs
`(("pkg-config" ,pkg-config)))
(home-page "/")
(synopsis "Lua bridge to GObject based libraries")
(description
"LGI is gobject-introspection based dynamic Lua binding to GObject
based libraries. It allows using GObject-based libraries directly from Lua.
Notable examples are GTK+, GStreamer and Webkit.")
(license license:expat)))
(define-public lua-lpeg
(package
(name "lua-lpeg")
(version "1.0.1")
(source (origin
(method url-fetch)
(uri (string-append "-rio.br/~roberto/lpeg/lpeg-"
version ".tar.gz"))
(sha256
(base32 "0sq25z3r324a324ky73izgq9mbf66j2xvjp0fxf227rwxalzgnb2"))))
(build-system gnu-build-system)
(arguments
`(#:phases
(modify-phases %standard-phases
(delete 'configure)
;; `make install` isn't available, so we have to do it manually
(replace 'install
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out"))
(lua-version ,(version-major+minor (package-version lua))))
(install-file "lpeg.so"
(string-append out "/lib/lua/" lua-version))
(install-file "re.lua"
(string-append out "/share/lua/" lua-version))
#t))))
#:test-target "test"))
(inputs `(("lua", lua)))
(synopsis "Pattern-matching library for Lua")
(description
"LPeg is a pattern-matching library for Lua, based on Parsing Expression
Grammars (PEGs).")
(home-page "-rio.br/~roberto/lpeg")
(license license:expat)))
(define-public lua5.2-lpeg
(package (inherit lua-lpeg)
(name "lua5.2-lpeg")
;; XXX: The arguments field is almost an exact copy of the field in
;; "lua-lpeg", except for the version string, which was derived from "lua"
;; and now is taken from "lua-5.2". See this discussion for context:
;; -devel/2017-01/msg02048.html
(arguments
`(#:phases
(modify-phases %standard-phases
(delete 'configure)
;; `make install` isn't available, so we have to do it manually
(replace 'install
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out"))
(lua-version ,(version-major+minor (package-version lua-5.2))))
(install-file "lpeg.so"
(string-append out "/lib/lua/" lua-version))
(install-file "re.lua"
(string-append out "/share/lua/" lua-version))
#t))))
#:test-target "test"))
(inputs `(("lua", lua-5.2)))))
;; Lua 5.3 is not supported.
(define-public lua5.2-bitop
(package
(name "lua5.2-bitop")
(version "1.0.2")
(source (origin
(method url-fetch)
(uri (string-append "/"
"LuaBitOp-" version ".tar.gz"))
(sha256
(base32
"16fffbrgfcw40kskh2bn9q7m3gajffwd2f35rafynlnd7llwj1qj"))))
(build-system gnu-build-system)
(arguments
`(#:test-target "test"
#:make-flags
(list "INSTALL=install -pD"
(string-append "INSTALLPATH=printf "
(assoc-ref %outputs "out")
"/lib/lua/"
,(version-major+minor (package-version lua-5.2))
"/bit/bit.so"))
#:phases
(modify-phases %standard-phases
(delete 'configure))))
(inputs `(("lua", lua-5.2)))
(home-page "")
(synopsis "Bitwise operations on numbers for Lua")
(description
"Lua BitOp is a C extension module for Lua which adds bitwise operations
on numbers.")
(license license:expat)))
| null | https://raw.githubusercontent.com/ragkousism/Guix-on-Hurd/e951bb2c0c4961dc6ac2bda8f331b9c4cee0da95/gnu/packages/lua.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
no tests included
no configure script
FIXME: Skip GTK tests:
and some non-gtk tests will also fail
Currently, awesome is the only package that uses lua-lgi but
There must be a running X server during tests.
`make install` isn't available, so we have to do it manually
XXX: The arguments field is almost an exact copy of the field in
"lua-lpeg", except for the version string, which was derived from "lua"
and now is taken from "lua-5.2". See this discussion for context:
-devel/2017-01/msg02048.html
`make install` isn't available, so we have to do it manually
Lua 5.3 is not supported. | Copyright © 2013 < >
Copyright © 2014 < >
Copyright © 2014 < >
Copyright © 2014 < >
Copyright © 2016 < >
Copyright © 2016 < >
Copyright © 2016 doncatnip < >
Copyright © 2016 Clément < >
Copyright © 2016 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages lua)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module (gnu packages)
#:use-module (gnu packages readline)
#:use-module (gnu packages tls)
#:use-module (gnu packages xml)
#:use-module (gnu packages glib)
#:use-module (gnu packages libffi)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages xorg)
#:use-module (gnu packages gtk))
(define-public lua
(package
(name "lua")
(version "5.3.4")
(source (origin
(method url-fetch)
(uri (string-append "-"
version ".tar.gz"))
(sha256
(base32 "0320a8dg3aci4hxla380dx1ifkw8gj4gbw5c4dz41g1kh98sm0gn"))
(patches (search-patches "lua-pkgconfig.patch"
"lua-liblua-so.patch"))))
(build-system gnu-build-system)
(inputs `(("readline" ,readline)))
(arguments
'(#:modules ((guix build gnu-build-system)
(guix build utils)
(srfi srfi-1))
#:test-target "test"
#:make-flags
'("MYCFLAGS=-fPIC -DLUA_DL_DLOPEN"
"linux")
#:phases
(modify-phases %standard-phases
(delete 'configure)
(replace 'install
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(zero? (system* "make" "install"
(string-append "INSTALL_TOP=" out)
(string-append "INSTALL_MAN=" out
"/share/man/man1")))))))))
(home-page "/")
(synopsis "Embeddable scripting language")
(description
"Lua is a powerful, fast, lightweight, embeddable scripting language. Lua
combines simple procedural syntax with powerful data description constructs
based on associative arrays and extensible semantics. Lua is dynamically typed,
runs by interpreting bytecode for a register-based virtual machine, and has
automatic memory management with incremental garbage collection, making it ideal
for configuration, scripting, and rapid prototyping.")
(license license:x11)))
(define-public lua-5.2
(package (inherit lua)
(version "5.2.4")
(source
(origin
(method url-fetch)
(uri (string-append "-"
version ".tar.gz"))
(sha256
(base32 "0jwznq0l8qg9wh5grwg07b5cy3lzngvl5m2nl1ikp6vqssmf9qmr"))
(patches (search-patches "lua-pkgconfig.patch"
"lua-liblua-so.patch"))))))
(define-public lua-5.1
(package (inherit lua)
(version "5.1.5")
(source (origin
(method url-fetch)
(uri (string-append "-"
version ".tar.gz"))
(sha256
(base32 "0cskd4w0g6rdm2q8q3i4n1h3j8kylhs3rq8mxwl9vwlmlxbgqh16"))
(patches (search-patches "lua51-liblua-so.patch"
"lua-CVE-2014-5461.patch"
"lua51-pkgconfig.patch"))))))
(define-public luajit
(package
(name "luajit")
(version "2.0.4")
(source (origin
(method url-fetch)
(uri (string-append "-"
version ".tar.gz"))
(sha256
(base32 "0zc0y7p6nx1c0pp4nhgbdgjljpfxsb5kgwp4ysz22l1p2bms83v2"))
(patches (search-patches "luajit-symlinks.patch"
"luajit-no_ldconfig.patch"))))
(build-system gnu-build-system)
(arguments
is distributed without tests
#:phases (alist-delete 'configure %standard-phases)
#:make-flags (list (string-append "PREFIX=" (assoc-ref %outputs "out")))))
(home-page "/")
(synopsis "Just in time compiler for Lua programming language version 5.1")
(description
"LuaJIT is a Just-In-Time Compiler (JIT) for the Lua
programming language. Lua is a powerful, dynamic and light-weight programming
language. It may be embedded or used as a general-purpose, stand-alone
language.")
(license license:x11)))
(define-public lua5.1-expat
(package
(name "lua5.1-expat")
(version "1.3.0")
(source (origin
(method url-fetch)
(uri (string-append "/"
"luaexpat/luaexpat-" version ".tar.gz"))
(sha256
(base32
"1hvxqngn0wf5642i5p3vcyhg3pmp102k63s9ry4jqyyqc1wkjq6h"))))
(build-system gnu-build-system)
(arguments
`(#:make-flags
(let ((out (assoc-ref %outputs "out")))
(list "CC=gcc"
(string-append "LUA_LDIR=" out "/share/lua/$(LUA_V)")
(string-append "LUA_CDIR=" out "/lib/lua/$(LUA_V)")))
#:phases
(modify-phases %standard-phases
(delete 'configure)
(replace 'check
(lambda _
(setenv "LUA_CPATH" "src/?.so;;")
(setenv "LUA_PATH" "src/?.lua;;")
(and (zero? (system* "lua" "tests/test.lua"))
(zero? (system* "lua" "tests/test-lom.lua"))))))))
(inputs
`(("lua" ,lua-5.1)
("expat" ,expat)))
(home-page "/")
(synopsis "SAX XML parser based on the Expat library")
(description "LuaExpat is a SAX XML parser based on the Expat library.")
(license (package-license lua-5.1))))
(define-public lua5.1-socket
(package
(name "lua5.1-socket")
(version "2.0.2")
(source (origin
(method url-fetch)
(uri (string-append "/"
"luasocket/luasocket/luasocket-"
version "/luasocket-" version ".tar.gz"))
(sha256
(base32
"19ichkbc4rxv00ggz8gyf29jibvc2wq9pqjik0ll326rrxswgnag"))))
(build-system gnu-build-system)
(arguments
`(#:make-flags
(let ((out (assoc-ref %outputs "out")))
(list (string-append "INSTALL_TOP_SHARE=" out "/share/lua/5.1")
(string-append "INSTALL_TOP_LIB=" out "/lib/lua/5.1")))
#:phases
(modify-phases %standard-phases
(delete 'configure)
(replace 'check
(lambda _
(setenv "LUA_CPATH" (string-append "src/?.so." ,version ";;"))
(setenv "LUA_PATH" "src/?.lua;;")
(when (zero? (primitive-fork))
(system* "lua" "test/testsrvr.lua"))
(zero? (system* "lua" "test/testclnt.lua")))))))
(inputs
`(("lua" ,lua-5.1)))
(home-page "-rio.br/~diego/professional/luasocket/")
(synopsis "Socket library for Lua")
(description "LuaSocket is a Lua extension library that is composed by two
parts: a C core that provides support for the TCP and UDP transport layers,
and a set of Lua modules that add support for functionality commonly needed by
applications that deal with the Internet.
Among the supported modules, the most commonly used implement the
SMTP (sending e-mails), HTTP (WWW access) and FTP (uploading and downloading
files) client protocols. These provide a very natural and generic interface
to the functionality defined by each protocol. In addition, you will find
that the MIME (common encodings), URL (anything you could possible want to do
with one) and LTN12 (filters, sinks, sources and pumps) modules can be very
handy.")
(license (package-license lua-5.1))))
(define-public lua5.1-filesystem
(package
(name "lua5.1-filesystem")
(version "1.6.3")
(source (origin
(method url-fetch)
(uri (string-append "/"
"luafilesystem/archive/v_"
"1_6_3" ".tar.gz"))
(file-name (string-append name "-" version ".tar.gz"))
(sha256
(base32
"0s10ckxin0bysd6gaywqhxkpw3ybjhprr8m655b8cx3pxjwd49am"))))
(build-system gnu-build-system)
(arguments
`(#:make-flags
(list (string-append "PREFIX=" (assoc-ref %outputs "out")))
#:test-target "test"
#:phases
(modify-phases %standard-phases
(delete 'configure))))
(inputs
`(("lua" ,lua-5.1)))
(home-page "")
(synopsis "File system library for Lua")
(description "LuaFileSystem is a Lua library developed to complement the
set of functions related to file systems offered by the standard Lua
distribution. LuaFileSystem offers a portable way to access the underlying
directory structure and file attributes.")
(license (package-license lua-5.1))))
(define-public lua5.1-sec
(package
(name "lua5.1-sec")
(version "0.6")
(source (origin
(method url-fetch)
(uri (string-append "/"
"luasec-" version ".tar.gz"))
(sha256
(base32
"0pgd1anzznl4s0h16wg8dlw9mgdb9h52drlcki6sbf5y31fa7wyf"))))
(build-system gnu-build-system)
(arguments
`(#:make-flags
(let ((out (assoc-ref %outputs "out")))
(list "linux"
"CC=gcc"
"LD=gcc"
(string-append "LUAPATH=" out "/share/lua/5.1")
(string-append "LUACPATH=" out "/lib/lua/5.1")))
#:phases
(modify-phases %standard-phases
(delete 'configure))))
(inputs
`(("lua" ,lua-5.1)
("openssl" ,openssl)))
(propagated-inputs
`(("lua-socket" ,lua5.1-socket)))
(home-page "")
(synopsis "OpenSSL bindings for Lua")
(description "LuaSec is a binding for OpenSSL library to provide TLS/SSL
communication. It takes an already established TCP connection and creates a
secure session between the peers.")
(license (package-license lua-5.1))))
(define-public lua5.1-sec-0.5
(package
(inherit lua5.1-sec)
(version "0.5.1")
(source (origin
(method url-fetch)
(uri (string-append "/"
"luasec-" version ".tar.gz"))
(sha256
(base32
"01llf5bcrjmqqy6m65avqkajz7h79rvkka6rd131kwr10n75yp3d"))))))
(define-public lua-lgi
(package
(name "lua-lgi")
(version "0.9.1")
(source
(origin
(method url-fetch)
(uri (string-append
"/"
version ".tar.gz"))
(file-name (string-append name "-" version ".tar.gz"))
(sha256
(base32
"1fmgdl5y4ph3yc6ycg865s3vai1rjkyda61cgqxk6zd13hmznw0c"))))
(build-system gnu-build-system)
(arguments
'(#:make-flags (list "CC=gcc"
(string-append "PREFIX=" (assoc-ref %outputs "out")))
#:phases
(modify-phases %standard-phases
(add-before 'build 'set-env
(lambda* (#:key inputs #:allow-other-keys)
we need to load cairo dynamically
(let* ((cairo (string-append
(assoc-ref inputs "cairo") "/lib" )))
(setenv "LD_LIBRARY_PATH" cairo)
#t)))
(add-before 'build 'set-lua-version
(lambda _
lua version and therefore install directories are hardcoded
FIXME : This breaks when we update lua to > = 5.3
(substitute* "./lgi/Makefile"
(("LUA_VERSION=5.1") "LUA_VERSION=5.2"))
#t))
(add-before 'check 'skip-test-gtk
(lambda _
gtk3 - ca n't get it to run with the xorg - server config below
gtk2 - lots of functions are n't implemented
We choose gtk2 as the lesser evil and simply skip the test .
it does n't need or interact with GTK using lua - lgi .
(substitute* "./tests/test.lua"
(("'gtk.lua',") "-- 'gtk.lua',"))
#t))
(add-before 'check 'start-xserver-instance
(lambda* (#:key inputs #:allow-other-keys)
(system (format #f "~a/bin/Xvfb :1 &"
(assoc-ref inputs "xorg-server")))
(setenv "DISPLAY" ":1")
#t)))))
(inputs
`(("gobject-introspection" ,gobject-introspection)
("glib" ,glib)
("pango", pango)
("gtk", gtk+-2)
("lua" ,lua)
("cairo" ,cairo)
("libffi" ,libffi)
("xorg-server", xorg-server)))
(native-inputs
`(("pkg-config" ,pkg-config)))
(home-page "/")
(synopsis "Lua bridge to GObject based libraries")
(description
"LGI is gobject-introspection based dynamic Lua binding to GObject
based libraries. It allows using GObject-based libraries directly from Lua.
Notable examples are GTK+, GStreamer and Webkit.")
(license license:expat)))
(define-public lua-lpeg
(package
(name "lua-lpeg")
(version "1.0.1")
(source (origin
(method url-fetch)
(uri (string-append "-rio.br/~roberto/lpeg/lpeg-"
version ".tar.gz"))
(sha256
(base32 "0sq25z3r324a324ky73izgq9mbf66j2xvjp0fxf227rwxalzgnb2"))))
(build-system gnu-build-system)
(arguments
`(#:phases
(modify-phases %standard-phases
(delete 'configure)
(replace 'install
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out"))
(lua-version ,(version-major+minor (package-version lua))))
(install-file "lpeg.so"
(string-append out "/lib/lua/" lua-version))
(install-file "re.lua"
(string-append out "/share/lua/" lua-version))
#t))))
#:test-target "test"))
(inputs `(("lua", lua)))
(synopsis "Pattern-matching library for Lua")
(description
"LPeg is a pattern-matching library for Lua, based on Parsing Expression
Grammars (PEGs).")
(home-page "-rio.br/~roberto/lpeg")
(license license:expat)))
(define-public lua5.2-lpeg
(package (inherit lua-lpeg)
(name "lua5.2-lpeg")
(arguments
`(#:phases
(modify-phases %standard-phases
(delete 'configure)
(replace 'install
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out"))
(lua-version ,(version-major+minor (package-version lua-5.2))))
(install-file "lpeg.so"
(string-append out "/lib/lua/" lua-version))
(install-file "re.lua"
(string-append out "/share/lua/" lua-version))
#t))))
#:test-target "test"))
(inputs `(("lua", lua-5.2)))))
(define-public lua5.2-bitop
(package
(name "lua5.2-bitop")
(version "1.0.2")
(source (origin
(method url-fetch)
(uri (string-append "/"
"LuaBitOp-" version ".tar.gz"))
(sha256
(base32
"16fffbrgfcw40kskh2bn9q7m3gajffwd2f35rafynlnd7llwj1qj"))))
(build-system gnu-build-system)
(arguments
`(#:test-target "test"
#:make-flags
(list "INSTALL=install -pD"
(string-append "INSTALLPATH=printf "
(assoc-ref %outputs "out")
"/lib/lua/"
,(version-major+minor (package-version lua-5.2))
"/bit/bit.so"))
#:phases
(modify-phases %standard-phases
(delete 'configure))))
(inputs `(("lua", lua-5.2)))
(home-page "")
(synopsis "Bitwise operations on numbers for Lua")
(description
"Lua BitOp is a C extension module for Lua which adds bitwise operations
on numbers.")
(license license:expat)))
|
fe89123b3692823f35dd4356b0408d61108d6e09c58082a4b44ca9ad767ff1ec | PEZ/rich4clojure | problem_112.clj | (ns rich4clojure.medium.problem-112
(:require [hyperfiddle.rcf :refer [tests]]))
= =
By 4Clojure user :
;; Difficulty: Medium
;; Tags: [seqs]
;;
;; Create a function which takes an integer and a nested
;; collection of integers as arguments. Analyze the
;; elements of the input collection and return a sequence
;; which maintains the nested structure, and which
;; includes all elements starting from the head whose sum
;; is less than or equal to the input integer.
(def __ :tests-will-fail)
(comment
)
(tests
(__ 10 [1 2 [3 [4 5] 6] 7]) :=
'(1 2 (3 (4)))
(__ 30 [1 2 [3 [4 [5 [6 [7 8]] 9]] 10] 11]) :=
'(1 2 (3 (4 (5 (6 (7))))))
(__ 9 (range)) :=
'(0 1 2 3)
(__ 1 [[[[[1]]]]]) :=
'(((((1)))))
(__ 0 [1 2 [3 [4 5] 6] 7]) :=
'()
(__ 0 [0 0 [0 [0]]]) :=
'(0 0 (0 (0)))
(__ 1 [-10 [1 [2 3 [4 5 [6 7 [8]]]]]]) :=
'(-10 (1 (2 3 (4)))))
;; Share your solution, and/or check how others did it:
;; | null | https://raw.githubusercontent.com/PEZ/rich4clojure/2ccfac041840e9b1550f0a69b9becbdb03f9525b/src/rich4clojure/medium/problem_112.clj | clojure | Difficulty: Medium
Tags: [seqs]
Create a function which takes an integer and a nested
collection of integers as arguments. Analyze the
elements of the input collection and return a sequence
which maintains the nested structure, and which
includes all elements starting from the head whose sum
is less than or equal to the input integer.
Share your solution, and/or check how others did it:
| (ns rich4clojure.medium.problem-112
(:require [hyperfiddle.rcf :refer [tests]]))
= =
By 4Clojure user :
(def __ :tests-will-fail)
(comment
)
(tests
(__ 10 [1 2 [3 [4 5] 6] 7]) :=
'(1 2 (3 (4)))
(__ 30 [1 2 [3 [4 [5 [6 [7 8]] 9]] 10] 11]) :=
'(1 2 (3 (4 (5 (6 (7))))))
(__ 9 (range)) :=
'(0 1 2 3)
(__ 1 [[[[[1]]]]]) :=
'(((((1)))))
(__ 0 [1 2 [3 [4 5] 6] 7]) :=
'()
(__ 0 [0 0 [0 [0]]]) :=
'(0 0 (0 (0)))
(__ 1 [-10 [1 [2 3 [4 5 [6 7 [8]]]]]]) :=
'(-10 (1 (2 3 (4)))))
|
cb02b3a82b57e7e1cc70cea30413938615d58febfbbbf7d97e6cad262f21e4b3 | DSiSc/why3 | glob.mli | (********************************************************************)
(* *)
The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
(* *)
(* This software is distributed under the terms of the GNU Lesser *)
General Public License version 2.1 , with the special exception
(* on linking described in file LICENSE. *)
(* *)
(********************************************************************)
open Ident
val flag: Debug.flag
val dummy_id: ident
val def: kind:string -> ident -> unit
(** [def id] registers that [id] is defined at position [id.id_loc] *)
val use: kind:string -> Loc.position -> ident -> unit
(** [use loc id] registers that [id] is used at position [loc] *)
type def_use = Def | Use
val find: Loc.position -> ident * def_use * string
(** [find pos] returns the ident at position [pos], if any, as well as its
used/defined status and its kind, or raises [Not_found] *)
| null | https://raw.githubusercontent.com/DSiSc/why3/8ba9c2287224b53075adc51544bc377bc8ea5c75/src/parser/glob.mli | ocaml | ******************************************************************
This software is distributed under the terms of the GNU Lesser
on linking described in file LICENSE.
******************************************************************
* [def id] registers that [id] is defined at position [id.id_loc]
* [use loc id] registers that [id] is used at position [loc]
* [find pos] returns the ident at position [pos], if any, as well as its
used/defined status and its kind, or raises [Not_found] | The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
General Public License version 2.1 , with the special exception
open Ident
val flag: Debug.flag
val dummy_id: ident
val def: kind:string -> ident -> unit
val use: kind:string -> Loc.position -> ident -> unit
type def_use = Def | Use
val find: Loc.position -> ident * def_use * string
|
73649ffbb94fc5a67677ee55eb7aa0c33950aa3d148a068b55bd96aebbe212c9 | haskell/c2hs | Trav.hs | C->Haskell Compiler : traversals of C structure tree
--
Author :
Created : 16 October 99
--
Copyright ( c ) [ 1999 .. 2001 ]
--
-- This file is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
-- (at your option) any later version.
--
-- This file is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- This modules provides for traversals of C structure trees. The C
-- traversal monad supports traversals that need convenient access to the
-- attributes of an attributed C structure tree. The monads state can still
-- be extended.
--
--- DOCU ----------------------------------------------------------------------
--
language : 98
--
-- Handling of redefined tag values
-- --------------------------------
--
-- Structures allow both
--
-- struct s {...} ...;
-- struct s ...;
--
-- and
--
-- struct s ...; /* this is called a forward reference */
-- struct s {...} ...;
--
-- In contrast enumerations only allow (in ANSI C)
--
-- enum e {...} ...;
-- enum e ...;
--
-- The function `defTag' handles both types and establishes an object
association from the tag identifier in the empty declaration ( i.e. the one
-- without `{...}') to the actually definition of the structure of
-- enumeration. This implies that when looking for the details of a
-- structure or enumeration, possibly a chain of references on tag
-- identifiers has to be chased. Note that the object association attribute
-- is _not_defined_ when the `{...}' part is present in a declaration.
--
--- TODO ----------------------------------------------------------------------
--
-- * `extractStruct' doesn't account for forward declarations that have no
-- full declaration yet; if `extractStruct' is called on such a declaration,
-- we have a user error, but currently an internal error is raised
--
module C2HS.C.Trav (CT, readCT, transCT, runCT, throwCTExc, ifCTExc,
raiseErrorCTExc,
enter, enterObjs, leave, leaveObjs, defObj, findObj,
findObjShadow, defTag, findTag, findTagShadow,
applyPrefixToNameSpaces, getDefOf, refersToDef, refersToNewDef,
getDeclOf, findTypeObjMaybe, findTypeObj, findValueObj,
findFunObj,
--
-- C structure tree query functions
--
isTypedef, simplifyDecl, declrFromDecl, declrNamed,
declaredDeclr, initDeclr, declaredName, structMembers, expandDecl,
structName, enumName, tagName, isPtrDeclr, isArrDeclr,
dropPtrDeclr, isPtrDecl, isArrDecl, isFunDeclr, structFromDecl,
funResultAndArgs, chaseDecl, findAndChaseDecl,
findAndChaseDeclOrTag, checkForAlias, checkForOneCUName,
checkForOneAliasName, lookupEnum, lookupStructUnion,
lookupDeclOrTag)
where
import Data.List (find)
import Control.Monad (liftM)
import Control.Exception (assert)
import Language.C.Data
import Language.C.Data.Ident (dumpIdent)
import Language.C.Syntax
import Data.Attributes
import Data.Errors
import C2HS.State (CST, readCST, transCST, runCST, raiseError, catchExc,
throwExc, Traces(..), putTraceStr)
import C2HS.C.Attrs (AttrC(..), enterNewRangeC, enterNewObjRangeC,
leaveRangeC, leaveObjRangeC, addDefObjC, lookupDefObjC,
lookupDefObjCShadow, addDefTagC, lookupDefTagC,
lookupDefTagCShadow, applyPrefix, getDefOfIdentC,
setDefOfIdentC, updDefOfIdentC, CObj(..), CTag(..),
CDef(..))
-- the C traversal monad
-- ---------------------
-- | C traversal monad
--
type CState s = (AttrC, s)
type CT s a = CST (CState s) a
-- | read attributed struture tree
--
readAttrCCT :: (AttrC -> a) -> CT s a
readAttrCCT reader = readCST $ \(ac, _) -> reader ac
-- | transform attributed structure tree
--
transAttrCCT :: (AttrC -> (AttrC, a)) -> CT s a
transAttrCCT trans = transCST $ \(ac, s) -> let
(ac', r) = trans ac
in
((ac', s), r)
-- | access to the user-defined state
--
-- | read user-defined state
--
readCT :: (s -> a) -> CT s a
readCT reader = readCST $ \(_, s) -> reader s
-- | transform user-defined state
--
transCT :: (s -> (s, a)) -> CT s a
transCT trans = transCST $ \(ac, s) -> let
(s', r) = trans s
in
((ac, s'), r)
-- usage of a traversal monad
--
-- | execute a traversal monad
--
-- * given a traversal monad, an attribute structure tree, and a user
-- state, the transformed structure tree and monads result are returned
--
runCT :: CT s a -> AttrC -> s -> CST t (AttrC, a)
runCT m ac s = runCST m' (ac, s)
where
m' = do
r <- m
(ac', _) <- readCST id
return (ac', r)
-- exception handling
-- ------------------
-- | exception identifier
--
ctExc :: String
ctExc = "ctExc"
-- | throw an exception
--
throwCTExc :: CT s a
throwCTExc = throwExc ctExc "Error during traversal of a C structure tree"
-- | catch a `ctExc'
--
ifCTExc :: CT s a -> CT s a -> CT s a
ifCTExc m handler = m `catchExc` (ctExc, const handler)
-- | raise an error followed by throwing a CT exception
--
raiseErrorCTExc :: Position -> [String] -> CT s a
raiseErrorCTExc pos errs = raiseError pos errs >> throwCTExc
-- attribute manipulation
-- ----------------------
-- name spaces
--
-- | enter a new local range
--
enter :: CT s ()
enter = transAttrCCT $ \ac -> (enterNewRangeC ac, ())
-- | enter a new local range, only for objects
--
enterObjs :: CT s ()
enterObjs = transAttrCCT $ \ac -> (enterNewObjRangeC ac, ())
-- | leave the current local range
--
leave :: CT s ()
leave = transAttrCCT $ \ac -> (leaveRangeC ac, ())
-- | leave the current local range, only for objects
--
leaveObjs :: CT s ()
leaveObjs = transAttrCCT $ \ac -> (leaveObjRangeC ac, ())
-- | enter an object definition into the object name space
--
-- * if a definition of the same name was already present, it is returned
--
defObj :: Ident -> CObj -> CT s (Maybe CObj)
defObj ide obj = do
traceCTrav $ "Defining object "++show ide++"...\n"
transAttrCCT $ \ac -> addDefObjC ac ide obj
-- | find a definition in the object name space
--
findObj :: Ident -> CT s (Maybe CObj)
findObj ide = readAttrCCT $ \ac -> lookupDefObjC ac ide
-- | find a definition in the object name space; if nothing found, try
-- whether there is a shadow identifier that matches
--
findObjShadow :: Ident -> CT s (Maybe (CObj, Ident))
findObjShadow ide = readAttrCCT $ \ac -> lookupDefObjCShadow ac ide
-- | enter a tag definition into the tag name space
--
-- * empty definitions of structures get overwritten with complete ones and a
-- forward reference is added to their tag identifier; furthermore, both
-- structures and enums may be referenced using an empty definition when
-- there was a full definition earlier and in this case there is also an
-- object association added; otherwise, if a definition of the same name was
already present , it is returned ( see DOCU section )
--
* it is checked that the first occurrence of an enumeration tag is
-- accompanied by a full definition of the enumeration
--
defTag :: Ident -> CTag -> CT s (Maybe CTag)
defTag ide tag =
do
traceCTrav $ "Defining tag "++show ide++"...\n"
otag <- transAttrCCT $ \ac -> addDefTagC ac ide tag
case otag of
Nothing -> return Nothing -- no collision
Just prevTag -> case isRefinedOrUse prevTag tag of
Nothing -> return otag
Just (fullTag, foreIde) -> do
_ <- transAttrCCT $ \ac -> addDefTagC ac ide fullTag
foreIde `refersToDef` TagCD fullTag
return Nothing -- transparent for env
where
-- compute whether we have the case of a non-conflicting redefined tag
-- definition, and if so, return the full definition and the foreward
-- definition's tag identifier
--
* the first argument contains the _ previous _ definition
--
-- * in the case of a structure, a foreward definition after a full
-- definition is allowed, so we have to handle this case; enumerations
-- don't allow foreward definitions
--
* there may also be multiple foreward definition ; if we have two of
-- them here, one is arbitrarily selected to take the role of the full
-- definition
--
isRefinedOrUse (StructUnionCT (CStruct _ (Just ide') Nothing _ _))
tag'@(StructUnionCT (CStruct _ (Just _ ) _ _ _)) =
Just (tag', ide')
isRefinedOrUse tag'@(StructUnionCT (CStruct _ (Just _ ) _ _ _))
(StructUnionCT (CStruct _ (Just ide') Nothing _ _)) =
Just (tag', ide')
isRefinedOrUse (EnumCT (CEnum (Just ide') Nothing _ _))
tag'@(EnumCT (CEnum (Just _ ) _ _ _)) =
Just (tag', ide')
isRefinedOrUse tag'@(EnumCT (CEnum (Just ide') _ _ _))
(EnumCT (CEnum (Just _ ) _ _ _)) =
Just (tag', ide')
isRefinedOrUse _ _ = Nothing
-- | find an definition in the tag name space
--
findTag :: Ident -> CT s (Maybe CTag)
findTag ide = readAttrCCT $ \ac -> lookupDefTagC ac ide
-- | find an definition in the tag name space; if nothing found, try
-- whether there is a shadow identifier that matches
--
findTagShadow :: Ident -> CT s (Maybe (CTag, Ident))
findTagShadow ide = readAttrCCT $ \ac -> lookupDefTagCShadow ac ide
-- | enrich the object and tag name space with identifiers obtained by dropping
-- the given prefix from the identifiers already in the name space
--
-- * if a new identifier would collides with an existing one, the new one is
-- discarded, i.e. all associations that existed before the transformation
-- started are still in effect after the transformation
--
applyPrefixToNameSpaces :: String -> String -> CT s ()
applyPrefixToNameSpaces prefix repprefix =
transAttrCCT $ \ac -> (applyPrefix ac prefix repprefix, ())
-- definition attribute
--
-- | get the definition of an identifier
--
-- * the attribute must be defined, i.e. a definition must be associated with
-- the given identifier
--
getDefOf :: Ident -> CT s CDef
getDefOf ide = do
def <- readAttrCCT $ \ac -> getDefOfIdentC ac ide
assert (not . isUndef $ def) $
return def
-- | set the definition of an identifier
--
refersToDef :: Ident -> CDef -> CT s ()
refersToDef ide def =
do traceCTrav $ "linking identifier: "++ dumpIdent ide ++ " --> " ++ show def
transAttrCCT $ \akl -> (setDefOfIdentC akl ide def, ())
-- | update the definition of an identifier
--
refersToNewDef :: Ident -> CDef -> CT s ()
refersToNewDef ide def =
transAttrCCT $ \akl -> (updDefOfIdentC akl ide def, ())
-- | get the declarator of an identifier
--
getDeclOf :: Ident -> CT s CDecl
getDeclOf ide =
do
traceEnter
def <- getDefOf ide
case def of
UndefCD -> interr "CTrav.getDeclOf: Undefined!"
DontCareCD -> interr "CTrav.getDeclOf: Don't care!"
TagCD _ -> interr "CTrav.getDeclOf: Illegal tag!"
ObjCD obj -> case obj of
TypeCO decl -> traceTypeCO decl >>
return decl
ObjCO decl -> traceObjCO decl >>
return decl
EnumCO _ _ -> illegalEnum
BuiltinCO Nothing -> illegalBuiltin
BuiltinCO (Just decl) -> traceBuiltinCO >>
return decl
where
illegalEnum = interr "CTrav.getDeclOf: Illegal enum!"
illegalBuiltin = interr "CTrav.getDeclOf: Attempted to get declarator of \
\builtin entity!"
-- if the latter ever becomes necessary, we have to
-- change the representation of builtins and give them
-- some dummy declarator
traceEnter = traceCTrav
$ "Entering `getDeclOf' for `" ++ identToString ide
++ "'...\n"
traceTypeCO decl = traceCTrav
$ "...found a type object:\n" ++ show decl ++ "\n"
traceObjCO decl = traceCTrav
$ "...found a vanilla object:\n" ++ show decl ++ "\n"
traceBuiltinCO = traceCTrav
$ "...found a builtin object with a proxy decl.\n"
-- convenience functions
--
findTypeObjMaybeWith :: Bool -> Ident -> Bool -> CT s (Maybe (CObj, Ident))
findTypeObjMaybeWith soft ide useShadows =
do
oobj <- if useShadows
then findObjShadow ide
else liftM (fmap (\obj -> (obj, ide))) $ findObj ide
case oobj of
Just obj@(TypeCO _ , _) -> return $ Just obj
Just obj@(BuiltinCO _, _) -> return $ Just obj
Just _ -> if soft
then return Nothing
else typedefExpectedErr ide
Nothing -> return $ Nothing
-- | find a type object in the object name space; returns 'Nothing' if the
-- identifier is not defined
--
* if the second argument is ' True ' , use ' findObjShadow '
--
findTypeObjMaybe :: Ident -> Bool -> CT s (Maybe (CObj, Ident))
findTypeObjMaybe = findTypeObjMaybeWith False
-- | find a type object in the object name space; raises an error and exception
-- if the identifier is not defined
--
* if the second argument is ' True ' , use ' findObjShadow '
--
findTypeObj :: Ident -> Bool -> CT s (CObj, Ident)
findTypeObj ide useShadows = do
oobj <- findTypeObjMaybe ide useShadows
case oobj of
Nothing -> unknownObjErr ide
Just obj -> return obj
-- | find an object, function, or enumerator in the object name space; raises an
-- error and exception if the identifier is not defined
--
* if the second argument is ' True ' , use ' findObjShadow '
--
findValueObj :: Ident -> Bool -> CT s (CObj, Ident)
findValueObj ide useShadows =
do
oobj <- if useShadows
then findObjShadow ide
else liftM (fmap (\obj -> (obj, ide))) $ findObj ide
case oobj of
Just obj@(ObjCO _ , _) -> return obj
Just obj@(EnumCO _ _, _) -> return obj
Just _ -> unexpectedTypedefErr (posOf ide)
Nothing -> unknownObjErr ide
-- | find a function in the object name space; raises an error and exception if
-- the identifier is not defined
--
* if the second argument is ' True ' , use ' findObjShadow '
--
findFunObj :: Ident -> Bool -> CT s (CObj, Ident)
findFunObj ide useShadows =
do
(obj, ide') <- findValueObj ide useShadows
case obj of
EnumCO _ _ -> funExpectedErr (posOf ide)
ObjCO decl -> do
let declr = ide' `declrFromDecl` decl
assertFunDeclr (posOf ide) declr
return (obj, ide')
-- C structure tree query routines
-- -------------------------------
-- | test if this is a type definition specification
--
isTypedef :: CDecl -> Bool
isTypedef (CDecl specs _ _) =
not . null $ [() | CStorageSpec (CTypedef _) <- specs]
-- | discard all declarators but the one declaring the given identifier
--
-- * the declaration must contain the identifier
--
simplifyDecl :: Ident -> CDecl -> CDecl
ide `simplifyDecl` (CDecl specs declrs at) =
case find (`declrPlusNamed` ide) declrs of
Nothing -> err
Just declr -> CDecl specs [declr] at
where
(Just declr, _, _) `declrPlusNamed` ide' = declr `declrNamed` ide'
_ `declrPlusNamed` _ = False
--
err = interr $ "CTrav.simplifyDecl: Wrong C object!\n\
\ Looking for `" ++ identToString ide ++ "' in decl \
\at " ++ show (posOf at)
-- | extract the declarator that declares the given identifier
--
-- * the declaration must contain the identifier
--
declrFromDecl :: Ident -> CDecl -> CDeclr
ide `declrFromDecl` decl =
let CDecl _ [(Just declr, _, _)] _ = ide `simplifyDecl` decl
in
declr
-- | tests whether the given declarator has the given name
--
declrNamed :: CDeclr -> Ident -> Bool
declr `declrNamed` ide = declrName declr == Just ide
| get the declarator of a declaration that has at most one declarator
--
declaredDeclr :: CDecl -> Maybe CDeclr
declaredDeclr (CDecl _ [] _) = Nothing
declaredDeclr (CDecl _ [(odeclr, _, _)] _) = odeclr
declaredDeclr decl =
interr $ "CTrav.declaredDeclr: Too many declarators!\n\
\ Declaration at " ++ show (posOf decl)
| get the initialiser of a declaration that has at most one initialiser
--
initDeclr :: CDecl -> Maybe (CInitializer NodeInfo)
initDeclr (CDecl _ [] _) = Nothing
initDeclr (CDecl _ [(_, ini, _)] _) = ini
initDeclr decl =
interr $ "CTrav.initDeclr: Too many declarators!\n\
\ Declaration at " ++ show (posOf decl)
| get the name declared by a declaration that has exactly one declarator
--
declaredName :: CDecl -> Maybe Ident
declaredName decl = declaredDeclr decl >>= declrName
-- | obtains the member definitions and the tag of a struct
--
-- * member definitions are expanded
--
structMembers :: CStructUnion -> ([CDecl], CStructTag)
structMembers (CStruct tag _ members _ _) = (concat . map expandDecl $ maybe [] id members, tag)
| expand declarators declaring more than one identifier into multiple
declarators , e.g. ` int x , y ; ' becomes ` int x ; int y ; '
For case of a declarator that declares no identifier , preserve the no - identifier .
--
expandDecl :: CDecl -> [CDecl]
expandDecl decl@(CDecl _ [] _) =
[decl] -- no name member stays as member without a name.
expandDecl (CDecl specs decls at) =
map (\decl -> CDecl specs [decl] at) decls
-- | get a struct's name
--
structName :: CStructUnion -> Maybe Ident
structName (CStruct _ oide _ _ _) = oide
-- | get an enum's name
--
enumName :: CEnum -> Maybe Ident
enumName (CEnum oide _ _ _) = oide
-- | get a tag's name
--
-- * fail if the tag is anonymous
--
tagName :: CTag -> Ident
tagName tag =
case tag of
StructUnionCT struct -> maybe err id $ structName struct
EnumCT enum -> maybe err id $ enumName enum
where
err = interr "CTrav.tagName: Anonymous tag definition"
-- | checks whether the given declarator defines an object that is a pointer to
-- some other type
--
-- * as far as parameter passing is concerned, arrays are also pointer
--
isPtrDeclr :: CDeclr -> Bool
isPtrDeclr (CDeclr _ (CPtrDeclr _ _:_) _ _ _) = True
isPtrDeclr (CDeclr _ (CArrDeclr _ _ _:_) _ _ _) = True
isPtrDeclr _ = False
-- | Need to distinguish between pointer and array declarations within
-- structures.
--
isArrDeclr :: CDeclr -> Maybe Int
isArrDeclr (CDeclr _ (CArrDeclr _ sz _:_) _ _ _) = Just $ szToInt sz
where szToInt (CArrSize _ (CConst (CIntConst s _))) =
fromIntegral $ getCInteger s
szToInt _ = 1
isArrDeclr _ = Nothing
| drops the first pointer level from the given declarator
--
-- * the declarator must declare a pointer object
--
-- * arrays are considered to be pointers
--
-- FIXME: this implementation isn't nice, because we retain the 'CVarDeclr'
-- unchanged; as the declarator is changed, we should maybe make this
-- into an anonymous declarator and also change its attributes
--
dropPtrDeclr :: CDeclr -> CDeclr
dropPtrDeclr (CDeclr ide (outermost:derived) asm ats node) =
case outermost of
(CPtrDeclr _ _) -> CDeclr ide derived asm ats node
(CArrDeclr _ _ _) -> CDeclr ide derived asm ats node
_ -> interr "CTrav.dropPtrDeclr: No pointer!"
-- | checks whether the given declaration defines a pointer object
--
-- * there may only be a single declarator in the declaration
--
isPtrDecl :: CDecl -> Bool
isPtrDecl (CDecl _ [] _) = False
isPtrDecl (CDecl _ [(Just declr, _, _)] _) = isPtrDeclr declr
isPtrDecl _ =
interr "CTrav.isPtrDecl: There was more than one declarator!"
isArrDecl :: CDecl -> Maybe Int
isArrDecl (CDecl _ [] _) = Nothing
isArrDecl (CDecl _ [(Just declr, _, _)] _) = isArrDeclr declr
isArrDecl _ =
interr "CTrav.isArrDecl: There was more than one declarator!"
-- | checks whether the given declarator defines a function object
--
isFunDeclr :: CDeclr -> Bool
isFunDeclr (CDeclr _ (CFunDeclr _ _ _:_) _ _ _) = True
isFunDeclr _ = False
-- | extract the structure from the type specifiers of a declaration
--
structFromDecl :: Position -> CDecl -> CT s CStructUnion
structFromDecl pos (CDecl specs _ _) =
case head [ts | CTypeSpec ts <- specs] of
CSUType su _ -> extractStruct pos (StructUnionCT su)
_ -> structExpectedErr pos
structFromDecl' :: Position -> CDecl -> CT s (Maybe CStructUnion)
structFromDecl' pos (CDecl specs _ _) =
case head [ts | CTypeSpec ts <- specs] of
CSUType su _ -> extractStruct' pos (StructUnionCT su)
_ -> structExpectedErr pos
-- | extracts the arguments from a function declaration (must be a unique
-- declarator) and constructs a declaration for the result of the function
--
-- * the boolean result indicates whether the function is variadic
--
-- * returns an abstract declarator
funResultAndArgs :: CDecl -> ([CDecl], CDecl, Bool)
funResultAndArgs cdecl@(CDecl specs [(Just declr, _, _)] _) =
let (args, declr', variadic) = funArgs declr
result = CDecl specs [(Just declr', Nothing, Nothing)]
(newAttrsOnlyPos (posOf cdecl))
in
(args, result, variadic)
where
funArgs (CDeclr _ide derived _asm _ats node) =
case derived of
(CFunDeclr (Right (args,variadic)) _ats' _dnode : derived') ->
(args, CDeclr Nothing derived' Nothing [] node, variadic)
(CFunDeclr (Left _) _ _ : _) ->
interr "CTrav.funResultAndArgs: Old style function definition"
_ -> interr "CTrav.funResultAndArgs: Illegal declarator!"
-- name chasing
--
-- | find the declarator identified by the given identifier; if the declarator
-- is itself only a 'typedef'ed name, the operation recursively searches for
-- the declarator associated with that name (this is called ``typedef
-- chasing'')
--
* if ` ind = True ' , we have to hop over one indirection
--
-- * remove all declarators except the one we just looked up
--
chaseDecl :: Ident -> Bool -> CT s CDecl
--
-- * cycles are no issue, as they cannot occur in a correct C header (we would
-- have spotted the problem during name analysis)
--
chaseDecl ide ind =
do
traceEnter
cdecl <- getDeclOf ide
let sdecl = ide `simplifyDecl` cdecl
case extractAlias sdecl ind of
Just (ide', ind') -> chaseDecl ide' ind'
Nothing -> return sdecl
where
traceEnter = traceCTrav $
"Entering `chaseDecl' for `" ++ identToString ide
++ "' " ++ (if ind then "" else "not ")
++ "following indirections...\n"
-- | find type object in object name space and then chase it
--
-- * see also 'chaseDecl'
--
-- * also create an object association from the given identifier to the object
-- that it _directly_ represents
--
* if the third argument is ' True ' , use ' findObjShadow '
--
findAndChaseDecl :: Ident -> Bool -> Bool -> CT s CDecl
findAndChaseDecl ide ind useShadows =
do
traceCTrav $ "findAndChaseDecl: " ++ show ide ++ " (" ++
show useShadows ++ ")\n"
(obj, ide') <- findTypeObj ide useShadows -- is there an object def?
ide `refersToNewDef` ObjCD obj
ide' `refersToNewDef` ObjCD obj -- assoc needed for chasing
chaseDecl ide' ind
findAndChaseDeclOrTag :: Ident -> Bool -> Bool -> CT s CDecl
findAndChaseDeclOrTag ide ind useShadows =
do
traceCTrav $ "findAndChaseDeclOrTag: " ++ show ide ++ " (" ++
show useShadows ++ ")\n"
mobjide <- findTypeObjMaybeWith True ide useShadows -- is there an object def?
case mobjide of
Just (obj, ide') -> do
ide `refersToNewDef` ObjCD obj
ide' `refersToNewDef` ObjCD obj -- assoc needed for chasing
chaseDecl ide' ind
Nothing -> do
otag <- if useShadows
then findTagShadow ide
else liftM (fmap (\tag -> (tag, ide))) $ findTag ide
case otag of
Just (StructUnionCT su, _) -> do
let (CStruct _ _ _ _ nodeinfo) = su
return $ CDecl [CTypeSpec (CSUType su nodeinfo)] [] nodeinfo
_ -> unknownObjErr ide
| given a declaration ( which must have exactly one declarator ) , if the
-- declarator is an alias, chase it to the actual declaration
--
checkForAlias :: CDecl -> CT s (Maybe CDecl)
checkForAlias decl =
case extractAlias decl False of
Nothing -> return Nothing
Just (ide', _) -> liftM Just $ chaseDecl ide' False
| given a declaration ( which must have exactly one declarator ) , if the
-- declarator is an alias, yield the alias name; *no* chasing
--
checkForOneAliasName :: CDecl -> Maybe Ident
checkForOneAliasName decl = fmap fst $ extractAlias decl False
-- | given a declaration, find the name of the struct/union type
checkForOneCUName :: CDecl -> Maybe Ident
checkForOneCUName decl@(CDecl specs _ _) =
case [ts | CTypeSpec ts <- specs] of
[CSUType (CStruct _ n _ _ _) _] ->
case declaredDeclr decl of
Nothing -> n
Just (CDeclr _ [] _ _ _) -> n -- no type derivations
_ -> Nothing
_ -> Nothing
-- smart lookup
--
-- | for the given identifier, either find an enumeration in the tag name space
-- or a type definition referring to an enumeration in the object name space;
-- raises an error and exception if the identifier is not defined
--
* if the second argument is ' True ' , use ' findTagShadow '
--
lookupEnum :: Ident -> Bool -> CT s CEnum
lookupEnum ide useShadows =
do
otag <- if useShadows
then liftM (fmap fst) $ findTagShadow ide
else findTag ide
case otag of
Just (StructUnionCT _ ) -> enumExpectedErr ide -- wrong tag definition
Just (EnumCT enum) -> return enum -- enum tag definition
Nothing -> do -- no tag definition
oobj <- if useShadows
then liftM (fmap fst) $ findObjShadow ide
else findObj ide
case oobj of
Just (EnumCO _ enum) -> return enum -- anonymous enum
_ -> do -- no value definition
(CDecl specs _ _) <- findAndChaseDecl ide False useShadows
case head [ts | CTypeSpec ts <- specs] of
CEnumType enum _ -> return enum
_ -> enumExpectedErr ide
-- | for the given identifier, either find a struct/union in the tag name space
-- or a type definition referring to a struct/union in the object name space;
-- raises an error and exception if the identifier is not defined
--
-- * the parameter `preferTag' determines whether tags or typedefs are
searched first
--
* if the third argument is ` True ' , use ` findTagShadow '
--
-- * when finding a forward definition of a tag, follow it to the real
-- definition
--
lookupStructUnion :: Ident -> Bool -> Bool -> CT s CStructUnion
lookupStructUnion ide preferTag useShadows = do
traceCTrav $ "lookupStructUnion: ide=" ++ show ide ++ " preferTag=" ++
show preferTag ++ " useShadows=" ++ show useShadows ++ "\n"
otag <- if useShadows
then liftM (fmap fst) $ findTagShadow ide
else findTag ide
mobj <- if useShadows
then findObjShadow ide
else liftM (fmap (\obj -> (obj, ide))) $ findObj ide
let oobj = case mobj of
Just obj@(TypeCO{}, _) -> Just obj
Just obj@(BuiltinCO{}, _) -> Just obj
_ -> Nothing
case preferTag of
True -> case otag of
Just tag -> extractStruct (posOf ide) tag
Nothing -> do
decl <- findAndChaseDecl ide True useShadows
structFromDecl (posOf ide) decl
False -> case oobj of
Just _ -> do
decl <- findAndChaseDecl ide True useShadows
mres <- structFromDecl' (posOf ide) decl
case mres of
Just su -> return su
Nothing -> case otag of
Just tag -> extractStruct (posOf ide) tag
Nothing -> unknownObjErr ide
Nothing -> case otag of
Just tag -> extractStruct (posOf ide) tag
Nothing -> unknownObjErr ide
-- | for the given identifier, check for the existence of both a type definition
-- or a struct, union, or enum definition
--
-- * if a typedef and a tag exists, the typedef takes precedence
--
-- * typedefs are chased
--
* if the second argument is ` True ' , look for shadows , too
--
lookupDeclOrTag :: Ident -> Bool -> CT s (Either CDecl CTag)
lookupDeclOrTag ide useShadows = do
oobj <- findTypeObjMaybeWith True ide useShadows
case oobj of
Just (_, ide') -> liftM Left $ findAndChaseDecl ide' False False
-- already did check shadows
Nothing -> do
otag <- if useShadows
then liftM (fmap fst) $ findTagShadow ide
else findTag ide
case otag of
Nothing -> unknownObjErr ide
Just tag -> return $ Right tag
-- auxiliary routines (internal)
--
| if the given declaration ( which may have at most one declarator ) is a
-- `typedef' alias, yield the referenced name
--
* a ` typedef ' alias has one of the following forms
--
-- <specs> at x, ...;
-- <specs> at *x, ...;
--
-- where `at' is the alias type, which has been defined by a `typedef', and
-- <specs> are arbitrary specifiers and qualifiers. Note that `x' may be a
-- variable, a type name (if `typedef' is in <specs>), or be entirely
-- omitted.
--
-- * if `ind = True', the alias may be via an indirection
--
-- * if `ind = True' and the alias is _not_ over an indirection, yield `True';
-- otherwise `False' (i.e. the ability to hop over an indirection is consumed)
--
* this may be an anonymous declaration , i.e. the name in ` ' may be
-- omitted or there may be no declarator at all
--
extractAlias :: CDecl -> Bool -> Maybe (Ident, Bool)
extractAlias decl@(CDecl specs _ _) ind =
case [ts | CTypeSpec ts <- specs] of
[CTypeDef ide' _] -> -- type spec is aliased ident
case declaredDeclr decl of
Nothing -> Just (ide', ind)
Just (CDeclr _ [] _ _ _) -> Just (ide', ind) -- no type derivations
one pointer indirection
| ind -> Just (ide', False)
| otherwise -> Nothing
_ -> Nothing
_ -> Nothing
-- | if the given tag is a forward declaration of a structure, follow the
-- reference to the full declaration
--
-- * the recursive call is not dangerous as there can't be any cycles
--
extractStruct :: Position -> CTag -> CT s CStructUnion
extractStruct pos (EnumCT _ ) = structExpectedErr pos
extractStruct pos (StructUnionCT su) = do
traceCTrav $ "extractStruct: " ++ show su ++ "\n"
case su of
CStruct _ (Just ide') Nothing _ _ -> do -- found forward definition
def <- getDefOf ide'
traceCTrav $ "def=" ++ show def ++ "\n"
case def of
TagCD tag -> extractStruct pos tag
UndefCD -> incompleteTypeErr pos
bad_obj -> err ide' bad_obj
_ -> return su
where
err ide bad_obj =
do interr $ "CTrav.extractStruct: Illegal reference! Expected " ++ dumpIdent ide ++
" to link to TagCD but refers to "++ (show bad_obj) ++ "\n"
extractStruct' :: Position -> CTag -> CT s (Maybe CStructUnion)
extractStruct' pos (EnumCT _ ) = structExpectedErr pos
extractStruct' pos (StructUnionCT su) = do
traceCTrav $ "extractStruct': " ++ show su ++ "\n"
case su of
CStruct _ (Just ide') Nothing _ _ -> do
def <- getDefOf ide'
traceCTrav $ "def=" ++ show def ++ "\n"
case def of
TagCD tag -> do
res <- extractStruct pos tag
return . Just $ res
_ -> return Nothing
_ -> return . Just $ su
-- | yield the name declared by a declarator if any
--
declrName :: CDeclr -> Maybe Ident
declrName (CDeclr oide _ _ _ _) = oide
-- | raise an error if the given declarator does not declare a C function or if
-- the function is supposed to return an array (the latter is illegal in C)
--
assertFunDeclr :: Position -> CDeclr -> CT s ()
assertFunDeclr pos (CDeclr _ (CFunDeclr _ _ _:retderiv) _ _ _) =
case retderiv of
(CArrDeclr _ _ _:_) -> illegalFunResultErr pos
_ -> return () -- ok, we have a function which doesn't return an array
assertFunDeclr pos _ =
funExpectedErr pos
-- | trace for this module
--
traceCTrav :: String -> CT s ()
traceCTrav = putTraceStr traceCTravSW
-- error messages
-- --------------
unknownObjErr :: Ident -> CT s a
unknownObjErr ide =
raiseErrorCTExc (posOf ide)
["Unknown identifier!",
"Cannot find a definition for `" ++ identToString ide ++ "' in the \
\header file."]
typedefExpectedErr :: Ident -> CT s a
typedefExpectedErr ide =
raiseErrorCTExc (posOf ide)
["Expected type definition!",
"The identifier `" ++ identToString ide ++ "' needs to be a C type name."]
unexpectedTypedefErr :: Position -> CT s a
unexpectedTypedefErr pos =
raiseErrorCTExc pos
["Unexpected type name!",
"An object, function, or enum constant is required here."]
illegalFunResultErr :: Position -> CT s a
illegalFunResultErr pos =
raiseErrorCTExc pos ["Function cannot return an array!",
"ANSI C does not allow functions to return an array."]
funExpectedErr :: Position -> CT s a
funExpectedErr pos =
raiseErrorCTExc pos
["Function expected!",
"A function is needed here, but this declarator does not declare",
"a function."]
enumExpectedErr :: Ident -> CT s a
enumExpectedErr ide =
raiseErrorCTExc (posOf ide)
["Expected enum!",
"Expected `" ++ identToString ide ++ "' to denote an enum; instead found",
"a struct, union, or object."]
structExpectedErr :: Position -> CT s a
structExpectedErr pos =
raiseErrorCTExc pos
["Expected a struct!",
"Expected a structure or union; instead found an enum or basic type."]
incompleteTypeErr :: Position -> CT s a
incompleteTypeErr pos =
raiseErrorCTExc pos
["Illegal use of incomplete type!",
"Expected a fully defined structure or union tag; instead found incomplete type."]
| null | https://raw.githubusercontent.com/haskell/c2hs/7a4da9e93fc0456f01aa7a94e5eea533b171fd1d/src/C2HS/C/Trav.hs | haskell |
This file is free software; you can redistribute it and/or modify
(at your option) any later version.
This file is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
- DESCRIPTION ---------------------------------------------------------------
This modules provides for traversals of C structure trees. The C
traversal monad supports traversals that need convenient access to the
attributes of an attributed C structure tree. The monads state can still
be extended.
- DOCU ----------------------------------------------------------------------
Handling of redefined tag values
--------------------------------
Structures allow both
struct s {...} ...;
struct s ...;
and
struct s ...; /* this is called a forward reference */
struct s {...} ...;
In contrast enumerations only allow (in ANSI C)
enum e {...} ...;
enum e ...;
The function `defTag' handles both types and establishes an object
without `{...}') to the actually definition of the structure of
enumeration. This implies that when looking for the details of a
structure or enumeration, possibly a chain of references on tag
identifiers has to be chased. Note that the object association attribute
is _not_defined_ when the `{...}' part is present in a declaration.
- TODO ----------------------------------------------------------------------
* `extractStruct' doesn't account for forward declarations that have no
full declaration yet; if `extractStruct' is called on such a declaration,
we have a user error, but currently an internal error is raised
C structure tree query functions
the C traversal monad
---------------------
| C traversal monad
| read attributed struture tree
| transform attributed structure tree
| access to the user-defined state
| read user-defined state
| transform user-defined state
usage of a traversal monad
| execute a traversal monad
* given a traversal monad, an attribute structure tree, and a user
state, the transformed structure tree and monads result are returned
exception handling
------------------
| exception identifier
| throw an exception
| catch a `ctExc'
| raise an error followed by throwing a CT exception
attribute manipulation
----------------------
name spaces
| enter a new local range
| enter a new local range, only for objects
| leave the current local range
| leave the current local range, only for objects
| enter an object definition into the object name space
* if a definition of the same name was already present, it is returned
| find a definition in the object name space
| find a definition in the object name space; if nothing found, try
whether there is a shadow identifier that matches
| enter a tag definition into the tag name space
* empty definitions of structures get overwritten with complete ones and a
forward reference is added to their tag identifier; furthermore, both
structures and enums may be referenced using an empty definition when
there was a full definition earlier and in this case there is also an
object association added; otherwise, if a definition of the same name was
accompanied by a full definition of the enumeration
no collision
transparent for env
compute whether we have the case of a non-conflicting redefined tag
definition, and if so, return the full definition and the foreward
definition's tag identifier
* in the case of a structure, a foreward definition after a full
definition is allowed, so we have to handle this case; enumerations
don't allow foreward definitions
them here, one is arbitrarily selected to take the role of the full
definition
| find an definition in the tag name space
| find an definition in the tag name space; if nothing found, try
whether there is a shadow identifier that matches
| enrich the object and tag name space with identifiers obtained by dropping
the given prefix from the identifiers already in the name space
* if a new identifier would collides with an existing one, the new one is
discarded, i.e. all associations that existed before the transformation
started are still in effect after the transformation
definition attribute
| get the definition of an identifier
* the attribute must be defined, i.e. a definition must be associated with
the given identifier
| set the definition of an identifier
| update the definition of an identifier
| get the declarator of an identifier
if the latter ever becomes necessary, we have to
change the representation of builtins and give them
some dummy declarator
convenience functions
| find a type object in the object name space; returns 'Nothing' if the
identifier is not defined
| find a type object in the object name space; raises an error and exception
if the identifier is not defined
| find an object, function, or enumerator in the object name space; raises an
error and exception if the identifier is not defined
| find a function in the object name space; raises an error and exception if
the identifier is not defined
C structure tree query routines
-------------------------------
| test if this is a type definition specification
| discard all declarators but the one declaring the given identifier
* the declaration must contain the identifier
| extract the declarator that declares the given identifier
* the declaration must contain the identifier
| tests whether the given declarator has the given name
| obtains the member definitions and the tag of a struct
* member definitions are expanded
no name member stays as member without a name.
| get a struct's name
| get an enum's name
| get a tag's name
* fail if the tag is anonymous
| checks whether the given declarator defines an object that is a pointer to
some other type
* as far as parameter passing is concerned, arrays are also pointer
| Need to distinguish between pointer and array declarations within
structures.
* the declarator must declare a pointer object
* arrays are considered to be pointers
FIXME: this implementation isn't nice, because we retain the 'CVarDeclr'
unchanged; as the declarator is changed, we should maybe make this
into an anonymous declarator and also change its attributes
| checks whether the given declaration defines a pointer object
* there may only be a single declarator in the declaration
| checks whether the given declarator defines a function object
| extract the structure from the type specifiers of a declaration
| extracts the arguments from a function declaration (must be a unique
declarator) and constructs a declaration for the result of the function
* the boolean result indicates whether the function is variadic
* returns an abstract declarator
name chasing
| find the declarator identified by the given identifier; if the declarator
is itself only a 'typedef'ed name, the operation recursively searches for
the declarator associated with that name (this is called ``typedef
chasing'')
* remove all declarators except the one we just looked up
* cycles are no issue, as they cannot occur in a correct C header (we would
have spotted the problem during name analysis)
| find type object in object name space and then chase it
* see also 'chaseDecl'
* also create an object association from the given identifier to the object
that it _directly_ represents
is there an object def?
assoc needed for chasing
is there an object def?
assoc needed for chasing
declarator is an alias, chase it to the actual declaration
declarator is an alias, yield the alias name; *no* chasing
| given a declaration, find the name of the struct/union type
no type derivations
smart lookup
| for the given identifier, either find an enumeration in the tag name space
or a type definition referring to an enumeration in the object name space;
raises an error and exception if the identifier is not defined
wrong tag definition
enum tag definition
no tag definition
anonymous enum
no value definition
| for the given identifier, either find a struct/union in the tag name space
or a type definition referring to a struct/union in the object name space;
raises an error and exception if the identifier is not defined
* the parameter `preferTag' determines whether tags or typedefs are
* when finding a forward definition of a tag, follow it to the real
definition
| for the given identifier, check for the existence of both a type definition
or a struct, union, or enum definition
* if a typedef and a tag exists, the typedef takes precedence
* typedefs are chased
already did check shadows
auxiliary routines (internal)
`typedef' alias, yield the referenced name
<specs> at x, ...;
<specs> at *x, ...;
where `at' is the alias type, which has been defined by a `typedef', and
<specs> are arbitrary specifiers and qualifiers. Note that `x' may be a
variable, a type name (if `typedef' is in <specs>), or be entirely
omitted.
* if `ind = True', the alias may be via an indirection
* if `ind = True' and the alias is _not_ over an indirection, yield `True';
otherwise `False' (i.e. the ability to hop over an indirection is consumed)
omitted or there may be no declarator at all
type spec is aliased ident
no type derivations
| if the given tag is a forward declaration of a structure, follow the
reference to the full declaration
* the recursive call is not dangerous as there can't be any cycles
found forward definition
| yield the name declared by a declarator if any
| raise an error if the given declarator does not declare a C function or if
the function is supposed to return an array (the latter is illegal in C)
ok, we have a function which doesn't return an array
| trace for this module
error messages
-------------- | C->Haskell Compiler : traversals of C structure tree
Author :
Created : 16 October 99
Copyright ( c ) [ 1999 .. 2001 ]
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
language : 98
association from the tag identifier in the empty declaration ( i.e. the one
module C2HS.C.Trav (CT, readCT, transCT, runCT, throwCTExc, ifCTExc,
raiseErrorCTExc,
enter, enterObjs, leave, leaveObjs, defObj, findObj,
findObjShadow, defTag, findTag, findTagShadow,
applyPrefixToNameSpaces, getDefOf, refersToDef, refersToNewDef,
getDeclOf, findTypeObjMaybe, findTypeObj, findValueObj,
findFunObj,
isTypedef, simplifyDecl, declrFromDecl, declrNamed,
declaredDeclr, initDeclr, declaredName, structMembers, expandDecl,
structName, enumName, tagName, isPtrDeclr, isArrDeclr,
dropPtrDeclr, isPtrDecl, isArrDecl, isFunDeclr, structFromDecl,
funResultAndArgs, chaseDecl, findAndChaseDecl,
findAndChaseDeclOrTag, checkForAlias, checkForOneCUName,
checkForOneAliasName, lookupEnum, lookupStructUnion,
lookupDeclOrTag)
where
import Data.List (find)
import Control.Monad (liftM)
import Control.Exception (assert)
import Language.C.Data
import Language.C.Data.Ident (dumpIdent)
import Language.C.Syntax
import Data.Attributes
import Data.Errors
import C2HS.State (CST, readCST, transCST, runCST, raiseError, catchExc,
throwExc, Traces(..), putTraceStr)
import C2HS.C.Attrs (AttrC(..), enterNewRangeC, enterNewObjRangeC,
leaveRangeC, leaveObjRangeC, addDefObjC, lookupDefObjC,
lookupDefObjCShadow, addDefTagC, lookupDefTagC,
lookupDefTagCShadow, applyPrefix, getDefOfIdentC,
setDefOfIdentC, updDefOfIdentC, CObj(..), CTag(..),
CDef(..))
type CState s = (AttrC, s)
type CT s a = CST (CState s) a
readAttrCCT :: (AttrC -> a) -> CT s a
readAttrCCT reader = readCST $ \(ac, _) -> reader ac
transAttrCCT :: (AttrC -> (AttrC, a)) -> CT s a
transAttrCCT trans = transCST $ \(ac, s) -> let
(ac', r) = trans ac
in
((ac', s), r)
readCT :: (s -> a) -> CT s a
readCT reader = readCST $ \(_, s) -> reader s
transCT :: (s -> (s, a)) -> CT s a
transCT trans = transCST $ \(ac, s) -> let
(s', r) = trans s
in
((ac, s'), r)
runCT :: CT s a -> AttrC -> s -> CST t (AttrC, a)
runCT m ac s = runCST m' (ac, s)
where
m' = do
r <- m
(ac', _) <- readCST id
return (ac', r)
ctExc :: String
ctExc = "ctExc"
throwCTExc :: CT s a
throwCTExc = throwExc ctExc "Error during traversal of a C structure tree"
ifCTExc :: CT s a -> CT s a -> CT s a
ifCTExc m handler = m `catchExc` (ctExc, const handler)
raiseErrorCTExc :: Position -> [String] -> CT s a
raiseErrorCTExc pos errs = raiseError pos errs >> throwCTExc
enter :: CT s ()
enter = transAttrCCT $ \ac -> (enterNewRangeC ac, ())
enterObjs :: CT s ()
enterObjs = transAttrCCT $ \ac -> (enterNewObjRangeC ac, ())
leave :: CT s ()
leave = transAttrCCT $ \ac -> (leaveRangeC ac, ())
leaveObjs :: CT s ()
leaveObjs = transAttrCCT $ \ac -> (leaveObjRangeC ac, ())
defObj :: Ident -> CObj -> CT s (Maybe CObj)
defObj ide obj = do
traceCTrav $ "Defining object "++show ide++"...\n"
transAttrCCT $ \ac -> addDefObjC ac ide obj
findObj :: Ident -> CT s (Maybe CObj)
findObj ide = readAttrCCT $ \ac -> lookupDefObjC ac ide
findObjShadow :: Ident -> CT s (Maybe (CObj, Ident))
findObjShadow ide = readAttrCCT $ \ac -> lookupDefObjCShadow ac ide
already present , it is returned ( see DOCU section )
* it is checked that the first occurrence of an enumeration tag is
defTag :: Ident -> CTag -> CT s (Maybe CTag)
defTag ide tag =
do
traceCTrav $ "Defining tag "++show ide++"...\n"
otag <- transAttrCCT $ \ac -> addDefTagC ac ide tag
case otag of
Just prevTag -> case isRefinedOrUse prevTag tag of
Nothing -> return otag
Just (fullTag, foreIde) -> do
_ <- transAttrCCT $ \ac -> addDefTagC ac ide fullTag
foreIde `refersToDef` TagCD fullTag
where
* the first argument contains the _ previous _ definition
* there may also be multiple foreward definition ; if we have two of
isRefinedOrUse (StructUnionCT (CStruct _ (Just ide') Nothing _ _))
tag'@(StructUnionCT (CStruct _ (Just _ ) _ _ _)) =
Just (tag', ide')
isRefinedOrUse tag'@(StructUnionCT (CStruct _ (Just _ ) _ _ _))
(StructUnionCT (CStruct _ (Just ide') Nothing _ _)) =
Just (tag', ide')
isRefinedOrUse (EnumCT (CEnum (Just ide') Nothing _ _))
tag'@(EnumCT (CEnum (Just _ ) _ _ _)) =
Just (tag', ide')
isRefinedOrUse tag'@(EnumCT (CEnum (Just ide') _ _ _))
(EnumCT (CEnum (Just _ ) _ _ _)) =
Just (tag', ide')
isRefinedOrUse _ _ = Nothing
findTag :: Ident -> CT s (Maybe CTag)
findTag ide = readAttrCCT $ \ac -> lookupDefTagC ac ide
findTagShadow :: Ident -> CT s (Maybe (CTag, Ident))
findTagShadow ide = readAttrCCT $ \ac -> lookupDefTagCShadow ac ide
applyPrefixToNameSpaces :: String -> String -> CT s ()
applyPrefixToNameSpaces prefix repprefix =
transAttrCCT $ \ac -> (applyPrefix ac prefix repprefix, ())
getDefOf :: Ident -> CT s CDef
getDefOf ide = do
def <- readAttrCCT $ \ac -> getDefOfIdentC ac ide
assert (not . isUndef $ def) $
return def
refersToDef :: Ident -> CDef -> CT s ()
refersToDef ide def =
do traceCTrav $ "linking identifier: "++ dumpIdent ide ++ " --> " ++ show def
transAttrCCT $ \akl -> (setDefOfIdentC akl ide def, ())
refersToNewDef :: Ident -> CDef -> CT s ()
refersToNewDef ide def =
transAttrCCT $ \akl -> (updDefOfIdentC akl ide def, ())
getDeclOf :: Ident -> CT s CDecl
getDeclOf ide =
do
traceEnter
def <- getDefOf ide
case def of
UndefCD -> interr "CTrav.getDeclOf: Undefined!"
DontCareCD -> interr "CTrav.getDeclOf: Don't care!"
TagCD _ -> interr "CTrav.getDeclOf: Illegal tag!"
ObjCD obj -> case obj of
TypeCO decl -> traceTypeCO decl >>
return decl
ObjCO decl -> traceObjCO decl >>
return decl
EnumCO _ _ -> illegalEnum
BuiltinCO Nothing -> illegalBuiltin
BuiltinCO (Just decl) -> traceBuiltinCO >>
return decl
where
illegalEnum = interr "CTrav.getDeclOf: Illegal enum!"
illegalBuiltin = interr "CTrav.getDeclOf: Attempted to get declarator of \
\builtin entity!"
traceEnter = traceCTrav
$ "Entering `getDeclOf' for `" ++ identToString ide
++ "'...\n"
traceTypeCO decl = traceCTrav
$ "...found a type object:\n" ++ show decl ++ "\n"
traceObjCO decl = traceCTrav
$ "...found a vanilla object:\n" ++ show decl ++ "\n"
traceBuiltinCO = traceCTrav
$ "...found a builtin object with a proxy decl.\n"
findTypeObjMaybeWith :: Bool -> Ident -> Bool -> CT s (Maybe (CObj, Ident))
findTypeObjMaybeWith soft ide useShadows =
do
oobj <- if useShadows
then findObjShadow ide
else liftM (fmap (\obj -> (obj, ide))) $ findObj ide
case oobj of
Just obj@(TypeCO _ , _) -> return $ Just obj
Just obj@(BuiltinCO _, _) -> return $ Just obj
Just _ -> if soft
then return Nothing
else typedefExpectedErr ide
Nothing -> return $ Nothing
* if the second argument is ' True ' , use ' findObjShadow '
findTypeObjMaybe :: Ident -> Bool -> CT s (Maybe (CObj, Ident))
findTypeObjMaybe = findTypeObjMaybeWith False
* if the second argument is ' True ' , use ' findObjShadow '
findTypeObj :: Ident -> Bool -> CT s (CObj, Ident)
findTypeObj ide useShadows = do
oobj <- findTypeObjMaybe ide useShadows
case oobj of
Nothing -> unknownObjErr ide
Just obj -> return obj
* if the second argument is ' True ' , use ' findObjShadow '
findValueObj :: Ident -> Bool -> CT s (CObj, Ident)
findValueObj ide useShadows =
do
oobj <- if useShadows
then findObjShadow ide
else liftM (fmap (\obj -> (obj, ide))) $ findObj ide
case oobj of
Just obj@(ObjCO _ , _) -> return obj
Just obj@(EnumCO _ _, _) -> return obj
Just _ -> unexpectedTypedefErr (posOf ide)
Nothing -> unknownObjErr ide
* if the second argument is ' True ' , use ' findObjShadow '
findFunObj :: Ident -> Bool -> CT s (CObj, Ident)
findFunObj ide useShadows =
do
(obj, ide') <- findValueObj ide useShadows
case obj of
EnumCO _ _ -> funExpectedErr (posOf ide)
ObjCO decl -> do
let declr = ide' `declrFromDecl` decl
assertFunDeclr (posOf ide) declr
return (obj, ide')
isTypedef :: CDecl -> Bool
isTypedef (CDecl specs _ _) =
not . null $ [() | CStorageSpec (CTypedef _) <- specs]
simplifyDecl :: Ident -> CDecl -> CDecl
ide `simplifyDecl` (CDecl specs declrs at) =
case find (`declrPlusNamed` ide) declrs of
Nothing -> err
Just declr -> CDecl specs [declr] at
where
(Just declr, _, _) `declrPlusNamed` ide' = declr `declrNamed` ide'
_ `declrPlusNamed` _ = False
err = interr $ "CTrav.simplifyDecl: Wrong C object!\n\
\ Looking for `" ++ identToString ide ++ "' in decl \
\at " ++ show (posOf at)
declrFromDecl :: Ident -> CDecl -> CDeclr
ide `declrFromDecl` decl =
let CDecl _ [(Just declr, _, _)] _ = ide `simplifyDecl` decl
in
declr
declrNamed :: CDeclr -> Ident -> Bool
declr `declrNamed` ide = declrName declr == Just ide
| get the declarator of a declaration that has at most one declarator
declaredDeclr :: CDecl -> Maybe CDeclr
declaredDeclr (CDecl _ [] _) = Nothing
declaredDeclr (CDecl _ [(odeclr, _, _)] _) = odeclr
declaredDeclr decl =
interr $ "CTrav.declaredDeclr: Too many declarators!\n\
\ Declaration at " ++ show (posOf decl)
| get the initialiser of a declaration that has at most one initialiser
initDeclr :: CDecl -> Maybe (CInitializer NodeInfo)
initDeclr (CDecl _ [] _) = Nothing
initDeclr (CDecl _ [(_, ini, _)] _) = ini
initDeclr decl =
interr $ "CTrav.initDeclr: Too many declarators!\n\
\ Declaration at " ++ show (posOf decl)
| get the name declared by a declaration that has exactly one declarator
declaredName :: CDecl -> Maybe Ident
declaredName decl = declaredDeclr decl >>= declrName
structMembers :: CStructUnion -> ([CDecl], CStructTag)
structMembers (CStruct tag _ members _ _) = (concat . map expandDecl $ maybe [] id members, tag)
| expand declarators declaring more than one identifier into multiple
declarators , e.g. ` int x , y ; ' becomes ` int x ; int y ; '
For case of a declarator that declares no identifier , preserve the no - identifier .
expandDecl :: CDecl -> [CDecl]
expandDecl decl@(CDecl _ [] _) =
expandDecl (CDecl specs decls at) =
map (\decl -> CDecl specs [decl] at) decls
structName :: CStructUnion -> Maybe Ident
structName (CStruct _ oide _ _ _) = oide
enumName :: CEnum -> Maybe Ident
enumName (CEnum oide _ _ _) = oide
tagName :: CTag -> Ident
tagName tag =
case tag of
StructUnionCT struct -> maybe err id $ structName struct
EnumCT enum -> maybe err id $ enumName enum
where
err = interr "CTrav.tagName: Anonymous tag definition"
isPtrDeclr :: CDeclr -> Bool
isPtrDeclr (CDeclr _ (CPtrDeclr _ _:_) _ _ _) = True
isPtrDeclr (CDeclr _ (CArrDeclr _ _ _:_) _ _ _) = True
isPtrDeclr _ = False
isArrDeclr :: CDeclr -> Maybe Int
isArrDeclr (CDeclr _ (CArrDeclr _ sz _:_) _ _ _) = Just $ szToInt sz
where szToInt (CArrSize _ (CConst (CIntConst s _))) =
fromIntegral $ getCInteger s
szToInt _ = 1
isArrDeclr _ = Nothing
| drops the first pointer level from the given declarator
dropPtrDeclr :: CDeclr -> CDeclr
dropPtrDeclr (CDeclr ide (outermost:derived) asm ats node) =
case outermost of
(CPtrDeclr _ _) -> CDeclr ide derived asm ats node
(CArrDeclr _ _ _) -> CDeclr ide derived asm ats node
_ -> interr "CTrav.dropPtrDeclr: No pointer!"
isPtrDecl :: CDecl -> Bool
isPtrDecl (CDecl _ [] _) = False
isPtrDecl (CDecl _ [(Just declr, _, _)] _) = isPtrDeclr declr
isPtrDecl _ =
interr "CTrav.isPtrDecl: There was more than one declarator!"
isArrDecl :: CDecl -> Maybe Int
isArrDecl (CDecl _ [] _) = Nothing
isArrDecl (CDecl _ [(Just declr, _, _)] _) = isArrDeclr declr
isArrDecl _ =
interr "CTrav.isArrDecl: There was more than one declarator!"
isFunDeclr :: CDeclr -> Bool
isFunDeclr (CDeclr _ (CFunDeclr _ _ _:_) _ _ _) = True
isFunDeclr _ = False
structFromDecl :: Position -> CDecl -> CT s CStructUnion
structFromDecl pos (CDecl specs _ _) =
case head [ts | CTypeSpec ts <- specs] of
CSUType su _ -> extractStruct pos (StructUnionCT su)
_ -> structExpectedErr pos
structFromDecl' :: Position -> CDecl -> CT s (Maybe CStructUnion)
structFromDecl' pos (CDecl specs _ _) =
case head [ts | CTypeSpec ts <- specs] of
CSUType su _ -> extractStruct' pos (StructUnionCT su)
_ -> structExpectedErr pos
funResultAndArgs :: CDecl -> ([CDecl], CDecl, Bool)
funResultAndArgs cdecl@(CDecl specs [(Just declr, _, _)] _) =
let (args, declr', variadic) = funArgs declr
result = CDecl specs [(Just declr', Nothing, Nothing)]
(newAttrsOnlyPos (posOf cdecl))
in
(args, result, variadic)
where
funArgs (CDeclr _ide derived _asm _ats node) =
case derived of
(CFunDeclr (Right (args,variadic)) _ats' _dnode : derived') ->
(args, CDeclr Nothing derived' Nothing [] node, variadic)
(CFunDeclr (Left _) _ _ : _) ->
interr "CTrav.funResultAndArgs: Old style function definition"
_ -> interr "CTrav.funResultAndArgs: Illegal declarator!"
* if ` ind = True ' , we have to hop over one indirection
chaseDecl :: Ident -> Bool -> CT s CDecl
chaseDecl ide ind =
do
traceEnter
cdecl <- getDeclOf ide
let sdecl = ide `simplifyDecl` cdecl
case extractAlias sdecl ind of
Just (ide', ind') -> chaseDecl ide' ind'
Nothing -> return sdecl
where
traceEnter = traceCTrav $
"Entering `chaseDecl' for `" ++ identToString ide
++ "' " ++ (if ind then "" else "not ")
++ "following indirections...\n"
* if the third argument is ' True ' , use ' findObjShadow '
findAndChaseDecl :: Ident -> Bool -> Bool -> CT s CDecl
findAndChaseDecl ide ind useShadows =
do
traceCTrav $ "findAndChaseDecl: " ++ show ide ++ " (" ++
show useShadows ++ ")\n"
ide `refersToNewDef` ObjCD obj
chaseDecl ide' ind
findAndChaseDeclOrTag :: Ident -> Bool -> Bool -> CT s CDecl
findAndChaseDeclOrTag ide ind useShadows =
do
traceCTrav $ "findAndChaseDeclOrTag: " ++ show ide ++ " (" ++
show useShadows ++ ")\n"
case mobjide of
Just (obj, ide') -> do
ide `refersToNewDef` ObjCD obj
chaseDecl ide' ind
Nothing -> do
otag <- if useShadows
then findTagShadow ide
else liftM (fmap (\tag -> (tag, ide))) $ findTag ide
case otag of
Just (StructUnionCT su, _) -> do
let (CStruct _ _ _ _ nodeinfo) = su
return $ CDecl [CTypeSpec (CSUType su nodeinfo)] [] nodeinfo
_ -> unknownObjErr ide
| given a declaration ( which must have exactly one declarator ) , if the
checkForAlias :: CDecl -> CT s (Maybe CDecl)
checkForAlias decl =
case extractAlias decl False of
Nothing -> return Nothing
Just (ide', _) -> liftM Just $ chaseDecl ide' False
| given a declaration ( which must have exactly one declarator ) , if the
checkForOneAliasName :: CDecl -> Maybe Ident
checkForOneAliasName decl = fmap fst $ extractAlias decl False
checkForOneCUName :: CDecl -> Maybe Ident
checkForOneCUName decl@(CDecl specs _ _) =
case [ts | CTypeSpec ts <- specs] of
[CSUType (CStruct _ n _ _ _) _] ->
case declaredDeclr decl of
Nothing -> n
_ -> Nothing
_ -> Nothing
* if the second argument is ' True ' , use ' findTagShadow '
lookupEnum :: Ident -> Bool -> CT s CEnum
lookupEnum ide useShadows =
do
otag <- if useShadows
then liftM (fmap fst) $ findTagShadow ide
else findTag ide
case otag of
oobj <- if useShadows
then liftM (fmap fst) $ findObjShadow ide
else findObj ide
case oobj of
(CDecl specs _ _) <- findAndChaseDecl ide False useShadows
case head [ts | CTypeSpec ts <- specs] of
CEnumType enum _ -> return enum
_ -> enumExpectedErr ide
searched first
* if the third argument is ` True ' , use ` findTagShadow '
lookupStructUnion :: Ident -> Bool -> Bool -> CT s CStructUnion
lookupStructUnion ide preferTag useShadows = do
traceCTrav $ "lookupStructUnion: ide=" ++ show ide ++ " preferTag=" ++
show preferTag ++ " useShadows=" ++ show useShadows ++ "\n"
otag <- if useShadows
then liftM (fmap fst) $ findTagShadow ide
else findTag ide
mobj <- if useShadows
then findObjShadow ide
else liftM (fmap (\obj -> (obj, ide))) $ findObj ide
let oobj = case mobj of
Just obj@(TypeCO{}, _) -> Just obj
Just obj@(BuiltinCO{}, _) -> Just obj
_ -> Nothing
case preferTag of
True -> case otag of
Just tag -> extractStruct (posOf ide) tag
Nothing -> do
decl <- findAndChaseDecl ide True useShadows
structFromDecl (posOf ide) decl
False -> case oobj of
Just _ -> do
decl <- findAndChaseDecl ide True useShadows
mres <- structFromDecl' (posOf ide) decl
case mres of
Just su -> return su
Nothing -> case otag of
Just tag -> extractStruct (posOf ide) tag
Nothing -> unknownObjErr ide
Nothing -> case otag of
Just tag -> extractStruct (posOf ide) tag
Nothing -> unknownObjErr ide
* if the second argument is ` True ' , look for shadows , too
lookupDeclOrTag :: Ident -> Bool -> CT s (Either CDecl CTag)
lookupDeclOrTag ide useShadows = do
oobj <- findTypeObjMaybeWith True ide useShadows
case oobj of
Just (_, ide') -> liftM Left $ findAndChaseDecl ide' False False
Nothing -> do
otag <- if useShadows
then liftM (fmap fst) $ findTagShadow ide
else findTag ide
case otag of
Nothing -> unknownObjErr ide
Just tag -> return $ Right tag
| if the given declaration ( which may have at most one declarator ) is a
* a ` typedef ' alias has one of the following forms
* this may be an anonymous declaration , i.e. the name in ` ' may be
extractAlias :: CDecl -> Bool -> Maybe (Ident, Bool)
extractAlias decl@(CDecl specs _ _) ind =
case [ts | CTypeSpec ts <- specs] of
case declaredDeclr decl of
Nothing -> Just (ide', ind)
one pointer indirection
| ind -> Just (ide', False)
| otherwise -> Nothing
_ -> Nothing
_ -> Nothing
extractStruct :: Position -> CTag -> CT s CStructUnion
extractStruct pos (EnumCT _ ) = structExpectedErr pos
extractStruct pos (StructUnionCT su) = do
traceCTrav $ "extractStruct: " ++ show su ++ "\n"
case su of
def <- getDefOf ide'
traceCTrav $ "def=" ++ show def ++ "\n"
case def of
TagCD tag -> extractStruct pos tag
UndefCD -> incompleteTypeErr pos
bad_obj -> err ide' bad_obj
_ -> return su
where
err ide bad_obj =
do interr $ "CTrav.extractStruct: Illegal reference! Expected " ++ dumpIdent ide ++
" to link to TagCD but refers to "++ (show bad_obj) ++ "\n"
extractStruct' :: Position -> CTag -> CT s (Maybe CStructUnion)
extractStruct' pos (EnumCT _ ) = structExpectedErr pos
extractStruct' pos (StructUnionCT su) = do
traceCTrav $ "extractStruct': " ++ show su ++ "\n"
case su of
CStruct _ (Just ide') Nothing _ _ -> do
def <- getDefOf ide'
traceCTrav $ "def=" ++ show def ++ "\n"
case def of
TagCD tag -> do
res <- extractStruct pos tag
return . Just $ res
_ -> return Nothing
_ -> return . Just $ su
declrName :: CDeclr -> Maybe Ident
declrName (CDeclr oide _ _ _ _) = oide
assertFunDeclr :: Position -> CDeclr -> CT s ()
assertFunDeclr pos (CDeclr _ (CFunDeclr _ _ _:retderiv) _ _ _) =
case retderiv of
(CArrDeclr _ _ _:_) -> illegalFunResultErr pos
assertFunDeclr pos _ =
funExpectedErr pos
traceCTrav :: String -> CT s ()
traceCTrav = putTraceStr traceCTravSW
unknownObjErr :: Ident -> CT s a
unknownObjErr ide =
raiseErrorCTExc (posOf ide)
["Unknown identifier!",
"Cannot find a definition for `" ++ identToString ide ++ "' in the \
\header file."]
typedefExpectedErr :: Ident -> CT s a
typedefExpectedErr ide =
raiseErrorCTExc (posOf ide)
["Expected type definition!",
"The identifier `" ++ identToString ide ++ "' needs to be a C type name."]
unexpectedTypedefErr :: Position -> CT s a
unexpectedTypedefErr pos =
raiseErrorCTExc pos
["Unexpected type name!",
"An object, function, or enum constant is required here."]
illegalFunResultErr :: Position -> CT s a
illegalFunResultErr pos =
raiseErrorCTExc pos ["Function cannot return an array!",
"ANSI C does not allow functions to return an array."]
funExpectedErr :: Position -> CT s a
funExpectedErr pos =
raiseErrorCTExc pos
["Function expected!",
"A function is needed here, but this declarator does not declare",
"a function."]
enumExpectedErr :: Ident -> CT s a
enumExpectedErr ide =
raiseErrorCTExc (posOf ide)
["Expected enum!",
"Expected `" ++ identToString ide ++ "' to denote an enum; instead found",
"a struct, union, or object."]
structExpectedErr :: Position -> CT s a
structExpectedErr pos =
raiseErrorCTExc pos
["Expected a struct!",
"Expected a structure or union; instead found an enum or basic type."]
incompleteTypeErr :: Position -> CT s a
incompleteTypeErr pos =
raiseErrorCTExc pos
["Illegal use of incomplete type!",
"Expected a fully defined structure or union tag; instead found incomplete type."]
|
920485ff73f933fdb86e6f965bba59cc4617155c78bc3c4cf7eab8fb3455014a | justinethier/nugget | 00-simplest.scm | (+ 2 1)
| null | https://raw.githubusercontent.com/justinethier/nugget/0c4e3e9944684ea83191671d58b5c8c342f64343/tests/00-simplest.scm | scheme | (+ 2 1)
| |
32b95c6226f8f64b3bd2945c97dd2f64160373c5d2b6e1a63c0bf9931b3d0390 | Gopiandcode/LibreRef | utils.ml | (* * License *)
LibreRef is a free as in freedom digital referencing tool for artists .
Copyright ( C ) < 2021 > < Kiran >
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
along with this program . If not , see < / > .
Also add information on how to contact you by electronic and paper mail .
If your software can interact with users remotely through a computer
network , you should also make sure that it provides a way for users to
get its source . For example , if your program is a web application , its
interface could display a " Source " link that leads users to an archive
of the code . There are many ways you could offer source , and different
solutions will be better for different programs ; see section 13 for the
specific requirements .
You should also get your employer ( if you work as a programmer ) or school ,
if any , to sign a " copyright disclaimer " for the program , if necessary .
For more information on this , and how to apply and follow the GNU AGPL , see
< / > .
LibreRef is a free as in freedom digital referencing tool for artists.
Copyright (C) <2021> <Kiran Gopinathan>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see </>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
</>.
*)
(* * Definition *)
let string_to_stb_image str =
let arr = Bigarray.(Array1.create char c_layout (String.length str)) in
String.iteri (fun i c -> arr.{i} <- c) str;
Stb_image.decode arr
let pixbuf_to_stb_image pixbuf =
let width = GdkPixbuf.get_width pixbuf
and height = GdkPixbuf.get_height pixbuf
and channels = GdkPixbuf.get_n_channels pixbuf in
let buffer = Buffer.create (width * height * channels) in
GdkPixbuf.save_to_buffer pixbuf ~typ:"png" buffer;
Buffer.to_bytes buffer |> Bytes.to_string |> string_to_stb_image
let stb_buffer_to_bigarray ?(rearrange_alpha=false) image =
let pad_image_data ?default from to_ data =
let open Bigarray in
let old_data_byte_size = Array1.dim data in
let old_array_size = old_data_byte_size / from in
let new_array_byte_size = old_array_size * to_ in
let new_data = Array1.create int8_unsigned c_layout new_array_byte_size in
let new_pos = ref 0 in
let old_pos = ref 0 in
for _ = 1 to old_array_size do
let fv = data.{!old_pos} in
for offset = 0 to from - 1 do
new_data.{!new_pos + offset} <- data.{!old_pos + from - offset - 1};
done;
for _ = 0 to from - 1 do
incr new_pos; incr old_pos
done;
begin match default with
| None ->
for _ = 1 to to_ - from do
new_data.{!new_pos} <- fv;
incr new_pos
done
| Some deflt ->
for _ = 1 to to_ - from do
new_data.{!new_pos} <- deflt;
incr new_pos
done
end;
done;
new_data in
let rearrange_alpha_data data =
let open Bigarray in
let data_byte_size = Array1.dim data in
let data_size = data_byte_size / 4 in
let new_data = Array1.create int8_unsigned c_layout data_byte_size in
let new_pos = ref 0 in
let old_pos = ref 0 in
for _ = 1 to data_size do
new_data.{!new_pos + 0} <- data.{!old_pos + 2};
new_data.{!new_pos + 1} <- data.{!old_pos + 1};
new_data.{!new_pos + 2} <- data.{!old_pos + 0};
new_data.{!new_pos + 3} <- data.{!old_pos + 3};
for _ = 0 to 3 do
incr new_pos; incr old_pos
done;
done;
new_data in
let channels = Stb_image.channels image in
let data = match channels with
| 4 when rearrange_alpha -> rearrange_alpha_data (Stb_image.data image)
| 4 -> Stb_image.data image
| n when n > 0 && n <= 3 ->
pad_image_data n 4 (Stb_image.data image)
| n ->
invalid_arg (Printf.sprintf "image has an unsupported number of channels %d" n) in
data
let stb_buffer_to_cairo_surface image =
let w = Stb_image.width image and h = Stb_image.height image in
let channels = Stb_image.channels image in
let data = stb_buffer_to_bigarray ~rearrange_alpha:true image in
let format = match channels with 4 -> Cairo.Image.ARGB32 | _ -> Cairo.Image.RGB24 in
~stride:(w * 4 )
img
| null | https://raw.githubusercontent.com/Gopiandcode/LibreRef/bf6d35e1838e63e30e9fc2455d567cffc84a7323/utils.ml | ocaml | * License
* Definition |
LibreRef is a free as in freedom digital referencing tool for artists .
Copyright ( C ) < 2021 > < Kiran >
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
along with this program . If not , see < / > .
Also add information on how to contact you by electronic and paper mail .
If your software can interact with users remotely through a computer
network , you should also make sure that it provides a way for users to
get its source . For example , if your program is a web application , its
interface could display a " Source " link that leads users to an archive
of the code . There are many ways you could offer source , and different
solutions will be better for different programs ; see section 13 for the
specific requirements .
You should also get your employer ( if you work as a programmer ) or school ,
if any , to sign a " copyright disclaimer " for the program , if necessary .
For more information on this , and how to apply and follow the GNU AGPL , see
< / > .
LibreRef is a free as in freedom digital referencing tool for artists.
Copyright (C) <2021> <Kiran Gopinathan>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see </>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
</>.
*)
let string_to_stb_image str =
let arr = Bigarray.(Array1.create char c_layout (String.length str)) in
String.iteri (fun i c -> arr.{i} <- c) str;
Stb_image.decode arr
let pixbuf_to_stb_image pixbuf =
let width = GdkPixbuf.get_width pixbuf
and height = GdkPixbuf.get_height pixbuf
and channels = GdkPixbuf.get_n_channels pixbuf in
let buffer = Buffer.create (width * height * channels) in
GdkPixbuf.save_to_buffer pixbuf ~typ:"png" buffer;
Buffer.to_bytes buffer |> Bytes.to_string |> string_to_stb_image
let stb_buffer_to_bigarray ?(rearrange_alpha=false) image =
let pad_image_data ?default from to_ data =
let open Bigarray in
let old_data_byte_size = Array1.dim data in
let old_array_size = old_data_byte_size / from in
let new_array_byte_size = old_array_size * to_ in
let new_data = Array1.create int8_unsigned c_layout new_array_byte_size in
let new_pos = ref 0 in
let old_pos = ref 0 in
for _ = 1 to old_array_size do
let fv = data.{!old_pos} in
for offset = 0 to from - 1 do
new_data.{!new_pos + offset} <- data.{!old_pos + from - offset - 1};
done;
for _ = 0 to from - 1 do
incr new_pos; incr old_pos
done;
begin match default with
| None ->
for _ = 1 to to_ - from do
new_data.{!new_pos} <- fv;
incr new_pos
done
| Some deflt ->
for _ = 1 to to_ - from do
new_data.{!new_pos} <- deflt;
incr new_pos
done
end;
done;
new_data in
let rearrange_alpha_data data =
let open Bigarray in
let data_byte_size = Array1.dim data in
let data_size = data_byte_size / 4 in
let new_data = Array1.create int8_unsigned c_layout data_byte_size in
let new_pos = ref 0 in
let old_pos = ref 0 in
for _ = 1 to data_size do
new_data.{!new_pos + 0} <- data.{!old_pos + 2};
new_data.{!new_pos + 1} <- data.{!old_pos + 1};
new_data.{!new_pos + 2} <- data.{!old_pos + 0};
new_data.{!new_pos + 3} <- data.{!old_pos + 3};
for _ = 0 to 3 do
incr new_pos; incr old_pos
done;
done;
new_data in
let channels = Stb_image.channels image in
let data = match channels with
| 4 when rearrange_alpha -> rearrange_alpha_data (Stb_image.data image)
| 4 -> Stb_image.data image
| n when n > 0 && n <= 3 ->
pad_image_data n 4 (Stb_image.data image)
| n ->
invalid_arg (Printf.sprintf "image has an unsupported number of channels %d" n) in
data
let stb_buffer_to_cairo_surface image =
let w = Stb_image.width image and h = Stb_image.height image in
let channels = Stb_image.channels image in
let data = stb_buffer_to_bigarray ~rearrange_alpha:true image in
let format = match channels with 4 -> Cairo.Image.ARGB32 | _ -> Cairo.Image.RGB24 in
~stride:(w * 4 )
img
|
bfac73886ac567715f1fef65d69579aadd646f385f2d65f08fc9bb8bba1b15ff | full-spectrum/neo4clj | client.clj | (ns neo4clj.client
(:require [clojure.string :as str]
[neo4clj.cypher :as cypher]
[neo4clj.java-interop :as java-interop]
[neo4clj.query-builder :as builder])
(:import [org.neo4j.driver Driver Session QueryRunner SessionConfig Transaction TransactionWork]))
(defrecord Connection [^Driver driver ^String database])
(defn connect
"Connect through bolt to the given neo4j server
Supports the current options:
:log :level [:all :error :warn :info :off] - defaults to :warn
:encryption [:required :none] - defaults to :required
:database - defaults to nil"
(^Connection [^String url]
(Connection. (java-interop/connect url) nil))
(^Connection [^String url ^clojure.lang.IPersistentMap opts]
(Connection. (java-interop/connect url (java-interop/build-config opts)) (:database opts)))
(^Connection [^String url ^String usr ^String pwd]
(Connection. (java-interop/connect url usr pwd) nil))
(^Connection [^String url ^String usr ^String pwd ^clojure.lang.IPersistentMap opts]
(Connection. (java-interop/connect url usr pwd (java-interop/build-config opts)) (:database opts))))
(defn disconnect
"Disconnect the given connection"
[^Connection conn]
(.close ^Driver (:driver conn)))
(defn create-session
"Create a new session on the given Neo4J connection"
(^Session [^Connection {:keys [driver database] :as conn}]
(assert (instance? Driver driver) "Neo4J driver not provided - check DB connection.")
(if database
(.session ^Driver driver (SessionConfig/forDatabase database))
(.session ^Driver driver))))
(defmacro with-session
"Creates a session with the given name on the given connection and executes the body
within the session.
The session can be used with the given name in the rest of the body."
[^Connection conn session & body]
`(with-open [~session (create-session ~conn)]
~@body))
(defn begin-transaction
"Start a new transaction on the given Neo4J session"
^Transaction [^Session session]
(.beginTransaction session))
(defn commit!
"Commits the given transaction"
[^Transaction trans]
(.commit trans))
(defn rollback
"Rolls the given transaction back"
[^Transaction trans]
(.rollback trans))
(defmacro with-transaction
"Create a transaction with given name on the given connection execute the body
within the transaction.
The transaction can be used with the given name in the rest of the body."
[^Connection conn trans & body]
`(with-open [~trans (begin-transaction (create-session ~conn))]
(try
~@body
(catch Exception e#
(rollback ~trans)
(throw e#))
(finally (commit! ~trans)))))
(defmulti execute!
"Execute the given query on the specified connection with optional parameters"
(fn [conn & args] (class conn)))
(defmethod execute! Connection
([^Connection conn ^String query]
(execute! conn query {}))
([^Connection conn ^String query ^clojure.lang.IPersistentMap params]
(with-open [session (create-session conn)]
(java-interop/execute session query params))))
(defmethod execute! QueryRunner
([^QueryRunner runner ^String query]
(java-interop/execute runner query))
([^QueryRunner runner ^String query ^clojure.lang.IPersistentMap params]
(java-interop/execute runner query params)))
(defmacro with-read-only-conn
"Create a managed read transaction with the name given as runner-alias and execute the
body within the transaction.
The runner-alias given for the transaction can be used within the body."
[^Connection conn runner-alias & body]
`(with-open [session# (create-session ~conn)]
(.readTransaction
session#
(proxy [TransactionWork] []
(execute [~runner-alias]
~@body)))))
(defn execute-read
([^Connection conn ^String query]
(execute-read conn query {}))
([^Connection conn ^String query ^clojure.lang.IPersistentMap params]
(with-read-only-conn conn tx
(java-interop/execute tx query params))))
(defmacro with-write-conn
"Create a managed write transaction with the name given as runner-alias and execute the
body within the transaction.
The runner-alias given for the transaction can be used within the body."
[^Connection conn runner-alias & body]
`(with-open [session# (create-session ~conn)]
(.writeTransaction
session#
(proxy [TransactionWork] []
(execute [~runner-alias]
~@body)))))
(defn execute-write!
([^Connection conn ^String query]
(execute-write! conn query {}))
([^Connection conn ^String query ^clojure.lang.IPersistentMap params]
(with-write-conn conn tx
(java-interop/execute tx query params))))
(defn create-index!
"Creates an index on the given combination and properties"
[runner alias label prop-keys]
(execute! runner (builder/create-index-query alias label prop-keys)))
(defn drop-index!
"Delete an index on the given combination and properties"
[runner alias]
(execute! runner (builder/drop-index-query alias)))
(defn create-from-builder!
"Helper function to execute a specific query builder string and return the results"
[runner ^clojure.lang.APersistentMap entity builder]
(let [ref-id (or (:ref-id entity) (cypher/gen-ref-id))]
(-> (execute! runner (builder (assoc entity :ref-id ref-id) true))
first
(get ref-id)
(assoc :ref-id ref-id))))
(defn create-node!
"Create a node based on the given Node representation"
[runner node]
(create-from-builder! runner node builder/create-node-query))
(defn create-rel!
"Create a relationship based on the given Relationship representation"
[runner rel]
(create-from-builder! runner rel builder/create-rel-query))
(defn find-node
"Takes a Node Lookup representation and returns a single matching node"
[runner ^clojure.lang.APersistentMap {:keys [ref-id] :as node}]
(->>
(str (builder/lookup-node node true) " LIMIT 1")
(execute! runner)
(map #(get % ref-id))
first))
(defn find-nodes
"Takes a Node Lookup representation and returns all matching nodes"
[runner ^clojure.lang.APersistentMap node]
(->>
(builder/lookup-node node true)
(execute! runner)
(map #(get % (:ref-id node)))))
(defn find-rel
"Takes a Relationship representation and returns a single matching relationship"
[runner ^clojure.lang.APersistentMap rel]
(->>
(str (builder/lookup-rel rel true) " LIMIT 1")
(execute! runner)
(map #(get % (:ref-id rel)))
first))
(defn find-rels
"Takes a Relationship representation and returns all matching relationships"
[runner ^clojure.lang.APersistentMap rel]
(->>
(builder/lookup-rel rel true)
(execute! runner)
(map #(get % (:ref-id rel)))))
(defn create-graph!
"Optimized function to create a whole graph within a transaction
Format of the graph is:
:lookups - collection of neo4j Node Lookup representations
:nodes - collection of neo4j Node representations
:rels - collection of neo4j Relationship representations
:returns - collection of aliases to return from query"
[runner ^clojure.lang.APersistentMap graph]
(execute! runner (builder/create-graph-query graph)))
(defn get-graph
"Lookups the nodes based on given relationships and returns specified entities
Format of the graph is:
:nodes - collection of neo4j Node representations
:rels - collection of neo4j Relationship representations
:returns - collection of aliases to return from query"
[runner ^clojure.lang.APersistentMap graph]
(execute! runner (builder/get-graph-query graph)))
(defn add-labels!
"Takes a collection of labels and adds them to the found neo4j nodes"
[runner
^clojure.lang.APersistentMap neo4j-node
^clojure.lang.APersistentVector labels]
(execute! runner (builder/modify-labels-query "SET" neo4j-node labels)))
(defn remove-labels!
"Takes a collection of labels and removes them from found neo4j nodes"
[runner
^clojure.lang.APersistentMap neo4j-node
^clojure.lang.APersistentVector labels]
(execute! runner (builder/modify-labels-query "REMOVE" neo4j-node labels)))
(defn update-props!
"Takes a property map and updates the found neo4j objects with it based on the
following rules:
Keys existing only in the given property map is added to the object
Keys existing only in the property map on the found object is kept as is
Keys existing in both property maps are updated with values from the given property map"
[runner
^clojure.lang.APersistentMap neo4j-entity
^clojure.lang.APersistentMap props]
(execute! runner (builder/modify-properties-query "+=" neo4j-entity props)))
(defn replace-props!
"Takes a property map and replaces the properties on all found neo4j objects with it"
[runner
^clojure.lang.APersistentMap neo4j-entity
^clojure.lang.APersistentMap props]
(execute! runner (builder/modify-properties-query "=" neo4j-entity props)))
(defn delete-node!
"Takes a neo4j node representation and deletes nodes found based on it"
[runner ^clojure.lang.APersistentMap neo4j-node]
(execute! runner (builder/delete-node neo4j-node)))
(defn delete-rel!
"Takes a neo4j relationship representation and deletes relationships found based on it"
[runner ^clojure.lang.APersistentMap neo4j-rel]
(execute! runner (builder/delete-rel neo4j-rel)))
(defn create-query
"Takes a cypher query as input and returns a anonymous function that
takes a query runner and return the query result as a map.
The function can also take a optional map of parameters used to replace params in the query string.
This functions can be used together with parameters to ensure better cached queries in Neo4j."
[query]
(fn
([runner] (execute! runner query))
([runner params] (execute! runner query params))))
| null | https://raw.githubusercontent.com/full-spectrum/neo4clj/7dd2466da27cbeccd5494efe4ff20307d83d1153/neo4clj-core/src/neo4clj/client.clj | clojure | (ns neo4clj.client
(:require [clojure.string :as str]
[neo4clj.cypher :as cypher]
[neo4clj.java-interop :as java-interop]
[neo4clj.query-builder :as builder])
(:import [org.neo4j.driver Driver Session QueryRunner SessionConfig Transaction TransactionWork]))
(defrecord Connection [^Driver driver ^String database])
(defn connect
"Connect through bolt to the given neo4j server
Supports the current options:
:log :level [:all :error :warn :info :off] - defaults to :warn
:encryption [:required :none] - defaults to :required
:database - defaults to nil"
(^Connection [^String url]
(Connection. (java-interop/connect url) nil))
(^Connection [^String url ^clojure.lang.IPersistentMap opts]
(Connection. (java-interop/connect url (java-interop/build-config opts)) (:database opts)))
(^Connection [^String url ^String usr ^String pwd]
(Connection. (java-interop/connect url usr pwd) nil))
(^Connection [^String url ^String usr ^String pwd ^clojure.lang.IPersistentMap opts]
(Connection. (java-interop/connect url usr pwd (java-interop/build-config opts)) (:database opts))))
(defn disconnect
"Disconnect the given connection"
[^Connection conn]
(.close ^Driver (:driver conn)))
(defn create-session
"Create a new session on the given Neo4J connection"
(^Session [^Connection {:keys [driver database] :as conn}]
(assert (instance? Driver driver) "Neo4J driver not provided - check DB connection.")
(if database
(.session ^Driver driver (SessionConfig/forDatabase database))
(.session ^Driver driver))))
(defmacro with-session
"Creates a session with the given name on the given connection and executes the body
within the session.
The session can be used with the given name in the rest of the body."
[^Connection conn session & body]
`(with-open [~session (create-session ~conn)]
~@body))
(defn begin-transaction
"Start a new transaction on the given Neo4J session"
^Transaction [^Session session]
(.beginTransaction session))
(defn commit!
"Commits the given transaction"
[^Transaction trans]
(.commit trans))
(defn rollback
"Rolls the given transaction back"
[^Transaction trans]
(.rollback trans))
(defmacro with-transaction
"Create a transaction with given name on the given connection execute the body
within the transaction.
The transaction can be used with the given name in the rest of the body."
[^Connection conn trans & body]
`(with-open [~trans (begin-transaction (create-session ~conn))]
(try
~@body
(catch Exception e#
(rollback ~trans)
(throw e#))
(finally (commit! ~trans)))))
(defmulti execute!
"Execute the given query on the specified connection with optional parameters"
(fn [conn & args] (class conn)))
(defmethod execute! Connection
([^Connection conn ^String query]
(execute! conn query {}))
([^Connection conn ^String query ^clojure.lang.IPersistentMap params]
(with-open [session (create-session conn)]
(java-interop/execute session query params))))
(defmethod execute! QueryRunner
([^QueryRunner runner ^String query]
(java-interop/execute runner query))
([^QueryRunner runner ^String query ^clojure.lang.IPersistentMap params]
(java-interop/execute runner query params)))
(defmacro with-read-only-conn
"Create a managed read transaction with the name given as runner-alias and execute the
body within the transaction.
The runner-alias given for the transaction can be used within the body."
[^Connection conn runner-alias & body]
`(with-open [session# (create-session ~conn)]
(.readTransaction
session#
(proxy [TransactionWork] []
(execute [~runner-alias]
~@body)))))
(defn execute-read
([^Connection conn ^String query]
(execute-read conn query {}))
([^Connection conn ^String query ^clojure.lang.IPersistentMap params]
(with-read-only-conn conn tx
(java-interop/execute tx query params))))
(defmacro with-write-conn
"Create a managed write transaction with the name given as runner-alias and execute the
body within the transaction.
The runner-alias given for the transaction can be used within the body."
[^Connection conn runner-alias & body]
`(with-open [session# (create-session ~conn)]
(.writeTransaction
session#
(proxy [TransactionWork] []
(execute [~runner-alias]
~@body)))))
(defn execute-write!
([^Connection conn ^String query]
(execute-write! conn query {}))
([^Connection conn ^String query ^clojure.lang.IPersistentMap params]
(with-write-conn conn tx
(java-interop/execute tx query params))))
(defn create-index!
"Creates an index on the given combination and properties"
[runner alias label prop-keys]
(execute! runner (builder/create-index-query alias label prop-keys)))
(defn drop-index!
"Delete an index on the given combination and properties"
[runner alias]
(execute! runner (builder/drop-index-query alias)))
(defn create-from-builder!
"Helper function to execute a specific query builder string and return the results"
[runner ^clojure.lang.APersistentMap entity builder]
(let [ref-id (or (:ref-id entity) (cypher/gen-ref-id))]
(-> (execute! runner (builder (assoc entity :ref-id ref-id) true))
first
(get ref-id)
(assoc :ref-id ref-id))))
(defn create-node!
"Create a node based on the given Node representation"
[runner node]
(create-from-builder! runner node builder/create-node-query))
(defn create-rel!
"Create a relationship based on the given Relationship representation"
[runner rel]
(create-from-builder! runner rel builder/create-rel-query))
(defn find-node
"Takes a Node Lookup representation and returns a single matching node"
[runner ^clojure.lang.APersistentMap {:keys [ref-id] :as node}]
(->>
(str (builder/lookup-node node true) " LIMIT 1")
(execute! runner)
(map #(get % ref-id))
first))
(defn find-nodes
"Takes a Node Lookup representation and returns all matching nodes"
[runner ^clojure.lang.APersistentMap node]
(->>
(builder/lookup-node node true)
(execute! runner)
(map #(get % (:ref-id node)))))
(defn find-rel
"Takes a Relationship representation and returns a single matching relationship"
[runner ^clojure.lang.APersistentMap rel]
(->>
(str (builder/lookup-rel rel true) " LIMIT 1")
(execute! runner)
(map #(get % (:ref-id rel)))
first))
(defn find-rels
"Takes a Relationship representation and returns all matching relationships"
[runner ^clojure.lang.APersistentMap rel]
(->>
(builder/lookup-rel rel true)
(execute! runner)
(map #(get % (:ref-id rel)))))
(defn create-graph!
"Optimized function to create a whole graph within a transaction
Format of the graph is:
:lookups - collection of neo4j Node Lookup representations
:nodes - collection of neo4j Node representations
:rels - collection of neo4j Relationship representations
:returns - collection of aliases to return from query"
[runner ^clojure.lang.APersistentMap graph]
(execute! runner (builder/create-graph-query graph)))
(defn get-graph
"Lookups the nodes based on given relationships and returns specified entities
Format of the graph is:
:nodes - collection of neo4j Node representations
:rels - collection of neo4j Relationship representations
:returns - collection of aliases to return from query"
[runner ^clojure.lang.APersistentMap graph]
(execute! runner (builder/get-graph-query graph)))
(defn add-labels!
"Takes a collection of labels and adds them to the found neo4j nodes"
[runner
^clojure.lang.APersistentMap neo4j-node
^clojure.lang.APersistentVector labels]
(execute! runner (builder/modify-labels-query "SET" neo4j-node labels)))
(defn remove-labels!
"Takes a collection of labels and removes them from found neo4j nodes"
[runner
^clojure.lang.APersistentMap neo4j-node
^clojure.lang.APersistentVector labels]
(execute! runner (builder/modify-labels-query "REMOVE" neo4j-node labels)))
(defn update-props!
"Takes a property map and updates the found neo4j objects with it based on the
following rules:
Keys existing only in the given property map is added to the object
Keys existing only in the property map on the found object is kept as is
Keys existing in both property maps are updated with values from the given property map"
[runner
^clojure.lang.APersistentMap neo4j-entity
^clojure.lang.APersistentMap props]
(execute! runner (builder/modify-properties-query "+=" neo4j-entity props)))
(defn replace-props!
"Takes a property map and replaces the properties on all found neo4j objects with it"
[runner
^clojure.lang.APersistentMap neo4j-entity
^clojure.lang.APersistentMap props]
(execute! runner (builder/modify-properties-query "=" neo4j-entity props)))
(defn delete-node!
"Takes a neo4j node representation and deletes nodes found based on it"
[runner ^clojure.lang.APersistentMap neo4j-node]
(execute! runner (builder/delete-node neo4j-node)))
(defn delete-rel!
"Takes a neo4j relationship representation and deletes relationships found based on it"
[runner ^clojure.lang.APersistentMap neo4j-rel]
(execute! runner (builder/delete-rel neo4j-rel)))
(defn create-query
"Takes a cypher query as input and returns a anonymous function that
takes a query runner and return the query result as a map.
The function can also take a optional map of parameters used to replace params in the query string.
This functions can be used together with parameters to ensure better cached queries in Neo4j."
[query]
(fn
([runner] (execute! runner query))
([runner params] (execute! runner query params))))
| |
8253a00f85ca7facdf09c51d4f4cc7e0dd1aa7bc5ea2c80dd44fd42d037a253e | robert-strandh/SICL | defpackage-defmacro.lisp | (cl:in-package #:sicl-package)
(defun check-defpackage-option (option)
(unless (consp option)
(error 'defpackage-option-must-be-a-non-empty-list
:option option))
(unless (cleavir-code-utilities:proper-list-p option)
(error 'defpackage-option-must-be-a-proper-list
:option option))
(unless (member (first option)
'(:nicknames :documentation :use :shadow :import :export
:shadowing-import-from :import-from :intern :size
:local-nicknames))
(error 'unknown-defpackage-option-name
:option-name (first option)))
(destructuring-bind (option-name . arguments)
option
(case option-name
(:nicknames
(loop for argument in arguments
unless (typep argument 'string-designator)
do (error 'package-nickname-must-be-a-string-designator
:nickname argument)))
(:local-nicknames
(loop for argument in arguments
unless (consp argument)
do (error 'package-local-nickname-argument-must-be-cons
:argument argument)
unless (proper-list-p argument)
do (error 'package-local-nickname-argument-must-be-proper-list
:argument argument)
unless (= (length argument) 2)
do (error 'package-local-nickname-argument-must-have-length-2
:argument argument)
unless (typep (first argument) 'string-designator)
do (error 'package-local-nickname-must-be-a-string-designator
:nickname (first argument))
unless (typep (second argument) 'package-designator)
do (error 'package-local-nickname-package-must-be-a-package-designator
:package-name (second argument))))
(:documentation
(when (null arguments)
(error 'package-documentation-option-requires-an-argument
:option option))
(unless (null (rest arguments))
(error 'package-documentation-option-requres-a-single-argument
:arguments arguments))
(unless (stringp (first arguments))
(error 'package-documentation-must-be-a-string
:documentation (first arguments))))
(:use
(loop for argument in arguments
unless (typep argument 'package-designator)
do (error 'package-use-argument-must-be-a-package-designator
:package-name argument)))
(:shadow
(loop for argument in arguments
unless (typep argument 'string-designator)
do (error 'shadowed-symbol-name-must-be-a-string-designator
:symbol-name argument)))
(:shadowing-import-from
(unless (consp arguments)
(error 'shadowing-import-from-option-must-have-a-package-argument
:option option))
(unless (typep (first arguments) 'package-designator)
(error 'shadowing-import-from-package-name-must-be-a-package-designator
:package-name (first arguments)))
(loop for argument in (rest arguments)
unless (typep argument 'string-designator)
do (error 'shadowed-symbol-must-be-a-string-designator
:symbol-name argument)))
(:import-from
(unless (consp arguments)
(error 'import-from-option-must-have-a-package-argument
:option option))
(unless (typep (first arguments) 'package-designator)
(error 'import-from-package-name-must-be-a-package-designator
:package-name (first arguments)))
(loop for argument in (rest arguments)
unless (typep argument 'string-designator)
do (error 'imported-symbol-must-be-a-string-designator
:symbol-name argument)))
(:export
(loop for argument in arguments
unless (typep argument 'string-designator)
do (error 'exported-symbol-name-must-be-a-string-designator
:symbol-name argument)))
(:intern
(loop for argument in arguments
unless (typep argument 'string-designator)
do (error 'interned-symbol-name-must-be-a-string-designator
:symbol-name argument)))
(:size
(when (null arguments)
(error 'package-size-option-requires-an-argument
:option option))
(unless (null (rest arguments))
(error 'package-size-option-requres-a-single-argument
:arguments arguments))
(unless (integerp (first arguments))
(error 'package-size-must-be-a-string
:size (first arguments)))))))
(defun check-defpackage-options (options)
;; We start by checking that the contents of each option is valid in
;; that it is well formed, that it is a valid option name, and that
;; the option arguments are valid for that kind of option type.
(loop for option in options
do (check-defpackage-option option))
Next , we check that the restrictions on the number of options of
;; a certain type are respected.
(when (> (count :documentation options :key #'car) 1)
(error 'package-documentation-option-may-occur-at-most-once
:options options))
(when (> (count :size options :key #'car) 1)
(error 'package-size-option-may-occur-at-most-once
:options options)))
;;; For options that can occur more than once, return a list of all
;;; the arguments of all the occurrences.
(defun group-options (name options)
(loop for (option-name . arguments) in options
when (eq name option-name)
append arguments))
(defun gather-nicknames (options)
(mapcar #'string
(group-options :nicknames options)))
(defun gather-local-nicknames (options)
(mapcar #'string
(group-options :local-nicknames options)))
(defun make-shadowing-imports (options package-var)
(loop for (option-name . arguments) in options
when (eq option-name :shadowing-import-from)
collect (let ((from-package-var (gensym)))
`(let ((,from-package-var (find-package ,(string (first arguments)))))
(shadowing-import
(list ,@(loop for symbol-name in (rest arguments)
collect `(find-symbol ,(string symbol-name)
,from-package-var)))
,package-var)))))
(defun make-imports (options package-var)
(loop for (option-name . arguments) in options
when (eq option-name :import-from)
collect (let ((from-package-var (gensym)))
`(let ((,from-package-var (find-package ,(string (first arguments)))))
(import
(list ,@(loop for symbol-name in (rest arguments)
collect `(find-symbol ,(string symbol-name)
,from-package-var)))
,package-var)))))
(defun make-shadow (options package-var)
`(shadow
(list ,@(loop for symbol-name in (group-options :shadow options)
collect `(make-instance 'symbol
:name ,(string symbol-name)
:package ,package-var)))
,package-var))
(defun make-use (options package-var)
`(use-package
(list ,@(loop for name in (group-options :use options)
collect `(find-package ,(string name))))
,package-var))
(defun make-intern (options package-var)
`(intern
(list ,@(loop for name in (group-options :intern options)
collect (string name)))
,package-var))
(defun make-export (options package-var)
`(export
(list ,@(loop for name in (group-options :use options)
collect `(find-symbol ,(string name) ,package-var)))
,package-var))
(defun defpackage-expander (name options)
(check-defpackage-options options)
(let ((package-var (gensym)))
`(eval-when (:compile-toplevel :load-toplevel :execute)
(let* ((,package-var (find-package ,(string name)))
(,package-var
(if (null ,package-var)
(make-instance 'package
:name ,(string name)
:nicknames ',(gather-nicknames options)
:local-nicknames ',(gather-local-nicknames options))
,package-var)))
(setf (%find-package ',(string name))
,package-var)
,@(make-shadowing-imports options package-var)
,(make-shadow options package-var)
,(make-use options package-var)
,@(make-imports options package-var)
,(make-intern options package-var)
,(make-export options package-var)))))
(defmacro defpackage (name &rest options)
(defpackage-expander name options))
| null | https://raw.githubusercontent.com/robert-strandh/SICL/b2c3e52fe4c9ce382b35e22e30003d6bf5d928fa/Code/Package/defpackage-defmacro.lisp | lisp | We start by checking that the contents of each option is valid in
that it is well formed, that it is a valid option name, and that
the option arguments are valid for that kind of option type.
a certain type are respected.
For options that can occur more than once, return a list of all
the arguments of all the occurrences. | (cl:in-package #:sicl-package)
(defun check-defpackage-option (option)
(unless (consp option)
(error 'defpackage-option-must-be-a-non-empty-list
:option option))
(unless (cleavir-code-utilities:proper-list-p option)
(error 'defpackage-option-must-be-a-proper-list
:option option))
(unless (member (first option)
'(:nicknames :documentation :use :shadow :import :export
:shadowing-import-from :import-from :intern :size
:local-nicknames))
(error 'unknown-defpackage-option-name
:option-name (first option)))
(destructuring-bind (option-name . arguments)
option
(case option-name
(:nicknames
(loop for argument in arguments
unless (typep argument 'string-designator)
do (error 'package-nickname-must-be-a-string-designator
:nickname argument)))
(:local-nicknames
(loop for argument in arguments
unless (consp argument)
do (error 'package-local-nickname-argument-must-be-cons
:argument argument)
unless (proper-list-p argument)
do (error 'package-local-nickname-argument-must-be-proper-list
:argument argument)
unless (= (length argument) 2)
do (error 'package-local-nickname-argument-must-have-length-2
:argument argument)
unless (typep (first argument) 'string-designator)
do (error 'package-local-nickname-must-be-a-string-designator
:nickname (first argument))
unless (typep (second argument) 'package-designator)
do (error 'package-local-nickname-package-must-be-a-package-designator
:package-name (second argument))))
(:documentation
(when (null arguments)
(error 'package-documentation-option-requires-an-argument
:option option))
(unless (null (rest arguments))
(error 'package-documentation-option-requres-a-single-argument
:arguments arguments))
(unless (stringp (first arguments))
(error 'package-documentation-must-be-a-string
:documentation (first arguments))))
(:use
(loop for argument in arguments
unless (typep argument 'package-designator)
do (error 'package-use-argument-must-be-a-package-designator
:package-name argument)))
(:shadow
(loop for argument in arguments
unless (typep argument 'string-designator)
do (error 'shadowed-symbol-name-must-be-a-string-designator
:symbol-name argument)))
(:shadowing-import-from
(unless (consp arguments)
(error 'shadowing-import-from-option-must-have-a-package-argument
:option option))
(unless (typep (first arguments) 'package-designator)
(error 'shadowing-import-from-package-name-must-be-a-package-designator
:package-name (first arguments)))
(loop for argument in (rest arguments)
unless (typep argument 'string-designator)
do (error 'shadowed-symbol-must-be-a-string-designator
:symbol-name argument)))
(:import-from
(unless (consp arguments)
(error 'import-from-option-must-have-a-package-argument
:option option))
(unless (typep (first arguments) 'package-designator)
(error 'import-from-package-name-must-be-a-package-designator
:package-name (first arguments)))
(loop for argument in (rest arguments)
unless (typep argument 'string-designator)
do (error 'imported-symbol-must-be-a-string-designator
:symbol-name argument)))
(:export
(loop for argument in arguments
unless (typep argument 'string-designator)
do (error 'exported-symbol-name-must-be-a-string-designator
:symbol-name argument)))
(:intern
(loop for argument in arguments
unless (typep argument 'string-designator)
do (error 'interned-symbol-name-must-be-a-string-designator
:symbol-name argument)))
(:size
(when (null arguments)
(error 'package-size-option-requires-an-argument
:option option))
(unless (null (rest arguments))
(error 'package-size-option-requres-a-single-argument
:arguments arguments))
(unless (integerp (first arguments))
(error 'package-size-must-be-a-string
:size (first arguments)))))))
(defun check-defpackage-options (options)
(loop for option in options
do (check-defpackage-option option))
Next , we check that the restrictions on the number of options of
(when (> (count :documentation options :key #'car) 1)
(error 'package-documentation-option-may-occur-at-most-once
:options options))
(when (> (count :size options :key #'car) 1)
(error 'package-size-option-may-occur-at-most-once
:options options)))
(defun group-options (name options)
(loop for (option-name . arguments) in options
when (eq name option-name)
append arguments))
(defun gather-nicknames (options)
(mapcar #'string
(group-options :nicknames options)))
(defun gather-local-nicknames (options)
(mapcar #'string
(group-options :local-nicknames options)))
(defun make-shadowing-imports (options package-var)
(loop for (option-name . arguments) in options
when (eq option-name :shadowing-import-from)
collect (let ((from-package-var (gensym)))
`(let ((,from-package-var (find-package ,(string (first arguments)))))
(shadowing-import
(list ,@(loop for symbol-name in (rest arguments)
collect `(find-symbol ,(string symbol-name)
,from-package-var)))
,package-var)))))
(defun make-imports (options package-var)
(loop for (option-name . arguments) in options
when (eq option-name :import-from)
collect (let ((from-package-var (gensym)))
`(let ((,from-package-var (find-package ,(string (first arguments)))))
(import
(list ,@(loop for symbol-name in (rest arguments)
collect `(find-symbol ,(string symbol-name)
,from-package-var)))
,package-var)))))
(defun make-shadow (options package-var)
`(shadow
(list ,@(loop for symbol-name in (group-options :shadow options)
collect `(make-instance 'symbol
:name ,(string symbol-name)
:package ,package-var)))
,package-var))
(defun make-use (options package-var)
`(use-package
(list ,@(loop for name in (group-options :use options)
collect `(find-package ,(string name))))
,package-var))
(defun make-intern (options package-var)
`(intern
(list ,@(loop for name in (group-options :intern options)
collect (string name)))
,package-var))
(defun make-export (options package-var)
`(export
(list ,@(loop for name in (group-options :use options)
collect `(find-symbol ,(string name) ,package-var)))
,package-var))
(defun defpackage-expander (name options)
(check-defpackage-options options)
(let ((package-var (gensym)))
`(eval-when (:compile-toplevel :load-toplevel :execute)
(let* ((,package-var (find-package ,(string name)))
(,package-var
(if (null ,package-var)
(make-instance 'package
:name ,(string name)
:nicknames ',(gather-nicknames options)
:local-nicknames ',(gather-local-nicknames options))
,package-var)))
(setf (%find-package ',(string name))
,package-var)
,@(make-shadowing-imports options package-var)
,(make-shadow options package-var)
,(make-use options package-var)
,@(make-imports options package-var)
,(make-intern options package-var)
,(make-export options package-var)))))
(defmacro defpackage (name &rest options)
(defpackage-expander name options))
|
0121e865e2ce2c0c4a71260e951ae910e4c0b594b31c5ada41831328be09b220 | pfdietz/ansi-test | simple-vector-p.lsp | ;-*- Mode: Lisp -*-
Author :
Created : We d Jan 22 21:23:45 2003
;;;; Contains: Tests for SIMPLE-VECTOR-P
;;; More tests for this are in make-array.lsp
(deftest simple-vector-p.1
(check-type-predicate #'simple-vector-p 'simple-vector)
nil)
(deftest simple-vector-p.2
(notnot-mv (simple-vector-p (make-array '(10))))
t)
;; (deftest simple-vector-p.3
( simple - vector - p ( make - array ' ( 5 ) : fill - pointer t ) )
;; nil)
(deftest simple-vector-p.4
(notnot-mv (simple-vector-p (vector 'a 'b 'c)))
t)
;;; (deftest simple-vector-p.5
( simple - vector - p ( make - array ' ( 5 ) : adjustable t ) )
;;; nil)
;;; (deftest simple-vector-p.6
;;; (let ((a #(a b c d e g h)))
( simple - vector - p ( make - array ' ( 5 ) : displaced - to a ) ) )
;;; nil)
(deftest simple-vector-p.7
(simple-vector-p #*001101)
nil)
(deftest simple-vector-p.8
(simple-vector-p "abcdef")
nil)
(deftest simple-vector-p.9
(simple-vector-p (make-array nil))
nil)
(deftest simple-vector-p.10
(simple-vector-p (make-array '(10) :element-type 'base-char))
nil)
(deftest simple-vector-p.11
(simple-vector-p (make-array '(10) :element-type 'character))
nil)
(deftest simple-vector-p.12
(simple-vector-p (make-array '(10) :element-type 'bit))
nil)
;;; Error tests
(deftest simple-vector-p.error.1
(signals-error (simple-vector-p) program-error)
t)
(deftest simple-vector-p.error.2
(signals-error (simple-vector-p #(a b) nil) program-error)
t)
| null | https://raw.githubusercontent.com/pfdietz/ansi-test/3f4b9d31c3408114f0467eaeca4fd13b28e2ce31/arrays/simple-vector-p.lsp | lisp | -*- Mode: Lisp -*-
Contains: Tests for SIMPLE-VECTOR-P
More tests for this are in make-array.lsp
(deftest simple-vector-p.3
nil)
(deftest simple-vector-p.5
nil)
(deftest simple-vector-p.6
(let ((a #(a b c d e g h)))
nil)
Error tests | Author :
Created : We d Jan 22 21:23:45 2003
(deftest simple-vector-p.1
(check-type-predicate #'simple-vector-p 'simple-vector)
nil)
(deftest simple-vector-p.2
(notnot-mv (simple-vector-p (make-array '(10))))
t)
( simple - vector - p ( make - array ' ( 5 ) : fill - pointer t ) )
(deftest simple-vector-p.4
(notnot-mv (simple-vector-p (vector 'a 'b 'c)))
t)
( simple - vector - p ( make - array ' ( 5 ) : adjustable t ) )
( simple - vector - p ( make - array ' ( 5 ) : displaced - to a ) ) )
(deftest simple-vector-p.7
(simple-vector-p #*001101)
nil)
(deftest simple-vector-p.8
(simple-vector-p "abcdef")
nil)
(deftest simple-vector-p.9
(simple-vector-p (make-array nil))
nil)
(deftest simple-vector-p.10
(simple-vector-p (make-array '(10) :element-type 'base-char))
nil)
(deftest simple-vector-p.11
(simple-vector-p (make-array '(10) :element-type 'character))
nil)
(deftest simple-vector-p.12
(simple-vector-p (make-array '(10) :element-type 'bit))
nil)
(deftest simple-vector-p.error.1
(signals-error (simple-vector-p) program-error)
t)
(deftest simple-vector-p.error.2
(signals-error (simple-vector-p #(a b) nil) program-error)
t)
|
88a79704b31cf7b123d95a275607bd83a183b787168c7ee0041c5e3f51bedee3 | cbaggers/cepl.examples | pre-release-run.lisp | (in-package :cepl.examples)
hacky script to let me run all the tests for a few seconds each so I can
;; spot any potential issues
(defun run-em-all ()
(swank:set-default-directory
(uiop:pathname-directory-pathname
(asdf:system-relative-pathname :cepl.examples "examples/")))
(with-setf (viewport-resolution (current-viewport)) (v! 800 600)
(flet ((run-test (package path)
(let ((path (print
(asdf:system-relative-pathname
:cepl.examples (format nil "examples/~a" path)))))
(compile-file path)
(load path))
(print "----------------------------------------------------------")
(print path)
(print "starting")
(let ((std-o *standard-output*))
(bt:make-thread
(lambda ()
(sleep 4)
(print "Stopping" std-o)
(force-output)
(funcall (symbol-function (find-symbol "STOP-LOOP" package))))))
(funcall (symbol-function (find-symbol "RUN-LOOP" package)))
(cls)))
(let ((examples '("bloom.lisp"
"game-of-life.lisp"
"inline-glsl.lisp"
"instance-array-triangles.lisp"
"lambda-pipelines.lisp"
"moving-triangles.lisp"
"raymarcher.lisp"
"sampling.lisp"
"shared-context.lisp"
"texture-example.lisp"
"transform-feedback.lisp"
"triangle.lisp"
"ubo-test.lisp"))
(plus-cam '("basic-3d-objects.lisp"
"basic-geometry-shader.lisp"
"blending.lisp"
"cubemap.lisp"
"instancing.lisp"
"normal-mapping.lisp"
"refraction.lisp"
"tessellation-inline-glsl.lisp"
"tessellation.lisp")))
(loop :for path :in examples :do
(run-test :cepl.examples path))
(loop :for path :in plus-cam :do
(run-test :cepl.examples+camera path))))))
| null | https://raw.githubusercontent.com/cbaggers/cepl.examples/87ac3def3e674632662350c809edf09f4820fbd1/pre-release-run.lisp | lisp | spot any potential issues | (in-package :cepl.examples)
hacky script to let me run all the tests for a few seconds each so I can
(defun run-em-all ()
(swank:set-default-directory
(uiop:pathname-directory-pathname
(asdf:system-relative-pathname :cepl.examples "examples/")))
(with-setf (viewport-resolution (current-viewport)) (v! 800 600)
(flet ((run-test (package path)
(let ((path (print
(asdf:system-relative-pathname
:cepl.examples (format nil "examples/~a" path)))))
(compile-file path)
(load path))
(print "----------------------------------------------------------")
(print path)
(print "starting")
(let ((std-o *standard-output*))
(bt:make-thread
(lambda ()
(sleep 4)
(print "Stopping" std-o)
(force-output)
(funcall (symbol-function (find-symbol "STOP-LOOP" package))))))
(funcall (symbol-function (find-symbol "RUN-LOOP" package)))
(cls)))
(let ((examples '("bloom.lisp"
"game-of-life.lisp"
"inline-glsl.lisp"
"instance-array-triangles.lisp"
"lambda-pipelines.lisp"
"moving-triangles.lisp"
"raymarcher.lisp"
"sampling.lisp"
"shared-context.lisp"
"texture-example.lisp"
"transform-feedback.lisp"
"triangle.lisp"
"ubo-test.lisp"))
(plus-cam '("basic-3d-objects.lisp"
"basic-geometry-shader.lisp"
"blending.lisp"
"cubemap.lisp"
"instancing.lisp"
"normal-mapping.lisp"
"refraction.lisp"
"tessellation-inline-glsl.lisp"
"tessellation.lisp")))
(loop :for path :in examples :do
(run-test :cepl.examples path))
(loop :for path :in plus-cam :do
(run-test :cepl.examples+camera path))))))
|
5b87514bc5d74b56beb2229f45a7a277d0835f959a03e5fcb56f98e073a157a3 | emhoracek/smooch | M20220302_add_dolls.hs | {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module M20220302_add_dolls where
import Data.String.QQ
import Data.Text (Text)
import qualified Data.Text as T
import Database.Rivet.V0
migrate :: Migration IO ()
migrate = sql up down
up :: Text
up = T.pack $
[s|
DROP TABLE IF EXISTS dolls CASCADE;
CREATE TABLE dolls (
id serial primary key,
name text not null,
otakuworld_url text unique,
hash text not null unique,
location text unique,
error text,
created_at timestamptz default now(),
updated_at timestamptz default now()
);
DROP TABLE IF EXISTS artists CASCADE;
CREATE TABLE artists (
id serial primary key,
name text not null,
email text,
website text,
user_id integer references users(id),
created_at timestamptz default now(),
updated_at timestamptz default now()
);
DROP TABLE IF EXISTS doll_artists CASCADE;
CREATE TABLE doll_artists (
id serial primary key,
doll_id integer references dolls(id),
artist_id integer references artists(id),
created_at timestamptz default now()
);
CREATE UNIQUE INDEX ON dolls (hash);
CREATE UNIQUE INDEX ON dolls (otakuworld_url);
DROP FUNCTION IF EXISTS set_updated_at() CASCADE;
CREATE FUNCTION set_updated_at() RETURNS trigger AS $$
BEGIN
NEW.updated_at := now();
RETURN NEW;
END
$$ LANGUAGE plpgsql;
CREATE TRIGGER dolls_updated
BEFORE UPDATE ON dolls
FOR EACH ROW
EXECUTE PROCEDURE set_updated_at();
CREATE TRIGGER artists_updated
BEFORE UPDATE ON artists
FOR EACH ROW
EXECUTE PROCEDURE set_updated_at();
|]
down :: Text
down = T.pack $
[s|
DROP TABLE IF EXISTS doll_artists CASCADE;
DROP TABLE IF EXISTS dolls CASCADE;
DROP TABLE IF EXISTS artists CASCADE;
|] | null | https://raw.githubusercontent.com/emhoracek/smooch/d08ceb7fc65e17c06fd6fc1d3b77fb2f9605cf87/app/migrations/M20220302_add_dolls.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE QuasiQuotes # | module M20220302_add_dolls where
import Data.String.QQ
import Data.Text (Text)
import qualified Data.Text as T
import Database.Rivet.V0
migrate :: Migration IO ()
migrate = sql up down
up :: Text
up = T.pack $
[s|
DROP TABLE IF EXISTS dolls CASCADE;
CREATE TABLE dolls (
id serial primary key,
name text not null,
otakuworld_url text unique,
hash text not null unique,
location text unique,
error text,
created_at timestamptz default now(),
updated_at timestamptz default now()
);
DROP TABLE IF EXISTS artists CASCADE;
CREATE TABLE artists (
id serial primary key,
name text not null,
email text,
website text,
user_id integer references users(id),
created_at timestamptz default now(),
updated_at timestamptz default now()
);
DROP TABLE IF EXISTS doll_artists CASCADE;
CREATE TABLE doll_artists (
id serial primary key,
doll_id integer references dolls(id),
artist_id integer references artists(id),
created_at timestamptz default now()
);
CREATE UNIQUE INDEX ON dolls (hash);
CREATE UNIQUE INDEX ON dolls (otakuworld_url);
DROP FUNCTION IF EXISTS set_updated_at() CASCADE;
CREATE FUNCTION set_updated_at() RETURNS trigger AS $$
BEGIN
NEW.updated_at := now();
RETURN NEW;
END
$$ LANGUAGE plpgsql;
CREATE TRIGGER dolls_updated
BEFORE UPDATE ON dolls
FOR EACH ROW
EXECUTE PROCEDURE set_updated_at();
CREATE TRIGGER artists_updated
BEFORE UPDATE ON artists
FOR EACH ROW
EXECUTE PROCEDURE set_updated_at();
|]
down :: Text
down = T.pack $
[s|
DROP TABLE IF EXISTS doll_artists CASCADE;
DROP TABLE IF EXISTS dolls CASCADE;
DROP TABLE IF EXISTS artists CASCADE;
|] |
c4f63cda351abac1d34be7618af35dd8ce44d2d4b5d50e38719dcbf2e06ebeb8 | openbadgefactory/salava | helper.cljs | (ns salava.admin.ui.helper
(:require
[reagent.core :refer [atom cursor create-class]]
[reagent.session :as session]
[salava.core.i18n :refer [t]]
[salava.core.ui.helper :refer [input-valid?]]
[salava.admin.schemas :as schemas]))
(defn valid-item-type? [item]
(input-valid? (:item-type schemas/Url-parser) item))
(defn valid-item-id? [item]
(input-valid? (:item-id schemas/Url-parser) (js/parseInt item)))
(defn checker [url]
(let [url-list (vec(re-seq #"\w+" (str url)))
type (get url-list 1)
id (get url-list 3)]
{:item-type (if (= type "gallery") "badges" type)
:item-id id}))
(defn admin? []
(let [role (session/get-in [:user :role])]
(= role "admin")))
(defn message-form [mail]
(let [message (cursor mail [:message])
subject (cursor mail [:subject])]
[:div
[:div.form-group
[:label {:for "subjectArea"}
(str (t :admin/Subjectforitemowner) ":")]
[:input {:class "form-control"
:value @subject
:onChange #(reset! subject (.-target.value %))
:aria-label (t :admin/Subjectforitemowner)
:id "subjectArea"}]]
[:div.form-group
[:label {:for "textArea"}
(str (t :admin/Messageforitemowner) ":")]
[:textarea {:class "form-control"
:rows "5"
:value @message
:onChange #(reset! message (.-target.value %))
:aria-label (t :admin/Messageforitemowner)
:id "textArea"}]]]))
(defn email-select [emails email-atom]
(let [primary-email (first (filter #(and (:verified %) (:primary_address %)) emails))
secondary-emails (filter #(and (:verified %) (not (:primary_address %))) emails)]
(if (not (pos? (count secondary-emails)))
[:div (:email primary-email)]
[:select {:class "form-control"
:id "emails"
:value @email-atom
:on-change #(reset! email-atom (.-target.value %))
:aria-label "Emails"}
[:optgroup {:label (str (t :admin/Primaryemail) ":")}
[:option {:key (hash (:email primary-email)) :value (:email primary-email)} (:email primary-email)]]
[:optgroup {:label (str (t :admin/Secondaryemail) ":")}
(doall
(for [element-data secondary-emails]
[:option {:key (hash (:email element-data)) :value (:email element-data)} (:email element-data)]))]])))
(defn no-verified-email-select [emails email-atom]
(let [emails (filter #(and (not (:verified %)) (not (:primary_address %))) emails)]
(create-class {:component-did-mount (fn []
(reset! email-atom (:email (first emails))))
:reagent-render (fn [] [:select {:class "form-control"
:id "emails"
:value @email-atom
:on-change #(reset! email-atom (.-target.value %))}
(doall
(for [element-data emails]
[:option {:key (hash (:email element-data)) :value (:email element-data)} (:email element-data)]))])})))
(defn status-handler [status item_type]
(cond
(= "success" @status)[:div {:class "alert alert-success col-xs-6 cos-md-8"}
(t :admin/Messagesentsuccessfully)]
(= "error" @status) [:div {:class "alert alert-warning col-xs-6 cos-md-8"}
(t :admin/Somethingwentwrong)]
:else ""))
| null | https://raw.githubusercontent.com/openbadgefactory/salava/97f05992406e4dcbe3c4bff75c04378d19606b61/src/cljs/salava/admin/ui/helper.cljs | clojure | (ns salava.admin.ui.helper
(:require
[reagent.core :refer [atom cursor create-class]]
[reagent.session :as session]
[salava.core.i18n :refer [t]]
[salava.core.ui.helper :refer [input-valid?]]
[salava.admin.schemas :as schemas]))
(defn valid-item-type? [item]
(input-valid? (:item-type schemas/Url-parser) item))
(defn valid-item-id? [item]
(input-valid? (:item-id schemas/Url-parser) (js/parseInt item)))
(defn checker [url]
(let [url-list (vec(re-seq #"\w+" (str url)))
type (get url-list 1)
id (get url-list 3)]
{:item-type (if (= type "gallery") "badges" type)
:item-id id}))
(defn admin? []
(let [role (session/get-in [:user :role])]
(= role "admin")))
(defn message-form [mail]
(let [message (cursor mail [:message])
subject (cursor mail [:subject])]
[:div
[:div.form-group
[:label {:for "subjectArea"}
(str (t :admin/Subjectforitemowner) ":")]
[:input {:class "form-control"
:value @subject
:onChange #(reset! subject (.-target.value %))
:aria-label (t :admin/Subjectforitemowner)
:id "subjectArea"}]]
[:div.form-group
[:label {:for "textArea"}
(str (t :admin/Messageforitemowner) ":")]
[:textarea {:class "form-control"
:rows "5"
:value @message
:onChange #(reset! message (.-target.value %))
:aria-label (t :admin/Messageforitemowner)
:id "textArea"}]]]))
(defn email-select [emails email-atom]
(let [primary-email (first (filter #(and (:verified %) (:primary_address %)) emails))
secondary-emails (filter #(and (:verified %) (not (:primary_address %))) emails)]
(if (not (pos? (count secondary-emails)))
[:div (:email primary-email)]
[:select {:class "form-control"
:id "emails"
:value @email-atom
:on-change #(reset! email-atom (.-target.value %))
:aria-label "Emails"}
[:optgroup {:label (str (t :admin/Primaryemail) ":")}
[:option {:key (hash (:email primary-email)) :value (:email primary-email)} (:email primary-email)]]
[:optgroup {:label (str (t :admin/Secondaryemail) ":")}
(doall
(for [element-data secondary-emails]
[:option {:key (hash (:email element-data)) :value (:email element-data)} (:email element-data)]))]])))
(defn no-verified-email-select [emails email-atom]
(let [emails (filter #(and (not (:verified %)) (not (:primary_address %))) emails)]
(create-class {:component-did-mount (fn []
(reset! email-atom (:email (first emails))))
:reagent-render (fn [] [:select {:class "form-control"
:id "emails"
:value @email-atom
:on-change #(reset! email-atom (.-target.value %))}
(doall
(for [element-data emails]
[:option {:key (hash (:email element-data)) :value (:email element-data)} (:email element-data)]))])})))
(defn status-handler [status item_type]
(cond
(= "success" @status)[:div {:class "alert alert-success col-xs-6 cos-md-8"}
(t :admin/Messagesentsuccessfully)]
(= "error" @status) [:div {:class "alert alert-warning col-xs-6 cos-md-8"}
(t :admin/Somethingwentwrong)]
:else ""))
| |
09d0bc87150980a0b880d4464ab2d62ddcbc18bef2b4aa08fd91ad2944905127 | databrary/databrary | AgeTest.hs | # LANGUAGE OverloadedStrings , ScopedTypeVariables #
module Model.AgeTest where
import Data.Aeson
import Data.Time
import Test.Tasty.HUnit
import Model.Age
import Model.TypeOrphans ()
unit_age_toJSON :: Assertion
unit_age_toJSON =
encode (Age 10) @?= "10"
-- possible property:
-- d + ageTime(age d (d2)) = d2
unit_age :: Assertion
unit_age = do
-- example
age (jan2000day 1) (jan2000day 3) @?= Age 2
-- edge cases
age (jan2000day 10) (jan2000day 3) @?= Age (-7) -- should return nothing instead
age (jan2000day 1) (jan2000day 1) @?= Age 0
type DayOfMonth = Int
jan2000day :: DayOfMonth -> Day
jan2000day = fromGregorian 2000 1
unit_yearsAge :: Assertion
unit_yearsAge =
-- example
yearsAge (1 :: Double) @?= Age 366
unit_ageTime :: Assertion
unit_ageTime =
ageTime (Age 1) @?= 60*60*24
| null | https://raw.githubusercontent.com/databrary/databrary/685f3c625b960268f5d9b04e3d7c6146bea5afda/test/Model/AgeTest.hs | haskell | possible property:
d + ageTime(age d (d2)) = d2
example
edge cases
should return nothing instead
example | # LANGUAGE OverloadedStrings , ScopedTypeVariables #
module Model.AgeTest where
import Data.Aeson
import Data.Time
import Test.Tasty.HUnit
import Model.Age
import Model.TypeOrphans ()
unit_age_toJSON :: Assertion
unit_age_toJSON =
encode (Age 10) @?= "10"
unit_age :: Assertion
unit_age = do
age (jan2000day 1) (jan2000day 3) @?= Age 2
age (jan2000day 1) (jan2000day 1) @?= Age 0
type DayOfMonth = Int
jan2000day :: DayOfMonth -> Day
jan2000day = fromGregorian 2000 1
unit_yearsAge :: Assertion
unit_yearsAge =
yearsAge (1 :: Double) @?= Age 366
unit_ageTime :: Assertion
unit_ageTime =
ageTime (Age 1) @?= 60*60*24
|
402375fccae14a54196a55d0cad69cd19eb9197cacff4f54cadcb1cd71905ef5 | OCamlPro/typerex-lint | lintParsing_Syntaxerr.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1997 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
module Location = LintParsing_Location
(* Auxiliary type for reporting syntax errors *)
type error =
Unclosed of Location.t * string * Location.t * string
| Expecting of Location.t * string
| Not_expecting of Location.t * string
| Applicative_path of Location.t
| Variable_in_scope of Location.t * string
| Other of Location.t
| Ill_formed_ast of Location.t * string
| Invalid_package_type of Location.t * string
exception Error of error
exception Escape_error
let prepare_error = function
| Unclosed(opening_loc, opening, closing_loc, closing) ->
Location.errorf_prefixed ~loc:closing_loc
~sub:[
Location.errorf_prefixed ~loc:opening_loc
"This '%s' might be unmatched" opening
]
~if_highlight:
(Printf.sprintf "Syntax error: '%s' expected, \
the highlighted '%s' might be unmatched"
closing opening)
"Syntax error: '%s' expected" closing
| Expecting (loc, nonterm) ->
Location.errorf_prefixed ~loc "Syntax error: %s expected." nonterm
| Not_expecting (loc, nonterm) ->
Location.errorf_prefixed ~loc "Syntax error: %s not expected." nonterm
| Applicative_path loc ->
Location.errorf_prefixed ~loc
"Syntax error: applicative paths of the form F(X).t \
are not supported when the option -no-app-func is set."
| Variable_in_scope (loc, var) ->
Location.errorf_prefixed ~loc
"In this scoped type, variable '%s \
is reserved for the local type %s."
var var
| Other loc ->
Location.errorf_prefixed ~loc "Syntax error"
| Ill_formed_ast (loc, s) ->
Location.errorf_prefixed ~loc "broken invariant in parsetree: %s" s
| Invalid_package_type (loc, s) ->
Location.errorf_prefixed ~loc "invalid package type: %s" s
let () =
Location.register_error_of_exn
(function
| Error err -> Some (prepare_error err)
| _ -> None
)
let report_error ppf err =
Location.report_error ppf (prepare_error err)
let location_of_error = function
| Unclosed(l,_,_,_)
| Applicative_path l
| Variable_in_scope(l,_)
| Other l
| Not_expecting (l, _)
| Ill_formed_ast (l, _)
| Invalid_package_type (l, _)
| Expecting (l, _) -> l
let ill_formed_ast loc s =
raise (Error (Ill_formed_ast (loc, s)))
| null | https://raw.githubusercontent.com/OCamlPro/typerex-lint/6d9e994c8278fb65e1f7de91d74876531691120c/plugins/ocp-lint-plugin-parsing/lintParsing_Syntaxerr.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Auxiliary type for reporting syntax errors | , projet Cristal , INRIA Rocquencourt
Copyright 1997 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
module Location = LintParsing_Location
type error =
Unclosed of Location.t * string * Location.t * string
| Expecting of Location.t * string
| Not_expecting of Location.t * string
| Applicative_path of Location.t
| Variable_in_scope of Location.t * string
| Other of Location.t
| Ill_formed_ast of Location.t * string
| Invalid_package_type of Location.t * string
exception Error of error
exception Escape_error
let prepare_error = function
| Unclosed(opening_loc, opening, closing_loc, closing) ->
Location.errorf_prefixed ~loc:closing_loc
~sub:[
Location.errorf_prefixed ~loc:opening_loc
"This '%s' might be unmatched" opening
]
~if_highlight:
(Printf.sprintf "Syntax error: '%s' expected, \
the highlighted '%s' might be unmatched"
closing opening)
"Syntax error: '%s' expected" closing
| Expecting (loc, nonterm) ->
Location.errorf_prefixed ~loc "Syntax error: %s expected." nonterm
| Not_expecting (loc, nonterm) ->
Location.errorf_prefixed ~loc "Syntax error: %s not expected." nonterm
| Applicative_path loc ->
Location.errorf_prefixed ~loc
"Syntax error: applicative paths of the form F(X).t \
are not supported when the option -no-app-func is set."
| Variable_in_scope (loc, var) ->
Location.errorf_prefixed ~loc
"In this scoped type, variable '%s \
is reserved for the local type %s."
var var
| Other loc ->
Location.errorf_prefixed ~loc "Syntax error"
| Ill_formed_ast (loc, s) ->
Location.errorf_prefixed ~loc "broken invariant in parsetree: %s" s
| Invalid_package_type (loc, s) ->
Location.errorf_prefixed ~loc "invalid package type: %s" s
let () =
Location.register_error_of_exn
(function
| Error err -> Some (prepare_error err)
| _ -> None
)
let report_error ppf err =
Location.report_error ppf (prepare_error err)
let location_of_error = function
| Unclosed(l,_,_,_)
| Applicative_path l
| Variable_in_scope(l,_)
| Other l
| Not_expecting (l, _)
| Ill_formed_ast (l, _)
| Invalid_package_type (l, _)
| Expecting (l, _) -> l
let ill_formed_ast loc s =
raise (Error (Ill_formed_ast (loc, s)))
|
4bc8f4037999b8441c2f179b806283eb2ffd66d986dc056e07e1ac1908eefc9a | GaloisInc/ivory | SizeOf.hs | # LANGUAGE FlexibleInstances #
# LANGUAGE QuasiQuotes #
# LANGUAGE TypeOperators #
# LANGUAGE DataKinds #
# OPTIONS_GHC -fno - warn - orphans #
module SizeOf where
import Ivory.Language
[ivory|
struct foo
{ f1 :: Stored Uint8
; f2 :: Stored Uint32
}
|]
test :: Def ('[] ':-> Uint8)
test = proc "sizeof_test" (body (ret (sizeOf (Proxy :: Proxy ('Struct "foo")))))
cmodule :: Module
cmodule = package "SizeOf" $ do
defStruct (Proxy :: Proxy "foo")
incl test
| null | https://raw.githubusercontent.com/GaloisInc/ivory/53a0795b4fbeb0b7da0f6cdaccdde18849a78cd6/ivory-examples/examples/SizeOf.hs | haskell | # LANGUAGE FlexibleInstances #
# LANGUAGE QuasiQuotes #
# LANGUAGE TypeOperators #
# LANGUAGE DataKinds #
# OPTIONS_GHC -fno - warn - orphans #
module SizeOf where
import Ivory.Language
[ivory|
struct foo
{ f1 :: Stored Uint8
; f2 :: Stored Uint32
}
|]
test :: Def ('[] ':-> Uint8)
test = proc "sizeof_test" (body (ret (sizeOf (Proxy :: Proxy ('Struct "foo")))))
cmodule :: Module
cmodule = package "SizeOf" $ do
defStruct (Proxy :: Proxy "foo")
incl test
| |
f5d8bb1b34a3c7f095cecde95bc9b29b985ae92aab4ddc099a877dc4d8e8dfd5 | zyrolasting/polyglot | functional.rkt | #lang racket/base
(require racket/file
racket/function
rackunit
polyglot/functional
polyglot/txexpr
polyglot/paths)
(require/expose polyglot/functional
(group-scripts!
script-info-path
script-info-predicate
script-info-element))
(test-equal? "Replace element via parameter"
(parameterize ([current-replace-element-predicate (λ (x) (tag-equal? 's x))])
(tx-replace-me '(root (rooter (rootest (s))))
(λ (x) '((leafy) (greens)))))
'(root (rooter (rootest (leafy) (greens)))))
(test-case "Can group scripts (with file I/O)"
(define tx '(root (script ((type "a") (id "q")) "A1")
(script ((type "b") (id "r")) "B1")
(script ((type "a") (id "s")) "A2")
(script ((type "c") (id "t")) "C1")))
(define tmpd (make-temp-ephmod-directory))
(define (group! type)
(group-scripts! tx
tmpd
(λ (x) (and (list? x)
(equal? (attr-ref x 'type #f)
type)))))
(define a-scripts (group! "a"))
(define b-scripts (group! "b"))
(define combined (append a-scripts b-scripts))
(check-equal? (length a-scripts) 2)
(check-equal? (length b-scripts) 1)
(check-true (andmap file-exists?
(map script-info-path
(append a-scripts
b-scripts))))
(check-true (andmap (λ (si)
((script-info-predicate si)
(script-info-element si)))
combined))
(delete-directory/files tmpd))
| null | https://raw.githubusercontent.com/zyrolasting/polyglot/d27ca7fe90fd4ba2a6c5bcd921fce89e72d2c408/polyglot-test/tests/polyglot/functional.rkt | racket | #lang racket/base
(require racket/file
racket/function
rackunit
polyglot/functional
polyglot/txexpr
polyglot/paths)
(require/expose polyglot/functional
(group-scripts!
script-info-path
script-info-predicate
script-info-element))
(test-equal? "Replace element via parameter"
(parameterize ([current-replace-element-predicate (λ (x) (tag-equal? 's x))])
(tx-replace-me '(root (rooter (rootest (s))))
(λ (x) '((leafy) (greens)))))
'(root (rooter (rootest (leafy) (greens)))))
(test-case "Can group scripts (with file I/O)"
(define tx '(root (script ((type "a") (id "q")) "A1")
(script ((type "b") (id "r")) "B1")
(script ((type "a") (id "s")) "A2")
(script ((type "c") (id "t")) "C1")))
(define tmpd (make-temp-ephmod-directory))
(define (group! type)
(group-scripts! tx
tmpd
(λ (x) (and (list? x)
(equal? (attr-ref x 'type #f)
type)))))
(define a-scripts (group! "a"))
(define b-scripts (group! "b"))
(define combined (append a-scripts b-scripts))
(check-equal? (length a-scripts) 2)
(check-equal? (length b-scripts) 1)
(check-true (andmap file-exists?
(map script-info-path
(append a-scripts
b-scripts))))
(check-true (andmap (λ (si)
((script-info-predicate si)
(script-info-element si)))
combined))
(delete-directory/files tmpd))
| |
c36033d168e0120f0c30464989e1bc72b21d6852a722ee6bbf8e18da98b9c163 | ocurrent/ocluster | log.ml | let src = Logs.Src.create "worker" ~doc:"ocluster-scheduler worker agent"
include (val Logs.src_log src : Logs.LOG)
| null | https://raw.githubusercontent.com/ocurrent/ocluster/43c14b56e1a35cdab6f772f921b6f1f7bed96bbd/worker/log.ml | ocaml | let src = Logs.Src.create "worker" ~doc:"ocluster-scheduler worker agent"
include (val Logs.src_log src : Logs.LOG)
| |
27af92dd997d11edbbe7558b7168f8bdeefcfacc82995869425af7eb877e505d | camlspotter/ocamloscope.2 | t1.ml | let x = 1
type t = Foo
type u = { l : int }
module M = struct
let y = 1
end
let z = Foo
let w = { l = 3 }
| null | https://raw.githubusercontent.com/camlspotter/ocamloscope.2/49b5977a283cdd373021d41cb3620222351a2efe/tests/t1.ml | ocaml | let x = 1
type t = Foo
type u = { l : int }
module M = struct
let y = 1
end
let z = Foo
let w = { l = 3 }
| |
31f7ffd52d4df14dac91290462d826fb79dd0826de1b1720daa58ba0b7eacba1 | gordonguthrie/pometo | interpreter_complex_no_tests.erl | -module(interpreter_complex_no_tests).
-compile([export_all]).
-include_lib("eunit/include/eunit.hrl").
%% Tests
basic_one_line_complex_no_test_() ->
Str = "MyVariable ← 1J3\nMyVariable + 2",
%0123456789012345678901234567890123456789
Got = pometo:interpret_TEST(Str),
Exp = "3J3",
% ?debugFmt("in basic_one_line_format_test_~nGot ~p~nExp ~p~n", [Got, Exp]),
?_assertEqual(Exp, Got). | null | https://raw.githubusercontent.com/gordonguthrie/pometo/5a4ff0c61272ba5ad85adec6605b5c676543975f/test/interpreter_complex_no_tests.erl | erlang | Tests
0123456789012345678901234567890123456789
?debugFmt("in basic_one_line_format_test_~nGot ~p~nExp ~p~n", [Got, Exp]), | -module(interpreter_complex_no_tests).
-compile([export_all]).
-include_lib("eunit/include/eunit.hrl").
basic_one_line_complex_no_test_() ->
Str = "MyVariable ← 1J3\nMyVariable + 2",
Got = pometo:interpret_TEST(Str),
Exp = "3J3",
?_assertEqual(Exp, Got). |
4d1a9e8a0f0f76b0e078d9c3db2dcb3bd40260ecffd0cd4eabfbf96a6b849d98 | tek/chiasma | LensTest.hs | module Chiasma.Test.LensTest where
import Chiasma.Data.Ident (Ident(Str))
import Chiasma.Lens.Tree (leafByIdent, modifyLeafByIdent, treesAndSubs)
import Chiasma.Ui.Data.View (
Pane(Pane),
PaneView,
Tree(Tree),
TreeSub(TreeNode, TreeLeaf),
View(View),
ViewTree,
ViewTreeSub,
consLayout,
consPane,
)
import qualified Chiasma.Ui.Data.View as View (_ident, ident)
import Chiasma.Ui.Data.ViewState (ViewState(ViewState))
import Chiasma.Ui.ViewTree (togglePane)
import qualified Chiasma.Ui.ViewTree as ToggleResult (ToggleResult(..))
import Control.Lens (transformM)
import qualified Control.Lens as Lens (set)
import Hedgehog ((===))
import Test.Tasty (TestTree, testGroup)
import Chiasma.Test.Util (UnitTest, unitTest)
id0, id1, id2, id3, id4 :: Ident
id0 = Str "0"
id1 = Str "1"
id2 = Str "2"
id3 = Str "3"
id4 = Str "4"
tree :: ViewTree
tree =
Tree (consLayout id0) [subtree, TreeLeaf (consPane id2)]
where
subtree = st id2 subtree2
subtree2 = st id3 subtree3
subtree3 = st id4 (TreeLeaf openPane)
openPane = View id1 (ViewState False) def (Pane True False Nothing)
st i s =
TreeNode $ Tree (consLayout i) [s]
test_modify :: UnitTest
test_modify = do
let
ident = Str "changed"
modded = modifyLeafByIdent id1 (Lens.set View.ident ident) tree
Nothing === leafByIdent ident tree
Just ident === (View._ident <$> leafByIdent ident modded)
failOnPaneIdent :: Ident -> ViewTree -> Maybe ViewTree
failOnPaneIdent target t@(Tree _ sub) =
t <$ traverse match sub
where
match (TreeLeaf (View i _ _ _)) = if target == i then Nothing else Just ()
match _ = Just ()
test_monadicModify :: UnitTest
test_monadicModify = do
Nothing === (transformM (failOnPaneIdent id2) tree)
Just tree === (transformM (failOnPaneIdent id4) tree)
insertPane :: Ident -> PaneView -> ViewTree -> ViewTree
insertPane targetLayout pane (Tree l sub) =
if View._ident l == targetLayout then Tree l (TreeLeaf pane : sub) else Tree l sub
ensurePaneUnique :: Ident -> ViewTreeSub -> Maybe ViewTreeSub
ensurePaneUnique paneIdent (TreeLeaf (View ident _ _ _)) | ident == paneIdent = Nothing
ensurePaneUnique _ n = Just n
subtreesTarget :: ViewTree
subtreesTarget =
Tree (consLayout id0) [subtree, TreeLeaf (consPane id2)]
where
subtree = TreeNode $ Tree (consLayout id2) [TreeLeaf $ consPane id4, subtree2]
subtree2 = TreeNode $ Tree (consLayout id3) [subtree3]
subtree3 = TreeNode $ Tree (consLayout id4) [TreeLeaf openPane]
openPane = View id1 (ViewState False) def (Pane True False Nothing)
test_subtrees :: UnitTest
test_subtrees =
Just subtreesTarget === treesAndSubs (Just . insertPane id2 (consPane id4)) (ensurePaneUnique id4) tree
togglePaneTree :: ViewTree
togglePaneTree =
Tree (consLayout id0) [TreeLeaf (consPane id0), TreeLeaf (consPane id0), TreeLeaf (consPane id2)]
test_togglePane :: UnitTest
test_togglePane = do
ToggleResult.Ambiguous 2 === togglePane id0 togglePaneTree
ToggleResult.NotFound === togglePane id1 togglePaneTree
ToggleResult.Success (1 :: Int) === (1 <$ togglePane id2 togglePaneTree)
test_lenses :: TestTree
test_lenses =
testGroup "lenses" [
unitTest "modify leaves by ident" test_modify,
unitTest "monadically transform leaves" test_monadicModify,
unitTest "traverse all subtrees" test_subtrees,
unitTest "toggle a pane" test_togglePane
]
| null | https://raw.githubusercontent.com/tek/chiasma/51751e19a416a9afe12f7797df8a67990b266240/packages/test/test/Chiasma/Test/LensTest.hs | haskell | module Chiasma.Test.LensTest where
import Chiasma.Data.Ident (Ident(Str))
import Chiasma.Lens.Tree (leafByIdent, modifyLeafByIdent, treesAndSubs)
import Chiasma.Ui.Data.View (
Pane(Pane),
PaneView,
Tree(Tree),
TreeSub(TreeNode, TreeLeaf),
View(View),
ViewTree,
ViewTreeSub,
consLayout,
consPane,
)
import qualified Chiasma.Ui.Data.View as View (_ident, ident)
import Chiasma.Ui.Data.ViewState (ViewState(ViewState))
import Chiasma.Ui.ViewTree (togglePane)
import qualified Chiasma.Ui.ViewTree as ToggleResult (ToggleResult(..))
import Control.Lens (transformM)
import qualified Control.Lens as Lens (set)
import Hedgehog ((===))
import Test.Tasty (TestTree, testGroup)
import Chiasma.Test.Util (UnitTest, unitTest)
id0, id1, id2, id3, id4 :: Ident
id0 = Str "0"
id1 = Str "1"
id2 = Str "2"
id3 = Str "3"
id4 = Str "4"
tree :: ViewTree
tree =
Tree (consLayout id0) [subtree, TreeLeaf (consPane id2)]
where
subtree = st id2 subtree2
subtree2 = st id3 subtree3
subtree3 = st id4 (TreeLeaf openPane)
openPane = View id1 (ViewState False) def (Pane True False Nothing)
st i s =
TreeNode $ Tree (consLayout i) [s]
test_modify :: UnitTest
test_modify = do
let
ident = Str "changed"
modded = modifyLeafByIdent id1 (Lens.set View.ident ident) tree
Nothing === leafByIdent ident tree
Just ident === (View._ident <$> leafByIdent ident modded)
failOnPaneIdent :: Ident -> ViewTree -> Maybe ViewTree
failOnPaneIdent target t@(Tree _ sub) =
t <$ traverse match sub
where
match (TreeLeaf (View i _ _ _)) = if target == i then Nothing else Just ()
match _ = Just ()
test_monadicModify :: UnitTest
test_monadicModify = do
Nothing === (transformM (failOnPaneIdent id2) tree)
Just tree === (transformM (failOnPaneIdent id4) tree)
insertPane :: Ident -> PaneView -> ViewTree -> ViewTree
insertPane targetLayout pane (Tree l sub) =
if View._ident l == targetLayout then Tree l (TreeLeaf pane : sub) else Tree l sub
ensurePaneUnique :: Ident -> ViewTreeSub -> Maybe ViewTreeSub
ensurePaneUnique paneIdent (TreeLeaf (View ident _ _ _)) | ident == paneIdent = Nothing
ensurePaneUnique _ n = Just n
subtreesTarget :: ViewTree
subtreesTarget =
Tree (consLayout id0) [subtree, TreeLeaf (consPane id2)]
where
subtree = TreeNode $ Tree (consLayout id2) [TreeLeaf $ consPane id4, subtree2]
subtree2 = TreeNode $ Tree (consLayout id3) [subtree3]
subtree3 = TreeNode $ Tree (consLayout id4) [TreeLeaf openPane]
openPane = View id1 (ViewState False) def (Pane True False Nothing)
test_subtrees :: UnitTest
test_subtrees =
Just subtreesTarget === treesAndSubs (Just . insertPane id2 (consPane id4)) (ensurePaneUnique id4) tree
togglePaneTree :: ViewTree
togglePaneTree =
Tree (consLayout id0) [TreeLeaf (consPane id0), TreeLeaf (consPane id0), TreeLeaf (consPane id2)]
test_togglePane :: UnitTest
test_togglePane = do
ToggleResult.Ambiguous 2 === togglePane id0 togglePaneTree
ToggleResult.NotFound === togglePane id1 togglePaneTree
ToggleResult.Success (1 :: Int) === (1 <$ togglePane id2 togglePaneTree)
test_lenses :: TestTree
test_lenses =
testGroup "lenses" [
unitTest "modify leaves by ident" test_modify,
unitTest "monadically transform leaves" test_monadicModify,
unitTest "traverse all subtrees" test_subtrees,
unitTest "toggle a pane" test_togglePane
]
| |
eb737e66bb898384f99011f19b3b1b4289ea60c6bfbd0377d6818d0e02ef755a | racket/web-server | base64.rkt | #lang racket/base
(require racket/contract
web-server/stuffers/stuffer
net/base64)
(define base64-stuffer
(make-stuffer (λ (x) (base64-encode x #"")) base64-decode))
(provide/contract
[base64-stuffer (stuffer/c bytes? bytes?)])
| null | https://raw.githubusercontent.com/racket/web-server/f718800b5b3f407f7935adf85dfa663c4bba1651/web-server-lib/web-server/stuffers/base64.rkt | racket | #lang racket/base
(require racket/contract
web-server/stuffers/stuffer
net/base64)
(define base64-stuffer
(make-stuffer (λ (x) (base64-encode x #"")) base64-decode))
(provide/contract
[base64-stuffer (stuffer/c bytes? bytes?)])
| |
99df7f43396ec44f44c37df40a64458720164265154d18c8f0547d67e5052c04 | acid-state/acid-state | Prelude.hs | module Benchmark.Prelude
(
module Prelude,
module Control.Monad,
module Control.Applicative,
module Control.Arrow,
module Data.Monoid,
module Data.Foldable,
module Data.Traversable,
module Data.Maybe,
module Data.List,
module Data.Data,
mtl
module Control.Monad.State,
module Control.Monad.Reader,
-- exceptions
module Control.Exception,
module System.IO.Error,
)
where
import Prelude hiding (concat, foldr, mapM_, sequence_, foldl1, maximum, minimum, product, sum, all, and, any, concatMap, elem, foldl, foldr1, notElem, or, mapM, sequence, FilePath)
import Control.Monad hiding (mapM_, sequence_, forM_, msum, mapM, sequence, forM)
import Control.Applicative
import Control.Arrow
import Data.Monoid
import Data.Foldable
import Data.Traversable
import Data.Maybe
import Data.List hiding (concat, foldr, foldl1, maximum, minimum, product, sum, all, and, any, concatMap, elem, foldl, foldr1, notElem, or, find, maximumBy, minimumBy, mapAccumL, mapAccumR, foldl')
import Data.Data
mtl
import Control.Monad.State hiding (mapM_, sequence_, forM_, msum, mapM, sequence, forM)
import Control.Monad.Reader hiding (mapM_, sequence_, forM_, msum, mapM, sequence, forM)
-- exceptions
import Control.Exception
import System.IO.Error
| null | https://raw.githubusercontent.com/acid-state/acid-state/1a9290570bcb48373bfa5f9de16cc694068c4d01/benchmarks/loading/Benchmark/Prelude.hs | haskell | exceptions
exceptions | module Benchmark.Prelude
(
module Prelude,
module Control.Monad,
module Control.Applicative,
module Control.Arrow,
module Data.Monoid,
module Data.Foldable,
module Data.Traversable,
module Data.Maybe,
module Data.List,
module Data.Data,
mtl
module Control.Monad.State,
module Control.Monad.Reader,
module Control.Exception,
module System.IO.Error,
)
where
import Prelude hiding (concat, foldr, mapM_, sequence_, foldl1, maximum, minimum, product, sum, all, and, any, concatMap, elem, foldl, foldr1, notElem, or, mapM, sequence, FilePath)
import Control.Monad hiding (mapM_, sequence_, forM_, msum, mapM, sequence, forM)
import Control.Applicative
import Control.Arrow
import Data.Monoid
import Data.Foldable
import Data.Traversable
import Data.Maybe
import Data.List hiding (concat, foldr, foldl1, maximum, minimum, product, sum, all, and, any, concatMap, elem, foldl, foldr1, notElem, or, find, maximumBy, minimumBy, mapAccumL, mapAccumR, foldl')
import Data.Data
mtl
import Control.Monad.State hiding (mapM_, sequence_, forM_, msum, mapM, sequence, forM)
import Control.Monad.Reader hiding (mapM_, sequence_, forM_, msum, mapM, sequence, forM)
import Control.Exception
import System.IO.Error
|
83dac90796e85f6543ca1182158bbe2f2fda40f074defc479e5f1280b966aae5 | pangloss/pattern | post_process.clj | (ns pattern.r3.post-process
(:require [pattern.r3.rule :refer [->Rule rule-name
*post-processor* *identity-rule-post-processor*]]
[pattern.util :refer [meta?]])
(:import (pattern.r3.rule Rule)))
(defn with-post-processor
"Change the rule to use the given post processor, replacing the old one."
[pp ^Rule rule]
(->Rule (.match-procedure rule) (.handler rule) (.get-values rule) pp
(assoc-in (.metadata rule)
[:rule :post-processor] pp)))
(defn post-processor [^Rule rule]
(.post-processor rule))
(defn comp-post-processors
"Compose multiple post-processors together."
[& fs]
(reduce (fn [c f]
(fn [r v ov e oe]
(let [[v e] (f r v ov e oe)]
(c r v ov e oe))))
(remove nil? fs)))
(defmacro use-post-processor
"Set all rules except identity rules in this scope to use the given post
processor."
[pp & forms]
`(binding [*post-processor* ~pp]
~@forms))
(defmacro use-post-processors
"Set all rules, including identity rules in this scope to use the given post
processor and identity post processor."
[pp ident-rule-pp & forms]
`(binding [*post-processor* ~pp
*identity-rule-post-processor* ~ident-rule-pp]
~@forms))
(defn post-processors
"Get the currently active default post-processors"
[]
[*post-processor* *identity-rule-post-processor*])
(defn merge-metadata*
"Merge the original value's metadata into the new value's metadata.
If a merge strategy is attached to the new value as :rule/merge-meta, use that
fn to do the merge. The :rule/merge-meta key will be removed from the
resulting metadata."
([rule]
(with-post-processor rule merge-metadata*))
([rule value orig-value env orig-env]
(if (or (identical? value orig-value) (not (meta? value)))
[value env]
(if-let [orig-meta (meta orig-value)]
(if-let [m (meta value)]
(if-let [mm (:rule/merge-meta m)]
[(with-meta value (mm orig-meta (dissoc m :rule/merge-meta))) env]
[(with-meta value (merge orig-meta m)) env])
[(with-meta value orig-meta) env])
[value env]))))
(defmacro merge-metadata
"Attach a post processor that will merge the original value's metadata into
the new value's metadata.
If a merge strategy is attached to the new value as :rule/merge-meta, use that
fn to do the merge. The :rule/merge-meta key will be removed from the
resulting metadata."
[& forms]
`(use-post-processors merge-metadata* merge-metadata*
~@forms))
(defmacro raw
"Don't attach any additional post-processing to rules defined within this form
If post processors are attached within the raw form, they will remain."
[& forms]
`(use-post-processors nil nil
~@forms))
(defn mark-success
"Capture in the env that the rule succeeded."
[rule value _ env _]
[value (update env :rule/success (fnil conj []) (rule-name rule))])
| null | https://raw.githubusercontent.com/pangloss/pattern/79a7068dad0c83808c6117f46623caccd64dddb8/src/pattern/r3/post_process.clj | clojure | (ns pattern.r3.post-process
(:require [pattern.r3.rule :refer [->Rule rule-name
*post-processor* *identity-rule-post-processor*]]
[pattern.util :refer [meta?]])
(:import (pattern.r3.rule Rule)))
(defn with-post-processor
"Change the rule to use the given post processor, replacing the old one."
[pp ^Rule rule]
(->Rule (.match-procedure rule) (.handler rule) (.get-values rule) pp
(assoc-in (.metadata rule)
[:rule :post-processor] pp)))
(defn post-processor [^Rule rule]
(.post-processor rule))
(defn comp-post-processors
"Compose multiple post-processors together."
[& fs]
(reduce (fn [c f]
(fn [r v ov e oe]
(let [[v e] (f r v ov e oe)]
(c r v ov e oe))))
(remove nil? fs)))
(defmacro use-post-processor
"Set all rules except identity rules in this scope to use the given post
processor."
[pp & forms]
`(binding [*post-processor* ~pp]
~@forms))
(defmacro use-post-processors
"Set all rules, including identity rules in this scope to use the given post
processor and identity post processor."
[pp ident-rule-pp & forms]
`(binding [*post-processor* ~pp
*identity-rule-post-processor* ~ident-rule-pp]
~@forms))
(defn post-processors
"Get the currently active default post-processors"
[]
[*post-processor* *identity-rule-post-processor*])
(defn merge-metadata*
"Merge the original value's metadata into the new value's metadata.
If a merge strategy is attached to the new value as :rule/merge-meta, use that
fn to do the merge. The :rule/merge-meta key will be removed from the
resulting metadata."
([rule]
(with-post-processor rule merge-metadata*))
([rule value orig-value env orig-env]
(if (or (identical? value orig-value) (not (meta? value)))
[value env]
(if-let [orig-meta (meta orig-value)]
(if-let [m (meta value)]
(if-let [mm (:rule/merge-meta m)]
[(with-meta value (mm orig-meta (dissoc m :rule/merge-meta))) env]
[(with-meta value (merge orig-meta m)) env])
[(with-meta value orig-meta) env])
[value env]))))
(defmacro merge-metadata
"Attach a post processor that will merge the original value's metadata into
the new value's metadata.
If a merge strategy is attached to the new value as :rule/merge-meta, use that
fn to do the merge. The :rule/merge-meta key will be removed from the
resulting metadata."
[& forms]
`(use-post-processors merge-metadata* merge-metadata*
~@forms))
(defmacro raw
"Don't attach any additional post-processing to rules defined within this form
If post processors are attached within the raw form, they will remain."
[& forms]
`(use-post-processors nil nil
~@forms))
(defn mark-success
"Capture in the env that the rule succeeded."
[rule value _ env _]
[value (update env :rule/success (fnil conj []) (rule-name rule))])
| |
af5ea8e953fe38aa3f52b94cd11343eaaaf587826fdb2360cb13180704633266 | aryx/ocamltarzan | pa_type_conv.ml | pp camlp4orf
File : pa_type_conv.ml
Copyright ( C ) 2005-
Jane Street Holding , LLC
Author :
email : mmottl\@janestcapital.com
WWW :
This file is derived from file " pa_tywith.ml " of version 0.45 of the
library " " .
is Copyright ( C ) 2004 , 2005 by
< >
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
Copyright (C) 2005-
Jane Street Holding, LLC
Author: Markus Mottl
email: mmottl\@janestcapital.com
WWW:
This file is derived from file "pa_tywith.ml" of version 0.45 of the
library "Tywith".
Tywith is Copyright (C) 2004, 2005 by
Martin Sandin <>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
(* Pa_type_conv: Preprocessing Module for Registering Type Conversions *)
open Printf
open Lexing
open Camlp4
open PreCast
open Ast
(* Utility functions *)
let both fa fb (a, b) = fa a, fb b
let get_loc_err loc msg =
sprintf "File \"%s\", line %d, characters %d-%d: %s"
(Loc.file_name loc) (Loc.start_line loc)
(Loc.start_off loc - Loc.start_bol loc)
(Loc.stop_off loc - Loc.stop_bol loc)
msg
let hash_variant str =
let acc_ref = ref 0 in
for i = 0 to String.length str - 1 do
acc_ref := 223 * !acc_ref + Char.code str.[i]
done;
if Sys.word_size = 32 then !acc_ref
else !acc_ref land int_of_string "0x7FFFFFFF"
(* Module/File path management *)
(* Reference storing the path to the currently preprocessed module *)
let conv_path_ref : (string * string list) option ref = ref None
let get_conv_path_el () =
match !conv_path_ref with
| None -> failwith "Pa_type_conv: path not set";
| Some el -> el
(* Get path to the currently preprocessed module *)
let get_conv_path () = fst (get_conv_path_el ())
(* Set path to the currently preprocessed module *)
let set_conv_path conv_path =
if !conv_path_ref <> None && not !Sys.interactive then
failwith "Pa_type_conv: module name set twice";
conv_path_ref := Some (conv_path, [conv_path])
let () = if !Sys.interactive then set_conv_path "Toplevel"
let push_conv_path mod_name =
let str, rev_lst = get_conv_path_el () in
conv_path_ref := Some (str ^ "." ^ mod_name, mod_name :: rev_lst)
let pop_conv_path () =
match get_conv_path_el () with
| _, _ :: rev_lst ->
conv_path_ref := Some (String.concat "." (List.rev rev_lst), rev_lst)
| _ -> assert false (* impossible *)
(* Generator registration *)
module GeneratorMap = Map.Make(String)
(* Map of "with"-generators *)
let generators = ref GeneratorMap.empty
let sig_generators = ref GeneratorMap.empty
Register a " with"-generator
let add_generator id e = generators := GeneratorMap.add id e !generators
Removes a " with"-generator
let rem_generator id = generators := GeneratorMap.remove id !generators
(* Register a "with"-generator to be used in a module signature *)
let add_sig_generator id e =
sig_generators := GeneratorMap.add id e !sig_generators
Removes a " with"-generator
let rem_sig_generator id =
sig_generators := GeneratorMap.remove id !sig_generators
(* General purpose code generation module *)
module Gen = struct
let rec ty_var_list_of_ctyp tp acc =
match tp with
| <:ctyp< $tp1$ $tp2$ >> ->
ty_var_list_of_ctyp tp1 (ty_var_list_of_ctyp tp2 acc)
| <:ctyp< '$param$ >> -> param :: acc
| _ -> invalid_arg "ty_var_list_of_ctyp"
let rec get_rev_id_path tp acc =
match tp with
| <:ident< $id1$ . $id2$ >> -> get_rev_id_path id2 (get_rev_id_path id1 acc)
| <:ident< $lid:id$ >> | <:ident< $uid:id$ >> -> id :: acc
| _ -> invalid_arg "get_rev_id_path"
let mk_ident _loc str =
let first = str.[0] in
if first >= 'A' && first <= 'Z' then <:ident< $uid:str$ >>
else <:ident< $lid:str$ >>
let rec ident_of_rev_path _loc = function
| [str] -> mk_ident _loc str
| str :: strs ->
<:ident< $ident_of_rev_path _loc strs$ . $mk_ident _loc str$ >>
| _ -> invalid_arg "ident_of_rev_path"
let rec get_appl_path _loc = function
| <:ctyp< $id:id$ >> -> id
| <:ctyp< $tp$ $_$ >> -> get_appl_path _loc tp
| _ -> failwith "get_appl_path: unknown type"
let abstract _loc = List.fold_right (fun p e -> <:expr< fun $p$ -> $e$ >>)
let apply _loc = List.fold_left (fun f arg -> <:expr< $f$ $arg$ >>)
let idp _loc id = <:patt< $lid:id$ >>
let ide _loc id = <:expr< $lid:id$ >>
let switch_tp_def _loc ~alias ~sum ~record ~variants ~mani ~nil tp =
let rec loop = function
| <:ctyp< private $tp$ >> -> loop tp
| <:ctyp< [ $alts$ ] >> -> sum _loc alts
| <:ctyp< [= $row_fields$ ] >> -> variants _loc row_fields
| <:ctyp< $id:_$ >>
| <:ctyp< ( $tup:_$ ) >>
| <:ctyp< $_$ -> $_$ >>
| <:ctyp< '$_$ >>
| <:ctyp< $_$ $_$ >> as tp_def -> alias _loc tp_def
| <:ctyp< { $flds$ } >> -> record _loc flds
| <:ctyp< $tp1$ == $tp2$ >> -> mani _loc tp1 tp2
| <:ctyp< ? >> -> nil _loc
| _ -> failwith "switch_tp_def: unknown type"
in
loop tp
let rec mk_expr_lst _loc = function
| [] -> <:expr< [] >>
| e :: es -> <:expr< [$e$ :: $mk_expr_lst _loc es$] >>
let rec mk_patt_lst _loc = function
| [] -> <:patt< [] >>
| p :: ps -> <:patt< [$p$ :: $mk_patt_lst _loc ps$] >>
let get_tparam_id = function
| <:ctyp< '$id$ >> -> id
| _ -> failwith "get_tparam_id: not a type parameter"
let type_is_recursive _loc type_name tp =
let rec loop = function
| <:ctyp< private $tp$>> -> loop tp
| <:ctyp< $tp1$ $tp2$ >>
| <:ctyp< $tp1$ * $tp2$ >>
| <:ctyp< $tp1$; $tp2$ >>
| <:ctyp< $tp1$ -> $tp2$ >>
| <:ctyp< $tp1$ == $tp2$ >>
| <:ctyp< $tp1$ and $tp2$ >>
| <:ctyp< $tp1$ | $tp2$ >> -> loop tp1 || loop tp2
| <:ctyp< ( $tup:tp$ ) >> | <:ctyp< { $tp$ } >>
| <:ctyp< [ $tp$ ] >>
| <:ctyp< $_$ : $tp$ >>
| <:ctyp< mutable $tp$ >>
| <:ctyp< $_$ of $tp$ >>
| <:ctyp< [< $tp$ ] >> | <:ctyp< [> $tp$ ] >> | <:ctyp< [= $tp$ ] >>
| <:ctyp< ! $_$ . $tp$ >> -> loop tp
| <:ctyp< $lid:id$ >> -> id = type_name
| <:ctyp< $id:_$ >>
| <:ctyp< `$_$ >>
| <:ctyp< '$_$ >>
| <:ctyp< ? >> -> false
| _ ->
prerr_endline (
get_loc_err _loc "type_is_recursive: unknown type construct");
exit 1
in
loop tp
let drop_variance_annotations _loc =
(map_ctyp (function
| <:ctyp< +'$var$ >> | <:ctyp< -'$var$ >> -> <:ctyp< '$var$ >>
| tp -> tp))#ctyp
end
(* Functions for interpreting derivation types *)
(* Generates a tuple of lists of functions and types. *)
let generate tp drv =
try GeneratorMap.find drv !generators tp
with Not_found ->
failwith ("Pa_type_conv: '" ^ drv ^ "' is not a supported generator.")
let gen_derived_defs _loc tp drvs =
let coll drv der_sis = <:str_item< $der_sis$; $generate tp drv$ >> in
List.fold_right coll drvs <:str_item< ? >>
let sig_generate tp drv =
try GeneratorMap.find drv !sig_generators tp
with Not_found ->
failwith (
"Pa_type_conv: '" ^ drv ^ "' is not a supported signature generator.")
let gen_derived_sigs _loc tp drvs =
let coll drv der_sis = <:sig_item< $der_sis$; $sig_generate tp drv$ >> in
List.fold_right coll drvs (SgNil _loc)
(* Syntax extension *)
open Syntax
let found_module_name =
Gram.Entry.of_parser "found_module_name" (fun strm ->
match Stream.npeek 1 strm with
| [(UIDENT name, _)] ->
push_conv_path name;
Stream.junk strm;
name
| _ -> raise Stream.Failure)
DELETE_RULE Gram str_item: "module"; a_UIDENT; module_binding0 END;
EXTEND Gram
GLOBAL: str_item sig_item;
str_item:
[[
"type"; tds = type_declaration; "with";
drvs = LIST1 [ id = LIDENT -> id ] SEP "," ->
<:str_item< type $tds$; $gen_derived_defs _loc tds drvs$ >>
]];
str_item:
[[
"TYPE_CONV_PATH"; conv_path = STRING ->
set_conv_path conv_path;
<:str_item< ? >>
]];
sig_item:
[[
"type"; tds = type_declaration; "with";
drvs = LIST1 [ id = LIDENT -> id ] SEP "," ->
<:sig_item< type $tds$; $gen_derived_sigs _loc tds drvs$ >>
]];
str_item:
[[
"module"; i = found_module_name; mb = module_binding0 ->
pop_conv_path ();
<:str_item< module $i$ = $mb$ >>
]];
END
| null | https://raw.githubusercontent.com/aryx/ocamltarzan/4140f5102cee83a2ca7be996ca2d92e9cb035f9c/pa/old/pa_type_conv.ml | ocaml | Pa_type_conv: Preprocessing Module for Registering Type Conversions
Utility functions
Module/File path management
Reference storing the path to the currently preprocessed module
Get path to the currently preprocessed module
Set path to the currently preprocessed module
impossible
Generator registration
Map of "with"-generators
Register a "with"-generator to be used in a module signature
General purpose code generation module
Functions for interpreting derivation types
Generates a tuple of lists of functions and types.
Syntax extension | pp camlp4orf
File : pa_type_conv.ml
Copyright ( C ) 2005-
Jane Street Holding , LLC
Author :
email : mmottl\@janestcapital.com
WWW :
This file is derived from file " pa_tywith.ml " of version 0.45 of the
library " " .
is Copyright ( C ) 2004 , 2005 by
< >
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
Copyright (C) 2005-
Jane Street Holding, LLC
Author: Markus Mottl
email: mmottl\@janestcapital.com
WWW:
This file is derived from file "pa_tywith.ml" of version 0.45 of the
library "Tywith".
Tywith is Copyright (C) 2004, 2005 by
Martin Sandin <>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
open Printf
open Lexing
open Camlp4
open PreCast
open Ast
let both fa fb (a, b) = fa a, fb b
let get_loc_err loc msg =
sprintf "File \"%s\", line %d, characters %d-%d: %s"
(Loc.file_name loc) (Loc.start_line loc)
(Loc.start_off loc - Loc.start_bol loc)
(Loc.stop_off loc - Loc.stop_bol loc)
msg
let hash_variant str =
let acc_ref = ref 0 in
for i = 0 to String.length str - 1 do
acc_ref := 223 * !acc_ref + Char.code str.[i]
done;
if Sys.word_size = 32 then !acc_ref
else !acc_ref land int_of_string "0x7FFFFFFF"
let conv_path_ref : (string * string list) option ref = ref None
let get_conv_path_el () =
match !conv_path_ref with
| None -> failwith "Pa_type_conv: path not set";
| Some el -> el
let get_conv_path () = fst (get_conv_path_el ())
let set_conv_path conv_path =
if !conv_path_ref <> None && not !Sys.interactive then
failwith "Pa_type_conv: module name set twice";
conv_path_ref := Some (conv_path, [conv_path])
let () = if !Sys.interactive then set_conv_path "Toplevel"
let push_conv_path mod_name =
let str, rev_lst = get_conv_path_el () in
conv_path_ref := Some (str ^ "." ^ mod_name, mod_name :: rev_lst)
let pop_conv_path () =
match get_conv_path_el () with
| _, _ :: rev_lst ->
conv_path_ref := Some (String.concat "." (List.rev rev_lst), rev_lst)
module GeneratorMap = Map.Make(String)
let generators = ref GeneratorMap.empty
let sig_generators = ref GeneratorMap.empty
Register a " with"-generator
let add_generator id e = generators := GeneratorMap.add id e !generators
Removes a " with"-generator
let rem_generator id = generators := GeneratorMap.remove id !generators
let add_sig_generator id e =
sig_generators := GeneratorMap.add id e !sig_generators
Removes a " with"-generator
let rem_sig_generator id =
sig_generators := GeneratorMap.remove id !sig_generators
module Gen = struct
let rec ty_var_list_of_ctyp tp acc =
match tp with
| <:ctyp< $tp1$ $tp2$ >> ->
ty_var_list_of_ctyp tp1 (ty_var_list_of_ctyp tp2 acc)
| <:ctyp< '$param$ >> -> param :: acc
| _ -> invalid_arg "ty_var_list_of_ctyp"
let rec get_rev_id_path tp acc =
match tp with
| <:ident< $id1$ . $id2$ >> -> get_rev_id_path id2 (get_rev_id_path id1 acc)
| <:ident< $lid:id$ >> | <:ident< $uid:id$ >> -> id :: acc
| _ -> invalid_arg "get_rev_id_path"
let mk_ident _loc str =
let first = str.[0] in
if first >= 'A' && first <= 'Z' then <:ident< $uid:str$ >>
else <:ident< $lid:str$ >>
let rec ident_of_rev_path _loc = function
| [str] -> mk_ident _loc str
| str :: strs ->
<:ident< $ident_of_rev_path _loc strs$ . $mk_ident _loc str$ >>
| _ -> invalid_arg "ident_of_rev_path"
let rec get_appl_path _loc = function
| <:ctyp< $id:id$ >> -> id
| <:ctyp< $tp$ $_$ >> -> get_appl_path _loc tp
| _ -> failwith "get_appl_path: unknown type"
let abstract _loc = List.fold_right (fun p e -> <:expr< fun $p$ -> $e$ >>)
let apply _loc = List.fold_left (fun f arg -> <:expr< $f$ $arg$ >>)
let idp _loc id = <:patt< $lid:id$ >>
let ide _loc id = <:expr< $lid:id$ >>
let switch_tp_def _loc ~alias ~sum ~record ~variants ~mani ~nil tp =
let rec loop = function
| <:ctyp< private $tp$ >> -> loop tp
| <:ctyp< [ $alts$ ] >> -> sum _loc alts
| <:ctyp< [= $row_fields$ ] >> -> variants _loc row_fields
| <:ctyp< $id:_$ >>
| <:ctyp< ( $tup:_$ ) >>
| <:ctyp< $_$ -> $_$ >>
| <:ctyp< '$_$ >>
| <:ctyp< $_$ $_$ >> as tp_def -> alias _loc tp_def
| <:ctyp< { $flds$ } >> -> record _loc flds
| <:ctyp< $tp1$ == $tp2$ >> -> mani _loc tp1 tp2
| <:ctyp< ? >> -> nil _loc
| _ -> failwith "switch_tp_def: unknown type"
in
loop tp
let rec mk_expr_lst _loc = function
| [] -> <:expr< [] >>
| e :: es -> <:expr< [$e$ :: $mk_expr_lst _loc es$] >>
let rec mk_patt_lst _loc = function
| [] -> <:patt< [] >>
| p :: ps -> <:patt< [$p$ :: $mk_patt_lst _loc ps$] >>
let get_tparam_id = function
| <:ctyp< '$id$ >> -> id
| _ -> failwith "get_tparam_id: not a type parameter"
let type_is_recursive _loc type_name tp =
let rec loop = function
| <:ctyp< private $tp$>> -> loop tp
| <:ctyp< $tp1$ $tp2$ >>
| <:ctyp< $tp1$ * $tp2$ >>
| <:ctyp< $tp1$; $tp2$ >>
| <:ctyp< $tp1$ -> $tp2$ >>
| <:ctyp< $tp1$ == $tp2$ >>
| <:ctyp< $tp1$ and $tp2$ >>
| <:ctyp< $tp1$ | $tp2$ >> -> loop tp1 || loop tp2
| <:ctyp< ( $tup:tp$ ) >> | <:ctyp< { $tp$ } >>
| <:ctyp< [ $tp$ ] >>
| <:ctyp< $_$ : $tp$ >>
| <:ctyp< mutable $tp$ >>
| <:ctyp< $_$ of $tp$ >>
| <:ctyp< [< $tp$ ] >> | <:ctyp< [> $tp$ ] >> | <:ctyp< [= $tp$ ] >>
| <:ctyp< ! $_$ . $tp$ >> -> loop tp
| <:ctyp< $lid:id$ >> -> id = type_name
| <:ctyp< $id:_$ >>
| <:ctyp< `$_$ >>
| <:ctyp< '$_$ >>
| <:ctyp< ? >> -> false
| _ ->
prerr_endline (
get_loc_err _loc "type_is_recursive: unknown type construct");
exit 1
in
loop tp
let drop_variance_annotations _loc =
(map_ctyp (function
| <:ctyp< +'$var$ >> | <:ctyp< -'$var$ >> -> <:ctyp< '$var$ >>
| tp -> tp))#ctyp
end
let generate tp drv =
try GeneratorMap.find drv !generators tp
with Not_found ->
failwith ("Pa_type_conv: '" ^ drv ^ "' is not a supported generator.")
let gen_derived_defs _loc tp drvs =
let coll drv der_sis = <:str_item< $der_sis$; $generate tp drv$ >> in
List.fold_right coll drvs <:str_item< ? >>
let sig_generate tp drv =
try GeneratorMap.find drv !sig_generators tp
with Not_found ->
failwith (
"Pa_type_conv: '" ^ drv ^ "' is not a supported signature generator.")
let gen_derived_sigs _loc tp drvs =
let coll drv der_sis = <:sig_item< $der_sis$; $sig_generate tp drv$ >> in
List.fold_right coll drvs (SgNil _loc)
open Syntax
let found_module_name =
Gram.Entry.of_parser "found_module_name" (fun strm ->
match Stream.npeek 1 strm with
| [(UIDENT name, _)] ->
push_conv_path name;
Stream.junk strm;
name
| _ -> raise Stream.Failure)
DELETE_RULE Gram str_item: "module"; a_UIDENT; module_binding0 END;
EXTEND Gram
GLOBAL: str_item sig_item;
str_item:
[[
"type"; tds = type_declaration; "with";
drvs = LIST1 [ id = LIDENT -> id ] SEP "," ->
<:str_item< type $tds$; $gen_derived_defs _loc tds drvs$ >>
]];
str_item:
[[
"TYPE_CONV_PATH"; conv_path = STRING ->
set_conv_path conv_path;
<:str_item< ? >>
]];
sig_item:
[[
"type"; tds = type_declaration; "with";
drvs = LIST1 [ id = LIDENT -> id ] SEP "," ->
<:sig_item< type $tds$; $gen_derived_sigs _loc tds drvs$ >>
]];
str_item:
[[
"module"; i = found_module_name; mb = module_binding0 ->
pop_conv_path ();
<:str_item< module $i$ = $mb$ >>
]];
END
|
9b3d38f479a8672f8150a91c11131d5370076fc095324507dfe202c13f0171d3 | microsoft/SLAyer | NSPolySet.mli | Copyright ( c ) Microsoft Corporation . All rights reserved .
open NSLib
(** Sets of ordered values. See also standard
{{:file:../../../doc/ocaml%20manual/libref/Set.html}Set}. *)
module PolySet : sig
module type S = sig
type 'a elt
type 'a t
val empty : 'a t
val is_empty : 'a t -> bool
val add : 'a elt -> 'a t -> 'a t
val singleton : 'a elt -> 'a t
val iter : ('a elt -> unit) -> 'a t -> unit
val map : ('a elt -> 'a elt) -> 'a t -> 'a t
val fold : ('a elt -> 'z -> 'z) -> 'a t -> 'z -> 'z
val exists : ('a elt -> bool) -> 'a t -> bool
val filter : ('a elt -> bool) -> 'a t -> 'a t
val mem : 'a elt -> 'a t -> bool
val remove : 'a elt -> 'a t -> 'a t
val union : 'a t -> 'a t -> 'a t
val unions : 'a t list -> 'a t
val inter : 'a t -> 'a t -> 'a t
val inters : 'a t list -> 'a t
val diff : 'a t -> 'a t -> 'a t
val diff_diff : 'a t -> 'a t -> 'a t * 'a t
val inter_diff : 'a t -> 'a t -> 'a t * 'a t
val diff_inter_diff : 'a t -> 'a t -> 'a t * 'a t * 'a t
val subset : 'a t -> 'a t -> bool
val disjoint : 'a t -> 'a t -> bool
val intersect : 'a t -> 'a t -> bool
val kfold : 'a t -> ('a elt -> ('y->'z) -> 'y->'z) -> ('y->'z) -> 'y->'z
val fold2 : ('z -> 'a elt -> 'a elt -> 'z) -> 'z -> 'a t -> 'a t -> 'z
val for_all : ('a elt -> bool) -> 'a t -> bool
val partition : ('a elt -> bool) -> 'a t -> 'a t * 'a t
val cardinal : 'a t -> int
val to_list : 'a t -> 'a elt list
val of_list : 'a elt list -> 'a t
val to_array : 'a t -> 'a elt array
val min_elt : 'a t -> 'a elt
val max_elt : 'a t -> 'a elt
val choose : 'a t -> 'a elt
val trychoose : 'a t -> 'a elt option
val split : 'a elt -> 'a t -> 'a t * bool * 'a t
val next : 'a elt -> 'a t -> 'a elt
val fold_pairs : ('a elt -> 'a elt -> 'z -> 'z) -> 'a t -> 'z -> 'z
val fold_product : ('a elt -> 'a elt -> 'z->'z) -> 'a t -> 'a t -> 'z->'z
val the_only : ('a elt -> bool) -> 'a t -> 'a elt option
(* val take : ('a elt -> bool) -> 'a t -> 'a elt option *)
val take_first_pair : ('a elt -> 'a elt -> 'z option) -> 'a t -> 'z option
val equal : 'a t -> 'a t -> bool
val compare : 'a t -> 'a t -> int
end
module Make (Ord : PolySet.OrderedType) : (S with type 'a elt = 'a Ord.t)
end
| null | https://raw.githubusercontent.com/microsoft/SLAyer/6f46f6999c18f415bc368b43b5ba3eb54f0b1c04/src/Library/NSPolySet.mli | ocaml | * Sets of ordered values. See also standard
{{:file:../../../doc/ocaml%20manual/libref/Set.html}Set}.
val take : ('a elt -> bool) -> 'a t -> 'a elt option | Copyright ( c ) Microsoft Corporation . All rights reserved .
open NSLib
module PolySet : sig
module type S = sig
type 'a elt
type 'a t
val empty : 'a t
val is_empty : 'a t -> bool
val add : 'a elt -> 'a t -> 'a t
val singleton : 'a elt -> 'a t
val iter : ('a elt -> unit) -> 'a t -> unit
val map : ('a elt -> 'a elt) -> 'a t -> 'a t
val fold : ('a elt -> 'z -> 'z) -> 'a t -> 'z -> 'z
val exists : ('a elt -> bool) -> 'a t -> bool
val filter : ('a elt -> bool) -> 'a t -> 'a t
val mem : 'a elt -> 'a t -> bool
val remove : 'a elt -> 'a t -> 'a t
val union : 'a t -> 'a t -> 'a t
val unions : 'a t list -> 'a t
val inter : 'a t -> 'a t -> 'a t
val inters : 'a t list -> 'a t
val diff : 'a t -> 'a t -> 'a t
val diff_diff : 'a t -> 'a t -> 'a t * 'a t
val inter_diff : 'a t -> 'a t -> 'a t * 'a t
val diff_inter_diff : 'a t -> 'a t -> 'a t * 'a t * 'a t
val subset : 'a t -> 'a t -> bool
val disjoint : 'a t -> 'a t -> bool
val intersect : 'a t -> 'a t -> bool
val kfold : 'a t -> ('a elt -> ('y->'z) -> 'y->'z) -> ('y->'z) -> 'y->'z
val fold2 : ('z -> 'a elt -> 'a elt -> 'z) -> 'z -> 'a t -> 'a t -> 'z
val for_all : ('a elt -> bool) -> 'a t -> bool
val partition : ('a elt -> bool) -> 'a t -> 'a t * 'a t
val cardinal : 'a t -> int
val to_list : 'a t -> 'a elt list
val of_list : 'a elt list -> 'a t
val to_array : 'a t -> 'a elt array
val min_elt : 'a t -> 'a elt
val max_elt : 'a t -> 'a elt
val choose : 'a t -> 'a elt
val trychoose : 'a t -> 'a elt option
val split : 'a elt -> 'a t -> 'a t * bool * 'a t
val next : 'a elt -> 'a t -> 'a elt
val fold_pairs : ('a elt -> 'a elt -> 'z -> 'z) -> 'a t -> 'z -> 'z
val fold_product : ('a elt -> 'a elt -> 'z->'z) -> 'a t -> 'a t -> 'z->'z
val the_only : ('a elt -> bool) -> 'a t -> 'a elt option
val take_first_pair : ('a elt -> 'a elt -> 'z option) -> 'a t -> 'z option
val equal : 'a t -> 'a t -> bool
val compare : 'a t -> 'a t -> int
end
module Make (Ord : PolySet.OrderedType) : (S with type 'a elt = 'a Ord.t)
end
|
52439d0bea031edd5088a1c8946776a5b0cd8c14e09e13c92530495516c54c3e | Gandalf-/coreutils | MkdirSpec.hs | module MkdirSpec where
import Control.Exception
import Control.Monad
import Coreutils.Mkdir
import Data.Bits
import Data.Either
import System.Directory
import Test.Hspec
spec :: Spec
spec = do
describe "bits" $
it "works" $ do
let seven = 7 :: Int
testBit seven 0 `shouldBe` True
testBit seven 1 `shouldBe` True
testBit seven 2 `shouldBe` True
describe "parse" $ do
it "int" $ do
parseMode "123" `shouldBe` Right (1, 2, 3)
parseMode "723" `shouldBe` Right (7, 2, 3)
parseMode "700" `shouldBe` Right (7, 0, 0)
parseMode "013" `shouldBe` Right (0, 1, 3)
parseMode "823" `shouldSatisfy` isLeft
parseMode "junk" `shouldSatisfy` isLeft
parseMode "" `shouldSatisfy` isLeft
it "permssions" $ do
rwx (parsePerms 1) `shouldBe` (False, False, True)
rwx (parsePerms 2) `shouldBe` (False, True, False)
rwx (parsePerms 3) `shouldBe` (False, True, True)
rwx (parsePerms 4) `shouldBe` (True, False, False)
rwx (parsePerms 5) `shouldBe` (True, False, True)
rwx (parsePerms 6) `shouldBe` (True, True, False)
rwx (parsePerms 7) `shouldBe` (True, True, True)
describe "runtime" $ do
it "defaults" $
withTempDir $ do
let (Right rt) = getRuntime defaultOptions
mkdir rt "a"
doesDirectoryExist "a" `shouldReturn` True
it "parents" $
withTempDir $ do
let (Right rt) = getRuntime defaultOptions { optParents = True }
mkdir rt "a/b/c"
doesDirectoryExist "a" `shouldReturn` True
doesDirectoryExist "a/b" `shouldReturn` True
doesDirectoryExist "a/b/c" `shouldReturn` True
it "permssions" $
withTempDir $ do
let (Right rt) = getRuntime defaultOptions {
optMode = Just (parsePerms 5)
}
mkdir rt "a"
chmod rt "a"
perms <- getPermissions "a"
rwx perms `shouldBe` (True, False, True)
where
rwx p = (readable p, writable p, searchable p)
withTempDir :: IO a -> IO a
withTempDir f = do
root <- getTemporaryDirectory
let tmp = root <> "/" <> "mkdir-test"
doesDirectoryExist tmp >>=
flip when (removeDirectoryRecursive tmp)
createDirectory tmp
withCurrentDirectory tmp f
| null | https://raw.githubusercontent.com/Gandalf-/coreutils/d76bd5a2698e9dc8f548698cded1a873a196a4dc/test/MkdirSpec.hs | haskell | module MkdirSpec where
import Control.Exception
import Control.Monad
import Coreutils.Mkdir
import Data.Bits
import Data.Either
import System.Directory
import Test.Hspec
spec :: Spec
spec = do
describe "bits" $
it "works" $ do
let seven = 7 :: Int
testBit seven 0 `shouldBe` True
testBit seven 1 `shouldBe` True
testBit seven 2 `shouldBe` True
describe "parse" $ do
it "int" $ do
parseMode "123" `shouldBe` Right (1, 2, 3)
parseMode "723" `shouldBe` Right (7, 2, 3)
parseMode "700" `shouldBe` Right (7, 0, 0)
parseMode "013" `shouldBe` Right (0, 1, 3)
parseMode "823" `shouldSatisfy` isLeft
parseMode "junk" `shouldSatisfy` isLeft
parseMode "" `shouldSatisfy` isLeft
it "permssions" $ do
rwx (parsePerms 1) `shouldBe` (False, False, True)
rwx (parsePerms 2) `shouldBe` (False, True, False)
rwx (parsePerms 3) `shouldBe` (False, True, True)
rwx (parsePerms 4) `shouldBe` (True, False, False)
rwx (parsePerms 5) `shouldBe` (True, False, True)
rwx (parsePerms 6) `shouldBe` (True, True, False)
rwx (parsePerms 7) `shouldBe` (True, True, True)
describe "runtime" $ do
it "defaults" $
withTempDir $ do
let (Right rt) = getRuntime defaultOptions
mkdir rt "a"
doesDirectoryExist "a" `shouldReturn` True
it "parents" $
withTempDir $ do
let (Right rt) = getRuntime defaultOptions { optParents = True }
mkdir rt "a/b/c"
doesDirectoryExist "a" `shouldReturn` True
doesDirectoryExist "a/b" `shouldReturn` True
doesDirectoryExist "a/b/c" `shouldReturn` True
it "permssions" $
withTempDir $ do
let (Right rt) = getRuntime defaultOptions {
optMode = Just (parsePerms 5)
}
mkdir rt "a"
chmod rt "a"
perms <- getPermissions "a"
rwx perms `shouldBe` (True, False, True)
where
rwx p = (readable p, writable p, searchable p)
withTempDir :: IO a -> IO a
withTempDir f = do
root <- getTemporaryDirectory
let tmp = root <> "/" <> "mkdir-test"
doesDirectoryExist tmp >>=
flip when (removeDirectoryRecursive tmp)
createDirectory tmp
withCurrentDirectory tmp f
| |
86295cd06f91316d5f2bffc27d3f3ab94882dcfacd7eb5a2aa28182c6ff1ccc7 | crategus/cl-cffi-gtk | package.lisp | (defpackage :gtk-widget-factory
(:use :gtk :gdk :gdk-pixbuf :gobject
:glib :gio :pango :cairo :cffi :common-lisp)
(:export #:gtk-widget-factory))
(in-package :gtk-widget-factory)
(defun rel-path (filename)
(let ((system-path (asdf:system-source-directory :gtk-widget-factory)))
(princ-to-string (merge-pathnames filename system-path))))
| null | https://raw.githubusercontent.com/crategus/cl-cffi-gtk/22156e3e2356f71a67231d9868abcab3582356f3/demo/gtk-widget-factory/package.lisp | lisp | (defpackage :gtk-widget-factory
(:use :gtk :gdk :gdk-pixbuf :gobject
:glib :gio :pango :cairo :cffi :common-lisp)
(:export #:gtk-widget-factory))
(in-package :gtk-widget-factory)
(defun rel-path (filename)
(let ((system-path (asdf:system-source-directory :gtk-widget-factory)))
(princ-to-string (merge-pathnames filename system-path))))
| |
e9d55d6a3f79c62a0d2010c17aba21e3097c091d33684ecfc82dfa36b0011958 | gebi/jungerl | ssh_xfer.erl | %%% File : ssh_xfer.erl
Author : < >
%%% Description : SSH File transfer protocol
Created : 23 Aug 2004 by < >
-module(ssh_xfer).
-vsn("$Revision$ ").
-rcsid("$Id$\n").
-compile(export_all).
-include("../include/ssh.hrl").
-include("../include/ssh_xfer.hrl").
-import(lists, [foldl/3, reverse/1]).
-define(is_set(F, Bits),
((F) band (Bits)) == (F)).
-define(XFER_PACKET_SIZE, 32768).
-define(XFER_WINDOW_SIZE, 4*?XFER_PACKET_SIZE).
attach(CM) ->
case ssh_cm:attach(CM) of
{ok,CMPid} -> open_xfer(CMPid);
Error -> Error
end.
connect(Host, Port, Opts) ->
case ssh_cm:start_link(undefined, Host, Port, Opts) of
{ok, CM} -> open_xfer(CM);
Error -> Error
end.
open_xfer(CM) ->
case ssh_cm:session_open(CM, ?XFER_WINDOW_SIZE, ?XFER_PACKET_SIZE) of
{ok, Channel} ->
case ssh_cm:subsystem(CM, Channel, "sftp") of
ok ->
case init(CM, Channel) of
{ok, {Vsn,Ext}, Rest} ->
{ok, #ssh_xfer { vsn = Vsn,
ext = Ext,
cm = CM,
channel = Channel },Rest};
Error ->
Error
end;
Error ->
Error
end;
Error ->
Error
end.
init(CM, Channel) ->
XF = #ssh_xfer { cm = CM, channel = Channel},
xf_request(XF, ?SSH_FXP_INIT, <<?UINT32(5)>>),
case reply(CM, Channel) of
{ok, <<?SSH_FXP_VERSION, ?UINT32(Version), Ext/binary>>, Rest} ->
{ok, {Version, decode_ext(Ext)}, Rest};
Error ->
Error
end.
reply(CM,Channel) ->
reply(CM,Channel,<<>>).
reply(CM,Channel,RBuf) ->
receive
{ssh_cm, CM, {data, Channel, 0, Data}} ->
case <<RBuf/binary, Data/binary>> of
<<?UINT32(Len),Reply:Len/binary,Rest/binary>> ->
{ok, Reply, Rest};
RBuf2 ->
reply(CM,Channel,RBuf2)
end;
{ssh_cm, CM, {data, Channel, _, Data}} ->
io:format("STDERR: ~s\n", [binary_to_list(Data)]),
reply(CM,Channel,RBuf);
{ssh_cm, CM, {exit_signal,Channel,SIG,Err,Lang}} ->
ssh_cm:close(CM, Channel),
{error, Err};
{ssh_cm, CM, {exit_status,Channel,Status}} ->
ssh_cm:close(CM, Channel),
eof;
{ssh_cm, CM, {eof, Channel}} ->
eof;
{ssh_cm, CM, {closed, Channel}} ->
{error, closed};
{ssh_cm, CM, Msg} ->
io:format("GOT: ssh_cm ~p\n", [Msg]);
Msg ->
io:format("GOT: ~p\n", [Msg])
end.
open(XF, ReqID, FileName, Access, Flags, Attrs) ->
Vsn = XF#ssh_xfer.vsn,
FileName1 = list_to_binary(FileName),
MBits = if Vsn >= 5 ->
M = encode_ace_mask(Access),
?uint32(M);
true ->
(<<>>)
end,
F = encode_open_flags(Flags),
xf_request(XF,?SSH_FXP_OPEN,
[?uint32(ReqID),
?string(FileName1),
MBits,
?uint32(F),
encode_ATTR(Vsn,Attrs)]).
opendir(XF, ReqID, DirName) ->
DirName1 = list_to_binary(DirName),
xf_request(XF, ?SSH_FXP_OPENDIR,
[?uint32(ReqID),
?string(DirName1)]).
close(XF, ReqID, Handle) ->
xf_request(XF, ?SSH_FXP_CLOSE,
[?uint32(ReqID),
?binary(Handle)]).
read(XF, ReqID, Handle, Offset, Length) ->
xf_request(XF, ?SSH_FXP_READ,
[?uint32(ReqID),
?binary(Handle),
?uint64(Offset),
?uint32(Length)]).
readdir(XF, ReqID, Handle) ->
xf_request(XF, ?SSH_FXP_READDIR,
[?uint32(ReqID),
?binary(Handle)]).
write(XF,ReqID, Handle, Offset, Data) ->
Data1 = if binary(Data) -> Data;
list(Data) -> list_to_binary(Data)
end,
xf_request(XF,?SSH_FXP_WRITE,
[?uint32(ReqID),
?binary(Handle),
?uint64(Offset),
?binary(Data1)]).
%% Remove a file
remove(XF, ReqID, File) ->
File1 = list_to_binary(File),
xf_request(XF, ?SSH_FXP_REMOVE,
[?uint32(ReqID),
?string(File1)]).
%% Rename a file/directory
rename(XF, ReqID, Old, New, Flags) ->
Vsn = XF#ssh_xfer.vsn,
OldPath = list_to_binary(Old),
NewPath = list_to_binary(New),
FlagBits
= if Vsn >= 5 ->
F0 = encode_rename_flags(Flags),
?uint32(F0);
true ->
(<<>>)
end,
xf_request(XF, ?SSH_FXP_RENAME,
[?uint32(ReqID),
?string(OldPath),
?string(NewPath),
FlagBits]).
%% Create directory
mkdir(XF, ReqID, Path, Attrs) ->
Path1 = list_to_binary(Path),
xf_request(XF, ?SSH_FXP_MKDIR,
[?uint32(ReqID),
?string(Path1),
encode_ATTR(XF#ssh_xfer.vsn, Attrs)]).
%% Remove a directory
rmdir(XF, ReqID, Dir) ->
Dir1 = list_to_binary(Dir),
xf_request(XF, ?SSH_FXP_REMOVE,
[?uint32(ReqID),
?string(Dir1)]).
%% Stat file
stat(XF, ReqID, Path, Flags) ->
Path1 = list_to_binary(Path),
Vsn = XF#ssh_xfer.vsn,
AttrFlags = if Vsn >= 5 ->
F = encode_attr_flags(Vsn, Flags),
?uint32(F);
true ->
[]
end,
xf_request(XF, ?SSH_FXP_STAT,
[?uint32(ReqID),
?string(Path1),
AttrFlags]).
%% Stat file - follow symbolic links
lstat(XF, ReqID, Path, Flags) ->
Path1 = list_to_binary(Path),
Vsn = XF#ssh_xfer.vsn,
AttrFlags = if Vsn >= 5 ->
F = encode_attr_flags(Vsn, Flags),
?uint32(F);
true ->
[]
end,
xf_request(XF, ?SSH_FXP_LSTAT,
[?uint32(ReqID),
?string(Path1),
AttrFlags]).
%% Stat open file
fstat(XF, ReqID, Handle, Flags) ->
Vsn = XF#ssh_xfer.vsn,
AttrFlags = if Vsn >= 5 ->
F = encode_attr_flags(Vsn, Flags),
?uint32(F);
true ->
[]
end,
xf_request(XF, ?SSH_FXP_FSTAT,
[?uint32(ReqID),
?binary(Handle),
AttrFlags]).
Modify file attributes
setstat(XF, ReqID, Path, Attrs) ->
Path1 = list_to_binary(Path),
xf_request(XF, ?SSH_FXP_SETSTAT,
[?uint32(ReqID),
?string(Path1),
encode_ATTR(XF#ssh_xfer.vsn, Attrs)]).
Modify file attributes
fsetstat(XF, ReqID, Handle, Attrs) ->
xf_request(XF, ?SSH_FXP_FSETSTAT,
[?uint32(ReqID),
?binary(Handle),
encode_ATTR(XF#ssh_xfer.vsn, Attrs)]).
%% Read a symbolic link
readlink(XF, ReqID, Path) ->
Path1 = list_to_binary(Path),
xf_request(XF, ?SSH_FXP_READLINK,
[?uint32(ReqID),
?binary(Path1)]).
%% Create a symbolic link
symlink(XF, ReqID, LinkPath, TargetPath) ->
LinkPath1 = list_to_binary(LinkPath),
TargetPath1 = list_to_binary(TargetPath),
xf_request(XF, ?SSH_FXP_SYMLINK,
[?uint32(ReqID),
?binary(LinkPath1),
?binary(TargetPath1)]).
%% Convert a path into a 'canonical' form
realpath(XF, ReqID, Path) ->
Path1 = list_to_binary(Path),
xf_request(XF, ?SSH_FXP_REALPATH,
[?uint32(ReqID),
?binary(Path1)]).
extended(XF, ReqID, Request, Data) ->
xf_request(XF, ?SSH_FXP_EXTENDED,
[?uint32(ReqID),
?string(Request),
?binary(Data)]).
Send request to connection manager
xf_request(XF, Op, Arg) ->
CM = XF#ssh_xfer.cm,
Channel = XF#ssh_xfer.channel,
Data = if binary(Arg) -> Arg;
list(Arg) -> list_to_binary(Arg)
end,
Size = 1+size(Data),
ssh_cm:send(CM, Channel, <<?UINT32(Size), Op, Data/binary>>).
xf_reply(XF, << ?SSH_FXP_STATUS, ?UINT32(ReqID), ?UINT32(Status),
?UINT32(ELen), Err:ELen/binary,
?UINT32(LLen), Lang:LLen/binary,
Reply/binary >> ) ->
Stat = decode_status(Status),
{status, ReqID, {Stat,binary_to_list(Err),binary_to_list(Lang),
Reply}};
xf_reply(XF, << ?SSH_FXP_STATUS, ?UINT32(ReqID), ?UINT32(Status)>>) ->
Stat = decode_status(Status),
{status, ReqID, {Stat,"","",<<>>}};
xf_reply(XF, <<?SSH_FXP_HANDLE, ?UINT32(ReqID),
?UINT32(HLen), Handle:HLen/binary>>) ->
{handle, ReqID, Handle};
xf_reply(XF, <<?SSH_FXP_DATA, ?UINT32(ReqID),
?UINT32(DLen), Data:DLen/binary>>) ->
{data, ReqID, Data};
xf_reply(XF, <<?SSH_FXP_NAME, ?UINT32(ReqID),
?UINT32(Count), AData/binary>>) ->
{name, ReqID, decode_names(XF#ssh_xfer.vsn, Count, AData)};
xf_reply(XF, <<?SSH_FXP_ATTRS, ?UINT32(ReqID),
AData/binary>>) ->
{A, _} = decode_ATTR(XF#ssh_xfer.vsn, AData),
{attrs, ReqID, A};
xf_reply(XF, <<?SSH_FXP_EXTENDED_REPLY, ?UINT32(ReqID),
RData>>) ->
{extended_reply, ReqID, RData}.
decode_status(Status) ->
case Status of
?SSH_FX_OK -> ok;
?SSH_FX_EOF -> eof;
?SSH_FX_NO_SUCH_FILE -> no_such_file;
?SSH_FX_PERMISSION_DENIED -> permission_denied;
?SSH_FX_FAILURE -> failure;
?SSH_FX_BAD_MESSAGE -> bad_message;
?SSH_FX_NO_CONNECTION -> no_connection;
?SSH_FX_CONNECTION_LOST -> connection_lost;
?SSH_FX_OP_UNSUPPORTED -> op_unsupported;
?SSH_FX_INVALID_HANDLE -> invalid_handle;
?SSH_FX_NO_SUCH_PATH -> no_such_path;
?SSH_FX_FILE_ALREADY_EXISTS -> file_already_exists;
?SSH_FX_WRITE_PROTECT -> write_protect;
?SSH_FX_NO_MEDIA -> no_media;
?SSH_FX_NO_SPACE_ON_FILESYSTEM -> no_space_on_filesystem;
?SSH_FX_QUOTA_EXCEEDED -> quota_exceeded;
?SSH_FX_UNKNOWN_PRINCIPLE -> unknown_principle;
?SSH_FX_LOCK_CONFlICT -> lock_conflict
end.
decode_ext(<<?UINT32(NameLen), Name:NameLen/binary,
?UINT32(DataLen), Data:DataLen/binary,
Tail/binary>>) ->
[{binary_to_list(Name), binary_to_list(Data)}
| decode_ext(Tail)];
decode_ext(<<>>) ->
[].
%%
%% Encode rename flags
%%
encode_rename_flags(Flags) ->
encode_bits(
fun(overwrite) -> ?SSH_FXP_RENAME_OVERWRITE;
(atomic) -> ?SSH_FXP_RENAME_ATOMIC;
(native) -> ?SSH_FXP_RENAME_NATIVE
end, Flags).
decode_rename_flags(F) ->
decode_bits(F,
[{?SSH_FXP_RENAME_OVERWRITE, overwrite},
{?SSH_FXP_RENAME_ATOMIC, atomic},
{?SSH_FXP_RENAME_NATIVE, native}]).
encode_open_flags(Flags) ->
encode_bits(
fun (read) -> ?SSH_FXF_READ;
(write) -> ?SSH_FXF_WRITE;
(append) -> ?SSH_FXF_APPEND;
(creat) -> ?SSH_FXF_CREAT;
(trunc) -> ?SSH_FXF_TRUNC;
(excl) -> ?SSH_FXF_EXCL;
(create_new) -> ?SSH_FXF_CREATE_NEW;
(create_truncate) -> ?SSH_FXF_CREATE_TRUNCATE;
(open_existing) -> ?SSH_FXF_OPEN_EXISTING;
(open_or_create) -> ?SSH_FXF_OPEN_OR_CREATE;
(truncate_existing) -> ?SSH_FXF_TRUNCATE_EXISTING;
(append_data) -> ?SSH_FXF_ACCESS_APPEND_DATA;
(append_data_atomic) -> ?SSH_FXF_ACCESS_APPEND_DATA_ATOMIC;
(text_mode) -> ?SSH_FXF_ACCESS_TEXT_MODE;
(read_lock) -> ?SSH_FXF_ACCESS_READ_LOCK;
(write_lock) -> ?SSH_FXF_ACCESS_WRITE_LOCK;
(delete_lock) -> ?SSH_FXF_ACCESS_DELETE_LOCK
end, Flags).
encode_ace_mask(Access) ->
encode_bits(
fun(read_data) -> ?ACE4_READ_DATA;
(list_directory) -> ?ACE4_LIST_DIRECTORY;
(write_data) -> ?ACE4_WRITE_DATA;
(add_file) -> ?ACE4_ADD_FILE;
(append_data) -> ?ACE4_APPEND_DATA;
(add_subdirectory) -> ?ACE4_ADD_SUBDIRECTORY;
(read_named_attrs) -> ?ACE4_READ_NAMED_ATTRS;
(write_named_attrs) -> ?ACE4_WRITE_NAMED_ATTRS;
(execute) -> ?ACE4_EXECUTE;
(delete_child) -> ?ACE4_DELETE_CHILD;
(read_attributes) -> ?ACE4_READ_ATTRIBUTES;
(write_attributes) -> ?ACE4_WRITE_ATTRIBUTES;
(delete) -> ?ACE4_DELETE;
(read_acl) -> ?ACE4_READ_ACL;
(write_acl) -> ?ACE4_WRITE_ACL;
(write_owner) -> ?ACE4_WRITE_OWNER;
(synchronize) -> ?ACE4_SYNCHRONIZE
end, Access).
decode_ace_mask(F) ->
decode_bits(F,
[
{?ACE4_READ_DATA, read_data},
{?ACE4_LIST_DIRECTORY, list_directory},
{?ACE4_WRITE_DATA, write_data},
{?ACE4_ADD_FILE, add_file},
{?ACE4_APPEND_DATA, append_data},
{?ACE4_ADD_SUBDIRECTORY, add_subdirectory},
{?ACE4_READ_NAMED_ATTRS, read_named_attrs},
{?ACE4_WRITE_NAMED_ATTRS, write_named_attrs},
{?ACE4_EXECUTE, execute},
{?ACE4_DELETE_CHILD, delete_child},
{?ACE4_READ_ATTRIBUTES, read_attributes},
{?ACE4_WRITE_ATTRIBUTES, write_attributes},
{?ACE4_DELETE, delete},
{?ACE4_READ_ACL, read_acl},
{?ACE4_WRITE_ACL, write_acl},
{?ACE4_WRITE_OWNER, write_owner},
{?ACE4_SYNCHRONIZE, synchronize}
]).
encode_ace_type(Type) ->
case Type of
access_allowed -> ?ACE4_ACCESS_ALLOWED_ACE_TYPE;
access_denied -> ?ACE4_ACCESS_DENIED_ACE_TYPE;
system_audit -> ?ACE4_SYSTEM_AUDIT_ACE_TYPE;
system_alarm -> ?ACE4_SYSTEM_ALARM_ACE_TYPE
end.
decode_ace_type(F) ->
case F of
?ACE4_ACCESS_ALLOWED_ACE_TYPE -> access_allowed;
?ACE4_ACCESS_DENIED_ACE_TYPE -> access_denied;
?ACE4_SYSTEM_AUDIT_ACE_TYPE -> system_audit;
?ACE4_SYSTEM_ALARM_ACE_TYPE -> system_alarm
end.
encode_ace_flag(Flag) ->
encode_bits(
fun(file_inherit) -> ?ACE4_FILE_INHERIT_ACE;
(directory_inherit) -> ?ACE4_DIRECTORY_INHERIT_ACE;
(no_propagte_inherit) -> ?ACE4_NO_PROPAGATE_INHERIT_ACE;
(inherit_only) -> ?ACE4_INHERIT_ONLY_ACE;
(successful_access) -> ?ACE4_SUCCESSFUL_ACCESS_ACE_FLAG;
(failed_access) -> ?ACE4_FAILED_ACCESS_ACE_FLAG;
(identifier_group) -> ?ACE4_IDENTIFIER_GROUP
end, Flag).
decode_ace_flag(F) ->
decode_bits(F,
[
{?ACE4_FILE_INHERIT_ACE, file_inherit},
{?ACE4_DIRECTORY_INHERIT_ACE, directory_inherit},
{?ACE4_NO_PROPAGATE_INHERIT_ACE, no_propagte_inherit},
{?ACE4_INHERIT_ONLY_ACE, inherit_only},
{?ACE4_SUCCESSFUL_ACCESS_ACE_FLAG, successful_access},
{?ACE4_FAILED_ACCESS_ACE_FLAG, failed_access},
{?ACE4_IDENTIFIER_GROUP, identifier_group}
]).
encode_attr_flags(Vsn, all) ->
encode_attr_flags(Vsn,
[size, uidgid, permissions,
acmodtime, accesstime, createtime,
modifytime, acl, ownergroup, subsecond_times,
bits, extended]);
encode_attr_flags(Vsn, Flags) ->
encode_bits(
fun(size) -> ?SSH_FILEXFER_ATTR_SIZE;
(uidgid) when Vsn >=2 -> ?SSH_FILEXFER_ATTR_UIDGID;
(permissions) -> ?SSH_FILEXFER_ATTR_PERMISSIONS;
(acmodtime) when Vsn >= 2 -> ?SSH_FILEXFER_ATTR_ACMODTIME;
(accesstime) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_ACCESSTIME;
(createtime) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_CREATETIME;
(modifytime) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_MODIFYTIME;
(acl) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_ACL;
(ownergroup) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_OWNERGROUP;
(subsecond_times) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_SUBSECOND_TIMES;
(bits) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_BITS;
(extended) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_EXTENDED
end, Flags).
encode_file_type(Type) ->
case Type of
regular -> ?SSH_FILEXFER_TYPE_REGULAR;
directory -> ?SSH_FILEXFER_TYPE_DIRECTORY;
symlink -> ?SSH_FILEXFER_TYPE_SYMLINK;
special -> ?SSH_FILEXFER_TYPE_SPECIAL;
unknown -> ?SSH_FILEXFER_TYPE_UNKNOWN;
socket -> ?SSH_FILEXFER_TYPE_SOCKET;
char_device -> ?SSH_FILEXFER_TYPE_CHAR_DEVICE;
block_device -> ?SSH_FILEXFER_TYPE_BLOCK_DEVICE;
fifo -> ?SSH_FILEXFER_TYPE_FIFO;
undefined -> ?SSH_FILEXFER_TYPE_UNKNOWN
end.
decode_file_type(Type) ->
case Type of
?SSH_FILEXFER_TYPE_REGULAR -> regular;
?SSH_FILEXFER_TYPE_DIRECTORY -> directory;
?SSH_FILEXFER_TYPE_SYMLINK -> symlink;
?SSH_FILEXFER_TYPE_SPECIAL -> special;
?SSH_FILEXFER_TYPE_UNKNOWN -> unknown;
?SSH_FILEXFER_TYPE_SOCKET -> socket;
?SSH_FILEXFER_TYPE_CHAR_DEVICE -> char_device;
?SSH_FILEXFER_TYPE_BLOCK_DEVICE -> block_device;
?SSH_FILEXFER_TYPE_FIFO -> fifo
end.
encode_attrib_bits(Bits) ->
encode_bits(
fun(readonly) -> ?SSH_FILEXFER_ATTR_FLAGS_READONLY;
(system) -> ?SSH_FILEXFER_ATTR_FLAGS_SYSTEM;
(hidden) -> ?SSH_FILEXFER_ATTR_FLAGS_HIDDEN;
(case_insensitive) -> ?SSH_FILEXFER_ATTR_FLAGS_CASE_INSENSITIVE;
(arcive) -> ?SSH_FILEXFER_ATTR_FLAGS_ARCHIVE;
(encrypted) -> ?SSH_FILEXFER_ATTR_FLAGS_ENCRYPTED;
(compressed) -> ?SSH_FILEXFER_ATTR_FLAGS_COMPRESSED;
(sparse) -> ?SSH_FILEXFER_ATTR_FLAGS_SPARSE;
(append_only) -> ?SSH_FILEXFER_ATTR_FLAGS_APPEND_ONLY;
(immutable) -> ?SSH_FILEXFER_ATTR_FLAGS_IMMUTABLE;
(sync) -> ?SSH_FILEXFER_ATTR_FLAGS_SYNC
end, Bits).
decode_attrib_bits(F) ->
decode_bits(F,
[{?SSH_FILEXFER_ATTR_FLAGS_READONLY, readonly},
{?SSH_FILEXFER_ATTR_FLAGS_SYSTEM, system},
{?SSH_FILEXFER_ATTR_FLAGS_HIDDEN, hidden},
{?SSH_FILEXFER_ATTR_FLAGS_CASE_INSENSITIVE, case_insensitive},
{?SSH_FILEXFER_ATTR_FLAGS_ARCHIVE, arcive},
{?SSH_FILEXFER_ATTR_FLAGS_ENCRYPTED, encrypted},
{?SSH_FILEXFER_ATTR_FLAGS_COMPRESSED, compressed},
{?SSH_FILEXFER_ATTR_FLAGS_SPARSE, sparse},
{?SSH_FILEXFER_ATTR_FLAGS_APPEND_ONLY, append_only},
{?SSH_FILEXFER_ATTR_FLAGS_IMMUTABLE, immutable},
{?SSH_FILEXFER_ATTR_FLAGS_SYNC, sync}]).
%%
Encode file attributes
%%
encode_ATTR(Vsn, A) ->
{Flags,As} =
encode_As(Vsn,
[{size, A#ssh_xfer_attr.size},
{ownergroup, A#ssh_xfer_attr.owner},
{ownergroup, A#ssh_xfer_attr.group},
{permissions, A#ssh_xfer_attr.permissions},
{acmodtime, A#ssh_xfer_attr.atime},
{acmodtime, A#ssh_xfer_attr.mtime},
{accesstime, A#ssh_xfer_attr.atime},
{subsecond_times, A#ssh_xfer_attr.atime_nseconds},
{createtime, A#ssh_xfer_attr.createtime},
{subsecond_times, A#ssh_xfer_attr.createtime_nseconds},
{modifytime, A#ssh_xfer_attr.mtime},
{subsecond_times, A#ssh_xfer_attr.mtime_nseconds},
{acl, A#ssh_xfer_attr.acl},
{bits, A#ssh_xfer_attr.attrib_bits},
{extended, A#ssh_xfer_attr.extensions}],
0, []),
Type = encode_file_type(A#ssh_xfer_attr.type),
list_to_binary([?uint32(Flags),
if Vsn >= 5 ->
?byte(Type);
true ->
(<<>>)
end, As]).
encode_As(Vsn, [{AName, undefined}|As], Flags, Acc) ->
encode_As(Vsn, As, Flags, Acc);
encode_As(Vsn, [{AName, X}|As], Flags, Acc) ->
case AName of
size ->
encode_As(Vsn, As,Flags bor ?SSH_FILEXFER_ATTR_SIZE,
[?uint64(X) | Acc]);
ownergroup when Vsn>=5 ->
encode_As(Vsn, As,Flags bor ?SSH_FILEXFER_ATTR_OWNERGROUP,
[?string(X) | Acc]);
ownergroup when Vsn>=2 ->
encode_As(Vsn, As,Flags bor ?SSH_FILEXFER_ATTR_UIDGID,
[?uint32(X) | Acc]);
permissions ->
encode_As(Vsn, As,Flags bor ?SSH_FILEXFER_ATTR_PERMISSIONS,
[?uint32(X) | Acc]);
acmodtime when Vsn>=2 ->
encode_As(Vsn, As,Flags bor ?SSH_FILEXFER_ATTR_ACMODTIME,
[?uint32(X) | Acc]);
accesstime when Vsn>=5 ->
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_ACCESSTIME,
[?uint64(X) | Acc]);
createtime when Vsn>=5->
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_CREATETIME,
[?uint64(X) | Acc]);
modifytime when Vsn>=5 ->
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_MODIFYTIME,
[?uint64(X) | Acc]);
subsecond_times when Vsn>=5 ->
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_SUBSECOND_TIMES,
[?uint64(X) | Acc]);
acl when Vsn >=5 ->
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_ACL,
[encode_acl(X) | Acc]);
bits when Vsn>=5 ->
F = encode_attrib_bits(X),
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_BITS,
[?uint32(F) | Acc]);
extended ->
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_EXTENDED,
[encode_extensions(X) | Acc]);
true ->
encode_As(Vsn, As, Flags, Acc)
end;
encode_As(Vsn, [], Flags, Acc) ->
{Flags, reverse(Acc)}.
decode_ATTR(Vsn, <<?UINT32(Flags), Tail/binary>>) ->
{Type,Tail2} =
if Vsn >= 5 ->
<<?BYTE(T), TL/binary>> = Tail,
{T, TL};
Vsn >= 2 ->
{?SSH_FILEXFER_TYPE_UNKNOWN, Tail}
end,
decode_As(Vsn,
[{size, #ssh_xfer_attr.size},
{ownergroup, #ssh_xfer_attr.owner},
{ownergroup, #ssh_xfer_attr.group},
{permissions, #ssh_xfer_attr.permissions},
{acmodtime, #ssh_xfer_attr.atime},
{acmodtime, #ssh_xfer_attr.mtime},
{accesstime, #ssh_xfer_attr.atime},
{subsecond_times, #ssh_xfer_attr.atime_nseconds},
{createtime, #ssh_xfer_attr.createtime},
{subsecond_times, #ssh_xfer_attr.createtime_nseconds},
{modifytime, #ssh_xfer_attr.mtime},
{subsecond_times, #ssh_xfer_attr.mtime_nseconds},
{acl, #ssh_xfer_attr.acl},
{bits, #ssh_xfer_attr.attrib_bits},
{extended, #ssh_xfer_attr.extensions}],
#ssh_xfer_attr { type = decode_file_type(Type) },
Flags,
Tail2).
decode_As(Vsn, [{AName, AField}|As], R, Flags, Tail) ->
case AName of
size when ?is_set(?SSH_FILEXFER_ATTR_SIZE, Flags) ->
<<?UINT64(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
ownergroup when ?is_set(?SSH_FILEXFER_ATTR_OWNERGROUP, Flags),Vsn>=5 ->
<<?UINT32(Len), Bin:Len/binary, Tail2/binary>> = Tail,
X = binary_to_list(Bin),
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
ownergroup when ?is_set(?SSH_FILEXFER_ATTR_UIDGID, Flags),Vsn>=2 ->
<<?UINT32(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
permissions when ?is_set(?SSH_FILEXFER_ATTR_PERMISSIONS,Flags),Vsn>=5->
<<?UINT32(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
permissions when ?is_set(?SSH_FILEXFER_ATTR_PERMISSIONS,Flags),Vsn>=2->
<<?UINT32(X), Tail2/binary>> = Tail,
R1 = setelement(AField, R, X),
Type = case X band ?S_IFMT of
?S_IFDIR -> directory;
?S_IFCHR -> char_device;
?S_IFBLK -> block_device;
?S_IFIFO -> fifi;
?S_IFREG -> regular;
?S_IFSOCK -> socket;
?S_IFLNK -> symlink;
_ -> unknown
end,
decode_As(Vsn, As, R1#ssh_xfer_attr { type=Type}, Flags, Tail2);
accesstime when ?is_set(?SSH_FILEXFER_ATTR_ACCESSTIME,Flags),Vsn>=5 ->
<<?UINT64(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
modifytime when ?is_set(?SSH_FILEXFER_ATTR_MODIFYTIME,Flags),Vsn>=5 ->
<<?UINT64(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
createtime when ?is_set(?SSH_FILEXFER_ATTR_CREATETIME,Flags),Vsn>=5 ->
<<?UINT64(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
subsecond_times when ?is_set(?SSH_FILEXFER_ATTR_SUBSECOND_TIMES,Flags),Vsn>=5 ->
<<?UINT32(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
acmodtime when ?is_set(?SSH_FILEXFER_ATTR_ACMODTIME,Flags),Vsn>=2 ->
<<?UINT32(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
acl when ?is_set(?SSH_FILEXFER_ATTR_ACL, Flags), Vsn>=5 ->
{X,Tail2} = decode_acl(Tail),
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
bits when ?is_set(?SSH_FILEXFER_ATTR_BITS, Flags), Vsn >=5 ->
<<?UINT32(Y), Tail2/binary>> = Tail,
X = decode_attrib_bits(Y),
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
extended when ?is_set(?SSH_FILEXFER_ATTR_EXTENDED, Flags) ->
{X,Tail2} = decode_extended(Tail),
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
_ ->
decode_As(Vsn, As, R, Flags, Tail)
end;
decode_As(Vsn, [], R, _, Tail) ->
{R, Tail}.
decode_names(Vsn, 0, Data) ->
[];
decode_names(Vsn, I, <<?UINT32(Len), FileName:Len/binary,
Tail/binary>>) when Vsn >= 5 ->
{A, Tail2} = decode_ATTR(Vsn, Tail),
[{binary_to_list(FileName), A} | decode_names(Vsn, I-1, Tail2)];
decode_names(Vsn, I, <<?UINT32(Len), FileName:Len/binary,
?UINT32(LLen), LongName:LLen/binary,
Tail/binary>>) when Vsn >= 2 ->
{A, Tail2} = decode_ATTR(Vsn, Tail),
[{binary_to_list(FileName), A} | decode_names(Vsn, I-1, Tail2)].
encode_acl(ACLList) ->
Count = length(ACLList),
[?uint32(Count) | encode_acl_items(ACLList)].
encode_acl_items([ACE|As]) ->
Type = encode_ace_type(ACE#ssh_xfer_ace.type),
Flag = encode_ace_flag(ACE#ssh_xfer_ace.flag),
Mask = encode_ace_mask(ACE#ssh_xfer_ace.mask),
Who = list_to_binary(ACE#ssh_xfer_ace.who),
[?uint32(Type), ?uint32(Flag), ?uint32(Mask),
?string(Who) | encode_acl_items(As)];
encode_acl_items([]) ->
[].
decode_acl(<<?UINT32(Count), Tail>>) ->
decode_acl_items(Count, Tail, []).
decode_acl_items(0, Tail, Acc) ->
{reverse(Acc), Tail};
decode_acl_items(I, <<?UINT32(Type),
?UINT32(Flag),
?UINT32(Mask),
?UINT32(WLen), BWho:WLen/binary,
Tail/binary>>, Acc) ->
decode_acl_items(I-1, Tail,
[#ssh_xfer_ace { type = decode_ace_type(Type),
flag = decode_ace_flag(Flag),
mask = decode_ace_mask(Mask),
who = binary_to_list(BWho)} | Acc]).
encode_extensions(Exts) ->
Count = length(Exts),
[?uint32(Count) | encode_ext(Exts)].
encode_ext([{Type, Data} | Exts]) ->
[?string(Type), ?string(Data) | encode_ext(Exts)];
encode_ext([]) ->
[].
decode_extended(<<?UINT32(Count), Tail/binary>>) ->
decode_ext(Count, Tail, []).
decode_ext(0, Tail, Acc) ->
{reverse(Acc), Tail};
decode_ext(I, <<?UINT32(TLen), Type:TLen/binary,
?UINT32(DLen), Data:DLen/binary,
Tail/binary>>, Acc) ->
decode_ext(I-1, Tail, [{binary_to_list(Type), Data}|Acc]).
%% Encode bit encoded flags
encode_bits(Fun, BitNames) ->
encode_bits(Fun, 0, BitNames).
encode_bits(Fun, F, [Bit|BitNames]) ->
encode_bits(Fun, Fun(Bit) bor F, BitNames);
encode_bits(Fun, F, []) ->
F.
%% Decode bit encoded flags
decode_bits(F, [{Bit,BitName}|Bits]) ->
if F band Bit == Bit ->
[BitName | decode_bits(F, Bits)];
true ->
decode_bits(F, Bits)
end;
decode_bits(F, []) ->
[].
| null | https://raw.githubusercontent.com/gebi/jungerl/8f5c102295dbe903f47d79fd64714b7de17026ec/lib/ssh/src/ssh_xfer.erl | erlang | File : ssh_xfer.erl
Description : SSH File transfer protocol
Remove a file
Rename a file/directory
Create directory
Remove a directory
Stat file
Stat file - follow symbolic links
Stat open file
Read a symbolic link
Create a symbolic link
Convert a path into a 'canonical' form
Encode rename flags
Encode bit encoded flags
Decode bit encoded flags | Author : < >
Created : 23 Aug 2004 by < >
-module(ssh_xfer).
-vsn("$Revision$ ").
-rcsid("$Id$\n").
-compile(export_all).
-include("../include/ssh.hrl").
-include("../include/ssh_xfer.hrl").
-import(lists, [foldl/3, reverse/1]).
-define(is_set(F, Bits),
((F) band (Bits)) == (F)).
-define(XFER_PACKET_SIZE, 32768).
-define(XFER_WINDOW_SIZE, 4*?XFER_PACKET_SIZE).
attach(CM) ->
case ssh_cm:attach(CM) of
{ok,CMPid} -> open_xfer(CMPid);
Error -> Error
end.
connect(Host, Port, Opts) ->
case ssh_cm:start_link(undefined, Host, Port, Opts) of
{ok, CM} -> open_xfer(CM);
Error -> Error
end.
open_xfer(CM) ->
case ssh_cm:session_open(CM, ?XFER_WINDOW_SIZE, ?XFER_PACKET_SIZE) of
{ok, Channel} ->
case ssh_cm:subsystem(CM, Channel, "sftp") of
ok ->
case init(CM, Channel) of
{ok, {Vsn,Ext}, Rest} ->
{ok, #ssh_xfer { vsn = Vsn,
ext = Ext,
cm = CM,
channel = Channel },Rest};
Error ->
Error
end;
Error ->
Error
end;
Error ->
Error
end.
init(CM, Channel) ->
XF = #ssh_xfer { cm = CM, channel = Channel},
xf_request(XF, ?SSH_FXP_INIT, <<?UINT32(5)>>),
case reply(CM, Channel) of
{ok, <<?SSH_FXP_VERSION, ?UINT32(Version), Ext/binary>>, Rest} ->
{ok, {Version, decode_ext(Ext)}, Rest};
Error ->
Error
end.
reply(CM,Channel) ->
reply(CM,Channel,<<>>).
reply(CM,Channel,RBuf) ->
receive
{ssh_cm, CM, {data, Channel, 0, Data}} ->
case <<RBuf/binary, Data/binary>> of
<<?UINT32(Len),Reply:Len/binary,Rest/binary>> ->
{ok, Reply, Rest};
RBuf2 ->
reply(CM,Channel,RBuf2)
end;
{ssh_cm, CM, {data, Channel, _, Data}} ->
io:format("STDERR: ~s\n", [binary_to_list(Data)]),
reply(CM,Channel,RBuf);
{ssh_cm, CM, {exit_signal,Channel,SIG,Err,Lang}} ->
ssh_cm:close(CM, Channel),
{error, Err};
{ssh_cm, CM, {exit_status,Channel,Status}} ->
ssh_cm:close(CM, Channel),
eof;
{ssh_cm, CM, {eof, Channel}} ->
eof;
{ssh_cm, CM, {closed, Channel}} ->
{error, closed};
{ssh_cm, CM, Msg} ->
io:format("GOT: ssh_cm ~p\n", [Msg]);
Msg ->
io:format("GOT: ~p\n", [Msg])
end.
open(XF, ReqID, FileName, Access, Flags, Attrs) ->
Vsn = XF#ssh_xfer.vsn,
FileName1 = list_to_binary(FileName),
MBits = if Vsn >= 5 ->
M = encode_ace_mask(Access),
?uint32(M);
true ->
(<<>>)
end,
F = encode_open_flags(Flags),
xf_request(XF,?SSH_FXP_OPEN,
[?uint32(ReqID),
?string(FileName1),
MBits,
?uint32(F),
encode_ATTR(Vsn,Attrs)]).
opendir(XF, ReqID, DirName) ->
DirName1 = list_to_binary(DirName),
xf_request(XF, ?SSH_FXP_OPENDIR,
[?uint32(ReqID),
?string(DirName1)]).
close(XF, ReqID, Handle) ->
xf_request(XF, ?SSH_FXP_CLOSE,
[?uint32(ReqID),
?binary(Handle)]).
read(XF, ReqID, Handle, Offset, Length) ->
xf_request(XF, ?SSH_FXP_READ,
[?uint32(ReqID),
?binary(Handle),
?uint64(Offset),
?uint32(Length)]).
readdir(XF, ReqID, Handle) ->
xf_request(XF, ?SSH_FXP_READDIR,
[?uint32(ReqID),
?binary(Handle)]).
write(XF,ReqID, Handle, Offset, Data) ->
Data1 = if binary(Data) -> Data;
list(Data) -> list_to_binary(Data)
end,
xf_request(XF,?SSH_FXP_WRITE,
[?uint32(ReqID),
?binary(Handle),
?uint64(Offset),
?binary(Data1)]).
remove(XF, ReqID, File) ->
File1 = list_to_binary(File),
xf_request(XF, ?SSH_FXP_REMOVE,
[?uint32(ReqID),
?string(File1)]).
rename(XF, ReqID, Old, New, Flags) ->
Vsn = XF#ssh_xfer.vsn,
OldPath = list_to_binary(Old),
NewPath = list_to_binary(New),
FlagBits
= if Vsn >= 5 ->
F0 = encode_rename_flags(Flags),
?uint32(F0);
true ->
(<<>>)
end,
xf_request(XF, ?SSH_FXP_RENAME,
[?uint32(ReqID),
?string(OldPath),
?string(NewPath),
FlagBits]).
mkdir(XF, ReqID, Path, Attrs) ->
Path1 = list_to_binary(Path),
xf_request(XF, ?SSH_FXP_MKDIR,
[?uint32(ReqID),
?string(Path1),
encode_ATTR(XF#ssh_xfer.vsn, Attrs)]).
rmdir(XF, ReqID, Dir) ->
Dir1 = list_to_binary(Dir),
xf_request(XF, ?SSH_FXP_REMOVE,
[?uint32(ReqID),
?string(Dir1)]).
stat(XF, ReqID, Path, Flags) ->
Path1 = list_to_binary(Path),
Vsn = XF#ssh_xfer.vsn,
AttrFlags = if Vsn >= 5 ->
F = encode_attr_flags(Vsn, Flags),
?uint32(F);
true ->
[]
end,
xf_request(XF, ?SSH_FXP_STAT,
[?uint32(ReqID),
?string(Path1),
AttrFlags]).
lstat(XF, ReqID, Path, Flags) ->
Path1 = list_to_binary(Path),
Vsn = XF#ssh_xfer.vsn,
AttrFlags = if Vsn >= 5 ->
F = encode_attr_flags(Vsn, Flags),
?uint32(F);
true ->
[]
end,
xf_request(XF, ?SSH_FXP_LSTAT,
[?uint32(ReqID),
?string(Path1),
AttrFlags]).
fstat(XF, ReqID, Handle, Flags) ->
Vsn = XF#ssh_xfer.vsn,
AttrFlags = if Vsn >= 5 ->
F = encode_attr_flags(Vsn, Flags),
?uint32(F);
true ->
[]
end,
xf_request(XF, ?SSH_FXP_FSTAT,
[?uint32(ReqID),
?binary(Handle),
AttrFlags]).
Modify file attributes
setstat(XF, ReqID, Path, Attrs) ->
Path1 = list_to_binary(Path),
xf_request(XF, ?SSH_FXP_SETSTAT,
[?uint32(ReqID),
?string(Path1),
encode_ATTR(XF#ssh_xfer.vsn, Attrs)]).
Modify file attributes
fsetstat(XF, ReqID, Handle, Attrs) ->
xf_request(XF, ?SSH_FXP_FSETSTAT,
[?uint32(ReqID),
?binary(Handle),
encode_ATTR(XF#ssh_xfer.vsn, Attrs)]).
readlink(XF, ReqID, Path) ->
Path1 = list_to_binary(Path),
xf_request(XF, ?SSH_FXP_READLINK,
[?uint32(ReqID),
?binary(Path1)]).
symlink(XF, ReqID, LinkPath, TargetPath) ->
LinkPath1 = list_to_binary(LinkPath),
TargetPath1 = list_to_binary(TargetPath),
xf_request(XF, ?SSH_FXP_SYMLINK,
[?uint32(ReqID),
?binary(LinkPath1),
?binary(TargetPath1)]).
realpath(XF, ReqID, Path) ->
Path1 = list_to_binary(Path),
xf_request(XF, ?SSH_FXP_REALPATH,
[?uint32(ReqID),
?binary(Path1)]).
extended(XF, ReqID, Request, Data) ->
xf_request(XF, ?SSH_FXP_EXTENDED,
[?uint32(ReqID),
?string(Request),
?binary(Data)]).
Send request to connection manager
xf_request(XF, Op, Arg) ->
CM = XF#ssh_xfer.cm,
Channel = XF#ssh_xfer.channel,
Data = if binary(Arg) -> Arg;
list(Arg) -> list_to_binary(Arg)
end,
Size = 1+size(Data),
ssh_cm:send(CM, Channel, <<?UINT32(Size), Op, Data/binary>>).
xf_reply(XF, << ?SSH_FXP_STATUS, ?UINT32(ReqID), ?UINT32(Status),
?UINT32(ELen), Err:ELen/binary,
?UINT32(LLen), Lang:LLen/binary,
Reply/binary >> ) ->
Stat = decode_status(Status),
{status, ReqID, {Stat,binary_to_list(Err),binary_to_list(Lang),
Reply}};
xf_reply(XF, << ?SSH_FXP_STATUS, ?UINT32(ReqID), ?UINT32(Status)>>) ->
Stat = decode_status(Status),
{status, ReqID, {Stat,"","",<<>>}};
xf_reply(XF, <<?SSH_FXP_HANDLE, ?UINT32(ReqID),
?UINT32(HLen), Handle:HLen/binary>>) ->
{handle, ReqID, Handle};
xf_reply(XF, <<?SSH_FXP_DATA, ?UINT32(ReqID),
?UINT32(DLen), Data:DLen/binary>>) ->
{data, ReqID, Data};
xf_reply(XF, <<?SSH_FXP_NAME, ?UINT32(ReqID),
?UINT32(Count), AData/binary>>) ->
{name, ReqID, decode_names(XF#ssh_xfer.vsn, Count, AData)};
xf_reply(XF, <<?SSH_FXP_ATTRS, ?UINT32(ReqID),
AData/binary>>) ->
{A, _} = decode_ATTR(XF#ssh_xfer.vsn, AData),
{attrs, ReqID, A};
xf_reply(XF, <<?SSH_FXP_EXTENDED_REPLY, ?UINT32(ReqID),
RData>>) ->
{extended_reply, ReqID, RData}.
decode_status(Status) ->
case Status of
?SSH_FX_OK -> ok;
?SSH_FX_EOF -> eof;
?SSH_FX_NO_SUCH_FILE -> no_such_file;
?SSH_FX_PERMISSION_DENIED -> permission_denied;
?SSH_FX_FAILURE -> failure;
?SSH_FX_BAD_MESSAGE -> bad_message;
?SSH_FX_NO_CONNECTION -> no_connection;
?SSH_FX_CONNECTION_LOST -> connection_lost;
?SSH_FX_OP_UNSUPPORTED -> op_unsupported;
?SSH_FX_INVALID_HANDLE -> invalid_handle;
?SSH_FX_NO_SUCH_PATH -> no_such_path;
?SSH_FX_FILE_ALREADY_EXISTS -> file_already_exists;
?SSH_FX_WRITE_PROTECT -> write_protect;
?SSH_FX_NO_MEDIA -> no_media;
?SSH_FX_NO_SPACE_ON_FILESYSTEM -> no_space_on_filesystem;
?SSH_FX_QUOTA_EXCEEDED -> quota_exceeded;
?SSH_FX_UNKNOWN_PRINCIPLE -> unknown_principle;
?SSH_FX_LOCK_CONFlICT -> lock_conflict
end.
decode_ext(<<?UINT32(NameLen), Name:NameLen/binary,
?UINT32(DataLen), Data:DataLen/binary,
Tail/binary>>) ->
[{binary_to_list(Name), binary_to_list(Data)}
| decode_ext(Tail)];
decode_ext(<<>>) ->
[].
encode_rename_flags(Flags) ->
encode_bits(
fun(overwrite) -> ?SSH_FXP_RENAME_OVERWRITE;
(atomic) -> ?SSH_FXP_RENAME_ATOMIC;
(native) -> ?SSH_FXP_RENAME_NATIVE
end, Flags).
decode_rename_flags(F) ->
decode_bits(F,
[{?SSH_FXP_RENAME_OVERWRITE, overwrite},
{?SSH_FXP_RENAME_ATOMIC, atomic},
{?SSH_FXP_RENAME_NATIVE, native}]).
encode_open_flags(Flags) ->
encode_bits(
fun (read) -> ?SSH_FXF_READ;
(write) -> ?SSH_FXF_WRITE;
(append) -> ?SSH_FXF_APPEND;
(creat) -> ?SSH_FXF_CREAT;
(trunc) -> ?SSH_FXF_TRUNC;
(excl) -> ?SSH_FXF_EXCL;
(create_new) -> ?SSH_FXF_CREATE_NEW;
(create_truncate) -> ?SSH_FXF_CREATE_TRUNCATE;
(open_existing) -> ?SSH_FXF_OPEN_EXISTING;
(open_or_create) -> ?SSH_FXF_OPEN_OR_CREATE;
(truncate_existing) -> ?SSH_FXF_TRUNCATE_EXISTING;
(append_data) -> ?SSH_FXF_ACCESS_APPEND_DATA;
(append_data_atomic) -> ?SSH_FXF_ACCESS_APPEND_DATA_ATOMIC;
(text_mode) -> ?SSH_FXF_ACCESS_TEXT_MODE;
(read_lock) -> ?SSH_FXF_ACCESS_READ_LOCK;
(write_lock) -> ?SSH_FXF_ACCESS_WRITE_LOCK;
(delete_lock) -> ?SSH_FXF_ACCESS_DELETE_LOCK
end, Flags).
encode_ace_mask(Access) ->
encode_bits(
fun(read_data) -> ?ACE4_READ_DATA;
(list_directory) -> ?ACE4_LIST_DIRECTORY;
(write_data) -> ?ACE4_WRITE_DATA;
(add_file) -> ?ACE4_ADD_FILE;
(append_data) -> ?ACE4_APPEND_DATA;
(add_subdirectory) -> ?ACE4_ADD_SUBDIRECTORY;
(read_named_attrs) -> ?ACE4_READ_NAMED_ATTRS;
(write_named_attrs) -> ?ACE4_WRITE_NAMED_ATTRS;
(execute) -> ?ACE4_EXECUTE;
(delete_child) -> ?ACE4_DELETE_CHILD;
(read_attributes) -> ?ACE4_READ_ATTRIBUTES;
(write_attributes) -> ?ACE4_WRITE_ATTRIBUTES;
(delete) -> ?ACE4_DELETE;
(read_acl) -> ?ACE4_READ_ACL;
(write_acl) -> ?ACE4_WRITE_ACL;
(write_owner) -> ?ACE4_WRITE_OWNER;
(synchronize) -> ?ACE4_SYNCHRONIZE
end, Access).
decode_ace_mask(F) ->
decode_bits(F,
[
{?ACE4_READ_DATA, read_data},
{?ACE4_LIST_DIRECTORY, list_directory},
{?ACE4_WRITE_DATA, write_data},
{?ACE4_ADD_FILE, add_file},
{?ACE4_APPEND_DATA, append_data},
{?ACE4_ADD_SUBDIRECTORY, add_subdirectory},
{?ACE4_READ_NAMED_ATTRS, read_named_attrs},
{?ACE4_WRITE_NAMED_ATTRS, write_named_attrs},
{?ACE4_EXECUTE, execute},
{?ACE4_DELETE_CHILD, delete_child},
{?ACE4_READ_ATTRIBUTES, read_attributes},
{?ACE4_WRITE_ATTRIBUTES, write_attributes},
{?ACE4_DELETE, delete},
{?ACE4_READ_ACL, read_acl},
{?ACE4_WRITE_ACL, write_acl},
{?ACE4_WRITE_OWNER, write_owner},
{?ACE4_SYNCHRONIZE, synchronize}
]).
encode_ace_type(Type) ->
case Type of
access_allowed -> ?ACE4_ACCESS_ALLOWED_ACE_TYPE;
access_denied -> ?ACE4_ACCESS_DENIED_ACE_TYPE;
system_audit -> ?ACE4_SYSTEM_AUDIT_ACE_TYPE;
system_alarm -> ?ACE4_SYSTEM_ALARM_ACE_TYPE
end.
decode_ace_type(F) ->
case F of
?ACE4_ACCESS_ALLOWED_ACE_TYPE -> access_allowed;
?ACE4_ACCESS_DENIED_ACE_TYPE -> access_denied;
?ACE4_SYSTEM_AUDIT_ACE_TYPE -> system_audit;
?ACE4_SYSTEM_ALARM_ACE_TYPE -> system_alarm
end.
encode_ace_flag(Flag) ->
encode_bits(
fun(file_inherit) -> ?ACE4_FILE_INHERIT_ACE;
(directory_inherit) -> ?ACE4_DIRECTORY_INHERIT_ACE;
(no_propagte_inherit) -> ?ACE4_NO_PROPAGATE_INHERIT_ACE;
(inherit_only) -> ?ACE4_INHERIT_ONLY_ACE;
(successful_access) -> ?ACE4_SUCCESSFUL_ACCESS_ACE_FLAG;
(failed_access) -> ?ACE4_FAILED_ACCESS_ACE_FLAG;
(identifier_group) -> ?ACE4_IDENTIFIER_GROUP
end, Flag).
decode_ace_flag(F) ->
decode_bits(F,
[
{?ACE4_FILE_INHERIT_ACE, file_inherit},
{?ACE4_DIRECTORY_INHERIT_ACE, directory_inherit},
{?ACE4_NO_PROPAGATE_INHERIT_ACE, no_propagte_inherit},
{?ACE4_INHERIT_ONLY_ACE, inherit_only},
{?ACE4_SUCCESSFUL_ACCESS_ACE_FLAG, successful_access},
{?ACE4_FAILED_ACCESS_ACE_FLAG, failed_access},
{?ACE4_IDENTIFIER_GROUP, identifier_group}
]).
encode_attr_flags(Vsn, all) ->
encode_attr_flags(Vsn,
[size, uidgid, permissions,
acmodtime, accesstime, createtime,
modifytime, acl, ownergroup, subsecond_times,
bits, extended]);
encode_attr_flags(Vsn, Flags) ->
encode_bits(
fun(size) -> ?SSH_FILEXFER_ATTR_SIZE;
(uidgid) when Vsn >=2 -> ?SSH_FILEXFER_ATTR_UIDGID;
(permissions) -> ?SSH_FILEXFER_ATTR_PERMISSIONS;
(acmodtime) when Vsn >= 2 -> ?SSH_FILEXFER_ATTR_ACMODTIME;
(accesstime) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_ACCESSTIME;
(createtime) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_CREATETIME;
(modifytime) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_MODIFYTIME;
(acl) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_ACL;
(ownergroup) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_OWNERGROUP;
(subsecond_times) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_SUBSECOND_TIMES;
(bits) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_BITS;
(extended) when Vsn >= 5 -> ?SSH_FILEXFER_ATTR_EXTENDED
end, Flags).
encode_file_type(Type) ->
case Type of
regular -> ?SSH_FILEXFER_TYPE_REGULAR;
directory -> ?SSH_FILEXFER_TYPE_DIRECTORY;
symlink -> ?SSH_FILEXFER_TYPE_SYMLINK;
special -> ?SSH_FILEXFER_TYPE_SPECIAL;
unknown -> ?SSH_FILEXFER_TYPE_UNKNOWN;
socket -> ?SSH_FILEXFER_TYPE_SOCKET;
char_device -> ?SSH_FILEXFER_TYPE_CHAR_DEVICE;
block_device -> ?SSH_FILEXFER_TYPE_BLOCK_DEVICE;
fifo -> ?SSH_FILEXFER_TYPE_FIFO;
undefined -> ?SSH_FILEXFER_TYPE_UNKNOWN
end.
decode_file_type(Type) ->
case Type of
?SSH_FILEXFER_TYPE_REGULAR -> regular;
?SSH_FILEXFER_TYPE_DIRECTORY -> directory;
?SSH_FILEXFER_TYPE_SYMLINK -> symlink;
?SSH_FILEXFER_TYPE_SPECIAL -> special;
?SSH_FILEXFER_TYPE_UNKNOWN -> unknown;
?SSH_FILEXFER_TYPE_SOCKET -> socket;
?SSH_FILEXFER_TYPE_CHAR_DEVICE -> char_device;
?SSH_FILEXFER_TYPE_BLOCK_DEVICE -> block_device;
?SSH_FILEXFER_TYPE_FIFO -> fifo
end.
encode_attrib_bits(Bits) ->
encode_bits(
fun(readonly) -> ?SSH_FILEXFER_ATTR_FLAGS_READONLY;
(system) -> ?SSH_FILEXFER_ATTR_FLAGS_SYSTEM;
(hidden) -> ?SSH_FILEXFER_ATTR_FLAGS_HIDDEN;
(case_insensitive) -> ?SSH_FILEXFER_ATTR_FLAGS_CASE_INSENSITIVE;
(arcive) -> ?SSH_FILEXFER_ATTR_FLAGS_ARCHIVE;
(encrypted) -> ?SSH_FILEXFER_ATTR_FLAGS_ENCRYPTED;
(compressed) -> ?SSH_FILEXFER_ATTR_FLAGS_COMPRESSED;
(sparse) -> ?SSH_FILEXFER_ATTR_FLAGS_SPARSE;
(append_only) -> ?SSH_FILEXFER_ATTR_FLAGS_APPEND_ONLY;
(immutable) -> ?SSH_FILEXFER_ATTR_FLAGS_IMMUTABLE;
(sync) -> ?SSH_FILEXFER_ATTR_FLAGS_SYNC
end, Bits).
decode_attrib_bits(F) ->
decode_bits(F,
[{?SSH_FILEXFER_ATTR_FLAGS_READONLY, readonly},
{?SSH_FILEXFER_ATTR_FLAGS_SYSTEM, system},
{?SSH_FILEXFER_ATTR_FLAGS_HIDDEN, hidden},
{?SSH_FILEXFER_ATTR_FLAGS_CASE_INSENSITIVE, case_insensitive},
{?SSH_FILEXFER_ATTR_FLAGS_ARCHIVE, arcive},
{?SSH_FILEXFER_ATTR_FLAGS_ENCRYPTED, encrypted},
{?SSH_FILEXFER_ATTR_FLAGS_COMPRESSED, compressed},
{?SSH_FILEXFER_ATTR_FLAGS_SPARSE, sparse},
{?SSH_FILEXFER_ATTR_FLAGS_APPEND_ONLY, append_only},
{?SSH_FILEXFER_ATTR_FLAGS_IMMUTABLE, immutable},
{?SSH_FILEXFER_ATTR_FLAGS_SYNC, sync}]).
Encode file attributes
encode_ATTR(Vsn, A) ->
{Flags,As} =
encode_As(Vsn,
[{size, A#ssh_xfer_attr.size},
{ownergroup, A#ssh_xfer_attr.owner},
{ownergroup, A#ssh_xfer_attr.group},
{permissions, A#ssh_xfer_attr.permissions},
{acmodtime, A#ssh_xfer_attr.atime},
{acmodtime, A#ssh_xfer_attr.mtime},
{accesstime, A#ssh_xfer_attr.atime},
{subsecond_times, A#ssh_xfer_attr.atime_nseconds},
{createtime, A#ssh_xfer_attr.createtime},
{subsecond_times, A#ssh_xfer_attr.createtime_nseconds},
{modifytime, A#ssh_xfer_attr.mtime},
{subsecond_times, A#ssh_xfer_attr.mtime_nseconds},
{acl, A#ssh_xfer_attr.acl},
{bits, A#ssh_xfer_attr.attrib_bits},
{extended, A#ssh_xfer_attr.extensions}],
0, []),
Type = encode_file_type(A#ssh_xfer_attr.type),
list_to_binary([?uint32(Flags),
if Vsn >= 5 ->
?byte(Type);
true ->
(<<>>)
end, As]).
encode_As(Vsn, [{AName, undefined}|As], Flags, Acc) ->
encode_As(Vsn, As, Flags, Acc);
encode_As(Vsn, [{AName, X}|As], Flags, Acc) ->
case AName of
size ->
encode_As(Vsn, As,Flags bor ?SSH_FILEXFER_ATTR_SIZE,
[?uint64(X) | Acc]);
ownergroup when Vsn>=5 ->
encode_As(Vsn, As,Flags bor ?SSH_FILEXFER_ATTR_OWNERGROUP,
[?string(X) | Acc]);
ownergroup when Vsn>=2 ->
encode_As(Vsn, As,Flags bor ?SSH_FILEXFER_ATTR_UIDGID,
[?uint32(X) | Acc]);
permissions ->
encode_As(Vsn, As,Flags bor ?SSH_FILEXFER_ATTR_PERMISSIONS,
[?uint32(X) | Acc]);
acmodtime when Vsn>=2 ->
encode_As(Vsn, As,Flags bor ?SSH_FILEXFER_ATTR_ACMODTIME,
[?uint32(X) | Acc]);
accesstime when Vsn>=5 ->
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_ACCESSTIME,
[?uint64(X) | Acc]);
createtime when Vsn>=5->
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_CREATETIME,
[?uint64(X) | Acc]);
modifytime when Vsn>=5 ->
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_MODIFYTIME,
[?uint64(X) | Acc]);
subsecond_times when Vsn>=5 ->
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_SUBSECOND_TIMES,
[?uint64(X) | Acc]);
acl when Vsn >=5 ->
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_ACL,
[encode_acl(X) | Acc]);
bits when Vsn>=5 ->
F = encode_attrib_bits(X),
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_BITS,
[?uint32(F) | Acc]);
extended ->
encode_As(Vsn, As, Flags bor ?SSH_FILEXFER_ATTR_EXTENDED,
[encode_extensions(X) | Acc]);
true ->
encode_As(Vsn, As, Flags, Acc)
end;
encode_As(Vsn, [], Flags, Acc) ->
{Flags, reverse(Acc)}.
decode_ATTR(Vsn, <<?UINT32(Flags), Tail/binary>>) ->
{Type,Tail2} =
if Vsn >= 5 ->
<<?BYTE(T), TL/binary>> = Tail,
{T, TL};
Vsn >= 2 ->
{?SSH_FILEXFER_TYPE_UNKNOWN, Tail}
end,
decode_As(Vsn,
[{size, #ssh_xfer_attr.size},
{ownergroup, #ssh_xfer_attr.owner},
{ownergroup, #ssh_xfer_attr.group},
{permissions, #ssh_xfer_attr.permissions},
{acmodtime, #ssh_xfer_attr.atime},
{acmodtime, #ssh_xfer_attr.mtime},
{accesstime, #ssh_xfer_attr.atime},
{subsecond_times, #ssh_xfer_attr.atime_nseconds},
{createtime, #ssh_xfer_attr.createtime},
{subsecond_times, #ssh_xfer_attr.createtime_nseconds},
{modifytime, #ssh_xfer_attr.mtime},
{subsecond_times, #ssh_xfer_attr.mtime_nseconds},
{acl, #ssh_xfer_attr.acl},
{bits, #ssh_xfer_attr.attrib_bits},
{extended, #ssh_xfer_attr.extensions}],
#ssh_xfer_attr { type = decode_file_type(Type) },
Flags,
Tail2).
decode_As(Vsn, [{AName, AField}|As], R, Flags, Tail) ->
case AName of
size when ?is_set(?SSH_FILEXFER_ATTR_SIZE, Flags) ->
<<?UINT64(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
ownergroup when ?is_set(?SSH_FILEXFER_ATTR_OWNERGROUP, Flags),Vsn>=5 ->
<<?UINT32(Len), Bin:Len/binary, Tail2/binary>> = Tail,
X = binary_to_list(Bin),
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
ownergroup when ?is_set(?SSH_FILEXFER_ATTR_UIDGID, Flags),Vsn>=2 ->
<<?UINT32(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
permissions when ?is_set(?SSH_FILEXFER_ATTR_PERMISSIONS,Flags),Vsn>=5->
<<?UINT32(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
permissions when ?is_set(?SSH_FILEXFER_ATTR_PERMISSIONS,Flags),Vsn>=2->
<<?UINT32(X), Tail2/binary>> = Tail,
R1 = setelement(AField, R, X),
Type = case X band ?S_IFMT of
?S_IFDIR -> directory;
?S_IFCHR -> char_device;
?S_IFBLK -> block_device;
?S_IFIFO -> fifi;
?S_IFREG -> regular;
?S_IFSOCK -> socket;
?S_IFLNK -> symlink;
_ -> unknown
end,
decode_As(Vsn, As, R1#ssh_xfer_attr { type=Type}, Flags, Tail2);
accesstime when ?is_set(?SSH_FILEXFER_ATTR_ACCESSTIME,Flags),Vsn>=5 ->
<<?UINT64(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
modifytime when ?is_set(?SSH_FILEXFER_ATTR_MODIFYTIME,Flags),Vsn>=5 ->
<<?UINT64(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
createtime when ?is_set(?SSH_FILEXFER_ATTR_CREATETIME,Flags),Vsn>=5 ->
<<?UINT64(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
subsecond_times when ?is_set(?SSH_FILEXFER_ATTR_SUBSECOND_TIMES,Flags),Vsn>=5 ->
<<?UINT32(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
acmodtime when ?is_set(?SSH_FILEXFER_ATTR_ACMODTIME,Flags),Vsn>=2 ->
<<?UINT32(X), Tail2/binary>> = Tail,
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
acl when ?is_set(?SSH_FILEXFER_ATTR_ACL, Flags), Vsn>=5 ->
{X,Tail2} = decode_acl(Tail),
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
bits when ?is_set(?SSH_FILEXFER_ATTR_BITS, Flags), Vsn >=5 ->
<<?UINT32(Y), Tail2/binary>> = Tail,
X = decode_attrib_bits(Y),
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
extended when ?is_set(?SSH_FILEXFER_ATTR_EXTENDED, Flags) ->
{X,Tail2} = decode_extended(Tail),
decode_As(Vsn, As, setelement(AField, R, X), Flags, Tail2);
_ ->
decode_As(Vsn, As, R, Flags, Tail)
end;
decode_As(Vsn, [], R, _, Tail) ->
{R, Tail}.
decode_names(Vsn, 0, Data) ->
[];
decode_names(Vsn, I, <<?UINT32(Len), FileName:Len/binary,
Tail/binary>>) when Vsn >= 5 ->
{A, Tail2} = decode_ATTR(Vsn, Tail),
[{binary_to_list(FileName), A} | decode_names(Vsn, I-1, Tail2)];
decode_names(Vsn, I, <<?UINT32(Len), FileName:Len/binary,
?UINT32(LLen), LongName:LLen/binary,
Tail/binary>>) when Vsn >= 2 ->
{A, Tail2} = decode_ATTR(Vsn, Tail),
[{binary_to_list(FileName), A} | decode_names(Vsn, I-1, Tail2)].
encode_acl(ACLList) ->
Count = length(ACLList),
[?uint32(Count) | encode_acl_items(ACLList)].
encode_acl_items([ACE|As]) ->
Type = encode_ace_type(ACE#ssh_xfer_ace.type),
Flag = encode_ace_flag(ACE#ssh_xfer_ace.flag),
Mask = encode_ace_mask(ACE#ssh_xfer_ace.mask),
Who = list_to_binary(ACE#ssh_xfer_ace.who),
[?uint32(Type), ?uint32(Flag), ?uint32(Mask),
?string(Who) | encode_acl_items(As)];
encode_acl_items([]) ->
[].
decode_acl(<<?UINT32(Count), Tail>>) ->
decode_acl_items(Count, Tail, []).
decode_acl_items(0, Tail, Acc) ->
{reverse(Acc), Tail};
decode_acl_items(I, <<?UINT32(Type),
?UINT32(Flag),
?UINT32(Mask),
?UINT32(WLen), BWho:WLen/binary,
Tail/binary>>, Acc) ->
decode_acl_items(I-1, Tail,
[#ssh_xfer_ace { type = decode_ace_type(Type),
flag = decode_ace_flag(Flag),
mask = decode_ace_mask(Mask),
who = binary_to_list(BWho)} | Acc]).
encode_extensions(Exts) ->
Count = length(Exts),
[?uint32(Count) | encode_ext(Exts)].
encode_ext([{Type, Data} | Exts]) ->
[?string(Type), ?string(Data) | encode_ext(Exts)];
encode_ext([]) ->
[].
decode_extended(<<?UINT32(Count), Tail/binary>>) ->
decode_ext(Count, Tail, []).
decode_ext(0, Tail, Acc) ->
{reverse(Acc), Tail};
decode_ext(I, <<?UINT32(TLen), Type:TLen/binary,
?UINT32(DLen), Data:DLen/binary,
Tail/binary>>, Acc) ->
decode_ext(I-1, Tail, [{binary_to_list(Type), Data}|Acc]).
encode_bits(Fun, BitNames) ->
encode_bits(Fun, 0, BitNames).
encode_bits(Fun, F, [Bit|BitNames]) ->
encode_bits(Fun, Fun(Bit) bor F, BitNames);
encode_bits(Fun, F, []) ->
F.
decode_bits(F, [{Bit,BitName}|Bits]) ->
if F band Bit == Bit ->
[BitName | decode_bits(F, Bits)];
true ->
decode_bits(F, Bits)
end;
decode_bits(F, []) ->
[].
|
c26e119c3de07224414d4a20b8864ae673d07bf2581aaaa781ab3fefdd9c5825 | webnf/dwn | classloader.clj | (ns webnf.jvm.classloader
(:require [clojure.string :as str]
[webnf.jvm.enumeration :refer [empty-enumeration seq-enumeration]]
[clojure.tools.logging :as log])
(:import (java.net URL URLClassLoader)
(webnf.jvm.classloader IClassLoader CustomClassLoader)))
(defn filtering-classloader
"Filters access to its parent class loader by predicates.
load-class? decides whether a class name is loaded by the parent
load-resource? decides whether a resource name is loaded by the parent
This does not conform to java's classloader model (which says that classes from the parent should take precedence), but it is useful to selectively share classes (mainly interfaces) between classloader worlds,
e.g. Servlet containers share the javax.servlet package between applications, so that the server can directly invoke its methods on objects from the application ClassLoader."
[^ClassLoader parent load-class? load-resource?]
(CustomClassLoader.
(reify IClassLoader
(findClass [_ name]
(if (load-class? name)
(.loadClass parent name)
(throw (ClassNotFoundException. (str "Could not load class" name)))))
(findResources [_ name]
(if (load-resource? name)
(.getResources parent name)
empty-enumeration)))))
(defn overlay-classloader
"Overlays classes over existing classes in parent"
[^ClassLoader parent class-overlay resource-overlay]
(CustomClassLoader.
(reify IClassLoader
(findClass [_ name]
(or (get class-overlay name) (.loadClass parent name)))
(findResources [_ name]
(or (get resource-overlay name) (.getResources parent name))))))
(comment
;; version compiles to a series of else ifs and .startsWith's
(defmacro prefix? [name & pfs]
`(or ~@(for [pf pfs] `(.startsWith ^String ~name ~pf))))
;; unused
(defmacro <-
"Start a -> chain in an ->> chain"
[& arms]
`(-> ~(last arms) ~@(butlast arms))))
(import java.util.regex.Pattern)
(defn prefix-regex [pfs]
(->> (map #(str "(?:" (Pattern/quote %) ".*)") pfs)
(str/join "|")
re-pattern))
;; compiles to a regex match
(defmacro prefix? [name & pfs]
`(re-matches
~(prefix-regex pfs)
~name))
(defn prefix-pred [prefixes]
(if-let [re (and (seq prefixes)
(prefix-regex prefixes))]
#(re-matches re %)
(constantly false)))
(defn package-forwarding-loader
"Create a filtering-classloader, which forwards a set of prefixes"
[parent-classloader
forwarded-classname-prefixes
forwarded-resname-prefixes]
(filtering-classloader
parent-classloader
(prefix-pred forwarded-classname-prefixes)
(prefix-pred forwarded-resname-prefixes)))
Reflection - based FFI
FIXME
(defn as-name [n]
(cond
(string? n) n
(instance? clojure.lang.Named n) (name n)
(instance? Class n) (.getName ^Class n)
:else (throw (ex-info "Not Named" {:named n}))))
(defn ^Class load-class [^ClassLoader cl class-name]
(.loadClass cl (as-name class-name)))
(defn ^java.lang.reflect.Method method-object [cl class-method signature]
(let [cn (namespace class-method)
mn (name class-method)]
(.getMethod (load-class cl cn) mn
(into-array Class (map (comp (partial load-class cl)
as-name)
signature)))))
(defn method [cl class-method signature]
(let [m (method-object cl class-method signature)]
#(.invoke m %1 (into-array Object %&))))
(defn static-method [cl class-method signature]
(let [cls (load-class cl (namespace class-method))
m (method-object cl class-method signature)]
#(.invoke m cls (into-array Object %&))))
(def ^:private invoke-method
(weak-memoize
(fn [cl cnt]
(method cl 'clojure.lang.IFn/invoke (repeat cnt Object)))))
(defn invoke [clj-fn args]
(let [res (apply (invoke-method (.getClassLoader
(class clj-fn))
(count args))
clj-fn
args)]
;; (log/debug clj-fn args " =>" res)
res))
(def clj-read-evaluator
(weak-memoize
(fn [cl]
(let [read-string' (static-method cl :clojure.lang.RT/readString [String])
eval' (static-method cl :clojure.lang.Compiler/eval [Object])]
(log/trace "Constructing read evaluator for cl" cl)
(fn [form-str]
(log/trace "Evaluating" form-str)
(let [res (eval' (read-string' form-str))]
(log/trace "=>" res)
res))))))
(defmacro with-context-classloader [cl & body]
`(let [t# (Thread/currentThread)
cl# (.getContextClassLoader t#)]
(try (.setContextClassLoader t# ~cl)
~@body
(finally
(.setContextClassLoader t# cl#)))))
(defn eval-in*
([cl form] (eval-in* cl form []))
([cl form curried-args]
(let [in (pr-str form)
reval (clj-read-evaluator cl)]
(if (seq curried-args)
(with-context-classloader cl
(reduce #(invoke %1 [%2])
(reval in) curried-args))
(with-context-classloader cl
(reval in))))))
| null | https://raw.githubusercontent.com/webnf/dwn/31ae2b016d2e2978a4f08c0201572225dc72eb53/src/clj/webnf/jvm/classloader.clj | clojure | version compiles to a series of else ifs and .startsWith's
unused
compiles to a regex match
(log/debug clj-fn args " =>" res) | (ns webnf.jvm.classloader
(:require [clojure.string :as str]
[webnf.jvm.enumeration :refer [empty-enumeration seq-enumeration]]
[clojure.tools.logging :as log])
(:import (java.net URL URLClassLoader)
(webnf.jvm.classloader IClassLoader CustomClassLoader)))
(defn filtering-classloader
"Filters access to its parent class loader by predicates.
load-class? decides whether a class name is loaded by the parent
load-resource? decides whether a resource name is loaded by the parent
This does not conform to java's classloader model (which says that classes from the parent should take precedence), but it is useful to selectively share classes (mainly interfaces) between classloader worlds,
e.g. Servlet containers share the javax.servlet package between applications, so that the server can directly invoke its methods on objects from the application ClassLoader."
[^ClassLoader parent load-class? load-resource?]
(CustomClassLoader.
(reify IClassLoader
(findClass [_ name]
(if (load-class? name)
(.loadClass parent name)
(throw (ClassNotFoundException. (str "Could not load class" name)))))
(findResources [_ name]
(if (load-resource? name)
(.getResources parent name)
empty-enumeration)))))
(defn overlay-classloader
"Overlays classes over existing classes in parent"
[^ClassLoader parent class-overlay resource-overlay]
(CustomClassLoader.
(reify IClassLoader
(findClass [_ name]
(or (get class-overlay name) (.loadClass parent name)))
(findResources [_ name]
(or (get resource-overlay name) (.getResources parent name))))))
(comment
(defmacro prefix? [name & pfs]
`(or ~@(for [pf pfs] `(.startsWith ^String ~name ~pf))))
(defmacro <-
"Start a -> chain in an ->> chain"
[& arms]
`(-> ~(last arms) ~@(butlast arms))))
(import java.util.regex.Pattern)
(defn prefix-regex [pfs]
(->> (map #(str "(?:" (Pattern/quote %) ".*)") pfs)
(str/join "|")
re-pattern))
(defmacro prefix? [name & pfs]
`(re-matches
~(prefix-regex pfs)
~name))
(defn prefix-pred [prefixes]
(if-let [re (and (seq prefixes)
(prefix-regex prefixes))]
#(re-matches re %)
(constantly false)))
(defn package-forwarding-loader
"Create a filtering-classloader, which forwards a set of prefixes"
[parent-classloader
forwarded-classname-prefixes
forwarded-resname-prefixes]
(filtering-classloader
parent-classloader
(prefix-pred forwarded-classname-prefixes)
(prefix-pred forwarded-resname-prefixes)))
Reflection - based FFI
FIXME
(defn as-name [n]
(cond
(string? n) n
(instance? clojure.lang.Named n) (name n)
(instance? Class n) (.getName ^Class n)
:else (throw (ex-info "Not Named" {:named n}))))
(defn ^Class load-class [^ClassLoader cl class-name]
(.loadClass cl (as-name class-name)))
(defn ^java.lang.reflect.Method method-object [cl class-method signature]
(let [cn (namespace class-method)
mn (name class-method)]
(.getMethod (load-class cl cn) mn
(into-array Class (map (comp (partial load-class cl)
as-name)
signature)))))
(defn method [cl class-method signature]
(let [m (method-object cl class-method signature)]
#(.invoke m %1 (into-array Object %&))))
(defn static-method [cl class-method signature]
(let [cls (load-class cl (namespace class-method))
m (method-object cl class-method signature)]
#(.invoke m cls (into-array Object %&))))
(def ^:private invoke-method
(weak-memoize
(fn [cl cnt]
(method cl 'clojure.lang.IFn/invoke (repeat cnt Object)))))
(defn invoke [clj-fn args]
(let [res (apply (invoke-method (.getClassLoader
(class clj-fn))
(count args))
clj-fn
args)]
res))
(def clj-read-evaluator
(weak-memoize
(fn [cl]
(let [read-string' (static-method cl :clojure.lang.RT/readString [String])
eval' (static-method cl :clojure.lang.Compiler/eval [Object])]
(log/trace "Constructing read evaluator for cl" cl)
(fn [form-str]
(log/trace "Evaluating" form-str)
(let [res (eval' (read-string' form-str))]
(log/trace "=>" res)
res))))))
(defmacro with-context-classloader [cl & body]
`(let [t# (Thread/currentThread)
cl# (.getContextClassLoader t#)]
(try (.setContextClassLoader t# ~cl)
~@body
(finally
(.setContextClassLoader t# cl#)))))
(defn eval-in*
([cl form] (eval-in* cl form []))
([cl form curried-args]
(let [in (pr-str form)
reval (clj-read-evaluator cl)]
(if (seq curried-args)
(with-context-classloader cl
(reduce #(invoke %1 [%2])
(reval in) curried-args))
(with-context-classloader cl
(reval in))))))
|
5e6f805836c78a88489be540c67e9d2826428ae200fef377f8cf4375ddc42fa1 | onedata/op-worker | events_reliability_test_base.erl | %%%--------------------------------------------------------------------
@author
( C ) 2018 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
%%% @end
%%%--------------------------------------------------------------------
%%% @doc This module holds base cases for tests of reliability of events.
%%% @end
%%%--------------------------------------------------------------------
-module(events_reliability_test_base).
-author("Bartosz Walkowicz").
-include("fuse_test_utils.hrl").
-include("global_definitions.hrl").
-include("proto/common/clproto_message_id.hrl").
-include("proto/oneclient/common_messages.hrl").
-include("proto/oneclient/event_messages.hrl").
-include("proto/oneclient/server_messages.hrl").
-include("proto/oneclient/client_messages.hrl").
-include_lib("ctool/include/logging.hrl").
-include_lib("clproto/include/messages.hrl").
-include_lib("ctool/include/errors.hrl").
-include_lib("ctool/include/test/test_utils.hrl").
-include_lib("ctool/include/test/assertions.hrl").
-include_lib("ctool/include/test/performance.hrl").
%% export for ct
-export([
init_per_suite/1, init_per_testcase/2,
end_per_testcase/2, end_per_suite/1
]).
%%tests
-export([
events_aggregation_test/1,
events_flush_test/1,
events_aggregation_stream_error_test/1,
events_aggregation_stream_error_test2/1,
events_aggregation_manager_error_test/1,
events_aggregation_manager_error_test2/1,
events_flush_stream_error_test/1,
events_flush_handler_error_test/1
]).
-define(TEST_DATA, <<"TEST_DATA">>).
-define(TEST_DATA_SIZE, size(?TEST_DATA)).
-define(PROVIDER_ID(__Node), rpc:call(__Node, oneprovider, get_id, [])).
-define(SMALL_NUM_OF_ATTEMPTS, 5).
-define(MEDIUM_NUM_OF_ATTEMPTS, 20).
-define(ATTEMPTS_INTERVAL, 200).
%%%===================================================================
%%% Test functions
%%%===================================================================
events_aggregation_stream_error_test(Config) ->
Workers = ?config(op_worker_nodes, Config),
test_utils:mock_new(Workers, event_stream, [passthrough]),
test_utils:mock_expect(Workers, event_stream, send,
fun(Stream, Message) ->
case {Message, get(first_tested)} of
{#event{type = #file_read_event{}}, undefined} ->
put(first_tested, true),
meck:passthrough([error, Message]);
_ ->
meck:passthrough([Stream, Message])
end
end
),
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_aggregation_test_base(Config, ConnectionWorker, AssertionWorker),
test_utils:mock_unload(Workers, event_stream).
events_aggregation_stream_error_test2(Config) ->
Workers = ?config(op_worker_nodes, Config),
test_utils:mock_new(Workers, event_stream, [passthrough]),
test_utils:mock_expect(Workers, event_stream, handle_call, fun
(#event{type = #file_read_event{}} = Request, From, State) ->
case op_worker:get_env(?FUNCTION_NAME, undefined) of
undefined ->
op_worker:set_env(?FUNCTION_NAME, true),
throw(test_error);
_ ->
meck:passthrough([Request, From, State])
end;
(Request, From, State) ->
meck:passthrough([Request, From, State])
end),
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_aggregation_test_base(Config, ConnectionWorker, AssertionWorker),
test_utils:mock_unload(Workers, event_stream).
events_aggregation_manager_error_test(Config) ->
Workers = ?config(op_worker_nodes, Config),
test_utils:mock_new(Workers, event_manager, [passthrough]),
test_utils:mock_expect(Workers, event_manager, handle,
fun(Stream, Message) ->
case {Message, get(first_tested)} of
{#event{type = #file_read_event{}}, undefined} ->
put(first_tested, true),
throw(error);
_ ->
meck:passthrough([Stream, Message])
end
end
),
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_aggregation_test_base(Config, ConnectionWorker, AssertionWorker),
test_utils:mock_unload(Workers, event_manager).
events_aggregation_manager_error_test2(_Config) ->
TODO VFS-5383 - test event manager test other way
ok.
%% Workers = ?config(op_worker_nodes, Config),
test_utils : set_env(Workers , ? APP_NAME , fuse_session_grace_period_seconds , 5 ) ,
%% test_utils:mock_new(Workers, event_manager, [passthrough]),
%%
test_utils : mock_expect(Workers , event_manager , handle_call , fun
( # event{type = # file_read_event { } } = Request , From , State ) - >
%% case op_worker:get_env(?FUNCTION_NAME, undefined) of
%% undefined ->
op_worker : , true ) ,
%% throw(test_error);
%% _ ->
meck : passthrough([Request , From , State ] )
%% end;
( Request , From , State ) - >
: passthrough([Request , From , State ] )
%% end),
%%
{ ConnectionWorker , } = get_nodes(Config ) ,
events_aggregation_failed_test_base(Config , ConnectionWorker , AssertionWorker ) ,
%% test_utils:mock_unload(Workers, event_manager),
test_utils : set_env(Workers , ? APP_NAME , fuse_session_grace_period_seconds , 300 ) .
events_aggregation_test(Config) ->
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_aggregation_test_base(Config, ConnectionWorker, AssertionWorker).
events_flush_stream_error_test(Config) ->
Workers = ?config(op_worker_nodes, Config),
test_utils:mock_new(Workers, event_stream, [passthrough]),
test_utils:mock_expect(Workers, event_stream, send,
fun(Stream, Message) ->
case {Message, get(first_tested)} of
{#event{type = #file_written_event{}}, undefined} ->
put(first_tested, true),
meck:passthrough([error, Message]);
_ ->
meck:passthrough([Stream, Message])
end
end
),
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_flush_test_base(Config, ConnectionWorker, AssertionWorker, false, ok),
test_utils:mock_unload(Workers, event_stream).
events_flush_handler_error_test(Config) ->
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_flush_test_base(Config, ConnectionWorker, AssertionWorker, true, eagain).
events_flush_test(Config) ->
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_flush_test_base(Config, ConnectionWorker, AssertionWorker, false, ok).
%%%===================================================================
%%% Test bases
%%%===================================================================
events_aggregation_test_base(Config, ConnectionWorker, AssertionWorker) ->
UserId = <<"user1">>,
AccessToken = ?config({access_token, UserId}, Config),
SessionId = ?config({session_id, {UserId, ?GET_DOMAIN(AssertionWorker)}}, Config),
[{_SpaceId, SpaceName} | _] = ?config({spaces, UserId}, Config),
FilePath = filename:join(["/", SpaceName, generator:gen_name()]),
{ok, FileGuid} = lfm_proxy:create(AssertionWorker, SessionId, FilePath),
% Mock function calls to check
mock_event_handler(AssertionWorker),
mock_aggregate_read_events(AssertionWorker),
mock_handle_file_read_events(AssertionWorker),
{ok, {Sock, _}} = fuse_test_utils:connect_via_token(
ConnectionWorker, [{active, true}], crypto:strong_rand_bytes(10), AccessToken
),
Send 2 event with some delay and assert correct aggregation
Block1 = #file_block{offset = 0, size = 4},
Block2 = #file_block{offset = 10, size = 4},
fuse_test_utils:emit_file_read_event(Sock, 0, 1, FileGuid, [Block2]),
timer:sleep(100),
fuse_test_utils:emit_file_read_event(Sock, 0, 0, FileGuid, [Block1]),
assert_aggregate_read_events_called(AssertionWorker, FileGuid, Block1, Block2),
% Assert that file read events handler was not called before aggregation time expires
timer:sleep(500),
assert_handle_file_read_events_not_called(AssertionWorker),
% Assert that file read events handler was called after aggregation time expires
timer:sleep(500),
assert_handle_file_read_events_called(AssertionWorker, [#file_read_event{
counter = 2,
file_guid = FileGuid,
size = 8,
blocks = [Block1, Block2]}
]),
unmock_event_handler(AssertionWorker),
ok = ssl:close(Sock).
events_aggregation_failed_test_base(Config , ConnectionWorker , AssertionWorker ) - >
TODO VFS-5383 - test event manager test other way
%% UserId = <<"user1">>,
= ? , UserId } , Config ) ,
SessionId = ? , { UserId , ? GET_DOMAIN(AssertionWorker ) } } , Config ) ,
[ { _ SpaceId , SpaceName } | _ ] = ? config({spaces , UserId } , Config ) ,
%%
FilePath = filename : join(["/ " , SpaceName , generator : gen_name ( ) ] ) ,
{ ok , FileGuid } = lfm_proxy : create(AssertionWorker , SessionId , FilePath ) ,
%%
%% {ok, {Sock, TestSessionID}} = fuse_test_utils:connect_via_token(
ConnectionWorker , [ { active , true } ] , crypto : strong_rand_bytes(10 ) , AccessToken
%% ),
%%
%% ?assertMatch({ok, _}, rpc:call(ConnectionWorker, session, get, [TestSessionID])),
Block1 = # file_block{offset = 0 , size = 4 } ,
fuse_test_utils : emit_file_read_event(Sock , 0 , 0 , FileGuid , [ Block1 ] ) ,
timer : ) ,
%%
? , not_found } , rpc : call(ConnectionWorker , session , get , [ TestSessionID ] ) ) ,
? , closed } , ssl : send(Sock , < < " test " > > ) ) ,
%% ok = ssl:close(Sock).
events_flush_test_base(Config, ConnectionWorker, AssertionWorker, MockError, FlushCode) ->
UserId = <<"user1">>,
AccessToken = ?config({access_token, UserId}, Config),
SessionId = ?config({session_id, {UserId, ?GET_DOMAIN(AssertionWorker)}}, Config),
[{_SpaceId, SpaceName} | _] = ?config({spaces, UserId}, Config),
FilePath = filename:join(["/", SpaceName, generator:gen_name()]),
{ok, FileGuid} = lfm_proxy:create(AssertionWorker, SessionId, FilePath),
% Mock function calls to check
mock_event_handler(AssertionWorker),
mock_aggregate_written_events(AssertionWorker),
mock_handle_file_written_events(AssertionWorker, MockError),
{ok, {Sock, _}} = fuse_test_utils:connect_via_token(
ConnectionWorker, [{active, true}], crypto:strong_rand_bytes(10), AccessToken
),
[#'Subscription'{id = SubscriptionId}] = fuse_test_utils:get_subscriptions(Sock, [file_written]),
Send 2 event with some delay
Block1 = #file_block{offset = 0, size = 4},
Block2 = #file_block{offset = 10, size = 4},
fuse_test_utils:emit_file_written_event(Sock, 0, 1, FileGuid, [Block2]),
timer:sleep(100),
fuse_test_utils:emit_file_written_event(Sock, 0, 0, FileGuid, [Block1]),
assert_aggregate_written_events_called(AssertionWorker, FileGuid, Block1, Block2),
% Assert that file read events handler was not called before aggregation time expires
timer:sleep(100),
assert_handle_file_written_events_not_called(AssertionWorker),
% Assert that after forcing flush handler is called before aggregation time expires
fuse_test_utils:flush_events(Sock, ?PROVIDER_ID(AssertionWorker), SubscriptionId, FlushCode),
assert_handle_file_written_events_called(AssertionWorker, [#file_written_event{
counter = 2,
file_guid = FileGuid,
size = 8,
blocks = [Block1, Block2]}
]),
unmock_event_handler(AssertionWorker),
ok = ssl:close(Sock).
%%%===================================================================
SetUp and TearDown functions
%%%===================================================================
init_per_suite(Config) ->
Posthook = fun(NewConfig) ->
NewConfig1 = initializer:setup_storage(NewConfig),
application:start(ssl),
application:ensure_all_started(hackney),
FinalConfig = initializer:create_test_users_and_spaces(?TEST_FILE(NewConfig1, "env_desc.json"), NewConfig1),
Time to process events connected with initialization and connect providers
FinalConfig
end,
[{?ENV_UP_POSTHOOK, Posthook}, {?LOAD_MODULES, [initializer, events_reliability_test_base]} | Config].
init_per_testcase(_Case, Config) ->
ct:timetrap({minutes, 5}),
initializer:remove_pending_messages(),
lfm_proxy:init(Config).
end_per_suite(Config) ->
initializer:clean_test_users_and_spaces_no_validate(Config),
ok.
end_per_testcase(_Case, Config) ->
lfm_proxy:teardown(Config).
%%%===================================================================
Internal functions
%%%===================================================================
get_nodes(Config) ->
case ?config(op_worker_nodes, Config) of
[WorkerP1] -> {WorkerP1, WorkerP1};
[WorkerP2, WorkerP1] -> {WorkerP1, WorkerP2}
end.
mock_event_handler(Workers) ->
test_utils:mock_new(Workers, fslogic_event_handler, [passthrough]).
unmock_event_handler(Workers) ->
test_utils:mock_unload(Workers, fslogic_event_handler).
mock_handle_file_read_events(Workers) ->
test_utils:mock_expect(Workers, fslogic_event_handler, handle_file_read_events,
fun(Evts, UserCtxMap) ->
meck:passthrough([Evts, UserCtxMap])
end
).
assert_handle_file_read_events_not_called(Worker) ->
?assertMatch({badrpc, {'EXIT', {not_found, _}}},
rpc:call(Worker, meck, capture, [
1, fslogic_event_handler, handle_file_read_events, '_', 1
])
),
ok.
assert_handle_file_read_events_called(Worker, ExpEvents) ->
?assertMatch(ExpEvents,
rpc:call(Worker, meck, capture, [
1, fslogic_event_handler, handle_file_read_events, '_', 1
]), ?SMALL_NUM_OF_ATTEMPTS, ?ATTEMPTS_INTERVAL
),
ok.
mock_handle_file_written_events(Workers, false = _MockError) ->
test_utils:mock_expect(Workers, fslogic_event_handler, handle_file_written_events,
fun(Evts, UserCtxMap) ->
meck:passthrough([Evts, UserCtxMap])
end
);
mock_handle_file_written_events(Workers, _MockError) ->
test_utils:mock_expect(Workers, fslogic_event_handler, handle_file_written_events,
fun
(Evts, #{notify := _NotifyFun} = UserCtxMap) ->
case op_worker:get_env(?FUNCTION_NAME, undefined) of
undefined ->
op_worker:set_env(?FUNCTION_NAME, true),
throw(test_error);
_ ->
meck:passthrough([Evts, UserCtxMap])
end;
(Evts, UserCtxMap) ->
meck:passthrough([Evts, UserCtxMap])
end
).
assert_handle_file_written_events_not_called(Worker) ->
?assertMatch({badrpc, {'EXIT', {not_found, _}}},
rpc:call(Worker, meck, capture, [
1, fslogic_event_handler, handle_file_written_events, '_', 1
])
),
ok.
assert_handle_file_written_events_called(Worker, ExpEvents) ->
?assertMatch(ExpEvents,
rpc:call(Worker, meck, capture, [
1, fslogic_event_handler, handle_file_written_events, '_', 1
]), ?SMALL_NUM_OF_ATTEMPTS, ?ATTEMPTS_INTERVAL
),
ok.
mock_aggregate_read_events(Workers) ->
test_utils:mock_expect(Workers, fslogic_event_handler, aggregate_file_read_events,
fun(OldEvt, NewEvt) ->
meck:passthrough([OldEvt, NewEvt])
end
).
assert_aggregate_read_events_called(Worker, FileGuid, ExpBlock1, ExpBlock2) ->
Mod = fslogic_event_handler,
Fun = aggregate_file_read_events,
?assertMatch([{
_, {Mod, Fun, [
#file_read_event{file_guid = FileGuid, blocks = [ExpBlock1]},
#file_read_event{file_guid = FileGuid, blocks = [ExpBlock2]}
]}, #file_read_event{file_guid = FileGuid, blocks = [ExpBlock1, ExpBlock2]}
}], rpc:call(Worker, meck, history, [Mod]), ?MEDIUM_NUM_OF_ATTEMPTS, ?ATTEMPTS_INTERVAL).
mock_aggregate_written_events(Workers) ->
test_utils:mock_expect(Workers, fslogic_event_handler, aggregate_file_written_events,
fun(OldEvt, NewEvt) ->
meck:passthrough([OldEvt, NewEvt])
end
).
assert_aggregate_written_events_called(Worker, FileGuid, ExpBlock1, ExpBlock2) ->
Mod = fslogic_event_handler,
Fun = aggregate_file_written_events,
?assertMatch([{
_, {Mod, Fun, [
#file_written_event{file_guid = FileGuid, blocks = [ExpBlock1]},
#file_written_event{file_guid = FileGuid, blocks = [ExpBlock2]}
]}, #file_written_event{file_guid = FileGuid, blocks = [ExpBlock1, ExpBlock2]}
}], rpc:call(Worker, meck, history, [Mod]), ?MEDIUM_NUM_OF_ATTEMPTS, ?ATTEMPTS_INTERVAL).
| null | https://raw.githubusercontent.com/onedata/op-worker/4b5d0bec945485800fb17a4b6674ce0dc9892edf/test_distributed/events_reliability_test_base.erl | erlang | --------------------------------------------------------------------
@end
--------------------------------------------------------------------
@doc This module holds base cases for tests of reliability of events.
@end
--------------------------------------------------------------------
export for ct
tests
===================================================================
Test functions
===================================================================
Workers = ?config(op_worker_nodes, Config),
test_utils:mock_new(Workers, event_manager, [passthrough]),
case op_worker:get_env(?FUNCTION_NAME, undefined) of
undefined ->
throw(test_error);
_ ->
end;
end),
test_utils:mock_unload(Workers, event_manager),
===================================================================
Test bases
===================================================================
Mock function calls to check
Assert that file read events handler was not called before aggregation time expires
Assert that file read events handler was called after aggregation time expires
UserId = <<"user1">>,
{ok, {Sock, TestSessionID}} = fuse_test_utils:connect_via_token(
),
?assertMatch({ok, _}, rpc:call(ConnectionWorker, session, get, [TestSessionID])),
ok = ssl:close(Sock).
Mock function calls to check
Assert that file read events handler was not called before aggregation time expires
Assert that after forcing flush handler is called before aggregation time expires
===================================================================
===================================================================
===================================================================
=================================================================== | @author
( C ) 2018 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
-module(events_reliability_test_base).
-author("Bartosz Walkowicz").
-include("fuse_test_utils.hrl").
-include("global_definitions.hrl").
-include("proto/common/clproto_message_id.hrl").
-include("proto/oneclient/common_messages.hrl").
-include("proto/oneclient/event_messages.hrl").
-include("proto/oneclient/server_messages.hrl").
-include("proto/oneclient/client_messages.hrl").
-include_lib("ctool/include/logging.hrl").
-include_lib("clproto/include/messages.hrl").
-include_lib("ctool/include/errors.hrl").
-include_lib("ctool/include/test/test_utils.hrl").
-include_lib("ctool/include/test/assertions.hrl").
-include_lib("ctool/include/test/performance.hrl").
-export([
init_per_suite/1, init_per_testcase/2,
end_per_testcase/2, end_per_suite/1
]).
-export([
events_aggregation_test/1,
events_flush_test/1,
events_aggregation_stream_error_test/1,
events_aggregation_stream_error_test2/1,
events_aggregation_manager_error_test/1,
events_aggregation_manager_error_test2/1,
events_flush_stream_error_test/1,
events_flush_handler_error_test/1
]).
-define(TEST_DATA, <<"TEST_DATA">>).
-define(TEST_DATA_SIZE, size(?TEST_DATA)).
-define(PROVIDER_ID(__Node), rpc:call(__Node, oneprovider, get_id, [])).
-define(SMALL_NUM_OF_ATTEMPTS, 5).
-define(MEDIUM_NUM_OF_ATTEMPTS, 20).
-define(ATTEMPTS_INTERVAL, 200).
events_aggregation_stream_error_test(Config) ->
Workers = ?config(op_worker_nodes, Config),
test_utils:mock_new(Workers, event_stream, [passthrough]),
test_utils:mock_expect(Workers, event_stream, send,
fun(Stream, Message) ->
case {Message, get(first_tested)} of
{#event{type = #file_read_event{}}, undefined} ->
put(first_tested, true),
meck:passthrough([error, Message]);
_ ->
meck:passthrough([Stream, Message])
end
end
),
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_aggregation_test_base(Config, ConnectionWorker, AssertionWorker),
test_utils:mock_unload(Workers, event_stream).
events_aggregation_stream_error_test2(Config) ->
Workers = ?config(op_worker_nodes, Config),
test_utils:mock_new(Workers, event_stream, [passthrough]),
test_utils:mock_expect(Workers, event_stream, handle_call, fun
(#event{type = #file_read_event{}} = Request, From, State) ->
case op_worker:get_env(?FUNCTION_NAME, undefined) of
undefined ->
op_worker:set_env(?FUNCTION_NAME, true),
throw(test_error);
_ ->
meck:passthrough([Request, From, State])
end;
(Request, From, State) ->
meck:passthrough([Request, From, State])
end),
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_aggregation_test_base(Config, ConnectionWorker, AssertionWorker),
test_utils:mock_unload(Workers, event_stream).
events_aggregation_manager_error_test(Config) ->
Workers = ?config(op_worker_nodes, Config),
test_utils:mock_new(Workers, event_manager, [passthrough]),
test_utils:mock_expect(Workers, event_manager, handle,
fun(Stream, Message) ->
case {Message, get(first_tested)} of
{#event{type = #file_read_event{}}, undefined} ->
put(first_tested, true),
throw(error);
_ ->
meck:passthrough([Stream, Message])
end
end
),
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_aggregation_test_base(Config, ConnectionWorker, AssertionWorker),
test_utils:mock_unload(Workers, event_manager).
events_aggregation_manager_error_test2(_Config) ->
TODO VFS-5383 - test event manager test other way
ok.
test_utils : set_env(Workers , ? APP_NAME , fuse_session_grace_period_seconds , 5 ) ,
test_utils : mock_expect(Workers , event_manager , handle_call , fun
( # event{type = # file_read_event { } } = Request , From , State ) - >
op_worker : , true ) ,
meck : passthrough([Request , From , State ] )
( Request , From , State ) - >
: passthrough([Request , From , State ] )
{ ConnectionWorker , } = get_nodes(Config ) ,
events_aggregation_failed_test_base(Config , ConnectionWorker , AssertionWorker ) ,
test_utils : set_env(Workers , ? APP_NAME , fuse_session_grace_period_seconds , 300 ) .
events_aggregation_test(Config) ->
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_aggregation_test_base(Config, ConnectionWorker, AssertionWorker).
events_flush_stream_error_test(Config) ->
Workers = ?config(op_worker_nodes, Config),
test_utils:mock_new(Workers, event_stream, [passthrough]),
test_utils:mock_expect(Workers, event_stream, send,
fun(Stream, Message) ->
case {Message, get(first_tested)} of
{#event{type = #file_written_event{}}, undefined} ->
put(first_tested, true),
meck:passthrough([error, Message]);
_ ->
meck:passthrough([Stream, Message])
end
end
),
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_flush_test_base(Config, ConnectionWorker, AssertionWorker, false, ok),
test_utils:mock_unload(Workers, event_stream).
events_flush_handler_error_test(Config) ->
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_flush_test_base(Config, ConnectionWorker, AssertionWorker, true, eagain).
events_flush_test(Config) ->
{ConnectionWorker, AssertionWorker} = get_nodes(Config),
events_flush_test_base(Config, ConnectionWorker, AssertionWorker, false, ok).
events_aggregation_test_base(Config, ConnectionWorker, AssertionWorker) ->
UserId = <<"user1">>,
AccessToken = ?config({access_token, UserId}, Config),
SessionId = ?config({session_id, {UserId, ?GET_DOMAIN(AssertionWorker)}}, Config),
[{_SpaceId, SpaceName} | _] = ?config({spaces, UserId}, Config),
FilePath = filename:join(["/", SpaceName, generator:gen_name()]),
{ok, FileGuid} = lfm_proxy:create(AssertionWorker, SessionId, FilePath),
mock_event_handler(AssertionWorker),
mock_aggregate_read_events(AssertionWorker),
mock_handle_file_read_events(AssertionWorker),
{ok, {Sock, _}} = fuse_test_utils:connect_via_token(
ConnectionWorker, [{active, true}], crypto:strong_rand_bytes(10), AccessToken
),
Send 2 event with some delay and assert correct aggregation
Block1 = #file_block{offset = 0, size = 4},
Block2 = #file_block{offset = 10, size = 4},
fuse_test_utils:emit_file_read_event(Sock, 0, 1, FileGuid, [Block2]),
timer:sleep(100),
fuse_test_utils:emit_file_read_event(Sock, 0, 0, FileGuid, [Block1]),
assert_aggregate_read_events_called(AssertionWorker, FileGuid, Block1, Block2),
timer:sleep(500),
assert_handle_file_read_events_not_called(AssertionWorker),
timer:sleep(500),
assert_handle_file_read_events_called(AssertionWorker, [#file_read_event{
counter = 2,
file_guid = FileGuid,
size = 8,
blocks = [Block1, Block2]}
]),
unmock_event_handler(AssertionWorker),
ok = ssl:close(Sock).
events_aggregation_failed_test_base(Config , ConnectionWorker , AssertionWorker ) - >
TODO VFS-5383 - test event manager test other way
= ? , UserId } , Config ) ,
SessionId = ? , { UserId , ? GET_DOMAIN(AssertionWorker ) } } , Config ) ,
[ { _ SpaceId , SpaceName } | _ ] = ? config({spaces , UserId } , Config ) ,
FilePath = filename : join(["/ " , SpaceName , generator : gen_name ( ) ] ) ,
{ ok , FileGuid } = lfm_proxy : create(AssertionWorker , SessionId , FilePath ) ,
ConnectionWorker , [ { active , true } ] , crypto : strong_rand_bytes(10 ) , AccessToken
Block1 = # file_block{offset = 0 , size = 4 } ,
fuse_test_utils : emit_file_read_event(Sock , 0 , 0 , FileGuid , [ Block1 ] ) ,
timer : ) ,
? , not_found } , rpc : call(ConnectionWorker , session , get , [ TestSessionID ] ) ) ,
? , closed } , ssl : send(Sock , < < " test " > > ) ) ,
events_flush_test_base(Config, ConnectionWorker, AssertionWorker, MockError, FlushCode) ->
UserId = <<"user1">>,
AccessToken = ?config({access_token, UserId}, Config),
SessionId = ?config({session_id, {UserId, ?GET_DOMAIN(AssertionWorker)}}, Config),
[{_SpaceId, SpaceName} | _] = ?config({spaces, UserId}, Config),
FilePath = filename:join(["/", SpaceName, generator:gen_name()]),
{ok, FileGuid} = lfm_proxy:create(AssertionWorker, SessionId, FilePath),
mock_event_handler(AssertionWorker),
mock_aggregate_written_events(AssertionWorker),
mock_handle_file_written_events(AssertionWorker, MockError),
{ok, {Sock, _}} = fuse_test_utils:connect_via_token(
ConnectionWorker, [{active, true}], crypto:strong_rand_bytes(10), AccessToken
),
[#'Subscription'{id = SubscriptionId}] = fuse_test_utils:get_subscriptions(Sock, [file_written]),
Send 2 event with some delay
Block1 = #file_block{offset = 0, size = 4},
Block2 = #file_block{offset = 10, size = 4},
fuse_test_utils:emit_file_written_event(Sock, 0, 1, FileGuid, [Block2]),
timer:sleep(100),
fuse_test_utils:emit_file_written_event(Sock, 0, 0, FileGuid, [Block1]),
assert_aggregate_written_events_called(AssertionWorker, FileGuid, Block1, Block2),
timer:sleep(100),
assert_handle_file_written_events_not_called(AssertionWorker),
fuse_test_utils:flush_events(Sock, ?PROVIDER_ID(AssertionWorker), SubscriptionId, FlushCode),
assert_handle_file_written_events_called(AssertionWorker, [#file_written_event{
counter = 2,
file_guid = FileGuid,
size = 8,
blocks = [Block1, Block2]}
]),
unmock_event_handler(AssertionWorker),
ok = ssl:close(Sock).
SetUp and TearDown functions
init_per_suite(Config) ->
Posthook = fun(NewConfig) ->
NewConfig1 = initializer:setup_storage(NewConfig),
application:start(ssl),
application:ensure_all_started(hackney),
FinalConfig = initializer:create_test_users_and_spaces(?TEST_FILE(NewConfig1, "env_desc.json"), NewConfig1),
Time to process events connected with initialization and connect providers
FinalConfig
end,
[{?ENV_UP_POSTHOOK, Posthook}, {?LOAD_MODULES, [initializer, events_reliability_test_base]} | Config].
init_per_testcase(_Case, Config) ->
ct:timetrap({minutes, 5}),
initializer:remove_pending_messages(),
lfm_proxy:init(Config).
end_per_suite(Config) ->
initializer:clean_test_users_and_spaces_no_validate(Config),
ok.
end_per_testcase(_Case, Config) ->
lfm_proxy:teardown(Config).
Internal functions
get_nodes(Config) ->
case ?config(op_worker_nodes, Config) of
[WorkerP1] -> {WorkerP1, WorkerP1};
[WorkerP2, WorkerP1] -> {WorkerP1, WorkerP2}
end.
mock_event_handler(Workers) ->
test_utils:mock_new(Workers, fslogic_event_handler, [passthrough]).
unmock_event_handler(Workers) ->
test_utils:mock_unload(Workers, fslogic_event_handler).
mock_handle_file_read_events(Workers) ->
test_utils:mock_expect(Workers, fslogic_event_handler, handle_file_read_events,
fun(Evts, UserCtxMap) ->
meck:passthrough([Evts, UserCtxMap])
end
).
assert_handle_file_read_events_not_called(Worker) ->
?assertMatch({badrpc, {'EXIT', {not_found, _}}},
rpc:call(Worker, meck, capture, [
1, fslogic_event_handler, handle_file_read_events, '_', 1
])
),
ok.
assert_handle_file_read_events_called(Worker, ExpEvents) ->
?assertMatch(ExpEvents,
rpc:call(Worker, meck, capture, [
1, fslogic_event_handler, handle_file_read_events, '_', 1
]), ?SMALL_NUM_OF_ATTEMPTS, ?ATTEMPTS_INTERVAL
),
ok.
mock_handle_file_written_events(Workers, false = _MockError) ->
test_utils:mock_expect(Workers, fslogic_event_handler, handle_file_written_events,
fun(Evts, UserCtxMap) ->
meck:passthrough([Evts, UserCtxMap])
end
);
mock_handle_file_written_events(Workers, _MockError) ->
test_utils:mock_expect(Workers, fslogic_event_handler, handle_file_written_events,
fun
(Evts, #{notify := _NotifyFun} = UserCtxMap) ->
case op_worker:get_env(?FUNCTION_NAME, undefined) of
undefined ->
op_worker:set_env(?FUNCTION_NAME, true),
throw(test_error);
_ ->
meck:passthrough([Evts, UserCtxMap])
end;
(Evts, UserCtxMap) ->
meck:passthrough([Evts, UserCtxMap])
end
).
assert_handle_file_written_events_not_called(Worker) ->
?assertMatch({badrpc, {'EXIT', {not_found, _}}},
rpc:call(Worker, meck, capture, [
1, fslogic_event_handler, handle_file_written_events, '_', 1
])
),
ok.
assert_handle_file_written_events_called(Worker, ExpEvents) ->
?assertMatch(ExpEvents,
rpc:call(Worker, meck, capture, [
1, fslogic_event_handler, handle_file_written_events, '_', 1
]), ?SMALL_NUM_OF_ATTEMPTS, ?ATTEMPTS_INTERVAL
),
ok.
mock_aggregate_read_events(Workers) ->
test_utils:mock_expect(Workers, fslogic_event_handler, aggregate_file_read_events,
fun(OldEvt, NewEvt) ->
meck:passthrough([OldEvt, NewEvt])
end
).
assert_aggregate_read_events_called(Worker, FileGuid, ExpBlock1, ExpBlock2) ->
Mod = fslogic_event_handler,
Fun = aggregate_file_read_events,
?assertMatch([{
_, {Mod, Fun, [
#file_read_event{file_guid = FileGuid, blocks = [ExpBlock1]},
#file_read_event{file_guid = FileGuid, blocks = [ExpBlock2]}
]}, #file_read_event{file_guid = FileGuid, blocks = [ExpBlock1, ExpBlock2]}
}], rpc:call(Worker, meck, history, [Mod]), ?MEDIUM_NUM_OF_ATTEMPTS, ?ATTEMPTS_INTERVAL).
mock_aggregate_written_events(Workers) ->
test_utils:mock_expect(Workers, fslogic_event_handler, aggregate_file_written_events,
fun(OldEvt, NewEvt) ->
meck:passthrough([OldEvt, NewEvt])
end
).
assert_aggregate_written_events_called(Worker, FileGuid, ExpBlock1, ExpBlock2) ->
Mod = fslogic_event_handler,
Fun = aggregate_file_written_events,
?assertMatch([{
_, {Mod, Fun, [
#file_written_event{file_guid = FileGuid, blocks = [ExpBlock1]},
#file_written_event{file_guid = FileGuid, blocks = [ExpBlock2]}
]}, #file_written_event{file_guid = FileGuid, blocks = [ExpBlock1, ExpBlock2]}
}], rpc:call(Worker, meck, history, [Mod]), ?MEDIUM_NUM_OF_ATTEMPTS, ?ATTEMPTS_INTERVAL).
|
f0701d0e65a1a9064518e34ba5e12894d902f8ef4034fc41da26ffd55e60f555 | cubicle-model-checker/cubicle | smt.mli | (**************************************************************************)
(* *)
Cubicle
(* *)
Copyright ( C ) 2011 - 2014
(* *)
and
Universite Paris - Sud 11
(* *)
(* *)
This file is distributed under the terms of the Apache Software
(* License version 2.0 *)
(* *)
(**************************************************************************)
* A module corresponding to the SMT solver selected by the command line
options
options *)
include Smt_sig.S
| null | https://raw.githubusercontent.com/cubicle-model-checker/cubicle/00f09bb2d4bb496549775e770d7ada08bc1e4866/smt/smt.mli | ocaml | ************************************************************************
License version 2.0
************************************************************************ | Cubicle
Copyright ( C ) 2011 - 2014
and
Universite Paris - Sud 11
This file is distributed under the terms of the Apache Software
* A module corresponding to the SMT solver selected by the command line
options
options *)
include Smt_sig.S
|
adcd64fbd7676e21fa1af3b88c4c09cea7c8fa22a04449e39171912cf5351303 | bobot/FetedelascienceINRIAsaclay | test_ppm.ml | open Printf
open Graphics
let snapshot_file = "/Users/marku/Desktop/ocaml.ppm"
(* let snapshot_file = "/tmp/ocaml.ppm" *)
let take_snapshot () =
Sys.command ("gqcam -w 0 -c 128 -b 255 --type PPM --dump " ^ snapshot_file)
let rgb_components c =
float((c lsr 16) land 0xFF),
float((c lsr 8) land 0xFF),
float(c land 0xFF)
let rgb (r, g, b) =
((truncate r land 0XFF) lsl 16) lor ((truncate g land 0xFF) lsl 8)
lor (truncate b land 0xFF)
let map f img =
Array.map (fun row -> Array.map (fun c -> rgb(f (rgb_components c))) row) img
let () =
(* take_snapshot(); *)
let img = Ppm.as_matrix_exn snapshot_file in
let height = Array.length img
and width = Array.length img.(0) in
open_graph (sprintf " %ix%i" (4 * width) height);
let f1 (r,g,b) = (r,0.,0.)
and f2 (r,g,b) = (0.,g,0.)
and f3 (r,g,b) = (0.,0.,b) in
draw_image (make_image img) 0 0;
draw_image (make_image (map f1 img)) width 0;
draw_image (make_image (map f2 img)) (2*width) 0;
draw_image (make_image (map f3 img)) (3*width) 0;
print_endline "press a key";
ignore(read_key());
close_graph()
| null | https://raw.githubusercontent.com/bobot/FetedelascienceINRIAsaclay/87765db9f9c7211a26a09eb93e9c92f99a49b0bc/2010/robot/examples_mindstorm_lab/rubik/test_ppm.ml | ocaml | let snapshot_file = "/tmp/ocaml.ppm"
take_snapshot(); | open Printf
open Graphics
let snapshot_file = "/Users/marku/Desktop/ocaml.ppm"
let take_snapshot () =
Sys.command ("gqcam -w 0 -c 128 -b 255 --type PPM --dump " ^ snapshot_file)
let rgb_components c =
float((c lsr 16) land 0xFF),
float((c lsr 8) land 0xFF),
float(c land 0xFF)
let rgb (r, g, b) =
((truncate r land 0XFF) lsl 16) lor ((truncate g land 0xFF) lsl 8)
lor (truncate b land 0xFF)
let map f img =
Array.map (fun row -> Array.map (fun c -> rgb(f (rgb_components c))) row) img
let () =
let img = Ppm.as_matrix_exn snapshot_file in
let height = Array.length img
and width = Array.length img.(0) in
open_graph (sprintf " %ix%i" (4 * width) height);
let f1 (r,g,b) = (r,0.,0.)
and f2 (r,g,b) = (0.,g,0.)
and f3 (r,g,b) = (0.,0.,b) in
draw_image (make_image img) 0 0;
draw_image (make_image (map f1 img)) width 0;
draw_image (make_image (map f2 img)) (2*width) 0;
draw_image (make_image (map f3 img)) (3*width) 0;
print_endline "press a key";
ignore(read_key());
close_graph()
|
5ca4c52f30a46443c7b665fcac1ed2d6580186625830147fb5cc3ea13a1e205e | nasa/PRECiSA | Expression.hs | -- Notices:
--
Copyright 2020 United States Government as represented by the Administrator of the National Aeronautics and Space Administration . All Rights Reserved .
-- Disclaimers
No Warranty : THE SUBJECT SOFTWARE IS PROVIDED " AS IS " WITHOUT ANY WARRANTY OF ANY KIND , EITHER EXPRESSED , IMPLIED , OR STATUTORY , INCLUDING , BUT NOT LIMITED TO , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL CONFORM TO SPECIFICATIONS , ANY IMPLIED WARRANTIES OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE , OR FREEDOM FROM INFRINGEMENT , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL BE ERROR FREE , OR ANY WARRANTY THAT DOCUMENTATION , IF PROVIDED , WILL CONFORM TO THE SUBJECT SOFTWARE . THIS AGREEMENT DOES NOT , IN ANY MANNER , CONSTITUTE AN ENDORSEMENT BY GOVERNMENT AGENCY OR ANY PRIOR RECIPIENT OF ANY RESULTS , RESULTING DESIGNS , HARDWARE , SOFTWARE PRODUCTS OR ANY OTHER APPLICATIONS RESULTING FROM USE OF THE SUBJECT SOFTWARE . FURTHER , GOVERNMENT AGENCY DISCLAIMS ALL WARRANTIES AND LIABILITIES REGARDING THIRD - PARTY SOFTWARE , IF PRESENT IN THE ORIGINAL SOFTWARE , AND DISTRIBUTES IT " AS IS . "
Waiver and Indemnity : RECIPIENT AGREES TO WAIVE ANY AND ALL CLAIMS AGAINST THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY PRIOR RECIPIENT . IF RECIPIENT 'S USE OF THE SUBJECT SOFTWARE RESULTS IN ANY LIABILITIES , DEMANDS , DAMAGES , EXPENSES OR LOSSES ARISING FROM SUCH USE , INCLUDING ANY DAMAGES FROM PRODUCTS BASED ON , OR RESULTING FROM , RECIPIENT 'S USE OF THE SUBJECT SOFTWARE , RECIPIENT SHALL INDEMNIFY AND HOLD HARMLESS THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY PRIOR RECIPIENT , TO THE EXTENT PERMITTED BY LAW . RECIPIENT 'S SOLE REMEDY FOR ANY SUCH MATTER SHALL BE THE IMMEDIATE , UNILATERAL TERMINATION OF THIS AGREEMENT .
# LANGUAGE MultiParamTypeClasses #
module Kodiak.Expression (
module Kodiak.Expression,
module PVSTypes
) where
import PVSTypes
import Kodiak.Kodiak
import Kodiak.Runnable
import Kodiak.Runner hiding (runBExpr)
import Common.TypesUtils
import Control.Exception (throw,AssertionFailed(..),assert)
import Data.Bits.Floating (nextUp,nextDown)
import Foreign.C
import Prelude hiding (True,False,LT,GT)
data AExpr
= Cnst Rational
| Var VarName
| Let VarName AExpr AExpr
| Add AExpr AExpr
| Sub AExpr AExpr
| Mul AExpr AExpr
| Div AExpr AExpr
| Neg AExpr
| Floor AExpr
| Sqrt AExpr
| Abs AExpr
| Sin AExpr
| Cos AExpr
| ATan AExpr
| Ln AExpr
| Exp AExpr
| Ulp PVSType AExpr
| Max [AExpr]
deriving (Show,Eq)
instance KodiakRunnable AExpr VariableMap PReal where
run = runAExpr
runAExpr :: AExpr -> VariableMap -> IO PReal
runAExpr e vmap@(VMap vMap)
| Cnst r <- e = do
let dr = fromRational r :: CDouble
pint <- if (toRational dr == r)
then interval_create dr dr
else let rdr = toRational dr in
if (rdr > r)
then do let lb = nextDown dr
assert (toRational lb < rdr) $
interval_create lb dr
else do let ub = nextUp dr
assert (rdr < toRational ub) $
interval_create dr ub
real_create_value pint
| Var v <- e = case lookup v vMap of
Just i -> newCString v >>= real_create_variable i
Nothing -> throw $ AssertionFailed $ "Kodiak.Var " ++ v ++ " name not found"
| Add l r <- e = runBinary l r real_create_addition
| Sub l r <- e = runBinary l r real_create_subtraction
| Mul l r <- e = runBinary l r real_create_multiplication
| Div l r <- e = runBinary l r real_create_division
| Neg r <- e = runUnary r real_create_negation
| Floor r <- e = runUnary r real_create_floor
| Sqrt r <- e = runUnary r real_create_sqrt
| Abs r <- e = runUnary r real_create_absolute_value
| Sin r <- e = runUnary r real_create_sine
| Cos r <- e = runUnary r real_create_cosine
| ATan r <- e = runUnary r real_create_arctangent
| Ln r <- e = runUnary r real_create_elogarithm
| Exp r <- e = runUnary r real_create_eexponent
| Ulp p r <- e = runUnary r $ case p of FPSingle -> real_create_single_ulp
FPDouble -> real_create_double_ulp
_ -> error $ "Kodiak Ulp AExpr is not defined for PVSType: " ++ show p
| Max rs <- e = do v <- real_vector_create
mapM_ (\r -> runAExpr r vmap >>= real_vector_add v) rs
real_create_maximum v
| otherwise = error $ "Kodiak does not support AExpr: " ++ show e
where
runBinary l r op = do pl <- runAExpr l vmap
pr <- runAExpr r vmap
op pl pr
runUnary r op = runAExpr r vmap >>= op
data BExpr
= True
| False
| Not BExpr
| And BExpr BExpr
| Or BExpr BExpr
| Eq AExpr AExpr
| NEq AExpr AExpr
| LT AExpr AExpr
| LE AExpr AExpr
| GT AExpr AExpr
| GE AExpr AExpr
deriving (Show,Eq)
instance KodiakRunnable BExpr VariableMap PBool where
run = runBExpr
runBExpr :: BExpr -> VariableMap -> IO PBool
runBExpr e vmap
| True <- e = bool_create_true
| False <- e = bool_create_false
| Not b <- e = runBExpr b vmap >>= bool_create_not
| Or l r <- e = runBooleanBinary l r bool_create_or
| And l r <- e = runBooleanBinary l r bool_create_and
| Eq l r <- e = runBinary l r bool_create_equal_to
| NEq l r <- e = runBinary l r bool_create_equal_to >>= bool_create_not
| LT l r <- e = runBinary l r bool_create_less_than
| LE l r <- e = runBinary l r bool_create_less_than_or_equal_to
| GT l r <- e = runBinary l r bool_create_greater_than
| GE l r <- e = runBinary l r bool_create_greater_than_or_equal_to
where
runBinary l r op = do pl <- runAExpr l vmap
pr <- runAExpr r vmap
op pl pr
runBooleanBinary l r op = do pl <- runBExpr l vmap
pr <- runBExpr r vmap
op pl pr
| null | https://raw.githubusercontent.com/nasa/PRECiSA/4f72e50d75528ce81cf0858ca9bf58b352e4f4b9/PRECiSA/src/Kodiak/Expression.hs | haskell | Notices:
Disclaimers | Copyright 2020 United States Government as represented by the Administrator of the National Aeronautics and Space Administration . All Rights Reserved .
No Warranty : THE SUBJECT SOFTWARE IS PROVIDED " AS IS " WITHOUT ANY WARRANTY OF ANY KIND , EITHER EXPRESSED , IMPLIED , OR STATUTORY , INCLUDING , BUT NOT LIMITED TO , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL CONFORM TO SPECIFICATIONS , ANY IMPLIED WARRANTIES OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE , OR FREEDOM FROM INFRINGEMENT , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL BE ERROR FREE , OR ANY WARRANTY THAT DOCUMENTATION , IF PROVIDED , WILL CONFORM TO THE SUBJECT SOFTWARE . THIS AGREEMENT DOES NOT , IN ANY MANNER , CONSTITUTE AN ENDORSEMENT BY GOVERNMENT AGENCY OR ANY PRIOR RECIPIENT OF ANY RESULTS , RESULTING DESIGNS , HARDWARE , SOFTWARE PRODUCTS OR ANY OTHER APPLICATIONS RESULTING FROM USE OF THE SUBJECT SOFTWARE . FURTHER , GOVERNMENT AGENCY DISCLAIMS ALL WARRANTIES AND LIABILITIES REGARDING THIRD - PARTY SOFTWARE , IF PRESENT IN THE ORIGINAL SOFTWARE , AND DISTRIBUTES IT " AS IS . "
Waiver and Indemnity : RECIPIENT AGREES TO WAIVE ANY AND ALL CLAIMS AGAINST THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY PRIOR RECIPIENT . IF RECIPIENT 'S USE OF THE SUBJECT SOFTWARE RESULTS IN ANY LIABILITIES , DEMANDS , DAMAGES , EXPENSES OR LOSSES ARISING FROM SUCH USE , INCLUDING ANY DAMAGES FROM PRODUCTS BASED ON , OR RESULTING FROM , RECIPIENT 'S USE OF THE SUBJECT SOFTWARE , RECIPIENT SHALL INDEMNIFY AND HOLD HARMLESS THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY PRIOR RECIPIENT , TO THE EXTENT PERMITTED BY LAW . RECIPIENT 'S SOLE REMEDY FOR ANY SUCH MATTER SHALL BE THE IMMEDIATE , UNILATERAL TERMINATION OF THIS AGREEMENT .
# LANGUAGE MultiParamTypeClasses #
module Kodiak.Expression (
module Kodiak.Expression,
module PVSTypes
) where
import PVSTypes
import Kodiak.Kodiak
import Kodiak.Runnable
import Kodiak.Runner hiding (runBExpr)
import Common.TypesUtils
import Control.Exception (throw,AssertionFailed(..),assert)
import Data.Bits.Floating (nextUp,nextDown)
import Foreign.C
import Prelude hiding (True,False,LT,GT)
data AExpr
= Cnst Rational
| Var VarName
| Let VarName AExpr AExpr
| Add AExpr AExpr
| Sub AExpr AExpr
| Mul AExpr AExpr
| Div AExpr AExpr
| Neg AExpr
| Floor AExpr
| Sqrt AExpr
| Abs AExpr
| Sin AExpr
| Cos AExpr
| ATan AExpr
| Ln AExpr
| Exp AExpr
| Ulp PVSType AExpr
| Max [AExpr]
deriving (Show,Eq)
instance KodiakRunnable AExpr VariableMap PReal where
run = runAExpr
runAExpr :: AExpr -> VariableMap -> IO PReal
runAExpr e vmap@(VMap vMap)
| Cnst r <- e = do
let dr = fromRational r :: CDouble
pint <- if (toRational dr == r)
then interval_create dr dr
else let rdr = toRational dr in
if (rdr > r)
then do let lb = nextDown dr
assert (toRational lb < rdr) $
interval_create lb dr
else do let ub = nextUp dr
assert (rdr < toRational ub) $
interval_create dr ub
real_create_value pint
| Var v <- e = case lookup v vMap of
Just i -> newCString v >>= real_create_variable i
Nothing -> throw $ AssertionFailed $ "Kodiak.Var " ++ v ++ " name not found"
| Add l r <- e = runBinary l r real_create_addition
| Sub l r <- e = runBinary l r real_create_subtraction
| Mul l r <- e = runBinary l r real_create_multiplication
| Div l r <- e = runBinary l r real_create_division
| Neg r <- e = runUnary r real_create_negation
| Floor r <- e = runUnary r real_create_floor
| Sqrt r <- e = runUnary r real_create_sqrt
| Abs r <- e = runUnary r real_create_absolute_value
| Sin r <- e = runUnary r real_create_sine
| Cos r <- e = runUnary r real_create_cosine
| ATan r <- e = runUnary r real_create_arctangent
| Ln r <- e = runUnary r real_create_elogarithm
| Exp r <- e = runUnary r real_create_eexponent
| Ulp p r <- e = runUnary r $ case p of FPSingle -> real_create_single_ulp
FPDouble -> real_create_double_ulp
_ -> error $ "Kodiak Ulp AExpr is not defined for PVSType: " ++ show p
| Max rs <- e = do v <- real_vector_create
mapM_ (\r -> runAExpr r vmap >>= real_vector_add v) rs
real_create_maximum v
| otherwise = error $ "Kodiak does not support AExpr: " ++ show e
where
runBinary l r op = do pl <- runAExpr l vmap
pr <- runAExpr r vmap
op pl pr
runUnary r op = runAExpr r vmap >>= op
data BExpr
= True
| False
| Not BExpr
| And BExpr BExpr
| Or BExpr BExpr
| Eq AExpr AExpr
| NEq AExpr AExpr
| LT AExpr AExpr
| LE AExpr AExpr
| GT AExpr AExpr
| GE AExpr AExpr
deriving (Show,Eq)
instance KodiakRunnable BExpr VariableMap PBool where
run = runBExpr
runBExpr :: BExpr -> VariableMap -> IO PBool
runBExpr e vmap
| True <- e = bool_create_true
| False <- e = bool_create_false
| Not b <- e = runBExpr b vmap >>= bool_create_not
| Or l r <- e = runBooleanBinary l r bool_create_or
| And l r <- e = runBooleanBinary l r bool_create_and
| Eq l r <- e = runBinary l r bool_create_equal_to
| NEq l r <- e = runBinary l r bool_create_equal_to >>= bool_create_not
| LT l r <- e = runBinary l r bool_create_less_than
| LE l r <- e = runBinary l r bool_create_less_than_or_equal_to
| GT l r <- e = runBinary l r bool_create_greater_than
| GE l r <- e = runBinary l r bool_create_greater_than_or_equal_to
where
runBinary l r op = do pl <- runAExpr l vmap
pr <- runAExpr r vmap
op pl pr
runBooleanBinary l r op = do pl <- runBExpr l vmap
pr <- runBExpr r vmap
op pl pr
|
6617351205c4425aa85d31c8c1cb78dfdb059c8b5442f8da8a97d01ccb304137 | fossas/fossa-cli | Docs.hs | module App.Docs (
userGuideUrl,
newIssueUrl,
fossaYmlDocUrl,
strategyLangDocUrl,
platformDocUrl,
fossaSslCertDocsUrl,
fossaContainerScannerUrl,
) where
import App.Version (versionOrBranch)
import Data.Text (Text)
sourceCodeUrl :: Text
sourceCodeUrl = "-cli"
guidePathOf :: Text -> Text -> Text
guidePathOf revision repoRelUrl = sourceCodeUrl <> "/blob/" <> revision <> repoRelUrl
userGuideUrl :: Text
userGuideUrl = guidePathOf versionOrBranch "/docs/README.md"
fossaYmlDocUrl :: Text
fossaYmlDocUrl = guidePathOf versionOrBranch "/docs/references/files/fossa-yml.md"
newIssueUrl :: Text
newIssueUrl = sourceCodeUrl <> "/issues/new"
strategyLangDocUrl :: Text -> Text
strategyLangDocUrl path = guidePathOf versionOrBranch ("/docs/references/strategies/languages/" <> path)
platformDocUrl :: Text -> Text
platformDocUrl path = guidePathOf versionOrBranch ("/docs/references/strategies/platforms/" <> path)
fossaSslCertDocsUrl :: Text
fossaSslCertDocsUrl = guidePathOf versionOrBranch "/docs/walkthroughs/ssl-cert.md"
fossaContainerScannerUrl :: Text
fossaContainerScannerUrl = guidePathOf versionOrBranch "/docs/references/subcommands/container/scanner.md"
| null | https://raw.githubusercontent.com/fossas/fossa-cli/62c25adda99bab2c80ef78ee78206a14ad0ab0fa/src/App/Docs.hs | haskell | module App.Docs (
userGuideUrl,
newIssueUrl,
fossaYmlDocUrl,
strategyLangDocUrl,
platformDocUrl,
fossaSslCertDocsUrl,
fossaContainerScannerUrl,
) where
import App.Version (versionOrBranch)
import Data.Text (Text)
sourceCodeUrl :: Text
sourceCodeUrl = "-cli"
guidePathOf :: Text -> Text -> Text
guidePathOf revision repoRelUrl = sourceCodeUrl <> "/blob/" <> revision <> repoRelUrl
userGuideUrl :: Text
userGuideUrl = guidePathOf versionOrBranch "/docs/README.md"
fossaYmlDocUrl :: Text
fossaYmlDocUrl = guidePathOf versionOrBranch "/docs/references/files/fossa-yml.md"
newIssueUrl :: Text
newIssueUrl = sourceCodeUrl <> "/issues/new"
strategyLangDocUrl :: Text -> Text
strategyLangDocUrl path = guidePathOf versionOrBranch ("/docs/references/strategies/languages/" <> path)
platformDocUrl :: Text -> Text
platformDocUrl path = guidePathOf versionOrBranch ("/docs/references/strategies/platforms/" <> path)
fossaSslCertDocsUrl :: Text
fossaSslCertDocsUrl = guidePathOf versionOrBranch "/docs/walkthroughs/ssl-cert.md"
fossaContainerScannerUrl :: Text
fossaContainerScannerUrl = guidePathOf versionOrBranch "/docs/references/subcommands/container/scanner.md"
| |
31fdec47eecc0ab71cc5d317bea2eac4789ff985a8231a8b4935408bc0f7195a | cyverse-archive/DiscoveryEnvironmentBackend | c189_2014070701.clj | (ns facepalm.c189-2014070701
(:use [korma.core]))
(def ^:private version
"The destination database version."
"1.8.9:20140707.01")
(defn- convert-avus-table-user-cols
[]
(println "\t* updating user columns in the avus table")
(exec-raw "ALTER TABLE avus DROP CONSTRAINT avus_unique")
(exec-raw "ALTER TABLE avus ADD CONSTRAINT avus_unique UNIQUE (target_id, target_type, attribute, value, unit)")
(exec-raw "DROP INDEX avus_owner_id_idx")
(exec-raw "ALTER TABLE avus RENAME COLUMN owner_id TO created_by")
(exec-raw "ALTER TABLE avus ADD COLUMN modified_by varchar(512)")
(update :avus (set-fields {:modified_by :created_by})))
(defn convert
"Performs the conversion for database version 1.8.9:20140707.01"
[]
(println "Performing the conversion for" version)
(convert-avus-table-user-cols))
| null | https://raw.githubusercontent.com/cyverse-archive/DiscoveryEnvironmentBackend/7f6177078c1a1cb6d11e62f12cfe2e22d669635b/databases/metadata/src/main/conversions/c189_2014070701.clj | clojure | (ns facepalm.c189-2014070701
(:use [korma.core]))
(def ^:private version
"The destination database version."
"1.8.9:20140707.01")
(defn- convert-avus-table-user-cols
[]
(println "\t* updating user columns in the avus table")
(exec-raw "ALTER TABLE avus DROP CONSTRAINT avus_unique")
(exec-raw "ALTER TABLE avus ADD CONSTRAINT avus_unique UNIQUE (target_id, target_type, attribute, value, unit)")
(exec-raw "DROP INDEX avus_owner_id_idx")
(exec-raw "ALTER TABLE avus RENAME COLUMN owner_id TO created_by")
(exec-raw "ALTER TABLE avus ADD COLUMN modified_by varchar(512)")
(update :avus (set-fields {:modified_by :created_by})))
(defn convert
"Performs the conversion for database version 1.8.9:20140707.01"
[]
(println "Performing the conversion for" version)
(convert-avus-table-user-cols))
| |
76b0b2db2739893bf253e965c8c2b3c4f648f272efda6a097f89a2038957ce48 | Opetushallitus/ataru | input_fields_with_lang_component.cljs | (ns ataru.virkailija.editor.components.input-fields-with-lang-component
(:require [clojure.string :as string]))
(defn- add-multi-lang-class [field-spec]
(let [multi-lang-class "editor-form__text-field-wrapper--with-label"]
(if (map? (last field-spec))
(assoc-in field-spec [(dec (count field-spec)) :class] multi-lang-class)
(conj field-spec {:class multi-lang-class}))))
(defn input-fields-with-lang [field-fn languages & {:keys [header?] :or {header? false}}]
(let [multiple-languages? (> (count languages) 1)]
(map-indexed (fn [idx lang]
(let [field-spec (field-fn lang)]
^{:key (str "option-" lang "-" idx)}
[:div.editor-form__text-field-container
(when-not header?
{:class "editor-form__multi-option-wrapper"})
(cond-> field-spec
multiple-languages? add-multi-lang-class)
(when multiple-languages?
[:div.editor-form__text-field-label (-> lang name string/upper-case)])]))
languages)))
| null | https://raw.githubusercontent.com/Opetushallitus/ataru/2d8ef1d3f972621e301a3818567d4e11219d2e82/src/cljs/ataru/virkailija/editor/components/input_fields_with_lang_component.cljs | clojure | (ns ataru.virkailija.editor.components.input-fields-with-lang-component
(:require [clojure.string :as string]))
(defn- add-multi-lang-class [field-spec]
(let [multi-lang-class "editor-form__text-field-wrapper--with-label"]
(if (map? (last field-spec))
(assoc-in field-spec [(dec (count field-spec)) :class] multi-lang-class)
(conj field-spec {:class multi-lang-class}))))
(defn input-fields-with-lang [field-fn languages & {:keys [header?] :or {header? false}}]
(let [multiple-languages? (> (count languages) 1)]
(map-indexed (fn [idx lang]
(let [field-spec (field-fn lang)]
^{:key (str "option-" lang "-" idx)}
[:div.editor-form__text-field-container
(when-not header?
{:class "editor-form__multi-option-wrapper"})
(cond-> field-spec
multiple-languages? add-multi-lang-class)
(when multiple-languages?
[:div.editor-form__text-field-label (-> lang name string/upper-case)])]))
languages)))
| |
80175c0e46c1dc2bc931b50b3e9fabfa85ebc96af4449f662c54158d85e9bbfd | ocaml-obuild/obuild | test_dag.ml | open Obuild
let err = ref 0
simple dag : a - > b - > c
let d1 =
let d = Dag.init () in
Dag.addEdge "A" "B" d;
Dag.addEdge "B" "C" d;
d
DAG with a fork
*
* A - > B - > C - > D - > E - > F
* \ > C'- > D'-/
*
* A -> B -> C -> D -> E -> F
* \> C'-> D'-/
*)
let d2 =
let d = Dag.init () in
Dag.addEdgesConnected ["A";"B";"C";"D";"E";"F"] d;
Dag.addEdges [ ("B","C'"); ("C'","D'"); ("D'", "E") ] d;
d
DAG
* A -------- > C
* \- > B --/
* A --------> C
* \-> B --/
*)
let d3 =
let d = Dag.init () in
Dag.addEdges [("A","C"); ("A","B"); ("B","C")] d;
d
DAG
* A \ /- > C
* - > B
* A ' / \- > C '
* A \ /-> C
* -> B
* A' / \-> C'
*)
let d4 =
let d = Dag.init () in
Dag.addEdges [("A","B"); ("A'","B"); ("B","C"); ("B","C'")] d;
d
let showDeps prefix l = Printf.printf "%s%s\n" prefix (String.concat " -> " l)
let assumeEqF f testname expected got =
if f expected got
then (Printf.printf "SUCCESS %s\n" testname)
else (Printf.printf "FAILED %s\n" testname; showDeps "expected:" (List.concat expected); showDeps "got :" got; err := !err + 1)
let assumeEq testname expected got =
if expected = got
then (Printf.printf "SUCCESS %s\n" testname)
else (Printf.printf "FAILED %s\n" testname; showDeps "expected:" expected; showDeps "got :" got; err := !err + 1)
let listEq a b =
let rec loopElem l r =
match l with
| [] -> (true, r)
| _ -> match r with
| [] -> (false, r)
| e::es ->
if List.mem e l
then loopElem (List.filter (fun z -> z <> e) l) es
else (false, r)
in
let rec loopGroup l r =
match l with
| [] -> if r = [] then true else false
| g::gs ->
let (e,r2) = loopElem g r in
if e = true
then loopGroup gs r2
else false
in
loopGroup a b
let () =
let l1 = Taskdep.linearize d1 Taskdep.FromParent ["A"] in
let l2 = Taskdep.linearize d2 Taskdep.FromParent ["A"] in
let l2' = Taskdep.linearize d2 Taskdep.FromParent ["C'"] in
let l3 = Taskdep.linearize d3 Taskdep.FromParent ["A"] in
let l3' = Taskdep.linearize (Dag.transitive_reduction d3) Taskdep.FromParent ["A"] in
let l4 = Taskdep.linearize d4 Taskdep.FromParent ["A"; "A'"] in
assumeEq "linearization A->B->C" [ "A"; "B"; "C" ] l1;
assumeEq "linearization A->B->(C,C')->(D,D')->E->F" ["A";"B";"C";"D";"C'";"D'";"E";"F"] l2;
assumeEq "linearization C'->D'->E->F" ["C'";"D'";"E";"F"] l2';
assumeEq "linearization A->(B->C)" ["A";"B";"C"] l3;
assumeEq "linearization A->(B->C)" ["A";"B";"C"] l3';
assumeEqF listEq "linearization (A,A')->B->(C,C')" [["A";"A'"];["B"];["C";"C'"]] l4;
if !err > 1
then exit 1
else exit 0
| null | https://raw.githubusercontent.com/ocaml-obuild/obuild/28252e8cee836448e85bfbc9e09a44e7674dae39/tests/test_dag.ml | ocaml | open Obuild
let err = ref 0
simple dag : a - > b - > c
let d1 =
let d = Dag.init () in
Dag.addEdge "A" "B" d;
Dag.addEdge "B" "C" d;
d
DAG with a fork
*
* A - > B - > C - > D - > E - > F
* \ > C'- > D'-/
*
* A -> B -> C -> D -> E -> F
* \> C'-> D'-/
*)
let d2 =
let d = Dag.init () in
Dag.addEdgesConnected ["A";"B";"C";"D";"E";"F"] d;
Dag.addEdges [ ("B","C'"); ("C'","D'"); ("D'", "E") ] d;
d
DAG
* A -------- > C
* \- > B --/
* A --------> C
* \-> B --/
*)
let d3 =
let d = Dag.init () in
Dag.addEdges [("A","C"); ("A","B"); ("B","C")] d;
d
DAG
* A \ /- > C
* - > B
* A ' / \- > C '
* A \ /-> C
* -> B
* A' / \-> C'
*)
let d4 =
let d = Dag.init () in
Dag.addEdges [("A","B"); ("A'","B"); ("B","C"); ("B","C'")] d;
d
let showDeps prefix l = Printf.printf "%s%s\n" prefix (String.concat " -> " l)
let assumeEqF f testname expected got =
if f expected got
then (Printf.printf "SUCCESS %s\n" testname)
else (Printf.printf "FAILED %s\n" testname; showDeps "expected:" (List.concat expected); showDeps "got :" got; err := !err + 1)
let assumeEq testname expected got =
if expected = got
then (Printf.printf "SUCCESS %s\n" testname)
else (Printf.printf "FAILED %s\n" testname; showDeps "expected:" expected; showDeps "got :" got; err := !err + 1)
let listEq a b =
let rec loopElem l r =
match l with
| [] -> (true, r)
| _ -> match r with
| [] -> (false, r)
| e::es ->
if List.mem e l
then loopElem (List.filter (fun z -> z <> e) l) es
else (false, r)
in
let rec loopGroup l r =
match l with
| [] -> if r = [] then true else false
| g::gs ->
let (e,r2) = loopElem g r in
if e = true
then loopGroup gs r2
else false
in
loopGroup a b
let () =
let l1 = Taskdep.linearize d1 Taskdep.FromParent ["A"] in
let l2 = Taskdep.linearize d2 Taskdep.FromParent ["A"] in
let l2' = Taskdep.linearize d2 Taskdep.FromParent ["C'"] in
let l3 = Taskdep.linearize d3 Taskdep.FromParent ["A"] in
let l3' = Taskdep.linearize (Dag.transitive_reduction d3) Taskdep.FromParent ["A"] in
let l4 = Taskdep.linearize d4 Taskdep.FromParent ["A"; "A'"] in
assumeEq "linearization A->B->C" [ "A"; "B"; "C" ] l1;
assumeEq "linearization A->B->(C,C')->(D,D')->E->F" ["A";"B";"C";"D";"C'";"D'";"E";"F"] l2;
assumeEq "linearization C'->D'->E->F" ["C'";"D'";"E";"F"] l2';
assumeEq "linearization A->(B->C)" ["A";"B";"C"] l3;
assumeEq "linearization A->(B->C)" ["A";"B";"C"] l3';
assumeEqF listEq "linearization (A,A')->B->(C,C')" [["A";"A'"];["B"];["C";"C'"]] l4;
if !err > 1
then exit 1
else exit 0
| |
da74053173dd38dadd81bb9c12e941d8f403bce1fc42807f7debf9b635472f8b | melange-re/melange | listLabels.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* NOTE:
If this file is listLabels.mli, run tools/sync_stdlib_docs after editing it
to generate list.mli.
If this file is list.mli, do not edit it directly -- edit
listLabels.mli instead.
*)
* List operations .
Some functions are flagged as not tail - recursive . A tail - recursive
function uses constant stack space , while a non - tail - recursive function
uses stack space proportional to the length of its list argument , which
can be a problem with very long lists . When the function takes several
list arguments , an approximate formula giving stack usage ( in some
unspecified constant unit ) is shown in parentheses .
The above considerations can usually be ignored if your lists are not
longer than about 10000 elements .
The labeled version of this module can be used as described in the
{ ! StdLabels } module .
Some functions are flagged as not tail-recursive. A tail-recursive
function uses constant stack space, while a non-tail-recursive function
uses stack space proportional to the length of its list argument, which
can be a problem with very long lists. When the function takes several
list arguments, an approximate formula giving stack usage (in some
unspecified constant unit) is shown in parentheses.
The above considerations can usually be ignored if your lists are not
longer than about 10000 elements.
The labeled version of this module can be used as described in the
{!StdLabels} module.
*)
type 'a t = 'a list = [] | (::) of 'a * 'a list (**)
(** An alias for the type of lists. *)
val length : 'a list -> int
(** Return the length (number of elements) of the given list. *)
val compare_lengths : 'a list -> 'b list -> int
* Compare the lengths of two lists . [ compare_lengths l1 l2 ] is
equivalent to [ compare ( length l1 ) ( length l2 ) ] , except that
the computation stops after reaching the end of the shortest list .
@since 4.05.0
equivalent to [compare (length l1) (length l2)], except that
the computation stops after reaching the end of the shortest list.
@since 4.05.0
*)
val compare_length_with : 'a list -> len:int -> int
* Compare the length of a list to an integer . [ compare_length_with l len ] is
equivalent to [ compare ( length l ) len ] , except that the computation stops
after at most [ len ] iterations on the list .
@since 4.05.0
equivalent to [compare (length l) len], except that the computation stops
after at most [len] iterations on the list.
@since 4.05.0
*)
val cons : 'a -> 'a list -> 'a list
* [ cons x xs ] is [ x : : xs ]
@since 4.03.0 ( 4.05.0 in ListLabels )
@since 4.03.0 (4.05.0 in ListLabels)
*)
val hd : 'a list -> 'a
* Return the first element of the given list .
@raise Failure if the list is empty .
@raise Failure if the list is empty.
*)
val tl : 'a list -> 'a list
* Return the given list without its first element .
@raise Failure if the list is empty .
@raise Failure if the list is empty.
*)
val nth : 'a list -> int -> 'a
* Return the [ n]-th element of the given list .
The first element ( head of the list ) is at position 0 .
@raise Failure if the list is too short .
@raise Invalid_argument if [ n ] is negative .
The first element (head of the list) is at position 0.
@raise Failure if the list is too short.
@raise Invalid_argument if [n] is negative.
*)
val nth_opt : 'a list -> int -> 'a option
* Return the [ n]-th element of the given list .
The first element ( head of the list ) is at position 0 .
Return [ None ] if the list is too short .
@raise Invalid_argument if [ n ] is negative .
@since 4.05
The first element (head of the list) is at position 0.
Return [None] if the list is too short.
@raise Invalid_argument if [n] is negative.
@since 4.05
*)
val rev : 'a list -> 'a list
(** List reversal. *)
val init : len:int -> f:(int -> 'a) -> 'a list
* [ init ~len ~f ] is [ f 0 ; f 1 ; ... ; f ( len-1 ) ] , evaluated left to right .
@raise Invalid_argument if ] .
@since 4.06.0
@raise Invalid_argument if [len < 0].
@since 4.06.0
*)
val append : 'a list -> 'a list -> 'a list
* two lists . Same function as the infix operator [ @ ] .
Not tail - recursive ( length of the first argument ) . The [ @ ]
operator is not tail - recursive either .
Not tail-recursive (length of the first argument). The [@]
operator is not tail-recursive either.
*)
val rev_append : 'a list -> 'a list -> 'a list
(** [rev_append l1 l2] reverses [l1] and concatenates it with [l2].
This is equivalent to [(]{!rev}[ l1) @ l2], but [rev_append] is
tail-recursive and more efficient.
*)
val concat : 'a list list -> 'a list
* a list of lists . The elements of the argument are all
concatenated together ( in the same order ) to give the result .
Not tail - recursive
( length of the argument + length of the longest sub - list ) .
concatenated together (in the same order) to give the result.
Not tail-recursive
(length of the argument + length of the longest sub-list).
*)
val flatten : 'a list list -> 'a list
(** Same as {!concat}. Not tail-recursive
(length of the argument + length of the longest sub-list).
*)
* { 1 Comparison }
val equal : eq:('a -> 'a -> bool) -> 'a list -> 'a list -> bool
* [ equal eq [ a1 ; ... ; an ] [ b1 ; .. ; bm ] ] holds when
the two input lists have the same length , and for each
pair of elements [ ai ] , [ bi ] at the same position we have
[ eq ai bi ] .
Note : the [ eq ] function may be called even if the
lists have different length . If you know your equality
function is costly , you may want to check { ! compare_lengths }
first .
@since 4.12.0
the two input lists have the same length, and for each
pair of elements [ai], [bi] at the same position we have
[eq ai bi].
Note: the [eq] function may be called even if the
lists have different length. If you know your equality
function is costly, you may want to check {!compare_lengths}
first.
@since 4.12.0
*)
val compare : cmp:('a -> 'a -> int) -> 'a list -> 'a list -> int
* [ compare cmp [ a1 ; ... ; an ] [ b1 ; ... ; bm ] ] performs
a lexicographic comparison of the two input lists ,
using the same [ ' a - > ' a - > int ] interface as { ! Stdlib.compare } :
- [ a1 : : l1 ] is smaller than [ a2 : : l2 ] ( negative result )
if [ a1 ] is smaller than [ a2 ] , or if they are equal ( 0 result )
and [ l1 ] is smaller than [ l2 ]
- the empty list [ [ ] ] is strictly smaller than non - empty lists
Note : the [ cmp ] function will be called even if the lists have
different lengths .
@since 4.12.0
a lexicographic comparison of the two input lists,
using the same ['a -> 'a -> int] interface as {!Stdlib.compare}:
- [a1 :: l1] is smaller than [a2 :: l2] (negative result)
if [a1] is smaller than [a2], or if they are equal (0 result)
and [l1] is smaller than [l2]
- the empty list [[]] is strictly smaller than non-empty lists
Note: the [cmp] function will be called even if the lists have
different lengths.
@since 4.12.0
*)
(** {1 Iterators} *)
val iter : f:('a -> unit) -> 'a list -> unit
* [ iter ~f [ a1 ; ... ; an ] ] applies function [ f ] in turn to
[ a1 ; ... ; an ] . It is equivalent to
[ begin f a1 ; f a2 ; ... ; f an ; ( ) end ] .
[a1; ...; an]. It is equivalent to
[begin f a1; f a2; ...; f an; () end].
*)
val iteri : f:(int -> 'a -> unit) -> 'a list -> unit
* Same as { ! iter } , but the function is applied to the index of
the element as first argument ( counting from 0 ) , and the element
itself as second argument .
@since 4.00.0
the element as first argument (counting from 0), and the element
itself as second argument.
@since 4.00.0
*)
val map : f:('a -> 'b) -> 'a list -> 'b list
(** [map ~f [a1; ...; an]] applies function [f] to [a1, ..., an],
and builds the list [[f a1; ...; f an]]
with the results returned by [f]. Not tail-recursive.
*)
val mapi : f:(int -> 'a -> 'b) -> 'a list -> 'b list
* Same as { ! map } , but the function is applied to the index of
the element as first argument ( counting from 0 ) , and the element
itself as second argument . Not tail - recursive .
@since 4.00.0
the element as first argument (counting from 0), and the element
itself as second argument. Not tail-recursive.
@since 4.00.0
*)
val rev_map : f:('a -> 'b) -> 'a list -> 'b list
(** [rev_map ~f l] gives the same result as
{!rev}[ (]{!map}[ f l)], but is tail-recursive and
more efficient.
*)
val filter_map : f:('a -> 'b option) -> 'a list -> 'b list
* [ filter_map ~f l ] applies [ f ] to every element of [ l ] , filters
out the [ None ] elements and returns the list of the arguments of
the [ Some ] elements .
@since 4.08.0
out the [None] elements and returns the list of the arguments of
the [Some] elements.
@since 4.08.0
*)
val concat_map : f:('a -> 'b list) -> 'a list -> 'b list
* [ concat_map ~f l ] gives the same result as
{ ! concat } [ ( ] { ! map } [ f l ) ] . Tail - recursive .
@since 4.10.0
{!concat}[ (]{!map}[ f l)]. Tail-recursive.
@since 4.10.0
*)
val fold_left_map :
f:('a -> 'b -> 'a * 'c) -> init:'a -> 'b list -> 'a * 'c list
* [ fold_left_map ] is a combination of [ fold_left ] and [ map ] that threads an
accumulator through calls to [ f ] .
@since 4.11.0
accumulator through calls to [f].
@since 4.11.0
*)
val fold_left : f:('a -> 'b -> 'a) -> init:'a -> 'b list -> 'a
(** [fold_left ~f ~init [b1; ...; bn]] is
[f (... (f (f init b1) b2) ...) bn].
*)
val fold_right : f:('a -> 'b -> 'b) -> 'a list -> init:'b -> 'b
(** [fold_right ~f [a1; ...; an] ~init] is
[f a1 (f a2 (... (f an init) ...))]. Not tail-recursive.
*)
* { 1 Iterators on two lists }
val iter2 : f:('a -> 'b -> unit) -> 'a list -> 'b list -> unit
* [ iter2 ~f [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] calls in turn
[ f a1 b1 ; ... ; f an bn ] .
@raise Invalid_argument if the two lists are determined
to have different lengths .
[f a1 b1; ...; f an bn].
@raise Invalid_argument if the two lists are determined
to have different lengths.
*)
val map2 : f:('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
* [ map2 ~f [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] is
[ [ f a1 b1 ; ... ; f an bn ] ] .
@raise Invalid_argument if the two lists are determined
to have different lengths . Not tail - recursive .
[[f a1 b1; ...; f an bn]].
@raise Invalid_argument if the two lists are determined
to have different lengths. Not tail-recursive.
*)
val rev_map2 : f:('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
(** [rev_map2 ~f l1 l2] gives the same result as
{!rev}[ (]{!map2}[ f l1 l2)], but is tail-recursive and
more efficient.
*)
val fold_left2 :
f:('a -> 'b -> 'c -> 'a) -> init:'a -> 'b list -> 'c list -> 'a
* [ ~f ~init [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] is
[ f ( ... ( f ( f init a1 b1 ) a2 b2 ) ... ) an bn ] .
@raise Invalid_argument if the two lists are determined
to have different lengths .
[f (... (f (f init a1 b1) a2 b2) ...) an bn].
@raise Invalid_argument if the two lists are determined
to have different lengths.
*)
val fold_right2 :
f:('a -> 'b -> 'c -> 'c) -> 'a list -> 'b list -> init:'c -> 'c
* [ fold_right2 ~f [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ~init ] is
[ f a1 b1 ( f a2 b2 ( ... ( f an bn init ) ... ) ) ] .
@raise Invalid_argument if the two lists are determined
to have different lengths . Not tail - recursive .
[f a1 b1 (f a2 b2 (... (f an bn init) ...))].
@raise Invalid_argument if the two lists are determined
to have different lengths. Not tail-recursive.
*)
* { 1 List scanning }
val for_all : f:('a -> bool) -> 'a list -> bool
* [ for_all ~f [ a1 ; ... ; an ] ] checks if all elements of the list
satisfy the predicate [ f ] . That is , it returns
[ ( f a1 ) & & ( f a2 ) & & ... & & ( f an ) ] for a non - empty list and
[ true ] if the list is empty .
satisfy the predicate [f]. That is, it returns
[(f a1) && (f a2) && ... && (f an)] for a non-empty list and
[true] if the list is empty.
*)
val exists : f:('a -> bool) -> 'a list -> bool
* [ exists ~f [ a1 ; ... ; an ] ] checks if at least one element of
the list satisfies the predicate [ f ] . That is , it returns
[ ( f a1 ) || ( f a2 ) || ... || ( f an ) ] for a non - empty list and
[ false ] if the list is empty .
the list satisfies the predicate [f]. That is, it returns
[(f a1) || (f a2) || ... || (f an)] for a non-empty list and
[false] if the list is empty.
*)
val for_all2 : f:('a -> 'b -> bool) -> 'a list -> 'b list -> bool
* Same as { ! for_all } , but for a two - argument predicate .
@raise Invalid_argument if the two lists are determined
to have different lengths .
@raise Invalid_argument if the two lists are determined
to have different lengths.
*)
val exists2 : f:('a -> 'b -> bool) -> 'a list -> 'b list -> bool
* Same as { ! exists } , but for a two - argument predicate .
@raise Invalid_argument if the two lists are determined
to have different lengths .
@raise Invalid_argument if the two lists are determined
to have different lengths.
*)
val mem : 'a -> set:'a list -> bool
(** [mem a ~set] is true if and only if [a] is equal
to an element of [set].
*)
val memq : 'a -> set:'a list -> bool
(** Same as {!mem}, but uses physical equality instead of structural
equality to compare list elements.
*)
* { 1 List searching }
val find : f:('a -> bool) -> 'a list -> 'a
* [ find ~f l ] returns the first element of the list [ l ]
that satisfies the predicate [ f ] .
@raise Not_found if there is no value that satisfies [ f ] in the
list [ l ] .
that satisfies the predicate [f].
@raise Not_found if there is no value that satisfies [f] in the
list [l].
*)
val find_opt : f:('a -> bool) -> 'a list -> 'a option
* [ find ~f l ] returns the first element of the list [ l ]
that satisfies the predicate [ f ] .
Returns [ None ] if there is no value that satisfies [ f ] in the
list [ l ] .
@since 4.05
that satisfies the predicate [f].
Returns [None] if there is no value that satisfies [f] in the
list [l].
@since 4.05
*)
val find_map : f:('a -> 'b option) -> 'a list -> 'b option
* [ find_map ~f l ] applies [ f ] to the elements of [ l ] in order ,
and returns the first result of the form [ Some v ] , or [ None ]
if none exist .
@since 4.10.0
and returns the first result of the form [Some v], or [None]
if none exist.
@since 4.10.0
*)
val filter : f:('a -> bool) -> 'a list -> 'a list
(** [filter ~f l] returns all the elements of the list [l]
that satisfy the predicate [f]. The order of the elements
in the input list is preserved.
*)
val find_all : f:('a -> bool) -> 'a list -> 'a list
(** [find_all] is another name for {!filter}.
*)
val filteri : f:(int -> 'a -> bool) -> 'a list -> 'a list
* Same as { ! filter } , but the predicate is applied to the index of
the element as first argument ( counting from 0 ) , and the element
itself as second argument .
@since 4.11.0
the element as first argument (counting from 0), and the element
itself as second argument.
@since 4.11.0
*)
val partition : f:('a -> bool) -> 'a list -> 'a list * 'a list
(** [partition ~f l] returns a pair of lists [(l1, l2)], where
[l1] is the list of all the elements of [l] that
satisfy the predicate [f], and [l2] is the list of all the
elements of [l] that do not satisfy [f].
The order of the elements in the input list is preserved.
*)
val partition_map : f:('a -> ('b, 'c) Either.t) -> 'a list -> 'b list * 'c list
(** [partition_map f l] returns a pair of lists [(l1, l2)] such that,
for each element [x] of the input list [l]:
- if [f x] is [Left y1], then [y1] is in [l1], and
- if [f x] is [Right y2], then [y2] is in [l2].
The output elements are included in [l1] and [l2] in the same
relative order as the corresponding input elements in [l].
In particular, [partition_map (fun x -> if f x then Left x else Right x) l]
is equivalent to [partition f l].
@since 4.12.0
*)
* { 1 Association lists }
val assoc : 'a -> ('a * 'b) list -> 'b
(** [assoc a l] returns the value associated with key [a] in the list of
pairs [l]. That is,
[assoc a [ ...; (a,b); ...] = b]
if [(a,b)] is the leftmost binding of [a] in list [l].
@raise Not_found if there is no value associated with [a] in the
list [l].
*)
val assoc_opt : 'a -> ('a * 'b) list -> 'b option
* [ assoc_opt a l ] returns the value associated with key [ a ] in the list of
pairs [ l ] . That is ,
[ a [ ... ; ( a , b ) ; ... ] = Some b ]
if [ ( a , b ) ] is the leftmost binding of [ a ] in list [ l ] .
Returns [ None ] if there is no value associated with [ a ] in the
list [ l ] .
@since 4.05
pairs [l]. That is,
[assoc_opt a [ ...; (a,b); ...] = Some b]
if [(a,b)] is the leftmost binding of [a] in list [l].
Returns [None] if there is no value associated with [a] in the
list [l].
@since 4.05
*)
val assq : 'a -> ('a * 'b) list -> 'b
(** Same as {!assoc}, but uses physical equality instead of
structural equality to compare keys.
*)
val assq_opt : 'a -> ('a * 'b) list -> 'b option
* Same as { ! } , but uses physical equality instead of
structural equality to compare keys .
@since 4.05.0
structural equality to compare keys.
@since 4.05.0
*)
val mem_assoc : 'a -> map:('a * 'b) list -> bool
(** Same as {!assoc}, but simply return [true] if a binding exists,
and [false] if no bindings exist for the given key.
*)
val mem_assq : 'a -> map:('a * 'b) list -> bool
* Same as { ! , but uses physical equality instead of
structural equality to compare keys .
structural equality to compare keys.
*)
val remove_assoc : 'a -> ('a * 'b) list -> ('a * 'b) list
* [ remove_assoc a l ] returns the list of
pairs [ l ] without the first pair with key [ a ] , if any .
Not tail - recursive .
pairs [l] without the first pair with key [a], if any.
Not tail-recursive.
*)
val remove_assq : 'a -> ('a * 'b) list -> ('a * 'b) list
* Same as { ! , but uses physical equality instead
of structural equality to compare keys . Not tail - recursive .
of structural equality to compare keys. Not tail-recursive.
*)
* { 1 Lists of pairs }
val split : ('a * 'b) list -> 'a list * 'b list
* Transform a list of pairs into a pair of lists :
[ split [ ( a1,b1 ) ; ... ; ( an , bn ) ] ] is [ ( [ a1 ; ... ; an ] , [ b1 ; ... ; bn ] ) ] .
Not tail - recursive .
[split [(a1,b1); ...; (an,bn)]] is [([a1; ...; an], [b1; ...; bn])].
Not tail-recursive.
*)
val combine : 'a list -> 'b list -> ('a * 'b) list
* Transform a pair of lists into a list of pairs :
[ combine [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] is
[ [ ( a1,b1 ) ; ... ; ( an , bn ) ] ] .
@raise Invalid_argument if the two lists
have different lengths . Not tail - recursive .
[combine [a1; ...; an] [b1; ...; bn]] is
[[(a1,b1); ...; (an,bn)]].
@raise Invalid_argument if the two lists
have different lengths. Not tail-recursive.
*)
(** {1 Sorting} *)
val sort : cmp:('a -> 'a -> int) -> 'a list -> 'a list
* Sort a list in increasing order according to a comparison
function . The comparison function must return 0 if its arguments
compare as equal , a positive integer if the first is greater ,
and a negative integer if the first is smaller ( see Array.sort for
a complete specification ) . For example ,
{ ! Stdlib.compare } is a suitable comparison function .
The resulting list is sorted in increasing order .
{ ! sort } is guaranteed to run in constant heap space
( in addition to the size of the result list ) and logarithmic
stack space .
The current implementation uses Merge Sort . It runs in constant
heap space and logarithmic stack space .
function. The comparison function must return 0 if its arguments
compare as equal, a positive integer if the first is greater,
and a negative integer if the first is smaller (see Array.sort for
a complete specification). For example,
{!Stdlib.compare} is a suitable comparison function.
The resulting list is sorted in increasing order.
{!sort} is guaranteed to run in constant heap space
(in addition to the size of the result list) and logarithmic
stack space.
The current implementation uses Merge Sort. It runs in constant
heap space and logarithmic stack space.
*)
val stable_sort : cmp:('a -> 'a -> int) -> 'a list -> 'a list
(** Same as {!sort}, but the sorting algorithm is guaranteed to
be stable (i.e. elements that compare equal are kept in their
original order).
The current implementation uses Merge Sort. It runs in constant
heap space and logarithmic stack space.
*)
val fast_sort : cmp:('a -> 'a -> int) -> 'a list -> 'a list
* Same as { ! sort } or { ! } , whichever is
faster on typical input .
faster on typical input.
*)
val sort_uniq : cmp:('a -> 'a -> int) -> 'a list -> 'a list
* Same as { ! sort } , but also remove duplicates .
@since 4.02.0 ( 4.03.0 in ListLabels )
@since 4.02.0 (4.03.0 in ListLabels)
*)
val merge : cmp:('a -> 'a -> int) -> 'a list -> 'a list -> 'a list
* Merge two lists :
Assuming that [ l1 ] and [ l2 ] are sorted according to the
comparison function [ cmp ] , [ merge ~cmp l1 l2 ] will return a
sorted list containing all the elements of [ l1 ] and [ l2 ] .
If several elements compare equal , the elements of [ l1 ] will be
before the elements of [ l2 ] .
Not tail - recursive ( sum of the lengths of the arguments ) .
Assuming that [l1] and [l2] are sorted according to the
comparison function [cmp], [merge ~cmp l1 l2] will return a
sorted list containing all the elements of [l1] and [l2].
If several elements compare equal, the elements of [l1] will be
before the elements of [l2].
Not tail-recursive (sum of the lengths of the arguments).
*)
* { 1 Lists and Sequences }
val to_seq : 'a list -> 'a Seq.t
* Iterate on the list .
@since 4.07
@since 4.07
*)
val of_seq : 'a Seq.t -> 'a list
* Create a list from a sequence .
@since 4.07
@since 4.07
*)
| null | https://raw.githubusercontent.com/melange-re/melange/13edf6108d884e64cd510bce077ef2ce73de6a97/jscomp/stdlib-412/stdlib_modules/listLabels.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
NOTE:
If this file is listLabels.mli, run tools/sync_stdlib_docs after editing it
to generate list.mli.
If this file is list.mli, do not edit it directly -- edit
listLabels.mli instead.
* An alias for the type of lists.
* Return the length (number of elements) of the given list.
* List reversal.
* [rev_append l1 l2] reverses [l1] and concatenates it with [l2].
This is equivalent to [(]{!rev}[ l1) @ l2], but [rev_append] is
tail-recursive and more efficient.
* Same as {!concat}. Not tail-recursive
(length of the argument + length of the longest sub-list).
* {1 Iterators}
* [map ~f [a1; ...; an]] applies function [f] to [a1, ..., an],
and builds the list [[f a1; ...; f an]]
with the results returned by [f]. Not tail-recursive.
* [rev_map ~f l] gives the same result as
{!rev}[ (]{!map}[ f l)], but is tail-recursive and
more efficient.
* [fold_left ~f ~init [b1; ...; bn]] is
[f (... (f (f init b1) b2) ...) bn].
* [fold_right ~f [a1; ...; an] ~init] is
[f a1 (f a2 (... (f an init) ...))]. Not tail-recursive.
* [rev_map2 ~f l1 l2] gives the same result as
{!rev}[ (]{!map2}[ f l1 l2)], but is tail-recursive and
more efficient.
* [mem a ~set] is true if and only if [a] is equal
to an element of [set].
* Same as {!mem}, but uses physical equality instead of structural
equality to compare list elements.
* [filter ~f l] returns all the elements of the list [l]
that satisfy the predicate [f]. The order of the elements
in the input list is preserved.
* [find_all] is another name for {!filter}.
* [partition ~f l] returns a pair of lists [(l1, l2)], where
[l1] is the list of all the elements of [l] that
satisfy the predicate [f], and [l2] is the list of all the
elements of [l] that do not satisfy [f].
The order of the elements in the input list is preserved.
* [partition_map f l] returns a pair of lists [(l1, l2)] such that,
for each element [x] of the input list [l]:
- if [f x] is [Left y1], then [y1] is in [l1], and
- if [f x] is [Right y2], then [y2] is in [l2].
The output elements are included in [l1] and [l2] in the same
relative order as the corresponding input elements in [l].
In particular, [partition_map (fun x -> if f x then Left x else Right x) l]
is equivalent to [partition f l].
@since 4.12.0
* [assoc a l] returns the value associated with key [a] in the list of
pairs [l]. That is,
[assoc a [ ...; (a,b); ...] = b]
if [(a,b)] is the leftmost binding of [a] in list [l].
@raise Not_found if there is no value associated with [a] in the
list [l].
* Same as {!assoc}, but uses physical equality instead of
structural equality to compare keys.
* Same as {!assoc}, but simply return [true] if a binding exists,
and [false] if no bindings exist for the given key.
* {1 Sorting}
* Same as {!sort}, but the sorting algorithm is guaranteed to
be stable (i.e. elements that compare equal are kept in their
original order).
The current implementation uses Merge Sort. It runs in constant
heap space and logarithmic stack space.
| , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* List operations .
Some functions are flagged as not tail - recursive . A tail - recursive
function uses constant stack space , while a non - tail - recursive function
uses stack space proportional to the length of its list argument , which
can be a problem with very long lists . When the function takes several
list arguments , an approximate formula giving stack usage ( in some
unspecified constant unit ) is shown in parentheses .
The above considerations can usually be ignored if your lists are not
longer than about 10000 elements .
The labeled version of this module can be used as described in the
{ ! StdLabels } module .
Some functions are flagged as not tail-recursive. A tail-recursive
function uses constant stack space, while a non-tail-recursive function
uses stack space proportional to the length of its list argument, which
can be a problem with very long lists. When the function takes several
list arguments, an approximate formula giving stack usage (in some
unspecified constant unit) is shown in parentheses.
The above considerations can usually be ignored if your lists are not
longer than about 10000 elements.
The labeled version of this module can be used as described in the
{!StdLabels} module.
*)
val length : 'a list -> int
val compare_lengths : 'a list -> 'b list -> int
* Compare the lengths of two lists . [ compare_lengths l1 l2 ] is
equivalent to [ compare ( length l1 ) ( length l2 ) ] , except that
the computation stops after reaching the end of the shortest list .
@since 4.05.0
equivalent to [compare (length l1) (length l2)], except that
the computation stops after reaching the end of the shortest list.
@since 4.05.0
*)
val compare_length_with : 'a list -> len:int -> int
* Compare the length of a list to an integer . [ compare_length_with l len ] is
equivalent to [ compare ( length l ) len ] , except that the computation stops
after at most [ len ] iterations on the list .
@since 4.05.0
equivalent to [compare (length l) len], except that the computation stops
after at most [len] iterations on the list.
@since 4.05.0
*)
val cons : 'a -> 'a list -> 'a list
* [ cons x xs ] is [ x : : xs ]
@since 4.03.0 ( 4.05.0 in ListLabels )
@since 4.03.0 (4.05.0 in ListLabels)
*)
val hd : 'a list -> 'a
* Return the first element of the given list .
@raise Failure if the list is empty .
@raise Failure if the list is empty.
*)
val tl : 'a list -> 'a list
* Return the given list without its first element .
@raise Failure if the list is empty .
@raise Failure if the list is empty.
*)
val nth : 'a list -> int -> 'a
* Return the [ n]-th element of the given list .
The first element ( head of the list ) is at position 0 .
@raise Failure if the list is too short .
@raise Invalid_argument if [ n ] is negative .
The first element (head of the list) is at position 0.
@raise Failure if the list is too short.
@raise Invalid_argument if [n] is negative.
*)
val nth_opt : 'a list -> int -> 'a option
* Return the [ n]-th element of the given list .
The first element ( head of the list ) is at position 0 .
Return [ None ] if the list is too short .
@raise Invalid_argument if [ n ] is negative .
@since 4.05
The first element (head of the list) is at position 0.
Return [None] if the list is too short.
@raise Invalid_argument if [n] is negative.
@since 4.05
*)
val rev : 'a list -> 'a list
val init : len:int -> f:(int -> 'a) -> 'a list
* [ init ~len ~f ] is [ f 0 ; f 1 ; ... ; f ( len-1 ) ] , evaluated left to right .
@raise Invalid_argument if ] .
@since 4.06.0
@raise Invalid_argument if [len < 0].
@since 4.06.0
*)
val append : 'a list -> 'a list -> 'a list
* two lists . Same function as the infix operator [ @ ] .
Not tail - recursive ( length of the first argument ) . The [ @ ]
operator is not tail - recursive either .
Not tail-recursive (length of the first argument). The [@]
operator is not tail-recursive either.
*)
val rev_append : 'a list -> 'a list -> 'a list
val concat : 'a list list -> 'a list
* a list of lists . The elements of the argument are all
concatenated together ( in the same order ) to give the result .
Not tail - recursive
( length of the argument + length of the longest sub - list ) .
concatenated together (in the same order) to give the result.
Not tail-recursive
(length of the argument + length of the longest sub-list).
*)
val flatten : 'a list list -> 'a list
* { 1 Comparison }
val equal : eq:('a -> 'a -> bool) -> 'a list -> 'a list -> bool
* [ equal eq [ a1 ; ... ; an ] [ b1 ; .. ; bm ] ] holds when
the two input lists have the same length , and for each
pair of elements [ ai ] , [ bi ] at the same position we have
[ eq ai bi ] .
Note : the [ eq ] function may be called even if the
lists have different length . If you know your equality
function is costly , you may want to check { ! compare_lengths }
first .
@since 4.12.0
the two input lists have the same length, and for each
pair of elements [ai], [bi] at the same position we have
[eq ai bi].
Note: the [eq] function may be called even if the
lists have different length. If you know your equality
function is costly, you may want to check {!compare_lengths}
first.
@since 4.12.0
*)
val compare : cmp:('a -> 'a -> int) -> 'a list -> 'a list -> int
* [ compare cmp [ a1 ; ... ; an ] [ b1 ; ... ; bm ] ] performs
a lexicographic comparison of the two input lists ,
using the same [ ' a - > ' a - > int ] interface as { ! Stdlib.compare } :
- [ a1 : : l1 ] is smaller than [ a2 : : l2 ] ( negative result )
if [ a1 ] is smaller than [ a2 ] , or if they are equal ( 0 result )
and [ l1 ] is smaller than [ l2 ]
- the empty list [ [ ] ] is strictly smaller than non - empty lists
Note : the [ cmp ] function will be called even if the lists have
different lengths .
@since 4.12.0
a lexicographic comparison of the two input lists,
using the same ['a -> 'a -> int] interface as {!Stdlib.compare}:
- [a1 :: l1] is smaller than [a2 :: l2] (negative result)
if [a1] is smaller than [a2], or if they are equal (0 result)
and [l1] is smaller than [l2]
- the empty list [[]] is strictly smaller than non-empty lists
Note: the [cmp] function will be called even if the lists have
different lengths.
@since 4.12.0
*)
val iter : f:('a -> unit) -> 'a list -> unit
* [ iter ~f [ a1 ; ... ; an ] ] applies function [ f ] in turn to
[ a1 ; ... ; an ] . It is equivalent to
[ begin f a1 ; f a2 ; ... ; f an ; ( ) end ] .
[a1; ...; an]. It is equivalent to
[begin f a1; f a2; ...; f an; () end].
*)
val iteri : f:(int -> 'a -> unit) -> 'a list -> unit
* Same as { ! iter } , but the function is applied to the index of
the element as first argument ( counting from 0 ) , and the element
itself as second argument .
@since 4.00.0
the element as first argument (counting from 0), and the element
itself as second argument.
@since 4.00.0
*)
val map : f:('a -> 'b) -> 'a list -> 'b list
val mapi : f:(int -> 'a -> 'b) -> 'a list -> 'b list
* Same as { ! map } , but the function is applied to the index of
the element as first argument ( counting from 0 ) , and the element
itself as second argument . Not tail - recursive .
@since 4.00.0
the element as first argument (counting from 0), and the element
itself as second argument. Not tail-recursive.
@since 4.00.0
*)
val rev_map : f:('a -> 'b) -> 'a list -> 'b list
val filter_map : f:('a -> 'b option) -> 'a list -> 'b list
* [ filter_map ~f l ] applies [ f ] to every element of [ l ] , filters
out the [ None ] elements and returns the list of the arguments of
the [ Some ] elements .
@since 4.08.0
out the [None] elements and returns the list of the arguments of
the [Some] elements.
@since 4.08.0
*)
val concat_map : f:('a -> 'b list) -> 'a list -> 'b list
* [ concat_map ~f l ] gives the same result as
{ ! concat } [ ( ] { ! map } [ f l ) ] . Tail - recursive .
@since 4.10.0
{!concat}[ (]{!map}[ f l)]. Tail-recursive.
@since 4.10.0
*)
val fold_left_map :
f:('a -> 'b -> 'a * 'c) -> init:'a -> 'b list -> 'a * 'c list
* [ fold_left_map ] is a combination of [ fold_left ] and [ map ] that threads an
accumulator through calls to [ f ] .
@since 4.11.0
accumulator through calls to [f].
@since 4.11.0
*)
val fold_left : f:('a -> 'b -> 'a) -> init:'a -> 'b list -> 'a
val fold_right : f:('a -> 'b -> 'b) -> 'a list -> init:'b -> 'b
* { 1 Iterators on two lists }
val iter2 : f:('a -> 'b -> unit) -> 'a list -> 'b list -> unit
* [ iter2 ~f [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] calls in turn
[ f a1 b1 ; ... ; f an bn ] .
@raise Invalid_argument if the two lists are determined
to have different lengths .
[f a1 b1; ...; f an bn].
@raise Invalid_argument if the two lists are determined
to have different lengths.
*)
val map2 : f:('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
* [ map2 ~f [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] is
[ [ f a1 b1 ; ... ; f an bn ] ] .
@raise Invalid_argument if the two lists are determined
to have different lengths . Not tail - recursive .
[[f a1 b1; ...; f an bn]].
@raise Invalid_argument if the two lists are determined
to have different lengths. Not tail-recursive.
*)
val rev_map2 : f:('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
val fold_left2 :
f:('a -> 'b -> 'c -> 'a) -> init:'a -> 'b list -> 'c list -> 'a
* [ ~f ~init [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] is
[ f ( ... ( f ( f init a1 b1 ) a2 b2 ) ... ) an bn ] .
@raise Invalid_argument if the two lists are determined
to have different lengths .
[f (... (f (f init a1 b1) a2 b2) ...) an bn].
@raise Invalid_argument if the two lists are determined
to have different lengths.
*)
val fold_right2 :
f:('a -> 'b -> 'c -> 'c) -> 'a list -> 'b list -> init:'c -> 'c
* [ fold_right2 ~f [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ~init ] is
[ f a1 b1 ( f a2 b2 ( ... ( f an bn init ) ... ) ) ] .
@raise Invalid_argument if the two lists are determined
to have different lengths . Not tail - recursive .
[f a1 b1 (f a2 b2 (... (f an bn init) ...))].
@raise Invalid_argument if the two lists are determined
to have different lengths. Not tail-recursive.
*)
* { 1 List scanning }
val for_all : f:('a -> bool) -> 'a list -> bool
* [ for_all ~f [ a1 ; ... ; an ] ] checks if all elements of the list
satisfy the predicate [ f ] . That is , it returns
[ ( f a1 ) & & ( f a2 ) & & ... & & ( f an ) ] for a non - empty list and
[ true ] if the list is empty .
satisfy the predicate [f]. That is, it returns
[(f a1) && (f a2) && ... && (f an)] for a non-empty list and
[true] if the list is empty.
*)
val exists : f:('a -> bool) -> 'a list -> bool
* [ exists ~f [ a1 ; ... ; an ] ] checks if at least one element of
the list satisfies the predicate [ f ] . That is , it returns
[ ( f a1 ) || ( f a2 ) || ... || ( f an ) ] for a non - empty list and
[ false ] if the list is empty .
the list satisfies the predicate [f]. That is, it returns
[(f a1) || (f a2) || ... || (f an)] for a non-empty list and
[false] if the list is empty.
*)
val for_all2 : f:('a -> 'b -> bool) -> 'a list -> 'b list -> bool
* Same as { ! for_all } , but for a two - argument predicate .
@raise Invalid_argument if the two lists are determined
to have different lengths .
@raise Invalid_argument if the two lists are determined
to have different lengths.
*)
val exists2 : f:('a -> 'b -> bool) -> 'a list -> 'b list -> bool
* Same as { ! exists } , but for a two - argument predicate .
@raise Invalid_argument if the two lists are determined
to have different lengths .
@raise Invalid_argument if the two lists are determined
to have different lengths.
*)
val mem : 'a -> set:'a list -> bool
val memq : 'a -> set:'a list -> bool
* { 1 List searching }
val find : f:('a -> bool) -> 'a list -> 'a
* [ find ~f l ] returns the first element of the list [ l ]
that satisfies the predicate [ f ] .
@raise Not_found if there is no value that satisfies [ f ] in the
list [ l ] .
that satisfies the predicate [f].
@raise Not_found if there is no value that satisfies [f] in the
list [l].
*)
val find_opt : f:('a -> bool) -> 'a list -> 'a option
* [ find ~f l ] returns the first element of the list [ l ]
that satisfies the predicate [ f ] .
Returns [ None ] if there is no value that satisfies [ f ] in the
list [ l ] .
@since 4.05
that satisfies the predicate [f].
Returns [None] if there is no value that satisfies [f] in the
list [l].
@since 4.05
*)
val find_map : f:('a -> 'b option) -> 'a list -> 'b option
* [ find_map ~f l ] applies [ f ] to the elements of [ l ] in order ,
and returns the first result of the form [ Some v ] , or [ None ]
if none exist .
@since 4.10.0
and returns the first result of the form [Some v], or [None]
if none exist.
@since 4.10.0
*)
val filter : f:('a -> bool) -> 'a list -> 'a list
val find_all : f:('a -> bool) -> 'a list -> 'a list
val filteri : f:(int -> 'a -> bool) -> 'a list -> 'a list
* Same as { ! filter } , but the predicate is applied to the index of
the element as first argument ( counting from 0 ) , and the element
itself as second argument .
@since 4.11.0
the element as first argument (counting from 0), and the element
itself as second argument.
@since 4.11.0
*)
val partition : f:('a -> bool) -> 'a list -> 'a list * 'a list
val partition_map : f:('a -> ('b, 'c) Either.t) -> 'a list -> 'b list * 'c list
* { 1 Association lists }
val assoc : 'a -> ('a * 'b) list -> 'b
val assoc_opt : 'a -> ('a * 'b) list -> 'b option
* [ assoc_opt a l ] returns the value associated with key [ a ] in the list of
pairs [ l ] . That is ,
[ a [ ... ; ( a , b ) ; ... ] = Some b ]
if [ ( a , b ) ] is the leftmost binding of [ a ] in list [ l ] .
Returns [ None ] if there is no value associated with [ a ] in the
list [ l ] .
@since 4.05
pairs [l]. That is,
[assoc_opt a [ ...; (a,b); ...] = Some b]
if [(a,b)] is the leftmost binding of [a] in list [l].
Returns [None] if there is no value associated with [a] in the
list [l].
@since 4.05
*)
val assq : 'a -> ('a * 'b) list -> 'b
val assq_opt : 'a -> ('a * 'b) list -> 'b option
* Same as { ! } , but uses physical equality instead of
structural equality to compare keys .
@since 4.05.0
structural equality to compare keys.
@since 4.05.0
*)
val mem_assoc : 'a -> map:('a * 'b) list -> bool
val mem_assq : 'a -> map:('a * 'b) list -> bool
* Same as { ! , but uses physical equality instead of
structural equality to compare keys .
structural equality to compare keys.
*)
val remove_assoc : 'a -> ('a * 'b) list -> ('a * 'b) list
* [ remove_assoc a l ] returns the list of
pairs [ l ] without the first pair with key [ a ] , if any .
Not tail - recursive .
pairs [l] without the first pair with key [a], if any.
Not tail-recursive.
*)
val remove_assq : 'a -> ('a * 'b) list -> ('a * 'b) list
* Same as { ! , but uses physical equality instead
of structural equality to compare keys . Not tail - recursive .
of structural equality to compare keys. Not tail-recursive.
*)
* { 1 Lists of pairs }
val split : ('a * 'b) list -> 'a list * 'b list
* Transform a list of pairs into a pair of lists :
[ split [ ( a1,b1 ) ; ... ; ( an , bn ) ] ] is [ ( [ a1 ; ... ; an ] , [ b1 ; ... ; bn ] ) ] .
Not tail - recursive .
[split [(a1,b1); ...; (an,bn)]] is [([a1; ...; an], [b1; ...; bn])].
Not tail-recursive.
*)
val combine : 'a list -> 'b list -> ('a * 'b) list
* Transform a pair of lists into a list of pairs :
[ combine [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] is
[ [ ( a1,b1 ) ; ... ; ( an , bn ) ] ] .
@raise Invalid_argument if the two lists
have different lengths . Not tail - recursive .
[combine [a1; ...; an] [b1; ...; bn]] is
[[(a1,b1); ...; (an,bn)]].
@raise Invalid_argument if the two lists
have different lengths. Not tail-recursive.
*)
val sort : cmp:('a -> 'a -> int) -> 'a list -> 'a list
* Sort a list in increasing order according to a comparison
function . The comparison function must return 0 if its arguments
compare as equal , a positive integer if the first is greater ,
and a negative integer if the first is smaller ( see Array.sort for
a complete specification ) . For example ,
{ ! Stdlib.compare } is a suitable comparison function .
The resulting list is sorted in increasing order .
{ ! sort } is guaranteed to run in constant heap space
( in addition to the size of the result list ) and logarithmic
stack space .
The current implementation uses Merge Sort . It runs in constant
heap space and logarithmic stack space .
function. The comparison function must return 0 if its arguments
compare as equal, a positive integer if the first is greater,
and a negative integer if the first is smaller (see Array.sort for
a complete specification). For example,
{!Stdlib.compare} is a suitable comparison function.
The resulting list is sorted in increasing order.
{!sort} is guaranteed to run in constant heap space
(in addition to the size of the result list) and logarithmic
stack space.
The current implementation uses Merge Sort. It runs in constant
heap space and logarithmic stack space.
*)
val stable_sort : cmp:('a -> 'a -> int) -> 'a list -> 'a list
val fast_sort : cmp:('a -> 'a -> int) -> 'a list -> 'a list
* Same as { ! sort } or { ! } , whichever is
faster on typical input .
faster on typical input.
*)
val sort_uniq : cmp:('a -> 'a -> int) -> 'a list -> 'a list
* Same as { ! sort } , but also remove duplicates .
@since 4.02.0 ( 4.03.0 in ListLabels )
@since 4.02.0 (4.03.0 in ListLabels)
*)
val merge : cmp:('a -> 'a -> int) -> 'a list -> 'a list -> 'a list
* Merge two lists :
Assuming that [ l1 ] and [ l2 ] are sorted according to the
comparison function [ cmp ] , [ merge ~cmp l1 l2 ] will return a
sorted list containing all the elements of [ l1 ] and [ l2 ] .
If several elements compare equal , the elements of [ l1 ] will be
before the elements of [ l2 ] .
Not tail - recursive ( sum of the lengths of the arguments ) .
Assuming that [l1] and [l2] are sorted according to the
comparison function [cmp], [merge ~cmp l1 l2] will return a
sorted list containing all the elements of [l1] and [l2].
If several elements compare equal, the elements of [l1] will be
before the elements of [l2].
Not tail-recursive (sum of the lengths of the arguments).
*)
* { 1 Lists and Sequences }
val to_seq : 'a list -> 'a Seq.t
* Iterate on the list .
@since 4.07
@since 4.07
*)
val of_seq : 'a Seq.t -> 'a list
* Create a list from a sequence .
@since 4.07
@since 4.07
*)
|
03ffeecc5d5e857675999f7f4dab5bf7ae74986189ff6fc0c95d8813c00b176d | ocaml-multicore/multicoretests | lin_tests_dsl_thread.ml | (* ********************************************************************** *)
(* Tests of in and out channels *)
(* ********************************************************************** *)
open Lin_tests_dsl_common_io.Lin_tests_dsl_common
module IC_thread = Lin_thread.Make(ICConf) [@@alert "-experimental"]
module OC_thread = Lin_thread.Make(OCConf) [@@alert "-experimental"]
let _ =
QCheck_base_runner.run_tests_main [
IC_thread.neg_lin_test ~count:1000 ~name:"Lin DSL In_channel test with Thread";
OC_thread.neg_lin_test ~count:1000 ~name:"Lin DSL Out_channel test with Thread";
]
| null | https://raw.githubusercontent.com/ocaml-multicore/multicoretests/01300b78f680544f490d4975b5d57f779f907a58/src/io/lin_tests_dsl_thread.ml | ocaml | **********************************************************************
Tests of in and out channels
********************************************************************** |
open Lin_tests_dsl_common_io.Lin_tests_dsl_common
module IC_thread = Lin_thread.Make(ICConf) [@@alert "-experimental"]
module OC_thread = Lin_thread.Make(OCConf) [@@alert "-experimental"]
let _ =
QCheck_base_runner.run_tests_main [
IC_thread.neg_lin_test ~count:1000 ~name:"Lin DSL In_channel test with Thread";
OC_thread.neg_lin_test ~count:1000 ~name:"Lin DSL Out_channel test with Thread";
]
|
3fb20fdc0b12765bacfefb0a681ed0bf63a596dcb1e8ffd7e9bbeba123d0faeb | MattWindsor91/travesty | or_error.mli | This file is part of ' travesty ' .
Copyright ( c ) 2018 , 2019 by
Permission is hereby granted , free of charge , to any person obtaining a
copy of this software and associated documentation files ( the " Software " ) ,
to deal in the Software without restriction , including without limitation
the rights to use , copy , modify , merge , publish , distribute , sublicense ,
and/or sell copies of the Software , and to permit persons to whom the
Software is furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE .
Copyright (c) 2018, 2019 by Matt Windsor
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE. *)
(** This interface intentionally left blank. *)
| null | https://raw.githubusercontent.com/MattWindsor91/travesty/3f4da33830cc928ad879077e690277088de1f836/base_exts/test/or_error.mli | ocaml | * This interface intentionally left blank. | This file is part of ' travesty ' .
Copyright ( c ) 2018 , 2019 by
Permission is hereby granted , free of charge , to any person obtaining a
copy of this software and associated documentation files ( the " Software " ) ,
to deal in the Software without restriction , including without limitation
the rights to use , copy , modify , merge , publish , distribute , sublicense ,
and/or sell copies of the Software , and to permit persons to whom the
Software is furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE .
Copyright (c) 2018, 2019 by Matt Windsor
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE. *)
|
0ead58d63a62cef0072d6988cb3f0c0d70c8689a86342c7099106a3a01aa834f | engagor/clj-vw | offline.clj | Copyright ( c ) 2014 Engagor
;;
;; The use and distribution terms for this software are covered by the
;; BSD License (-2-Clause)
;; which can be found in the file LICENSE at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns ^{:doc "Higher level helper functions for interfacing to a local vowpal wabbit installation."}
clj-vw.offline
(:require [clj-vw.core :refer :all]
[clojure.java.io :refer (as-file)])
(:import [java.util UUID]))
(defn- maybe-write-data-file
"Like write-data-file but only executes if (get-option settings :data) doesn't already exist and if (:data settings) is non-empty. In that case, the settings data is written to a new file, which is pushed onto ::tmp-files."
([settings]
(maybe-write-data-file settings {}))
([settings writer-settings]
(if-not (get-option settings :data)
(if (empty? (:data settings))
(throw (Exception. "Unset :data file and empty examples."))
(let [tmp-file (str "/tmp/.vw-temp-data." (java.util.UUID/randomUUID))]
(println "temporarily writing data to" tmp-file)
(-> settings
(set-option :data tmp-file)
(write-data-file)
(update-in [::tmp-files] conj tmp-file))))
(if-not (.exists (clojure.java.io/as-file (get-option settings :data)))
(do (when (empty? (:data settings))
(throw (Exception. "Non-existing :data-file and empty examples.")))
(println "writing data to" (get-option settings :data))
(write-data-file settings))
settings))))
(defn- maybe-set-predictions-file [settings]
(if (get-option settings :predictions)
settings
(let [tmp-file (str "/tmp/.vw-temp-predictions." (java.util.UUID/randomUUID))]
(println "temporarily writing predictions to" tmp-file)
(-> settings
(set-option :predictions tmp-file)
(update-in [::tmp-files] conj tmp-file)))))
(defn- cleanup-tmp-files [settings]
(doseq [f (::tmp-files settings)]
(when (.exists (as-file f))
(println "removing temporary file" f)
(.delete (as-file f))))
(dissoc settings ::tmp-files))
;;; Public API
;;; ==========
(defn train
"Train a vowpal wabbit model from a data file (as specified by (get-option settings :data)) or from a
collection of in memory examples (as specified by (:data settings))."
[settings]
(-> settings
(maybe-write-data-file)
(vw)
(cleanup-tmp-files)
(assoc :options (:options settings))))
(defn predict
"Use an existing vowpal wabbit model (as specified by (get-option
settings :initial-regressor))or (get-option settings :final-regressor), in that order) to compute
predictions for examples in a data file (as specified by (get-option settings :data)) or in
memory (as spefified by (:data settings))."
[settings]
(-> settings
(maybe-write-data-file)
(maybe-set-predictions-file)
(maybe-set-option :test-only true)
(maybe-set-option :initial-regressor (get-option settings :final-regressor))
(vw)
(read-predictions)
(cleanup-tmp-files)
(assoc :options (:options settings))))
| null | https://raw.githubusercontent.com/engagor/clj-vw/ff03de8005495349652cd5f0f2f3f3e0f08a6964/src/clj_vw/offline.clj | clojure |
The use and distribution terms for this software are covered by the
BSD License (-2-Clause)
which can be found in the file LICENSE at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
Public API
========== | Copyright ( c ) 2014 Engagor
(ns ^{:doc "Higher level helper functions for interfacing to a local vowpal wabbit installation."}
clj-vw.offline
(:require [clj-vw.core :refer :all]
[clojure.java.io :refer (as-file)])
(:import [java.util UUID]))
(defn- maybe-write-data-file
"Like write-data-file but only executes if (get-option settings :data) doesn't already exist and if (:data settings) is non-empty. In that case, the settings data is written to a new file, which is pushed onto ::tmp-files."
([settings]
(maybe-write-data-file settings {}))
([settings writer-settings]
(if-not (get-option settings :data)
(if (empty? (:data settings))
(throw (Exception. "Unset :data file and empty examples."))
(let [tmp-file (str "/tmp/.vw-temp-data." (java.util.UUID/randomUUID))]
(println "temporarily writing data to" tmp-file)
(-> settings
(set-option :data tmp-file)
(write-data-file)
(update-in [::tmp-files] conj tmp-file))))
(if-not (.exists (clojure.java.io/as-file (get-option settings :data)))
(do (when (empty? (:data settings))
(throw (Exception. "Non-existing :data-file and empty examples.")))
(println "writing data to" (get-option settings :data))
(write-data-file settings))
settings))))
(defn- maybe-set-predictions-file [settings]
(if (get-option settings :predictions)
settings
(let [tmp-file (str "/tmp/.vw-temp-predictions." (java.util.UUID/randomUUID))]
(println "temporarily writing predictions to" tmp-file)
(-> settings
(set-option :predictions tmp-file)
(update-in [::tmp-files] conj tmp-file)))))
(defn- cleanup-tmp-files [settings]
(doseq [f (::tmp-files settings)]
(when (.exists (as-file f))
(println "removing temporary file" f)
(.delete (as-file f))))
(dissoc settings ::tmp-files))
(defn train
"Train a vowpal wabbit model from a data file (as specified by (get-option settings :data)) or from a
collection of in memory examples (as specified by (:data settings))."
[settings]
(-> settings
(maybe-write-data-file)
(vw)
(cleanup-tmp-files)
(assoc :options (:options settings))))
(defn predict
"Use an existing vowpal wabbit model (as specified by (get-option
settings :initial-regressor))or (get-option settings :final-regressor), in that order) to compute
predictions for examples in a data file (as specified by (get-option settings :data)) or in
memory (as spefified by (:data settings))."
[settings]
(-> settings
(maybe-write-data-file)
(maybe-set-predictions-file)
(maybe-set-option :test-only true)
(maybe-set-option :initial-regressor (get-option settings :final-regressor))
(vw)
(read-predictions)
(cleanup-tmp-files)
(assoc :options (:options settings))))
|
9ecd70eb77b315a15ffce174b4ffbb8856d47bd59cabcbffce9b1d170b8a6a58 | informatimago/lisp | peek-stream-test.lisp | -*- mode : lisp;coding : utf-8 -*-
;;;;**************************************************************************
FILE : peek-stream-test.lisp
;;;;LANGUAGE: Common-Lisp
;;;;SYSTEM: Common-Lisp
USER - INTERFACE :
;;;;DESCRIPTION
;;;;
;;;; Tests peek-stream.lisp.
;;;;
< PJB > < >
MODIFICATIONS
2015 - 02 - 25 < PJB > Extracted from .
;;;;LEGAL
AGPL3
;;;;
Copyright 2015 - 2016
;;;;
;;;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;;; (at your option) any later version.
;;;;
;;;; This program is distributed in the hope that it will be useful,
;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details .
;;;;
You should have received a copy of the GNU Affero General Public License
;;;; along with this program. If not, see </>.
;;;;**************************************************************************
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(defpackage "COM.INFORMATIMAGO.COMMON-LISP.CESARUM.PEEK-STREAM.TEST"
(:use "COMMON-LISP"
"COM.INFORMATIMAGO.COMMON-LISP.CESARUM.SIMPLE-TEST"
"COM.INFORMATIMAGO.COMMON-LISP.CESARUM.PEEK-STREAM")
(:export "TEST/ALL"))
(in-package "COM.INFORMATIMAGO.COMMON-LISP.CESARUM.PEEK-STREAM.TEST")
(define-test test/peek-stream/get-future-char ()
(dotimes (n 10)
(with-input-from-string (in "ComMon-Lisp")
(let* ((ps (make-instance 'peek-stream :stream in))
(nc (loop
:for ch = (get-future-char ps)
:repeat n
:collect ch :into result :finally (return result)))
(gc (loop
:for ch = (getchar ps)
:repeat n
:collect ch :into result :finally (return result))))
(assert-true (equal nc gc))))))
(define-test test/peek-stream/nextchar/1 ()
(with-input-from-string (in "ComMon-Lisp")
(let ((ps (make-instance 'peek-stream :stream in))
c1 c2 c3)
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3)
'(#\C #\o #\m)))
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3 (nextchar ps))
'(#\M #\o #\n #\-)))
(ungetchar ps c3) (ungetchar ps c2) (ungetchar ps c1)
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3)
'(#\M #\o #\n)))
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3)
'(#\- #\L #\i))))))
(define-test test/peek-stream/nextchar/2 ()
(with-input-from-string (in "Common-Lisp")
(let ((ps (make-instance 'peek-stream :stream in))
c1 c2 c3)
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3)
'(#\C #\o #\m)))
(setf c1 (getchar ps) c2 (getchar ps))
(assert-true (equal (list c1 c2 (nextchar ps))
'(#\m #\o #\n)))
(setf c3 (getchar ps))
(assert-true (equal (list c3 (nextchar ps))
'(#\n #\-)))
(ungetchar ps c3) (ungetchar ps c2) (ungetchar ps c1)
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3)
'(#\m #\o #\n)))
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3)
'(#\- #\L #\i))))))
(define-test test/peek-stream/nextchar/3 ()
(with-input-from-string (in " Common Lisp")
(let ((ps (make-instance 'peek-stream :stream in))
c1 c2 c3)
(setf c1 (getchar ps) c2 (getchar ps) c3 (nextchar ps))
(assert-true (equal (list c1 c2 c3) '(#\space #\space #\C)))
(setf c1 (getchar ps) c2 (getchar ps) c3 (nextchar ps #\n))
(assert-true (equal (list c1 c2 c3) '(#\C #\o #\n)))
(setf c1 (getchar ps) c2 (nextchar ps t) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3) '(#\n #\L #\L))))))
(define-test test/all ()
(test/peek-stream/get-future-char)
(test/peek-stream/nextchar/1)
(test/peek-stream/nextchar/2)
(test/peek-stream/nextchar/3))
;;;; THE END ;;;;
| null | https://raw.githubusercontent.com/informatimago/lisp/571af24c06ba466e01b4c9483f8bb7690bc46d03/common-lisp/cesarum/peek-stream-test.lisp | lisp | coding : utf-8 -*-
**************************************************************************
LANGUAGE: Common-Lisp
SYSTEM: Common-Lisp
DESCRIPTION
Tests peek-stream.lisp.
LEGAL
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this program. If not, see </>.
**************************************************************************
THE END ;;;; | FILE : peek-stream-test.lisp
USER - INTERFACE :
< PJB > < >
MODIFICATIONS
2015 - 02 - 25 < PJB > Extracted from .
AGPL3
Copyright 2015 - 2016
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(defpackage "COM.INFORMATIMAGO.COMMON-LISP.CESARUM.PEEK-STREAM.TEST"
(:use "COMMON-LISP"
"COM.INFORMATIMAGO.COMMON-LISP.CESARUM.SIMPLE-TEST"
"COM.INFORMATIMAGO.COMMON-LISP.CESARUM.PEEK-STREAM")
(:export "TEST/ALL"))
(in-package "COM.INFORMATIMAGO.COMMON-LISP.CESARUM.PEEK-STREAM.TEST")
(define-test test/peek-stream/get-future-char ()
(dotimes (n 10)
(with-input-from-string (in "ComMon-Lisp")
(let* ((ps (make-instance 'peek-stream :stream in))
(nc (loop
:for ch = (get-future-char ps)
:repeat n
:collect ch :into result :finally (return result)))
(gc (loop
:for ch = (getchar ps)
:repeat n
:collect ch :into result :finally (return result))))
(assert-true (equal nc gc))))))
(define-test test/peek-stream/nextchar/1 ()
(with-input-from-string (in "ComMon-Lisp")
(let ((ps (make-instance 'peek-stream :stream in))
c1 c2 c3)
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3)
'(#\C #\o #\m)))
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3 (nextchar ps))
'(#\M #\o #\n #\-)))
(ungetchar ps c3) (ungetchar ps c2) (ungetchar ps c1)
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3)
'(#\M #\o #\n)))
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3)
'(#\- #\L #\i))))))
(define-test test/peek-stream/nextchar/2 ()
(with-input-from-string (in "Common-Lisp")
(let ((ps (make-instance 'peek-stream :stream in))
c1 c2 c3)
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3)
'(#\C #\o #\m)))
(setf c1 (getchar ps) c2 (getchar ps))
(assert-true (equal (list c1 c2 (nextchar ps))
'(#\m #\o #\n)))
(setf c3 (getchar ps))
(assert-true (equal (list c3 (nextchar ps))
'(#\n #\-)))
(ungetchar ps c3) (ungetchar ps c2) (ungetchar ps c1)
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3)
'(#\m #\o #\n)))
(setf c1 (getchar ps) c2 (getchar ps) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3)
'(#\- #\L #\i))))))
(define-test test/peek-stream/nextchar/3 ()
(with-input-from-string (in " Common Lisp")
(let ((ps (make-instance 'peek-stream :stream in))
c1 c2 c3)
(setf c1 (getchar ps) c2 (getchar ps) c3 (nextchar ps))
(assert-true (equal (list c1 c2 c3) '(#\space #\space #\C)))
(setf c1 (getchar ps) c2 (getchar ps) c3 (nextchar ps #\n))
(assert-true (equal (list c1 c2 c3) '(#\C #\o #\n)))
(setf c1 (getchar ps) c2 (nextchar ps t) c3 (getchar ps))
(assert-true (equal (list c1 c2 c3) '(#\n #\L #\L))))))
(define-test test/all ()
(test/peek-stream/get-future-char)
(test/peek-stream/nextchar/1)
(test/peek-stream/nextchar/2)
(test/peek-stream/nextchar/3))
|
ace08a29a9aebc266cb741d4c6f72d4b5b2b646dc696afeeb3a0eae2b5be09fa | kakao/hbase-packet-inspector | core.clj | (ns load-generator.core
(:import
org.apache.hadoop.hbase.TableName
[org.apache.hadoop.hbase.client ConnectionFactory Connection Put Scan]
[org.hbase.async HBaseClient AtomicIncrementRequest])
(:gen-class))
(def ^String table-name "t")
(def ^String cf "d")
(defn asynchbase-batch-increment
[]
(let [async (HBaseClient. "localhost")]
(.setFlushInterval async 100)
(doseq [f (doall
(for [_ (range 10)]
(future
(dotimes [i 100]
(.atomicIncrement
async
(AtomicIncrementRequest. table-name (str i) cf "foo" 1))))))]
(deref f))
(.. async shutdown join)))
(defn small-scan
[]
(with-open [connection (ConnectionFactory/createConnection)
table (.getTable connection (TableName/valueOf table-name))]
(dotimes [i 100]
(let [cf (.getBytes cf)
value (.getBytes "value")
put (.. (Put. (.getBytes (str i)))
(addColumn cf (.getBytes "foo") value)
(addColumn cf (.getBytes "bar") value))]
(.put table put)))
(with-open [scanner (.getScanner table (.. (Scan.) (setSmall true)))]
(doseq [_ (seq scanner)]))))
(defn -main
[& args]
(case (first args)
"asynchbase"
(asynchbase-batch-increment)
"small-scan"
(small-scan)))
| null | https://raw.githubusercontent.com/kakao/hbase-packet-inspector/a62ca478c5f59123155377f2a983b7a69dcdd522/dev-resources/load-generator/src/load_generator/core.clj | clojure | (ns load-generator.core
(:import
org.apache.hadoop.hbase.TableName
[org.apache.hadoop.hbase.client ConnectionFactory Connection Put Scan]
[org.hbase.async HBaseClient AtomicIncrementRequest])
(:gen-class))
(def ^String table-name "t")
(def ^String cf "d")
(defn asynchbase-batch-increment
[]
(let [async (HBaseClient. "localhost")]
(.setFlushInterval async 100)
(doseq [f (doall
(for [_ (range 10)]
(future
(dotimes [i 100]
(.atomicIncrement
async
(AtomicIncrementRequest. table-name (str i) cf "foo" 1))))))]
(deref f))
(.. async shutdown join)))
(defn small-scan
[]
(with-open [connection (ConnectionFactory/createConnection)
table (.getTable connection (TableName/valueOf table-name))]
(dotimes [i 100]
(let [cf (.getBytes cf)
value (.getBytes "value")
put (.. (Put. (.getBytes (str i)))
(addColumn cf (.getBytes "foo") value)
(addColumn cf (.getBytes "bar") value))]
(.put table put)))
(with-open [scanner (.getScanner table (.. (Scan.) (setSmall true)))]
(doseq [_ (seq scanner)]))))
(defn -main
[& args]
(case (first args)
"asynchbase"
(asynchbase-batch-increment)
"small-scan"
(small-scan)))
| |
b638ab364cc8555744d633759dbcdd9c5d5ef07ce392a90454ffa1ac95bba9e1 | sbcl/sbcl | sb-rotate-byte.impure.lisp | (require :sb-rotate-byte)
(load "../contrib/sb-rotate-byte/rotate-byte-tests.lisp")
| null | https://raw.githubusercontent.com/sbcl/sbcl/fbd46c52ea5ea4b3ba98a14ebdd4991d35989c6d/tests/sb-rotate-byte.impure.lisp | lisp | (require :sb-rotate-byte)
(load "../contrib/sb-rotate-byte/rotate-byte-tests.lisp")
| |
a17ff18a658094ca2cec885c29db2ee3a0e6931ca6ec537b779bd63d51bb2b7b | haskell-tools/haskell-tools | MultiMatchGuarded_res.hs | module Refactor.InlineBinding.MultiMatchGuarded where
b u v = (case (u, v) of (x, y) | x == y -> x
(x, y) -> x ++ y) | null | https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/builtin-refactorings/examples/Refactor/InlineBinding/MultiMatchGuarded_res.hs | haskell | module Refactor.InlineBinding.MultiMatchGuarded where
b u v = (case (u, v) of (x, y) | x == y -> x
(x, y) -> x ++ y) | |
184d0ebf813adcaf1094546e964af5f80050929da36991501a780fbce99cb0c4 | Gbury/archsat | pipe.mli | This file is free software , part of Archsat . See file " LICENSE " for more details .
(** Top-level operations
This module defines top-level operators, i.e functions to act
on top-level statements. *)
(** {2 Types} *)
type 'a tr_stmt = {
contents : 'a;
implicit : Term.id list;
}
(* Used for wrapping translated contents with implicit declarations *)
type +'a stmt = {
id : Dolmen.Id.t;
contents : 'a;
loc : Dolmen.ParseLocation.t option;
}
(** Wrapper around statements. It records implicit type declarations. *)
type executed = [
| `Executed
]
type type_decls = [
| `Type_decl of Expr.Id.TyCstr.t
| `Term_decl of Expr.Id.Const.t
]
(** The type of top-level type declarations. *)
type decl = [
`Decl of Term.id tr_stmt
]
(** The type of proof id declaration. *)
type type_defs = [
| `Type_def of Dolmen.Id.t * Expr.ttype Expr.id list * Expr.ty
| `Term_def of Dolmen.Id.t * Expr.ttype Expr.id list * Expr.ty Expr.id list * Expr.term
]
(** The type of top-level type definitions. *)
type def = [
| `Def of (Dolmen.Id.t * Term.t) tr_stmt
]
(** The type of id definition. *)
type assume = [
| `Hyp of Expr.formula
| `Goal of Expr.formula
| `Clause of Expr.formula list
]
(** The type of top-level assertion statements *)
type solve_sequent = [
| `Left of Solver.id * Expr.formula
| `Right of Solver.id * Expr.formula
]
(** The type of sequent components (for proof output). *)
type proof_sequent = [
| `Left of Term.id tr_stmt
| `Right of (Solver.id * Term.id) tr_stmt
]
type solve = [
| `Solve of Expr.formula list
]
(** Top-level solve instruction *)
type result = [
| `Skipped
| `Unknown
| `Proof of Solver.proof
| `Model of Solver.model
]
(** The type of results for a solve instruction. *)
type typechecked = [ executed | type_defs | type_decls | assume | solve ]
(** The type of statements after typechecking *)
type solved = [ executed | type_defs | type_decls | solve_sequent | result ]
(** The type of solved statement *)
type translated = [ executed | decl | def | proof_sequent | result ]
(** The type of translated statements *)
* { 2 Pipes }
val parse :
Dolmen.Statement.t list -> Options.opts ->
Options.opts * (Options.opts -> Dolmen.Statement.t option)
(** Parsing function. Reads a list of prelude statements, and the input options and
returns a tuple of the new options (including the detected input language),
together with a statement generator. *)
val execute : Options.opts * Dolmen.Statement.t ->
[ `Continue of Options.opts * Dolmen.Statement.t | `Done of Options.opts ]
(** Perform side effects of statement (such as the 'exit' statement. *)
val expand : Options.opts * Dolmen.Statement.t ->
Options.opts * [ `Ok | `Gen of bool * Dolmen.Statement.t Gen.gen ]
(** Expand statements (such as includes). Returns the new options, and either:
- [ `Ok ], which means the statement can be propagated as is
- [ `Gen (flat, g) ], if the statement expands into a generator [g]. The bool [flat]
indicates wether the statements in [g] should be treated as a single group of
statements (with regards to timeouts, etc...), or as a list of independant statements
(each with its own timeout...).
*)
val run_typecheck : Options.opts -> bool
(** Should the typechecker be run ? *)
val typecheck : Options.opts * Dolmen.Statement.t -> typechecked stmt
(** Typechecks a statement. *)
val solve : Options.opts * typechecked stmt -> solved stmt
(** Solve a statement *)
val print_res : Options.opts * solved stmt -> unit
(** Print the results of solved statements *)
val run_translate : Options.opts -> bool
(** Should translation (and subsequent passes) be run ? *)
val translate : Options.opts * solved stmt -> translated stmt
(** Translate statements into proof statements *)
val export : Options.opts * translated stmt -> unit
(** Export various information; usually for debugging purposes. *)
val print_proof : Options.opts * translated stmt -> unit
(** Print the proof according to the options *)
val print_model : Options.opts * translated stmt -> unit
(** Print the proof according to the options *)
| null | https://raw.githubusercontent.com/Gbury/archsat/322fbefa4a58023ddafb3fa1a51f8199c25cde3d/src/middle/pipe.mli | ocaml | * Top-level operations
This module defines top-level operators, i.e functions to act
on top-level statements.
* {2 Types}
Used for wrapping translated contents with implicit declarations
* Wrapper around statements. It records implicit type declarations.
* The type of top-level type declarations.
* The type of proof id declaration.
* The type of top-level type definitions.
* The type of id definition.
* The type of top-level assertion statements
* The type of sequent components (for proof output).
* Top-level solve instruction
* The type of results for a solve instruction.
* The type of statements after typechecking
* The type of solved statement
* The type of translated statements
* Parsing function. Reads a list of prelude statements, and the input options and
returns a tuple of the new options (including the detected input language),
together with a statement generator.
* Perform side effects of statement (such as the 'exit' statement.
* Expand statements (such as includes). Returns the new options, and either:
- [ `Ok ], which means the statement can be propagated as is
- [ `Gen (flat, g) ], if the statement expands into a generator [g]. The bool [flat]
indicates wether the statements in [g] should be treated as a single group of
statements (with regards to timeouts, etc...), or as a list of independant statements
(each with its own timeout...).
* Should the typechecker be run ?
* Typechecks a statement.
* Solve a statement
* Print the results of solved statements
* Should translation (and subsequent passes) be run ?
* Translate statements into proof statements
* Export various information; usually for debugging purposes.
* Print the proof according to the options
* Print the proof according to the options | This file is free software , part of Archsat . See file " LICENSE " for more details .
type 'a tr_stmt = {
contents : 'a;
implicit : Term.id list;
}
type +'a stmt = {
id : Dolmen.Id.t;
contents : 'a;
loc : Dolmen.ParseLocation.t option;
}
type executed = [
| `Executed
]
type type_decls = [
| `Type_decl of Expr.Id.TyCstr.t
| `Term_decl of Expr.Id.Const.t
]
type decl = [
`Decl of Term.id tr_stmt
]
type type_defs = [
| `Type_def of Dolmen.Id.t * Expr.ttype Expr.id list * Expr.ty
| `Term_def of Dolmen.Id.t * Expr.ttype Expr.id list * Expr.ty Expr.id list * Expr.term
]
type def = [
| `Def of (Dolmen.Id.t * Term.t) tr_stmt
]
type assume = [
| `Hyp of Expr.formula
| `Goal of Expr.formula
| `Clause of Expr.formula list
]
type solve_sequent = [
| `Left of Solver.id * Expr.formula
| `Right of Solver.id * Expr.formula
]
type proof_sequent = [
| `Left of Term.id tr_stmt
| `Right of (Solver.id * Term.id) tr_stmt
]
type solve = [
| `Solve of Expr.formula list
]
type result = [
| `Skipped
| `Unknown
| `Proof of Solver.proof
| `Model of Solver.model
]
type typechecked = [ executed | type_defs | type_decls | assume | solve ]
type solved = [ executed | type_defs | type_decls | solve_sequent | result ]
type translated = [ executed | decl | def | proof_sequent | result ]
* { 2 Pipes }
val parse :
Dolmen.Statement.t list -> Options.opts ->
Options.opts * (Options.opts -> Dolmen.Statement.t option)
val execute : Options.opts * Dolmen.Statement.t ->
[ `Continue of Options.opts * Dolmen.Statement.t | `Done of Options.opts ]
val expand : Options.opts * Dolmen.Statement.t ->
Options.opts * [ `Ok | `Gen of bool * Dolmen.Statement.t Gen.gen ]
val run_typecheck : Options.opts -> bool
val typecheck : Options.opts * Dolmen.Statement.t -> typechecked stmt
val solve : Options.opts * typechecked stmt -> solved stmt
val print_res : Options.opts * solved stmt -> unit
val run_translate : Options.opts -> bool
val translate : Options.opts * solved stmt -> translated stmt
val export : Options.opts * translated stmt -> unit
val print_proof : Options.opts * translated stmt -> unit
val print_model : Options.opts * translated stmt -> unit
|
f0976369568c02b0bc2aeb2a99592dc414b8b128e9500bfc28b18eaf735cdd04 | ramalho/lc-with-redex | church-numerals.scm | #lang scheme
(require redex)
(define (number->Church-numeral n)
(define (f^n f n) (if (zero? n) (λ (x) x) (λ (x) ((f^n f (sub1 n)) (f x)))))
(λ (f) (λ (x) ((f^n f n) x))))
(define (Church-numeral->number Ch-n) ((Ch-n add1) 0))
(define (test Church-numeral number)
(test-equal (Church-numeral->number Church-numeral) number))
(let
((Church-add1 (λ(n)(λ(f)(λ(x)(f((n f)x))))))
(Church-sub1 (λ(n)(λ(f)(λ(x)(((n(λ(g)(λ(h)(h(g f)))))(λ(u)x))(λ(u)u))))))
(Church-plus (λ(m)(λ(n)(λ(f)(λ(x)((m f)((n f)x)))))))
(Church-mult (λ(m)(λ(n)(λ(f)(n(m f))))))
(Church-expt (λ(m)(λ(n)(n m))))
(Church-zero? (λ(n)((n(λ(x)#f))#t))))
(let*-values
(((Church-minus) (λ(m)(λ(n)((n Church-sub1)m))))
((first-ten-numbers) (build-list 10 (λ (n) n)))
((first-ten-Church-numerals) (map number->Church-numeral first-ten-numbers))
((C0 C1 C2 C3 C4 C5 C6 C7 C8 C9) (apply values first-ten-Church-numerals)))
(for-each test first-ten-Church-numerals first-ten-numbers)
(test ((Church-plus C5) C7) 12)
(test ((Church-mult C5) C7) 35)
(test ((Church-minus C5) C5) 0)
(test ((Church-minus C7) C5) 2)
(test ((Church-expt C7) C3) 343)
(test-equal (Church-zero? ((Church-minus C5) C5)) #t)
(test-equal (Church-zero? ((Church-minus C5) C4)) #f)))
(test-results)
| null | https://raw.githubusercontent.com/ramalho/lc-with-redex/506190bce2b8309192823b4c824e0b3758f29e46/church-numerals.scm | scheme | #lang scheme
(require redex)
(define (number->Church-numeral n)
(define (f^n f n) (if (zero? n) (λ (x) x) (λ (x) ((f^n f (sub1 n)) (f x)))))
(λ (f) (λ (x) ((f^n f n) x))))
(define (Church-numeral->number Ch-n) ((Ch-n add1) 0))
(define (test Church-numeral number)
(test-equal (Church-numeral->number Church-numeral) number))
(let
((Church-add1 (λ(n)(λ(f)(λ(x)(f((n f)x))))))
(Church-sub1 (λ(n)(λ(f)(λ(x)(((n(λ(g)(λ(h)(h(g f)))))(λ(u)x))(λ(u)u))))))
(Church-plus (λ(m)(λ(n)(λ(f)(λ(x)((m f)((n f)x)))))))
(Church-mult (λ(m)(λ(n)(λ(f)(n(m f))))))
(Church-expt (λ(m)(λ(n)(n m))))
(Church-zero? (λ(n)((n(λ(x)#f))#t))))
(let*-values
(((Church-minus) (λ(m)(λ(n)((n Church-sub1)m))))
((first-ten-numbers) (build-list 10 (λ (n) n)))
((first-ten-Church-numerals) (map number->Church-numeral first-ten-numbers))
((C0 C1 C2 C3 C4 C5 C6 C7 C8 C9) (apply values first-ten-Church-numerals)))
(for-each test first-ten-Church-numerals first-ten-numbers)
(test ((Church-plus C5) C7) 12)
(test ((Church-mult C5) C7) 35)
(test ((Church-minus C5) C5) 0)
(test ((Church-minus C7) C5) 2)
(test ((Church-expt C7) C3) 343)
(test-equal (Church-zero? ((Church-minus C5) C5)) #t)
(test-equal (Church-zero? ((Church-minus C5) C4)) #f)))
(test-results)
| |
a677cb13c3dd4a3fb6d728882b1e0fafc53139963251b4dedcf64197fd496a60 | nojb/llvm-min-caml | main.ml | let limit = ref 1000
let rec iter n e = (* 最適化処理をくりかえす (caml2html: main_iter) *)
Format.eprintf "iteration %d@." n;
if n = 0 then e else
let e' = Elim.f (ConstFold.f (Inline.f (Assoc.f (Beta.f e)))) in
if e = e' then e else
iter (n - 1) e'
let lexbuf outchan l = (* バッファをコンパイルしてチャンネルへ出力する (caml2html: main_lexbuf) *)
Id.counter := 0;
Typing.extenv := M.empty;
Emit.f outchan
(Gc.f
(Closure.f
(iter !limit
(Alpha.f
(KNormal.f
(Typing.f
(Parser.exp Lexer.token l)))))))
let string s = lexbuf stdout (Lexing.from_string s) (* 文字列をコンパイルして標準出力に表示する (caml2html: main_string) *)
let file f = (* ファイルをコンパイルしてファイルに出力する (caml2html: main_file) *)
let inchan = open_in (f ^ ".ml") in
let outchan = open_out (f ^ ".bc") in
try
lexbuf outchan (Lexing.from_channel inchan);
close_in inchan;
close_out outchan;
with e -> (close_in inchan; close_out outchan; raise e)
let () = (* ここからコンパイラの実行が開始される (caml2html: main_entry) *)
let files = ref [] in
Arg.parse
[("-inline", Arg.Int(fun i -> Inline.threshold := i), "maximum size of functions inlined");
("-iter", Arg.Int(fun i -> limit := i), "maximum number of optimizations iterated")]
(fun s -> files := !files @ [s])
("Mitou Min-Caml Compiler (C) Eijiro Sumii\n" ^
Printf.sprintf "usage: %s [-inline m] [-iter n] ...filenames without \".ml\"..." Sys.argv.(0));
List.iter
(fun f -> ignore (file f))
!files
| null | https://raw.githubusercontent.com/nojb/llvm-min-caml/68703b905f8292cb2e20b41bbd90cfea85ca2a19/main.ml | ocaml | 最適化処理をくりかえす (caml2html: main_iter)
バッファをコンパイルしてチャンネルへ出力する (caml2html: main_lexbuf)
文字列をコンパイルして標準出力に表示する (caml2html: main_string)
ファイルをコンパイルしてファイルに出力する (caml2html: main_file)
ここからコンパイラの実行が開始される (caml2html: main_entry) | let limit = ref 1000
Format.eprintf "iteration %d@." n;
if n = 0 then e else
let e' = Elim.f (ConstFold.f (Inline.f (Assoc.f (Beta.f e)))) in
if e = e' then e else
iter (n - 1) e'
Id.counter := 0;
Typing.extenv := M.empty;
Emit.f outchan
(Gc.f
(Closure.f
(iter !limit
(Alpha.f
(KNormal.f
(Typing.f
(Parser.exp Lexer.token l)))))))
let inchan = open_in (f ^ ".ml") in
let outchan = open_out (f ^ ".bc") in
try
lexbuf outchan (Lexing.from_channel inchan);
close_in inchan;
close_out outchan;
with e -> (close_in inchan; close_out outchan; raise e)
let files = ref [] in
Arg.parse
[("-inline", Arg.Int(fun i -> Inline.threshold := i), "maximum size of functions inlined");
("-iter", Arg.Int(fun i -> limit := i), "maximum number of optimizations iterated")]
(fun s -> files := !files @ [s])
("Mitou Min-Caml Compiler (C) Eijiro Sumii\n" ^
Printf.sprintf "usage: %s [-inline m] [-iter n] ...filenames without \".ml\"..." Sys.argv.(0));
List.iter
(fun f -> ignore (file f))
!files
|
5673ddcdccd6206a9ef1cfb8b3a6abd143ea73a23c4b815603eb620701c6b8fb | mflatt/shrubbery-rhombus-0 | lex.rkt | #lang racket/base
(require parser-tools/lex
(for-syntax racket/base)
(prefix-in : parser-tools/lex-sre)
"private/property.rkt"
"private/peek-port.rkt")
(provide lex/status
lex-all
token-name
;; 'identifier
;; 'literal
;; 'comment
;; 'whitespace
;;
;; 'operator
;; 'block-operator
;; 'continue-operator
;; 'bar-operator
;;
;; 'opener
;; 'closer
;; 'comma-operator
;; 'semicolon-operator
;;
;; 's-exp
;;
' EOF
;;
;; 'fail
token?
token-value
token-e
token-line
token-column
token-srcloc
token-rename
syntax->token
stx-for-original-property
current-lexer-source
make-in-text-status
lex-nested-status?
lex-dont-stop-status?)
(define-lex-abbrevs
;; For case insensitivity
[e (char-set "eE")]
[digit (:/ "0" "9")]
[digit_ (:or digit (:: digit "_"))]
[digit16 (:/ "af" "AF" "09")]
[digit16_ (:or digit16 (:: digit16 "_"))]
[digit8 (:/ "0" "7")]
[langchar (:or (:/ "az" "AZ" "09") "+" "-" "_")]
;; does not constrain to avoid surrogates:
[unicode (:or (:: "u" (:** 1 4 digit16))
(:: "U" (:** 1 6 digit16)))]
[str (:: "\"" (:* string-element ) "\"")]
[string-element (:or (:~ "\"" "\\" "\n" "\r")
(:: "\\" unicode)
string-escape)]
[byte-str (:: "#\"" (:* byte-string-element) "\"")]
[byte-string-element (:or (:- (:/ "\x00" "\xFF") "\"" "\\" "\n" "\r")
string-escape)]
[string-escape (:or "\\\""
"\\\\"
"\\a"
"\\b"
"\\t"
"\\n"
"\\v"
"\\f"
"\\r"
"\\e"
"\\'"
(:: "\\" (:** 1 3 digit8))
(:: "\\x" (:** 1 2 digit16)))]
[bad-str (:: (:? "#") "\""
(:* (:~ "\"" "\\" #\newline)
(:: "\\" (:- any-char #\newline)))
(:? "\\" "\""))]
[boolean (:or "#true" "#false")]
[void-const "#void"]
[special-number (:: "#"
(:or "inf"
"neginf"
"nan"))]
[bad-hash (:- (:or (:: "#" (:* non-delims))
"#/")
boolean
void-const
special-number)]
[exponent-marker e]
[sign (char-set "+-")]
[script (:: "#!" (:or #\space #\/) (:* (:~ #\newline) (:: #\\ #\newline)))]
[identifier (:: (:or alphabetic "_")
(:* (:or alphabetic numeric "_")))]
[opchar (:or (:- symbolic (:or "~"))
(:- punctuation (:or "," ";" "#" "\\" "_" "@" "\""
"(" ")" "[" "]" "{" "}" "«" "»")))]
[operator (:- (:or opchar
(:: (:* opchar) (:- opchar "+" "-" "." "/"))
(:+ ".")
(:+ "+")
(:+ "-"))
"|" ":"
(:: (:* any-char) (:or "//" "/*") (:* any-char)))]
[keyword (:: "~" identifier)]
[bad-keyword (:: "~")]
;; disallows a number that starts +, -, or "."
[number/continuing (:or decimal-number/continuing
hex-number)]
[number (:: (:? sign)
(:or decimal-number
hex-number))]
[uinteger (:: (:* digit_) digit)]
[uinteger16 (:: (:* digit16_) digit16)]
;; doesn't match digits ending with "."; that case is handled with
;; a follow-up peek to use "." when not part of an multi-char operator
[decimal-number/continuing (:or (:: uinteger (:? number-exponent))
(:: uinteger "." (:? uinteger) number-exponent)
(:: uinteger "." uinteger))]
[decimal-number (:or decimal-number/continuing
(:: "." uinteger (:? number-exponent)))]
[number-exponent (:: exponent-marker (:? sign) uinteger)]
[hex-number (:: "0x" uinteger16)]
[bad-number/continuing (:- (:: digit (:+ non-number-delims))
identifier
number/continuing)]
[bad-number (:- (:: (:? sign) digit (:+ non-number-delims))
identifier
(:: identifier ".")
number)]
[bad-comment "*/"]
[non-number-delims (:or non-delims
(:: "." non-delims))]
[non-delims (:or alphabetic numeric "_")]
;; making whitespace end at newlines is for interactive parsing
;; where we end at a blank line
[whitespace-segment (:or (:+ (:- whitespace "\n"))
(:: (:* (:- whitespace "\n")) "\n"))])
(define-syntax (ret stx)
(syntax-case stx (quote)
[(_ (quote name) lexeme #:raw raw (quote type) more ...)
(with-syntax ([ht (hasheq 'type #'type 'rhombus-type #'name)])
#`(make-ret (quote name) lexeme #:raw raw ht more ...))]
[(_ name lexeme type more ...)
#`(ret name lexeme #:raw #f type more ...)]))
(define (make-ret name lexeme #:raw [raw #f] attribs paren start-pos end-pos status
#:pending-backup [pending-backup 0])
(define backup 0)
(values (make-token name lexeme start-pos end-pos raw)
attribs paren (position-offset start-pos) (position-offset end-pos)
backup status
pending-backup))
(define stx-for-original-property (read-syntax #f (open-input-string "original")))
(define current-lexer-source (make-parameter "input"))
(define (make-token name e start-pos end-pos [raw #f])
(define offset (position-offset start-pos))
(define loc (vector (current-lexer-source)
(position-line start-pos)
(position-col start-pos)
offset
(- (position-offset end-pos)
offset)))
(token name (let loop ([e e] [raw raw])
(let ([e (if (pair? e)
(let p-loop ([e e])
(cond
[(null? (cdr e)) (list (loop (car e) raw))]
[else (cons (loop (car e) #f)
(p-loop (cdr e)))]))
e)]
[raw (if (pair? e) #f raw)])
(define stx (datum->syntax #f
e
loc
stx-for-original-property))
(if (eq? name 'comment)
stx
(syntax-raw-property stx (or raw (if (string? e) e '()))))))))
(define (read-line-comment name lexeme input-port start-pos
#:status [status 'initial]
#:consume-newline? [consume-newline? #f]
#:pending-backup [pending-backup 0])
(let ([comment (apply string (append (string->list lexeme) (read-line/skip-over-specials input-port
consume-newline?)))])
(define-values (end-line end-col end-offset) (port-next-location input-port))
(values (make-token name comment start-pos (position end-offset end-line end-col))
'comment #f
(position-offset start-pos)
end-offset
0
status
pending-backup)))
(define get-next-comment
(lexer
["/*" (values 1 end-pos lexeme)]
["*/" (values -1 end-pos lexeme)]
[(:or "/" "*" (:* (:~ "*" "/")))
(let-values ([(delta end-pos rest-lexeme) (get-next-comment input-port)])
(values delta end-pos (string-append lexeme rest-lexeme)))]
[(eof) (values 'eof end-pos "")]
[(special)
(get-next-comment input-port)]
[(special-comment)
(get-next-comment input-port)]))
(define (read-nested-comment num-opens start-pos lexeme input)
(define-values (diff end next-lexeme) (get-next-comment input))
(cond
[(eq? 'eof diff) (ret 'fail eof 'error #f start-pos end 'initial)]
[else
(define all-lexeme (string-append lexeme next-lexeme))
(define next-num-opens (+ diff num-opens))
(cond
[(= 0 next-num-opens) (ret 'comment all-lexeme 'comment #f start-pos end 'initial)]
[else (read-nested-comment next-num-opens start-pos all-lexeme input)])]))
(define (get-offset i)
(let-values (((x y offset) (port-next-location i)))
offset))
(define (read-line/skip-over-specials i consume-newline?)
(let loop ()
(define next (peek-char-or-special i))
(cond
[(eq? next #\newline)
(cond
[consume-newline?
(read-char-or-special i)
(list #\newline)]
[else null])]
[(eof-object? next)
null]
[else
(read-char-or-special i)
(if (char? next)
(cons next (loop))
(loop))])))
(struct s-exp-mode (depth status) #:prefab)
(struct in-at (mode comment? closeable? opener shrubbery-status openers) #:prefab)
(struct in-escaped (shrubbery-status at-status) #:prefab)
A pending - backup mode causes a non - zero ` backup ` count for one or
;; more future tokens; for example, when parsing `@|{`, there's a peek
;; triggered by `@` that decides how to proceed next, and if that
;; peek's result changes, then we'll need to go back to the `@` token.
;; The `amount` of pending backup is how many characters need to be
;; consumed before the pending backup expires. For example, with
` @|<<<x ` , the peek stopped at ` x ` while looking for ` { ` , and we 'll
;; need to re-lex starting from `@` before operators `|` and `<<<`.
A pending backup is not needed if only one character is peeked , since
;; the colorer would check the token just before a change, anyway.
(struct pending-backup-mode (amount status) #:prefab)
(define (make-in-text-status)
(in-at 'inside #f #f "" 'initial 0))
(define (out-of-s-exp-mode status)
(cond
[(pending-backup-mode? status) (struct-copy pending-backup-mode status
[status (out-of-s-exp-mode
(pending-backup-mode-status status))])]
[(s-exp-mode? status) (s-exp-mode-status status)]
[(in-at? status) (struct-copy in-at status
[shrubbery-status (out-of-s-exp-mode (in-at-shrubbery-status status))]
[openers (let ([openers (in-at-openers status)])
(unless (and (pair? openers) (equal? "{" (car openers)))
(error 'out-of-s-exp-mode "expected opener not found"))
(cdr openers))])]
[(in-escaped? status) (struct-copy in-escaped status
[shrubbery-status (out-of-s-exp-mode (in-escaped-shrubbery-status status))])]
[else (error 'out-of-s-exp-mode "not in S-expression mode!")]))
(define (lex-nested-status? status)
(if (pending-backup-mode? status)
(lex-nested-status? (pending-backup-mode-status status))
(not (or (not status) (symbol? status)))))
(define (lex-dont-stop-status? status)
;; anything involving a peek has a pending backup
(pending-backup-mode? status))
(define (lex/status in pos status-in racket-lexer*/status)
(define prev-pending-backup (if (pending-backup-mode? status-in)
(pending-backup-mode-amount status-in)
0))
(define status (if (pending-backup-mode? status-in)
(pending-backup-mode-status status-in)
status-in))
(let-values ([(tok type paren start end backup status pending-backup)
(let loop ([status status])
(cond
[(s-exp-mode? status)
;; within `#{}`
(unless racket-lexer*/status
(error "shouldn't be in S-expression mode without a Racket lexer"))
(define depth (s-exp-mode-depth status))
(cond
[(and (zero? depth)
(eqv? #\} (peek-char in)))
;; go out of S-expression mode by using shrubbery lexer again
(shrubbery-lexer/status in)]
[else
(define-values (tok type paren start end backup s-exp-status action)
(racket-lexer*/status in pos (s-exp-mode-status status)))
(values tok type paren start end backup (case action
[(open)
(s-exp-mode (add1 depth) s-exp-status)]
[(close)
(s-exp-mode (sub1 depth) s-exp-status)]
[else
(s-exp-mode depth s-exp-status)])
0)])]
[(in-at? status)
;; within an `@` sequence
(define-values (tok type paren start end backup new-status pending-backup)
(at-lexer in status (lambda (status) (loop status))))
(define new-type (if (and (in-at-comment? status)
(not (eq? type 'eof)))
(hash-set (if (hash? type) type (hash 'type type)) 'comment? #t)
type))
(values tok new-type paren start end backup new-status pending-backup)]
[(in-escaped? status)
(define-values (t type paren start end backup sub-status pending-backup)
(loop (in-escaped-shrubbery-status status)))
(values t type paren start end backup (struct-copy in-escaped status
[shrubbery-status sub-status])
pending-backup)]
[(eq? status 'continuing)
;; normal mode, after a form
(shrubbery-lexer-continuing/status in)]
[else
;; normal mode, at start or after an operator or whitespace
(shrubbery-lexer/status in)]))])
(cond
[(and (token? tok)
(eq? (token-name tok) 'at-content)
(eqv? 0 (string-length (token-e tok))))
;; a syntax coloring lexer must not return a token that
;; consumes no characters, so just drop it by recurring
(lex/status in pos status racket-lexer*/status)]
[else
(define new-backup (cond
[(zero? prev-pending-backup) backup]
[#t (max 1 backup)]
;; If we have "@/{" and we add a "/" after the existing one,
;; we'll need to back up more:
[(not (token? tok)) backup]
[(eq? (token-name tok) 'at-opener) 1]
[(eq? (token-name tok) 'at-closer) 1]
[(eq? (token-name tok) 'at) 1]
[(eq? (token-name tok) 'at-comment) 3]
[(and (in-at? status) (eq? (token-name tok) 'operator)) 2]
[else backup]))
(define new-pending-backup (max pending-backup
(if (and end start)
(- prev-pending-backup (- end start))
0)))
(define status/backup (if (zero? new-pending-backup)
status
(pending-backup-mode new-pending-backup status)))
(values tok type paren start end new-backup status/backup)])))
(define-syntax-rule (make-lexer/status number bad-number)
(lexer
[whitespace-segment
(ret 'whitespace lexeme 'white-space #f start-pos end-pos 'initial)]
[str (ret 'literal (parse-string lexeme) #:raw lexeme 'string #f start-pos end-pos 'datum)]
[byte-str (ret 'literal (parse-byte-string lexeme) #:raw lexeme 'string #f start-pos end-pos 'datum)]
[bad-number
(let-values ([(dot? new-lexeme new-end-pos pending-backup) (maybe-consume-trailing-dot input-port lexeme end-pos)])
(ret 'fail new-lexeme 'error #f start-pos new-end-pos 'continuing
;; backup is needed if the next character is `+` or `-`; ok to conservatively back up
#:pending-backup 1))]
[number
(let-values ([(dot? new-lexeme new-end-pos pending-backup) (maybe-consume-trailing-dot input-port lexeme end-pos)])
(cond
[dot?
(cond
[(decimal-integer? lexeme)
;; add `.` to end of number
(ret 'literal (parse-number new-lexeme) #:raw new-lexeme 'constant #f start-pos new-end-pos 'continuing
#:pending-backup pending-backup)]
[else
;; count `.` as error
(ret 'fail new-lexeme 'error #f start-pos new-end-pos 'continuing
#:pending-backup pending-backup)])]
[else
(ret 'literal (parse-number lexeme) #:raw lexeme 'constant #f start-pos end-pos 'continuing
#:pending-backup pending-backup)]))]
[special-number
(let ([num (case lexeme
[("#inf") +inf.0]
[("#neginf") -inf.0]
[("#nan") +nan.0])])
(ret 'literal num #:raw lexeme 'constant #f start-pos end-pos 'continuing))]
[boolean
(ret 'literal (equal? lexeme "#true") #:raw lexeme 'constant #f start-pos end-pos 'continuing)]
[void-const
(ret 'literal (void) #:raw lexeme 'constant #f start-pos end-pos 'continuing)]
["//" (read-line-comment 'comment lexeme input-port start-pos)]
["/*" (read-nested-comment 1 start-pos lexeme input-port)]
["#//"
(ret 'group-comment lexeme 'comment #f start-pos end-pos 'initial)]
[(:: (:or "#lang " "#!")
(:or langchar
(:: langchar (:* (:or langchar "/")) langchar)))
(ret 'comment lexeme 'other #f start-pos end-pos 'initial)]
[(:: (:or "#lang " "#!") (:* (:& any-char (complement whitespace))))
(ret 'fail lexeme 'error #f start-pos end-pos 'initial)]
[script
(ret 'comment lexeme 'comment #f start-pos end-pos 'initial)]
[(:or "(" "[" "{" "«")
(ret 'opener lexeme 'parenthesis (string->symbol lexeme) start-pos end-pos 'initial)]
[(:or ")" "]" "}" "»")
(ret 'closer lexeme 'parenthesis (string->symbol lexeme) start-pos end-pos 'continuing)]
["#{"
(ret 's-exp lexeme 'parenthesis '|{| start-pos end-pos (s-exp-mode 0 #f))]
[":"
(ret 'block-operator lexeme 'block-operator #f start-pos end-pos 'initial)]
["|"
(ret 'bar-operator lexeme 'bar-operator #f start-pos end-pos 'initial)]
["\\"
(ret 'continue-operator lexeme 'continue-operator #f start-pos end-pos 'initial)]
[","
(ret 'comma-operator lexeme 'separator #f start-pos end-pos 'initial)]
[";"
(ret 'semicolon-operator lexeme 'separator #f start-pos end-pos 'initial)]
[identifier
(ret 'identifier (string->symbol lexeme) #:raw lexeme 'symbol #f start-pos end-pos 'continuing)]
[operator
(ret 'operator (list 'op (string->symbol lexeme)) #:raw lexeme 'operator #f start-pos end-pos 'initial)]
[keyword
(let ([kw (string->keyword (substring lexeme 1))])
(ret 'identifier kw #:raw lexeme 'hash-colon-keyword #f start-pos end-pos 'continuing))]
["@//"
(let ([opener (peek-at-opener input-port)])
(if opener
(let ([status (in-at 'open #t #t opener 'initial '())])
(ret 'at-comment lexeme 'comment (string->symbol lexeme) start-pos end-pos status #:pending-backup 1))
all characters up to an opener - deciding character are part of the comment , so pending - backup = 1
(read-line-comment 'at-comment lexeme input-port start-pos #:pending-backup 1)))]
["@"
(let-values ([(opener pending-backup) (peek-at-opener* input-port)])
(define mode (if opener 'open 'initial))
(ret 'at lexeme 'at #f start-pos end-pos (in-at mode #f #t opener 'initial '())
#:pending-backup (if opener 1 pending-backup)))]
[(special)
(cond
[(or (number? lexeme) (boolean? lexeme) (void? lexeme))
(ret 'literal lexeme 'constant #f start-pos end-pos 'continuing)]
[(string? lexeme)
(ret 'literal lexeme 'string #f start-pos end-pos 'continuing)]
[(keyword? lexeme)
(ret 'literal lexeme 'hash-colon-keyword #f start-pos end-pos 'continuing)]
[else
(ret 'literal lexeme 'no-color #f start-pos end-pos 'continuing)])]
[(special-comment)
(ret 'comment "" 'comment #f start-pos end-pos 'initial)]
[(eof) (ret-eof start-pos end-pos)]
[(:or bad-str bad-keyword bad-hash bad-comment)
(ret 'fail lexeme 'error #f start-pos end-pos 'bad)]
[any-char (extend-error lexeme start-pos end-pos input-port)]))
(define (ret-eof start-pos end-pos)
(values (make-token 'EOF eof start-pos end-pos) 'eof #f #f #f 0 #f 0))
(define shrubbery-lexer/status (make-lexer/status number bad-number))
(define shrubbery-lexer-continuing/status (make-lexer/status number/continuing bad-number/continuing))
;; after reading `@`, we enter an at-exp state machine for whether
;; we're in the initial part, within `[]`, or within `{}`; we have to
;; perform some parsing here to balance openers and closers; we leave
;; wehite trimming to the parser layer
(define (at-lexer in status recur)
(define in-mode (in-at-mode status))
anything that uses ` get - expected ` should trigger a non - zero backup
(define (get-expected opener/ch ch/closer)
(define (get-all-expected s)
(for ([ch (in-string s)])
(unless (eqv? ch (read-char in))
(error "inconsistent input" ch))))
(define start-pos (next-location-as-pos in))
(define eof?
(cond
[(string? opener/ch)
(get-all-expected opener/ch)
(unless (eqv? ch/closer (read-char in))
(error "inconsistent opener input" ch/closer))
#f]
[else
(define ch (read-char in))
(cond
[(eof-object? ch) #t]
[else
(unless (eqv? opener/ch ch)
(error "inconsistent closer input" opener/ch))
(get-all-expected ch/closer)
#f])]))
(define end-pos (next-location-as-pos in))
(values start-pos end-pos eof?))
(case in-mode
;; 'initial mode is right after `@` without immediate `{`, and we
;; may transition from 'initial mode to 'brackets mode at `[`
[(initial brackets)
;; recur to parse in shrubbery mode:
(define-values (t type paren start end backup sub-status pending-backup)
(recur (in-at-shrubbery-status status)))
;; to keep the term and possibly exit 'initial or 'brackets mode:
(define (ok status)
(define-values (next-status pending-backup)
(cond
[(and (not (s-exp-mode? sub-status))
(null? (in-at-openers status)))
;; either `{`, `[`, or back to shrubbery mode
(define-values (opener pending-backup) (peek-at-opener* in))
(cond
[opener
(values (in-at 'open (in-at-comment? status) #t opener sub-status '())
1)]
[else
(values
(cond
[(and (not (eq? in-mode 'brackets))
(eqv? #\[ (peek-char in)))
(in-at 'brackets (in-at-comment? status) #t #f sub-status '())]
[(in-escaped? sub-status)
(in-escaped-at-status sub-status)]
[else sub-status])
pending-backup)])]
[else
;; continue in-at mode
(values status 0)]))
(values t type paren start end 0 next-status pending-backup))
;; converts a token to an error token:
(define (error status)
(values (struct-copy token t [name 'fail]) 'error #f start end 0 status 0))
;; update the shrubbery-level status, then keep the term or error,
;; tracking nesting depth through the status as we continue:
(let ([status (struct-copy in-at status
[shrubbery-status sub-status])])
(case (and (token? t) (token-name t))
[(opener s-exp) (ok (struct-copy in-at status
[openers (cons (if (eq? 's-exp (token-name t))
"{"
(token-e t))
(in-at-openers status))]))]
[(closer)
(cond
[(and (pair? (in-at-openers status))
(closer-for? (token-e t) (car (in-at-openers status))))
(ok (struct-copy in-at status
[openers (cdr (in-at-openers status))]))]
[else
(error status)])]
[else (ok status)]))]
;; 'open mode is right after `@` when the next character is `{`,
;; or after a closing `}` when the next character is `{`
[(open)
(define opener (in-at-opener status))
(define-values (start-pos end-pos eof?) (get-expected opener #\{))
(ret 'at-opener (string-append opener "{") 'parenthesis '|{| start-pos end-pos
(struct-copy in-at status [mode 'inside] [openers 0]))]
;; 'inside mode means in `{}` and not currently escaped, and we
;; transition to 'escape mode on a `@`, and we transition to 'close mode
;; on a `}` that is not balancing a `{` within `{}`
[(inside)
(define opener (in-at-opener status))
(define closeable? (in-at-closeable? status))
(define start-pos (next-location-as-pos in))
(define o (open-output-string))
(let loop ([depth (in-at-openers status)])
(define ch (peek-char in))
(cond
[(eqv? ch #\newline)
;; convert a newline into a separate string input
(define s (get-output-string o))
(cond
[(= 0 (string-length s))
(read-char in)
(define end-pos (next-location-as-pos in))
(ret 'at-content "\n" 'text #f start-pos end-pos
(struct-copy in-at status [mode 'inside] [openers depth]))]
[else
(define end-pos (next-location-as-pos in))
(ret 'at-content s 'text #f start-pos end-pos
(struct-copy in-at status [mode 'inside] [openers depth]))])]
[(or (eof-object? ch)
(and closeable?
(peek-at-closer in #:opener opener)))
(cond
[(or (zero? depth)
(eof-object? ch))
;; `lex/status` will handle the case that the content is empty
(define end-pos (next-location-as-pos in))
(ret 'at-content (get-output-string o) 'text #f start-pos end-pos
(struct-copy in-at status [mode 'close])
#:pending-backup 1)]
[else
(if (equal? opener "")
(write-char (read-char in) o)
(write-string (read-string (add1 (string-length opener)) in) o))
(loop (sub1 depth))])]
[(peek-at-prefixed #\@ in #:opener opener)
;; `lex/status` will handle the case that the content is empty
(define end-pos (next-location-as-pos in))
(ret 'at-content (get-output-string o) 'text #f start-pos end-pos
(struct-copy in-at status [mode 'escape] [openers depth])
#:pending-backup 1)]
[(and closeable?
(peek-at-opener in #:opener opener))
(if (equal? opener "")
(write-char (read-char in) o)
(write-string (read-string (add1 (string-length opener)) in) o))
(loop (add1 depth))]
[else
(write-char (read-char in) o)
(loop depth)]))]
;; 'escape mode means in `{}`, not currently escaped, and expect `@` next
[(escape)
(define opener (in-at-opener status))
(define-values (start-pos end-pos eof?) (get-expected opener #\@))
(cond
[(read-at-comment in)
=> (lambda (slashes)
(cond
[(peek-at-opener in)
=> (lambda (opener)
;; block comment
(define end-pos (next-location-as-pos in))
(ret 'at-comment (string-append opener "@" slashes) 'comment #f start-pos end-pos
(in-at 'open #t #t opener (in-escaped 'initial (struct-copy in-at status [mode 'inside])) '())
#:pending-backup 1))]
[else
;; line comment
(read-line-comment 'comment (string-append opener "@" slashes) in start-pos
#:status (struct-copy in-at status [mode 'inside])
#:consume-newline? #t
#:pending-backup 1)]))]
[else
(define-values (next-opener pending-backup) (peek-at-opener* in))
(define mode (if next-opener 'open 'initial))
(ret 'at (string-append opener "@") 'at #f start-pos end-pos
(in-at mode (in-at-comment? status) #t next-opener (in-escaped 'initial (struct-copy in-at status [mode 'inside])) '())
#:pending-backup (if next-opener 1 pending-backup))])]
;; 'close mode handles the final `}` of a `{}`
[(close)
(define closer (at-opener->closer (in-at-opener status)))
(define-values (start-pos end-pos eof?) (get-expected #\} closer))
(cond
[eof? (ret-eof start-pos end-pos)]
[else
(define sub-status (in-at-shrubbery-status status))
;; might continue with another immediate opener:
(define-values (next-opener pending-backup) (peek-at-opener* in))
(ret 'at-closer (string-append "}" closer) 'parenthesis '|}| start-pos end-pos
(if next-opener
(in-at 'open (in-at-comment? status) #t next-opener sub-status '())
(if (in-escaped? sub-status)
(in-escaped-at-status sub-status)
sub-status))
#:pending-backup (if next-opener
1
pending-backup))])]
[else (error "unknown at-exp state")]))
(define (peek-at-opener in #:opener [opener #f])
(define-values (found-opener pending-backup)
(peek-at-opener* in #:opener opener))
found-opener)
returns opener or # f , plus a pending - backup amount ;
the pending - backup amount can be > 1 if opener is # f , and
;; it represents the number of characters that need to be consumed
;; to get past the point where the content is a known non-opener
(define (peek-at-opener* in #:opener [opener #f])
(cond
[opener
;; look for another instance of the current opener
(values (peek-at-prefixed #\{ in #:opener opener)
1)]
[else
;; look for a fresh opener
(define ch (peek-char in))
(cond
[(eqv? ch #\{) (values "" 1)]
[(eqv? ch #\|)
(let loop ([chars '(#\|)] [offset 1])
(define ch (peek-char in offset))
(cond
[(eof-object? ch) (values #f (add1 offset))]
[(eqv? ch #\{) (values (list->string (reverse chars))
1)]
[(and ((char->integer ch) . < . 128)
(or (char-symbolic? ch)
(char-punctuation? ch)))
(loop (cons ch chars) (add1 offset))]
[else (values #f (add1 offset))]))]
[else (values #f 1)])]))
(define (peek-at-prefixed ch in #:opener opener)
(let loop ([offset 0])
(cond
[(= offset (string-length opener))
(if (eqv? ch (peek-char in offset))
opener
#f)]
[(eqv? (peek-char in offset) (string-ref opener offset))
(loop (add1 offset))]
[else #f])))
(define (peek-at-closer in #:opener [opener #f])
(define ch (peek-char in))
(cond
[(eqv? ch #\})
(let loop ([offset 0])
(cond
[(= offset (string-length opener)) opener]
[(eqv? (peek-char in (add1 offset))
(flip-at-bracket (string-ref opener (- (string-length opener) offset 1))))
(loop (add1 offset))]
[else #f]))]
[else #f]))
(define (read-at-comment in)
(and (eqv? (peek-char in) #\/)
(eqv? (peek-char in 1) #\/)
(begin
(read-char in)
(read-char in)
"//")))
(define (flip-at-bracket ch)
(case ch
[(#\<) #\>]
[(#\>) #\<]
[(#\[) #\]]
[(#\]) #\[]
[(#\() #\)]
[(#\)) #\(]
[else ch]))
(define (at-opener->closer opener)
(cond
[(eqv? 0 (string-length opener)) ""]
[else
(list->string (reverse (for/list ([ch (in-string opener)])
(flip-at-bracket ch))))]))
(define (next-location-as-pos in)
(define-values (line col pos) (port-next-location in))
(position pos line col))
(define (extend-error lexeme start end in)
(define next (peek-char-or-special in))
(if (or (memq next
`(special
#\" #\, #\' #\` #\( #\) #\[ #\] #\{ #\} #\;
,eof))
(char-whitespace? next))
(ret 'fail lexeme 'error #f start end 'bad)
(let-values (((rest end-pos) (get-chunk in)))
(ret 'fail (string-append lexeme rest) 'error #f start end-pos 'bad))))
(define get-chunk
(lexer
[(:+ whitespace) (values lexeme end-pos)]))
(define (parse-number s)
(if (and ((string-length s) . > . 2)
(eqv? #\x (string-ref s 1)))
(string->number (regexp-replace* #rx"_" (substring s 2) "") 16)
(string->number (regexp-replace* #rx"_" s ""))))
(define (parse-string s)
(read (open-input-string s)))
(define (parse-byte-string s)
(read (open-input-string s)))
(define (parse-char s)
(define str
(read (open-input-string (string-append "\""
(substring s 1 (sub1 (string-length s)))
"\""))))
(string-ref str 0))
;; argument string matches `number`; check whether adding "." to the end could make sense
(define (decimal-integer? s)
(let loop ([i (case (string-ref s 0)
[(#\+ #\-) 1]
[else 0])])
(cond
[(= i (string-length s)) #t]
[else
(define ch (string-ref s i))
(and (or (char-numeric? ch)
(eqv? ch #\_))
(loop (add1 i)))])))
(define operator-lexer
(lexer
[operator ((string-length lexeme) . > . 1)]
[(eof) #f]
[any-char #f]))
(define (peek-multi-char-operator? input-port)
(call-with-peeking-port
input-port
(lambda (p)
(operator-lexer p))))
(define (maybe-consume-trailing-dot input-port lexeme end-pos)
(define ch (peek-char input-port))
(cond
[(eqv? ch #\.)
(cond
[(peek-multi-char-operator? input-port)
(values #f lexeme end-pos 1)]
[else
(read-char input-port)
(define new-lexeme (string-append lexeme "."))
(define new-end-pos (struct-copy position end-pos
[offset (add1 (position-offset end-pos))]
[col (let ([c (position-col end-pos)])
(and c (add1 c)))]))
(values #t new-lexeme new-end-pos 1)])]
[else (values #f lexeme end-pos 0)]))
(struct token (name value))
(struct located-token token (srcloc))
(define (token-e t)
(syntax-e (token-value t)))
(define (token-line t)
(if (located-token? t)
(srcloc-line (located-token-srcloc t))
(syntax-line (token-value t))))
(define (token-column t)
(let ([c (if (located-token? t)
(srcloc-column (located-token-srcloc t))
(syntax-column (token-value t)))])
(if (and c (eq? (token-name t) 'bar-operator))
(+ c 0.5)
c)))
(define (token-srcloc t)
(cond
[(located-token? t)
(located-token-srcloc t)]
[else
(define s (token-value t))
(srcloc (syntax-source s)
(syntax-line s)
(syntax-column s)
(syntax-position s)
(syntax-span s))]))
(define (token-rename t name)
(struct-copy token t [name name]))
(define (syntax->token name s [srcloc #f])
(if srcloc
(located-token name s srcloc)
(token name s)))
;; Runs `lex/status` in a loop, but switches to `finish-s-exp`
;; for an S-expression escape:
(define (lex-all in fail
#:text-mode? [text-mode? #f]
#:keep-type? [keep-type? #f]
#:source [source (object-name in)]
#:interactive? [interactive? #f])
(define status (if text-mode?
(make-in-text-status)
'initial))
(parameterize ([current-lexer-source source])
(let loop ([status status] [depth 0] [blanks 0] [multi? #f])
(cond
[(eof-object? (peek-char in))
do n't consume an EOF
'()]
[else
(define-values (tok type paren start-pos end-pos backup new-status)
(lex/status in 0 status #f))
(define (wrap r)
(if keep-type?
(vector r type paren)
r))
(define name (token-name tok))
(case name
[(EOF) '()]
[(fail) (fail tok "read error")]
[(whitespace)
(define a (wrap tok))
(define newline? (let* ([s (syntax-e (token-value tok))]
[len (string-length s)])
(and (positive? len)
(eqv? #\newline (string-ref s (sub1 len))))))
(cond
[(and interactive? newline? (zero? depth)
(blanks . >= . (if multi? 1 0))
(not (lex-nested-status? status)))
(list (wrap tok))]
[else (cons (wrap tok)
(loop new-status depth (+ blanks (if newline? 1 0)) multi?))])]
[else
(define a (case name
[(s-exp)
(wrap (finish-s-exp tok in fail))]
[else (wrap tok)]))
(define d (loop (case name
[(s-exp) (out-of-s-exp-mode new-status)]
[else new-status])
(case name
[(opener) (add1 depth)]
[(closer) (sub1 depth)]
[else depth])
0
(case name
[(block-operator semicolon-operator)
(or multi?
(and (zero? depth) (not (lex-nested-status? status))))]
[else multi?])))
(cons a d)])]))))
(define (finish-s-exp open-tok in fail)
(define v (read-syntax (current-lexer-source) in))
(when (eof-object? v)
(fail open-tok "expected S-expression after `#{`"))
(define end-pos
(let loop ()
(define-values (line col pos) (port-next-location in))
(define c (read-char in))
(cond
[(eof-object? c)
(fail v "expected `}` after S-expression")]
[(eqv? c #\})
(add1 pos)]
[(char-whitespace? c)
(loop)]
[else
(define bad (datum->syntax #f c (list (current-lexer-source)
line
col
pos
1)))
(fail bad "expected only whitespace or `}` after S-expression")])))
(define result
(syntax->token (if (identifier? v) 'identifier 'literal)
(syntax-raw-property v
(format "#{~s}" (syntax->datum v)))
(let ([loc (token-srcloc open-tok)])
(struct-copy srcloc loc
[span (- end-pos (srcloc-position loc))]))))
(when (pair? (syntax-e v))
(fail result "S-expression in `#{` and `}` must not be a pair"))
result)
(define (closer-for? cl op)
(equal? cl (case op
[("(") ")"]
[("[") "]"]
[("{") "}"]
[("«") "»"]
[else #f])))
(define (consume-only-whitespace-line? in)
(define (consume)
(read-line in)
#t)
(let loop ([i 0])
(define ch (peek-char in i))
(cond
[(eof-object? ch) (consume)]
[(or (eqv? ch #\newline)
(eqv? ch #\return))
(consume)]
[(char-whitespace? ch)
(loop (+ i (char-utf-8-length ch)))]
[else #f])))
| null | https://raw.githubusercontent.com/mflatt/shrubbery-rhombus-0/cfe1b1450cfaef393fc7f33bef4d0aaf5f1837fe/shrubbery/lex.rkt | racket | 'identifier
'literal
'comment
'whitespace
'operator
'block-operator
'continue-operator
'bar-operator
'opener
'closer
'comma-operator
'semicolon-operator
's-exp
'fail
For case insensitivity
does not constrain to avoid surrogates:
disallows a number that starts +, -, or "."
doesn't match digits ending with "."; that case is handled with
a follow-up peek to use "." when not part of an multi-char operator
making whitespace end at newlines is for interactive parsing
where we end at a blank line
more future tokens; for example, when parsing `@|{`, there's a peek
triggered by `@` that decides how to proceed next, and if that
peek's result changes, then we'll need to go back to the `@` token.
The `amount` of pending backup is how many characters need to be
consumed before the pending backup expires. For example, with
need to re-lex starting from `@` before operators `|` and `<<<`.
the colorer would check the token just before a change, anyway.
anything involving a peek has a pending backup
within `#{}`
go out of S-expression mode by using shrubbery lexer again
within an `@` sequence
normal mode, after a form
normal mode, at start or after an operator or whitespace
a syntax coloring lexer must not return a token that
consumes no characters, so just drop it by recurring
If we have "@/{" and we add a "/" after the existing one,
we'll need to back up more:
backup is needed if the next character is `+` or `-`; ok to conservatively back up
add `.` to end of number
count `.` as error
after reading `@`, we enter an at-exp state machine for whether
we're in the initial part, within `[]`, or within `{}`; we have to
perform some parsing here to balance openers and closers; we leave
wehite trimming to the parser layer
'initial mode is right after `@` without immediate `{`, and we
may transition from 'initial mode to 'brackets mode at `[`
recur to parse in shrubbery mode:
to keep the term and possibly exit 'initial or 'brackets mode:
either `{`, `[`, or back to shrubbery mode
continue in-at mode
converts a token to an error token:
update the shrubbery-level status, then keep the term or error,
tracking nesting depth through the status as we continue:
'open mode is right after `@` when the next character is `{`,
or after a closing `}` when the next character is `{`
'inside mode means in `{}` and not currently escaped, and we
transition to 'escape mode on a `@`, and we transition to 'close mode
on a `}` that is not balancing a `{` within `{}`
convert a newline into a separate string input
`lex/status` will handle the case that the content is empty
`lex/status` will handle the case that the content is empty
'escape mode means in `{}`, not currently escaped, and expect `@` next
block comment
line comment
'close mode handles the final `}` of a `{}`
might continue with another immediate opener:
it represents the number of characters that need to be consumed
to get past the point where the content is a known non-opener
look for another instance of the current opener
look for a fresh opener
argument string matches `number`; check whether adding "." to the end could make sense
Runs `lex/status` in a loop, but switches to `finish-s-exp`
for an S-expression escape: | #lang racket/base
(require parser-tools/lex
(for-syntax racket/base)
(prefix-in : parser-tools/lex-sre)
"private/property.rkt"
"private/peek-port.rkt")
(provide lex/status
lex-all
token-name
' EOF
token?
token-value
token-e
token-line
token-column
token-srcloc
token-rename
syntax->token
stx-for-original-property
current-lexer-source
make-in-text-status
lex-nested-status?
lex-dont-stop-status?)
(define-lex-abbrevs
[e (char-set "eE")]
[digit (:/ "0" "9")]
[digit_ (:or digit (:: digit "_"))]
[digit16 (:/ "af" "AF" "09")]
[digit16_ (:or digit16 (:: digit16 "_"))]
[digit8 (:/ "0" "7")]
[langchar (:or (:/ "az" "AZ" "09") "+" "-" "_")]
[unicode (:or (:: "u" (:** 1 4 digit16))
(:: "U" (:** 1 6 digit16)))]
[str (:: "\"" (:* string-element ) "\"")]
[string-element (:or (:~ "\"" "\\" "\n" "\r")
(:: "\\" unicode)
string-escape)]
[byte-str (:: "#\"" (:* byte-string-element) "\"")]
[byte-string-element (:or (:- (:/ "\x00" "\xFF") "\"" "\\" "\n" "\r")
string-escape)]
[string-escape (:or "\\\""
"\\\\"
"\\a"
"\\b"
"\\t"
"\\n"
"\\v"
"\\f"
"\\r"
"\\e"
"\\'"
(:: "\\" (:** 1 3 digit8))
(:: "\\x" (:** 1 2 digit16)))]
[bad-str (:: (:? "#") "\""
(:* (:~ "\"" "\\" #\newline)
(:: "\\" (:- any-char #\newline)))
(:? "\\" "\""))]
[boolean (:or "#true" "#false")]
[void-const "#void"]
[special-number (:: "#"
(:or "inf"
"neginf"
"nan"))]
[bad-hash (:- (:or (:: "#" (:* non-delims))
"#/")
boolean
void-const
special-number)]
[exponent-marker e]
[sign (char-set "+-")]
[script (:: "#!" (:or #\space #\/) (:* (:~ #\newline) (:: #\\ #\newline)))]
[identifier (:: (:or alphabetic "_")
(:* (:or alphabetic numeric "_")))]
[opchar (:or (:- symbolic (:or "~"))
(:- punctuation (:or "," ";" "#" "\\" "_" "@" "\""
"(" ")" "[" "]" "{" "}" "«" "»")))]
[operator (:- (:or opchar
(:: (:* opchar) (:- opchar "+" "-" "." "/"))
(:+ ".")
(:+ "+")
(:+ "-"))
"|" ":"
(:: (:* any-char) (:or "//" "/*") (:* any-char)))]
[keyword (:: "~" identifier)]
[bad-keyword (:: "~")]
[number/continuing (:or decimal-number/continuing
hex-number)]
[number (:: (:? sign)
(:or decimal-number
hex-number))]
[uinteger (:: (:* digit_) digit)]
[uinteger16 (:: (:* digit16_) digit16)]
[decimal-number/continuing (:or (:: uinteger (:? number-exponent))
(:: uinteger "." (:? uinteger) number-exponent)
(:: uinteger "." uinteger))]
[decimal-number (:or decimal-number/continuing
(:: "." uinteger (:? number-exponent)))]
[number-exponent (:: exponent-marker (:? sign) uinteger)]
[hex-number (:: "0x" uinteger16)]
[bad-number/continuing (:- (:: digit (:+ non-number-delims))
identifier
number/continuing)]
[bad-number (:- (:: (:? sign) digit (:+ non-number-delims))
identifier
(:: identifier ".")
number)]
[bad-comment "*/"]
[non-number-delims (:or non-delims
(:: "." non-delims))]
[non-delims (:or alphabetic numeric "_")]
[whitespace-segment (:or (:+ (:- whitespace "\n"))
(:: (:* (:- whitespace "\n")) "\n"))])
(define-syntax (ret stx)
(syntax-case stx (quote)
[(_ (quote name) lexeme #:raw raw (quote type) more ...)
(with-syntax ([ht (hasheq 'type #'type 'rhombus-type #'name)])
#`(make-ret (quote name) lexeme #:raw raw ht more ...))]
[(_ name lexeme type more ...)
#`(ret name lexeme #:raw #f type more ...)]))
(define (make-ret name lexeme #:raw [raw #f] attribs paren start-pos end-pos status
#:pending-backup [pending-backup 0])
(define backup 0)
(values (make-token name lexeme start-pos end-pos raw)
attribs paren (position-offset start-pos) (position-offset end-pos)
backup status
pending-backup))
(define stx-for-original-property (read-syntax #f (open-input-string "original")))
(define current-lexer-source (make-parameter "input"))
(define (make-token name e start-pos end-pos [raw #f])
(define offset (position-offset start-pos))
(define loc (vector (current-lexer-source)
(position-line start-pos)
(position-col start-pos)
offset
(- (position-offset end-pos)
offset)))
(token name (let loop ([e e] [raw raw])
(let ([e (if (pair? e)
(let p-loop ([e e])
(cond
[(null? (cdr e)) (list (loop (car e) raw))]
[else (cons (loop (car e) #f)
(p-loop (cdr e)))]))
e)]
[raw (if (pair? e) #f raw)])
(define stx (datum->syntax #f
e
loc
stx-for-original-property))
(if (eq? name 'comment)
stx
(syntax-raw-property stx (or raw (if (string? e) e '()))))))))
(define (read-line-comment name lexeme input-port start-pos
#:status [status 'initial]
#:consume-newline? [consume-newline? #f]
#:pending-backup [pending-backup 0])
(let ([comment (apply string (append (string->list lexeme) (read-line/skip-over-specials input-port
consume-newline?)))])
(define-values (end-line end-col end-offset) (port-next-location input-port))
(values (make-token name comment start-pos (position end-offset end-line end-col))
'comment #f
(position-offset start-pos)
end-offset
0
status
pending-backup)))
(define get-next-comment
(lexer
["/*" (values 1 end-pos lexeme)]
["*/" (values -1 end-pos lexeme)]
[(:or "/" "*" (:* (:~ "*" "/")))
(let-values ([(delta end-pos rest-lexeme) (get-next-comment input-port)])
(values delta end-pos (string-append lexeme rest-lexeme)))]
[(eof) (values 'eof end-pos "")]
[(special)
(get-next-comment input-port)]
[(special-comment)
(get-next-comment input-port)]))
(define (read-nested-comment num-opens start-pos lexeme input)
(define-values (diff end next-lexeme) (get-next-comment input))
(cond
[(eq? 'eof diff) (ret 'fail eof 'error #f start-pos end 'initial)]
[else
(define all-lexeme (string-append lexeme next-lexeme))
(define next-num-opens (+ diff num-opens))
(cond
[(= 0 next-num-opens) (ret 'comment all-lexeme 'comment #f start-pos end 'initial)]
[else (read-nested-comment next-num-opens start-pos all-lexeme input)])]))
(define (get-offset i)
(let-values (((x y offset) (port-next-location i)))
offset))
(define (read-line/skip-over-specials i consume-newline?)
(let loop ()
(define next (peek-char-or-special i))
(cond
[(eq? next #\newline)
(cond
[consume-newline?
(read-char-or-special i)
(list #\newline)]
[else null])]
[(eof-object? next)
null]
[else
(read-char-or-special i)
(if (char? next)
(cons next (loop))
(loop))])))
(struct s-exp-mode (depth status) #:prefab)
(struct in-at (mode comment? closeable? opener shrubbery-status openers) #:prefab)
(struct in-escaped (shrubbery-status at-status) #:prefab)
A pending - backup mode causes a non - zero ` backup ` count for one or
` @|<<<x ` , the peek stopped at ` x ` while looking for ` { ` , and we 'll
A pending backup is not needed if only one character is peeked , since
(struct pending-backup-mode (amount status) #:prefab)
(define (make-in-text-status)
(in-at 'inside #f #f "" 'initial 0))
(define (out-of-s-exp-mode status)
(cond
[(pending-backup-mode? status) (struct-copy pending-backup-mode status
[status (out-of-s-exp-mode
(pending-backup-mode-status status))])]
[(s-exp-mode? status) (s-exp-mode-status status)]
[(in-at? status) (struct-copy in-at status
[shrubbery-status (out-of-s-exp-mode (in-at-shrubbery-status status))]
[openers (let ([openers (in-at-openers status)])
(unless (and (pair? openers) (equal? "{" (car openers)))
(error 'out-of-s-exp-mode "expected opener not found"))
(cdr openers))])]
[(in-escaped? status) (struct-copy in-escaped status
[shrubbery-status (out-of-s-exp-mode (in-escaped-shrubbery-status status))])]
[else (error 'out-of-s-exp-mode "not in S-expression mode!")]))
(define (lex-nested-status? status)
(if (pending-backup-mode? status)
(lex-nested-status? (pending-backup-mode-status status))
(not (or (not status) (symbol? status)))))
(define (lex-dont-stop-status? status)
(pending-backup-mode? status))
(define (lex/status in pos status-in racket-lexer*/status)
(define prev-pending-backup (if (pending-backup-mode? status-in)
(pending-backup-mode-amount status-in)
0))
(define status (if (pending-backup-mode? status-in)
(pending-backup-mode-status status-in)
status-in))
(let-values ([(tok type paren start end backup status pending-backup)
(let loop ([status status])
(cond
[(s-exp-mode? status)
(unless racket-lexer*/status
(error "shouldn't be in S-expression mode without a Racket lexer"))
(define depth (s-exp-mode-depth status))
(cond
[(and (zero? depth)
(eqv? #\} (peek-char in)))
(shrubbery-lexer/status in)]
[else
(define-values (tok type paren start end backup s-exp-status action)
(racket-lexer*/status in pos (s-exp-mode-status status)))
(values tok type paren start end backup (case action
[(open)
(s-exp-mode (add1 depth) s-exp-status)]
[(close)
(s-exp-mode (sub1 depth) s-exp-status)]
[else
(s-exp-mode depth s-exp-status)])
0)])]
[(in-at? status)
(define-values (tok type paren start end backup new-status pending-backup)
(at-lexer in status (lambda (status) (loop status))))
(define new-type (if (and (in-at-comment? status)
(not (eq? type 'eof)))
(hash-set (if (hash? type) type (hash 'type type)) 'comment? #t)
type))
(values tok new-type paren start end backup new-status pending-backup)]
[(in-escaped? status)
(define-values (t type paren start end backup sub-status pending-backup)
(loop (in-escaped-shrubbery-status status)))
(values t type paren start end backup (struct-copy in-escaped status
[shrubbery-status sub-status])
pending-backup)]
[(eq? status 'continuing)
(shrubbery-lexer-continuing/status in)]
[else
(shrubbery-lexer/status in)]))])
(cond
[(and (token? tok)
(eq? (token-name tok) 'at-content)
(eqv? 0 (string-length (token-e tok))))
(lex/status in pos status racket-lexer*/status)]
[else
(define new-backup (cond
[(zero? prev-pending-backup) backup]
[#t (max 1 backup)]
[(not (token? tok)) backup]
[(eq? (token-name tok) 'at-opener) 1]
[(eq? (token-name tok) 'at-closer) 1]
[(eq? (token-name tok) 'at) 1]
[(eq? (token-name tok) 'at-comment) 3]
[(and (in-at? status) (eq? (token-name tok) 'operator)) 2]
[else backup]))
(define new-pending-backup (max pending-backup
(if (and end start)
(- prev-pending-backup (- end start))
0)))
(define status/backup (if (zero? new-pending-backup)
status
(pending-backup-mode new-pending-backup status)))
(values tok type paren start end new-backup status/backup)])))
(define-syntax-rule (make-lexer/status number bad-number)
(lexer
[whitespace-segment
(ret 'whitespace lexeme 'white-space #f start-pos end-pos 'initial)]
[str (ret 'literal (parse-string lexeme) #:raw lexeme 'string #f start-pos end-pos 'datum)]
[byte-str (ret 'literal (parse-byte-string lexeme) #:raw lexeme 'string #f start-pos end-pos 'datum)]
[bad-number
(let-values ([(dot? new-lexeme new-end-pos pending-backup) (maybe-consume-trailing-dot input-port lexeme end-pos)])
(ret 'fail new-lexeme 'error #f start-pos new-end-pos 'continuing
#:pending-backup 1))]
[number
(let-values ([(dot? new-lexeme new-end-pos pending-backup) (maybe-consume-trailing-dot input-port lexeme end-pos)])
(cond
[dot?
(cond
[(decimal-integer? lexeme)
(ret 'literal (parse-number new-lexeme) #:raw new-lexeme 'constant #f start-pos new-end-pos 'continuing
#:pending-backup pending-backup)]
[else
(ret 'fail new-lexeme 'error #f start-pos new-end-pos 'continuing
#:pending-backup pending-backup)])]
[else
(ret 'literal (parse-number lexeme) #:raw lexeme 'constant #f start-pos end-pos 'continuing
#:pending-backup pending-backup)]))]
[special-number
(let ([num (case lexeme
[("#inf") +inf.0]
[("#neginf") -inf.0]
[("#nan") +nan.0])])
(ret 'literal num #:raw lexeme 'constant #f start-pos end-pos 'continuing))]
[boolean
(ret 'literal (equal? lexeme "#true") #:raw lexeme 'constant #f start-pos end-pos 'continuing)]
[void-const
(ret 'literal (void) #:raw lexeme 'constant #f start-pos end-pos 'continuing)]
["//" (read-line-comment 'comment lexeme input-port start-pos)]
["/*" (read-nested-comment 1 start-pos lexeme input-port)]
["#//"
(ret 'group-comment lexeme 'comment #f start-pos end-pos 'initial)]
[(:: (:or "#lang " "#!")
(:or langchar
(:: langchar (:* (:or langchar "/")) langchar)))
(ret 'comment lexeme 'other #f start-pos end-pos 'initial)]
[(:: (:or "#lang " "#!") (:* (:& any-char (complement whitespace))))
(ret 'fail lexeme 'error #f start-pos end-pos 'initial)]
[script
(ret 'comment lexeme 'comment #f start-pos end-pos 'initial)]
[(:or "(" "[" "{" "«")
(ret 'opener lexeme 'parenthesis (string->symbol lexeme) start-pos end-pos 'initial)]
[(:or ")" "]" "}" "»")
(ret 'closer lexeme 'parenthesis (string->symbol lexeme) start-pos end-pos 'continuing)]
["#{"
(ret 's-exp lexeme 'parenthesis '|{| start-pos end-pos (s-exp-mode 0 #f))]
[":"
(ret 'block-operator lexeme 'block-operator #f start-pos end-pos 'initial)]
["|"
(ret 'bar-operator lexeme 'bar-operator #f start-pos end-pos 'initial)]
["\\"
(ret 'continue-operator lexeme 'continue-operator #f start-pos end-pos 'initial)]
[","
(ret 'comma-operator lexeme 'separator #f start-pos end-pos 'initial)]
[";"
(ret 'semicolon-operator lexeme 'separator #f start-pos end-pos 'initial)]
[identifier
(ret 'identifier (string->symbol lexeme) #:raw lexeme 'symbol #f start-pos end-pos 'continuing)]
[operator
(ret 'operator (list 'op (string->symbol lexeme)) #:raw lexeme 'operator #f start-pos end-pos 'initial)]
[keyword
(let ([kw (string->keyword (substring lexeme 1))])
(ret 'identifier kw #:raw lexeme 'hash-colon-keyword #f start-pos end-pos 'continuing))]
["@//"
(let ([opener (peek-at-opener input-port)])
(if opener
(let ([status (in-at 'open #t #t opener 'initial '())])
(ret 'at-comment lexeme 'comment (string->symbol lexeme) start-pos end-pos status #:pending-backup 1))
all characters up to an opener - deciding character are part of the comment , so pending - backup = 1
(read-line-comment 'at-comment lexeme input-port start-pos #:pending-backup 1)))]
["@"
(let-values ([(opener pending-backup) (peek-at-opener* input-port)])
(define mode (if opener 'open 'initial))
(ret 'at lexeme 'at #f start-pos end-pos (in-at mode #f #t opener 'initial '())
#:pending-backup (if opener 1 pending-backup)))]
[(special)
(cond
[(or (number? lexeme) (boolean? lexeme) (void? lexeme))
(ret 'literal lexeme 'constant #f start-pos end-pos 'continuing)]
[(string? lexeme)
(ret 'literal lexeme 'string #f start-pos end-pos 'continuing)]
[(keyword? lexeme)
(ret 'literal lexeme 'hash-colon-keyword #f start-pos end-pos 'continuing)]
[else
(ret 'literal lexeme 'no-color #f start-pos end-pos 'continuing)])]
[(special-comment)
(ret 'comment "" 'comment #f start-pos end-pos 'initial)]
[(eof) (ret-eof start-pos end-pos)]
[(:or bad-str bad-keyword bad-hash bad-comment)
(ret 'fail lexeme 'error #f start-pos end-pos 'bad)]
[any-char (extend-error lexeme start-pos end-pos input-port)]))
(define (ret-eof start-pos end-pos)
(values (make-token 'EOF eof start-pos end-pos) 'eof #f #f #f 0 #f 0))
(define shrubbery-lexer/status (make-lexer/status number bad-number))
(define shrubbery-lexer-continuing/status (make-lexer/status number/continuing bad-number/continuing))
(define (at-lexer in status recur)
(define in-mode (in-at-mode status))
anything that uses ` get - expected ` should trigger a non - zero backup
(define (get-expected opener/ch ch/closer)
(define (get-all-expected s)
(for ([ch (in-string s)])
(unless (eqv? ch (read-char in))
(error "inconsistent input" ch))))
(define start-pos (next-location-as-pos in))
(define eof?
(cond
[(string? opener/ch)
(get-all-expected opener/ch)
(unless (eqv? ch/closer (read-char in))
(error "inconsistent opener input" ch/closer))
#f]
[else
(define ch (read-char in))
(cond
[(eof-object? ch) #t]
[else
(unless (eqv? opener/ch ch)
(error "inconsistent closer input" opener/ch))
(get-all-expected ch/closer)
#f])]))
(define end-pos (next-location-as-pos in))
(values start-pos end-pos eof?))
(case in-mode
[(initial brackets)
(define-values (t type paren start end backup sub-status pending-backup)
(recur (in-at-shrubbery-status status)))
(define (ok status)
(define-values (next-status pending-backup)
(cond
[(and (not (s-exp-mode? sub-status))
(null? (in-at-openers status)))
(define-values (opener pending-backup) (peek-at-opener* in))
(cond
[opener
(values (in-at 'open (in-at-comment? status) #t opener sub-status '())
1)]
[else
(values
(cond
[(and (not (eq? in-mode 'brackets))
(eqv? #\[ (peek-char in)))
(in-at 'brackets (in-at-comment? status) #t #f sub-status '())]
[(in-escaped? sub-status)
(in-escaped-at-status sub-status)]
[else sub-status])
pending-backup)])]
[else
(values status 0)]))
(values t type paren start end 0 next-status pending-backup))
(define (error status)
(values (struct-copy token t [name 'fail]) 'error #f start end 0 status 0))
(let ([status (struct-copy in-at status
[shrubbery-status sub-status])])
(case (and (token? t) (token-name t))
[(opener s-exp) (ok (struct-copy in-at status
[openers (cons (if (eq? 's-exp (token-name t))
"{"
(token-e t))
(in-at-openers status))]))]
[(closer)
(cond
[(and (pair? (in-at-openers status))
(closer-for? (token-e t) (car (in-at-openers status))))
(ok (struct-copy in-at status
[openers (cdr (in-at-openers status))]))]
[else
(error status)])]
[else (ok status)]))]
[(open)
(define opener (in-at-opener status))
(define-values (start-pos end-pos eof?) (get-expected opener #\{))
(ret 'at-opener (string-append opener "{") 'parenthesis '|{| start-pos end-pos
(struct-copy in-at status [mode 'inside] [openers 0]))]
[(inside)
(define opener (in-at-opener status))
(define closeable? (in-at-closeable? status))
(define start-pos (next-location-as-pos in))
(define o (open-output-string))
(let loop ([depth (in-at-openers status)])
(define ch (peek-char in))
(cond
[(eqv? ch #\newline)
(define s (get-output-string o))
(cond
[(= 0 (string-length s))
(read-char in)
(define end-pos (next-location-as-pos in))
(ret 'at-content "\n" 'text #f start-pos end-pos
(struct-copy in-at status [mode 'inside] [openers depth]))]
[else
(define end-pos (next-location-as-pos in))
(ret 'at-content s 'text #f start-pos end-pos
(struct-copy in-at status [mode 'inside] [openers depth]))])]
[(or (eof-object? ch)
(and closeable?
(peek-at-closer in #:opener opener)))
(cond
[(or (zero? depth)
(eof-object? ch))
(define end-pos (next-location-as-pos in))
(ret 'at-content (get-output-string o) 'text #f start-pos end-pos
(struct-copy in-at status [mode 'close])
#:pending-backup 1)]
[else
(if (equal? opener "")
(write-char (read-char in) o)
(write-string (read-string (add1 (string-length opener)) in) o))
(loop (sub1 depth))])]
[(peek-at-prefixed #\@ in #:opener opener)
(define end-pos (next-location-as-pos in))
(ret 'at-content (get-output-string o) 'text #f start-pos end-pos
(struct-copy in-at status [mode 'escape] [openers depth])
#:pending-backup 1)]
[(and closeable?
(peek-at-opener in #:opener opener))
(if (equal? opener "")
(write-char (read-char in) o)
(write-string (read-string (add1 (string-length opener)) in) o))
(loop (add1 depth))]
[else
(write-char (read-char in) o)
(loop depth)]))]
[(escape)
(define opener (in-at-opener status))
(define-values (start-pos end-pos eof?) (get-expected opener #\@))
(cond
[(read-at-comment in)
=> (lambda (slashes)
(cond
[(peek-at-opener in)
=> (lambda (opener)
(define end-pos (next-location-as-pos in))
(ret 'at-comment (string-append opener "@" slashes) 'comment #f start-pos end-pos
(in-at 'open #t #t opener (in-escaped 'initial (struct-copy in-at status [mode 'inside])) '())
#:pending-backup 1))]
[else
(read-line-comment 'comment (string-append opener "@" slashes) in start-pos
#:status (struct-copy in-at status [mode 'inside])
#:consume-newline? #t
#:pending-backup 1)]))]
[else
(define-values (next-opener pending-backup) (peek-at-opener* in))
(define mode (if next-opener 'open 'initial))
(ret 'at (string-append opener "@") 'at #f start-pos end-pos
(in-at mode (in-at-comment? status) #t next-opener (in-escaped 'initial (struct-copy in-at status [mode 'inside])) '())
#:pending-backup (if next-opener 1 pending-backup))])]
[(close)
(define closer (at-opener->closer (in-at-opener status)))
(define-values (start-pos end-pos eof?) (get-expected #\} closer))
(cond
[eof? (ret-eof start-pos end-pos)]
[else
(define sub-status (in-at-shrubbery-status status))
(define-values (next-opener pending-backup) (peek-at-opener* in))
(ret 'at-closer (string-append "}" closer) 'parenthesis '|}| start-pos end-pos
(if next-opener
(in-at 'open (in-at-comment? status) #t next-opener sub-status '())
(if (in-escaped? sub-status)
(in-escaped-at-status sub-status)
sub-status))
#:pending-backup (if next-opener
1
pending-backup))])]
[else (error "unknown at-exp state")]))
(define (peek-at-opener in #:opener [opener #f])
(define-values (found-opener pending-backup)
(peek-at-opener* in #:opener opener))
found-opener)
the pending - backup amount can be > 1 if opener is # f , and
(define (peek-at-opener* in #:opener [opener #f])
(cond
[opener
(values (peek-at-prefixed #\{ in #:opener opener)
1)]
[else
(define ch (peek-char in))
(cond
[(eqv? ch #\{) (values "" 1)]
[(eqv? ch #\|)
(let loop ([chars '(#\|)] [offset 1])
(define ch (peek-char in offset))
(cond
[(eof-object? ch) (values #f (add1 offset))]
[(eqv? ch #\{) (values (list->string (reverse chars))
1)]
[(and ((char->integer ch) . < . 128)
(or (char-symbolic? ch)
(char-punctuation? ch)))
(loop (cons ch chars) (add1 offset))]
[else (values #f (add1 offset))]))]
[else (values #f 1)])]))
(define (peek-at-prefixed ch in #:opener opener)
(let loop ([offset 0])
(cond
[(= offset (string-length opener))
(if (eqv? ch (peek-char in offset))
opener
#f)]
[(eqv? (peek-char in offset) (string-ref opener offset))
(loop (add1 offset))]
[else #f])))
(define (peek-at-closer in #:opener [opener #f])
(define ch (peek-char in))
(cond
[(eqv? ch #\})
(let loop ([offset 0])
(cond
[(= offset (string-length opener)) opener]
[(eqv? (peek-char in (add1 offset))
(flip-at-bracket (string-ref opener (- (string-length opener) offset 1))))
(loop (add1 offset))]
[else #f]))]
[else #f]))
(define (read-at-comment in)
(and (eqv? (peek-char in) #\/)
(eqv? (peek-char in 1) #\/)
(begin
(read-char in)
(read-char in)
"//")))
(define (flip-at-bracket ch)
(case ch
[(#\<) #\>]
[(#\>) #\<]
[(#\[) #\]]
[(#\]) #\[]
[(#\() #\)]
[(#\)) #\(]
[else ch]))
(define (at-opener->closer opener)
(cond
[(eqv? 0 (string-length opener)) ""]
[else
(list->string (reverse (for/list ([ch (in-string opener)])
(flip-at-bracket ch))))]))
(define (next-location-as-pos in)
(define-values (line col pos) (port-next-location in))
(position pos line col))
(define (extend-error lexeme start end in)
(define next (peek-char-or-special in))
(if (or (memq next
`(special
,eof))
(char-whitespace? next))
(ret 'fail lexeme 'error #f start end 'bad)
(let-values (((rest end-pos) (get-chunk in)))
(ret 'fail (string-append lexeme rest) 'error #f start end-pos 'bad))))
(define get-chunk
(lexer
[(:+ whitespace) (values lexeme end-pos)]))
(define (parse-number s)
(if (and ((string-length s) . > . 2)
(eqv? #\x (string-ref s 1)))
(string->number (regexp-replace* #rx"_" (substring s 2) "") 16)
(string->number (regexp-replace* #rx"_" s ""))))
(define (parse-string s)
(read (open-input-string s)))
(define (parse-byte-string s)
(read (open-input-string s)))
(define (parse-char s)
(define str
(read (open-input-string (string-append "\""
(substring s 1 (sub1 (string-length s)))
"\""))))
(string-ref str 0))
(define (decimal-integer? s)
(let loop ([i (case (string-ref s 0)
[(#\+ #\-) 1]
[else 0])])
(cond
[(= i (string-length s)) #t]
[else
(define ch (string-ref s i))
(and (or (char-numeric? ch)
(eqv? ch #\_))
(loop (add1 i)))])))
(define operator-lexer
(lexer
[operator ((string-length lexeme) . > . 1)]
[(eof) #f]
[any-char #f]))
(define (peek-multi-char-operator? input-port)
(call-with-peeking-port
input-port
(lambda (p)
(operator-lexer p))))
(define (maybe-consume-trailing-dot input-port lexeme end-pos)
(define ch (peek-char input-port))
(cond
[(eqv? ch #\.)
(cond
[(peek-multi-char-operator? input-port)
(values #f lexeme end-pos 1)]
[else
(read-char input-port)
(define new-lexeme (string-append lexeme "."))
(define new-end-pos (struct-copy position end-pos
[offset (add1 (position-offset end-pos))]
[col (let ([c (position-col end-pos)])
(and c (add1 c)))]))
(values #t new-lexeme new-end-pos 1)])]
[else (values #f lexeme end-pos 0)]))
(struct token (name value))
(struct located-token token (srcloc))
(define (token-e t)
(syntax-e (token-value t)))
(define (token-line t)
(if (located-token? t)
(srcloc-line (located-token-srcloc t))
(syntax-line (token-value t))))
(define (token-column t)
(let ([c (if (located-token? t)
(srcloc-column (located-token-srcloc t))
(syntax-column (token-value t)))])
(if (and c (eq? (token-name t) 'bar-operator))
(+ c 0.5)
c)))
(define (token-srcloc t)
(cond
[(located-token? t)
(located-token-srcloc t)]
[else
(define s (token-value t))
(srcloc (syntax-source s)
(syntax-line s)
(syntax-column s)
(syntax-position s)
(syntax-span s))]))
(define (token-rename t name)
(struct-copy token t [name name]))
(define (syntax->token name s [srcloc #f])
(if srcloc
(located-token name s srcloc)
(token name s)))
(define (lex-all in fail
#:text-mode? [text-mode? #f]
#:keep-type? [keep-type? #f]
#:source [source (object-name in)]
#:interactive? [interactive? #f])
(define status (if text-mode?
(make-in-text-status)
'initial))
(parameterize ([current-lexer-source source])
(let loop ([status status] [depth 0] [blanks 0] [multi? #f])
(cond
[(eof-object? (peek-char in))
do n't consume an EOF
'()]
[else
(define-values (tok type paren start-pos end-pos backup new-status)
(lex/status in 0 status #f))
(define (wrap r)
(if keep-type?
(vector r type paren)
r))
(define name (token-name tok))
(case name
[(EOF) '()]
[(fail) (fail tok "read error")]
[(whitespace)
(define a (wrap tok))
(define newline? (let* ([s (syntax-e (token-value tok))]
[len (string-length s)])
(and (positive? len)
(eqv? #\newline (string-ref s (sub1 len))))))
(cond
[(and interactive? newline? (zero? depth)
(blanks . >= . (if multi? 1 0))
(not (lex-nested-status? status)))
(list (wrap tok))]
[else (cons (wrap tok)
(loop new-status depth (+ blanks (if newline? 1 0)) multi?))])]
[else
(define a (case name
[(s-exp)
(wrap (finish-s-exp tok in fail))]
[else (wrap tok)]))
(define d (loop (case name
[(s-exp) (out-of-s-exp-mode new-status)]
[else new-status])
(case name
[(opener) (add1 depth)]
[(closer) (sub1 depth)]
[else depth])
0
(case name
[(block-operator semicolon-operator)
(or multi?
(and (zero? depth) (not (lex-nested-status? status))))]
[else multi?])))
(cons a d)])]))))
(define (finish-s-exp open-tok in fail)
(define v (read-syntax (current-lexer-source) in))
(when (eof-object? v)
(fail open-tok "expected S-expression after `#{`"))
(define end-pos
(let loop ()
(define-values (line col pos) (port-next-location in))
(define c (read-char in))
(cond
[(eof-object? c)
(fail v "expected `}` after S-expression")]
[(eqv? c #\})
(add1 pos)]
[(char-whitespace? c)
(loop)]
[else
(define bad (datum->syntax #f c (list (current-lexer-source)
line
col
pos
1)))
(fail bad "expected only whitespace or `}` after S-expression")])))
(define result
(syntax->token (if (identifier? v) 'identifier 'literal)
(syntax-raw-property v
(format "#{~s}" (syntax->datum v)))
(let ([loc (token-srcloc open-tok)])
(struct-copy srcloc loc
[span (- end-pos (srcloc-position loc))]))))
(when (pair? (syntax-e v))
(fail result "S-expression in `#{` and `}` must not be a pair"))
result)
(define (closer-for? cl op)
(equal? cl (case op
[("(") ")"]
[("[") "]"]
[("{") "}"]
[("«") "»"]
[else #f])))
(define (consume-only-whitespace-line? in)
(define (consume)
(read-line in)
#t)
(let loop ([i 0])
(define ch (peek-char in i))
(cond
[(eof-object? ch) (consume)]
[(or (eqv? ch #\newline)
(eqv? ch #\return))
(consume)]
[(char-whitespace? ch)
(loop (+ i (char-utf-8-length ch)))]
[else #f])))
|
ed92e7e12c6a49c1b0d043df8a14cef32303c5ef22967a5d2d69e23788301b5e | noprompt/meander | epsilon.cljc | (ns meander.specs.epsilon
(:require [meander.match.syntax.epsilon :as r.match.syntax]
[meander.syntax.specs.epsilon :as r.syntax.specs]
[clojure.spec.alpha :as s]))
(s/fdef meander.epsilon/defsyntax
:args ::r.syntax.specs/defsyntax-args)
(s/fdef meander.epsilon/rewrite
:args (s/cat :x any?
:clauses (s/* (s/cat :match any?
:substitution any?)))
:ret any?)
(s/fdef meander.epsilon/rewrites
:args (s/cat :x any?
:clauses (s/* (s/cat :match any?
:substitution any?)))
:ret any?)
(s/fdef meander.epsilon/let
:args (s/cat :binding-patterns (s/and vector? (s/cat :pattern any? :expression any?))
:target-pattern (s/? any?))
:ret seq?)
| null | https://raw.githubusercontent.com/noprompt/meander/8c0e9457befea5eee71a94a6d8726ed5916875dc/src/meander/specs/epsilon.cljc | clojure | (ns meander.specs.epsilon
(:require [meander.match.syntax.epsilon :as r.match.syntax]
[meander.syntax.specs.epsilon :as r.syntax.specs]
[clojure.spec.alpha :as s]))
(s/fdef meander.epsilon/defsyntax
:args ::r.syntax.specs/defsyntax-args)
(s/fdef meander.epsilon/rewrite
:args (s/cat :x any?
:clauses (s/* (s/cat :match any?
:substitution any?)))
:ret any?)
(s/fdef meander.epsilon/rewrites
:args (s/cat :x any?
:clauses (s/* (s/cat :match any?
:substitution any?)))
:ret any?)
(s/fdef meander.epsilon/let
:args (s/cat :binding-patterns (s/and vector? (s/cat :pattern any? :expression any?))
:target-pattern (s/? any?))
:ret seq?)
| |
d83e84a52a82ecaa596c87459aa251c95e8903748425dcf134c9b09d8337159f | input-output-hk/project-icarus-importer | Functions.hs | # LANGUAGE RankNTypes #
# LANGUAGE TypeFamilies #
-- | Pending transactions utils.
module Pos.Wallet.Web.Pending.Functions
( ptxPoolInfo
, isPtxActive
, isPtxInBlocks
, mkPendingTx
, isReclaimableFailure
, usingPtxCoords
) where
import Universum
import Formatting (build, sformat, (%))
import Pos.Core (HasConfiguration, protocolConstants)
import Pos.Client.Txp.History (SaveTxException (..), TxHistoryEntry)
import Pos.Core.Txp (TxAux (..), TxId)
import Pos.Slotting.Class (MonadSlots (..))
import Pos.Txp (ToilVerFailure (..))
import Pos.Util.Util (maybeThrow)
import Pos.Wallet.Web.ClientTypes (CId, Wal)
import Pos.Wallet.Web.Error (WalletError (RequestError))
import Pos.Wallet.Web.Pending.Types (PendingTx (..), PtxCondition (..), PtxPoolInfo)
import Pos.Wallet.Web.Pending.Util (mkPtxSubmitTiming)
import Pos.Wallet.Web.State (WalletSnapshot, getWalletMeta)
ptxPoolInfo :: PtxCondition -> Maybe PtxPoolInfo
ptxPoolInfo (PtxCreating i) = Just i
ptxPoolInfo (PtxApplying i) = Just i
ptxPoolInfo (PtxWontApply _ i) = Just i
ptxPoolInfo PtxInNewestBlocks{} = Nothing
ptxPoolInfo PtxPersisted{} = Nothing
-- | Whether transaction is claimed to be once created.
isPtxActive :: PtxCondition -> Bool
isPtxActive PtxCreating{} = False
isPtxActive _ = True
isPtxInBlocks :: PtxCondition -> Bool
isPtxInBlocks = isNothing . ptxPoolInfo
mkPendingTx
:: (HasConfiguration, MonadThrow m, MonadSlots ctx m)
=> WalletSnapshot
-> CId Wal -> TxId -> TxAux -> TxHistoryEntry -> m PendingTx
mkPendingTx ws wid _ptxTxId _ptxTxAux th = do
void $ maybeThrow noWallet $ getWalletMeta ws wid
_ptxCreationSlot <- getCurrentSlotInaccurate
return PendingTx
{ _ptxCond = PtxCreating th
, _ptxWallet = wid
, _ptxPeerAck = False
, _ptxSubmitTiming = mkPtxSubmitTiming protocolConstants _ptxCreationSlot
, ..
}
where
noWallet =
RequestError $ sformat ("Failed to get meta of wallet "%build) wid
| Whether formed transaction ( ' TxAux ' ) has reasonable chances to be applied
-- later after specified error.
isReclaimableFailure :: SaveTxException -> Bool
isReclaimableFailure (SaveTxToilFailure tvf) = case tvf of
-- We consider all cases explicitly here to prevent changing
-- constructors set blindly
ToilKnown -> True
ToilTipsMismatch{} -> True
ToilSlotUnknown -> True
ToilOverwhelmed{} -> True
ToilNotUnspent{} -> False
ToilOutGreaterThanIn{} -> False
ToilInconsistentTxAux{} -> False
ToilInvalidOutput{} -> False
ToilUnknownInput{} -> False
ToilWitnessDoesntMatch{} -> False
ToilInvalidWitness{} -> False
ToilTooLargeTx{} -> False
ToilInvalidMinFee{} -> False
ToilInsufficientFee{} -> False
ToilUnknownAttributes{} -> False
ToilNonBootstrapDistr{} -> False
ToilRepeatedInput{} -> False
usingPtxCoords :: (CId Wal -> TxId -> a) -> PendingTx -> a
usingPtxCoords f PendingTx{..} = f _ptxWallet _ptxTxId
| null | https://raw.githubusercontent.com/input-output-hk/project-icarus-importer/36342f277bcb7f1902e677a02d1ce93e4cf224f0/wallet/src/Pos/Wallet/Web/Pending/Functions.hs | haskell | | Pending transactions utils.
| Whether transaction is claimed to be once created.
later after specified error.
We consider all cases explicitly here to prevent changing
constructors set blindly | # LANGUAGE RankNTypes #
# LANGUAGE TypeFamilies #
module Pos.Wallet.Web.Pending.Functions
( ptxPoolInfo
, isPtxActive
, isPtxInBlocks
, mkPendingTx
, isReclaimableFailure
, usingPtxCoords
) where
import Universum
import Formatting (build, sformat, (%))
import Pos.Core (HasConfiguration, protocolConstants)
import Pos.Client.Txp.History (SaveTxException (..), TxHistoryEntry)
import Pos.Core.Txp (TxAux (..), TxId)
import Pos.Slotting.Class (MonadSlots (..))
import Pos.Txp (ToilVerFailure (..))
import Pos.Util.Util (maybeThrow)
import Pos.Wallet.Web.ClientTypes (CId, Wal)
import Pos.Wallet.Web.Error (WalletError (RequestError))
import Pos.Wallet.Web.Pending.Types (PendingTx (..), PtxCondition (..), PtxPoolInfo)
import Pos.Wallet.Web.Pending.Util (mkPtxSubmitTiming)
import Pos.Wallet.Web.State (WalletSnapshot, getWalletMeta)
ptxPoolInfo :: PtxCondition -> Maybe PtxPoolInfo
ptxPoolInfo (PtxCreating i) = Just i
ptxPoolInfo (PtxApplying i) = Just i
ptxPoolInfo (PtxWontApply _ i) = Just i
ptxPoolInfo PtxInNewestBlocks{} = Nothing
ptxPoolInfo PtxPersisted{} = Nothing
isPtxActive :: PtxCondition -> Bool
isPtxActive PtxCreating{} = False
isPtxActive _ = True
isPtxInBlocks :: PtxCondition -> Bool
isPtxInBlocks = isNothing . ptxPoolInfo
mkPendingTx
:: (HasConfiguration, MonadThrow m, MonadSlots ctx m)
=> WalletSnapshot
-> CId Wal -> TxId -> TxAux -> TxHistoryEntry -> m PendingTx
mkPendingTx ws wid _ptxTxId _ptxTxAux th = do
void $ maybeThrow noWallet $ getWalletMeta ws wid
_ptxCreationSlot <- getCurrentSlotInaccurate
return PendingTx
{ _ptxCond = PtxCreating th
, _ptxWallet = wid
, _ptxPeerAck = False
, _ptxSubmitTiming = mkPtxSubmitTiming protocolConstants _ptxCreationSlot
, ..
}
where
noWallet =
RequestError $ sformat ("Failed to get meta of wallet "%build) wid
| Whether formed transaction ( ' TxAux ' ) has reasonable chances to be applied
isReclaimableFailure :: SaveTxException -> Bool
isReclaimableFailure (SaveTxToilFailure tvf) = case tvf of
ToilKnown -> True
ToilTipsMismatch{} -> True
ToilSlotUnknown -> True
ToilOverwhelmed{} -> True
ToilNotUnspent{} -> False
ToilOutGreaterThanIn{} -> False
ToilInconsistentTxAux{} -> False
ToilInvalidOutput{} -> False
ToilUnknownInput{} -> False
ToilWitnessDoesntMatch{} -> False
ToilInvalidWitness{} -> False
ToilTooLargeTx{} -> False
ToilInvalidMinFee{} -> False
ToilInsufficientFee{} -> False
ToilUnknownAttributes{} -> False
ToilNonBootstrapDistr{} -> False
ToilRepeatedInput{} -> False
usingPtxCoords :: (CId Wal -> TxId -> a) -> PendingTx -> a
usingPtxCoords f PendingTx{..} = f _ptxWallet _ptxTxId
|
a7da61ebdbd02126c129650f0b5fa854e02c8e0ef7721bb4a2a5277f1ee0c8a0 | gorillalabs/sparkling | validation_test.clj | (ns sparkling.ml.validation-test
(:require [clojure.test :as t]
[sparkling.conf :as conf]
[sparkling.api :as s]
[sparkling.ml.core :as mlc]
[clojure.java.io :as io]
[sparkling.ml.classification :as cl]
[sparkling.ml.validation :as v])
(:import [org.apache.spark.api.java JavaSparkContext]
[org.apache.spark.sql SQLContext]
[org.apache.spark.ml.tuning ParamGridBuilder CrossValidator CrossValidatorModel
TrainValidationSplit TrainValidationSplitModel]
[org.apache.spark.ml.classification NaiveBayes LogisticRegression
DecisionTreeClassifier RandomForestClassifier GBTClassifier ]
[org.apache.spark.ml.evaluation BinaryClassificationEvaluator MulticlassClassificationEvaluator]
[java.io File]))
(def cconf (-> (conf/spark-conf)
(conf/set-sparkling-registrator)
(conf/set "spark.kryo.registrationRequired" "false")
(conf/master "local[*]")
(conf/app-name "classifier-test")))
(t/deftest validation-test
(s/with-context c cconf
(let [sqc (mlc/sql-context c)]
(t/testing
"valid classes created "
(t/is (= (class (v/binary-classification-evaluator)) BinaryClassificationEvaluator))
(t/is (= (class (v/multiclass-classification-evaluator)) MulticlassClassificationEvaluator)))
(t/testing
"mandatory params passed"
(t/is (thrown? AssertionError (v/cross-validator {})))
(t/is (thrown? AssertionError (v/cross-validator {:estimator nil})))
(t/is (thrown? AssertionError (v/train-val-split-validator {})))
(t/is (thrown? AssertionError (v/train-val-split-validator {:estimator nil}))))
(t/testing
"type of validator created "
(let [estmap {:estimator (cl/logistic-regression)
:evaluator (v/binary-classification-evaluator)}]
(t/is (= (class (v/cross-validator estmap)) CrossValidator))
(t/is (= (class (v/train-val-split-validator estmap)) TrainValidationSplit ))))
(t/testing
"valid params created "
(t/is (= (.getMetricName (v/binary-classification-evaluator {:metric-name "areaUnderPR"})) "areaUnderPR" ))
(t/is (= (.getMetricName (v/multiclass-classification-evaluator {:metric-name "f1"})) "f1" ))))))
| null | https://raw.githubusercontent.com/gorillalabs/sparkling/ffedcc70fd46bf1b48405be8b1f5a1e1c4f9f578/test/sparkling/ml/validation_test.clj | clojure | (ns sparkling.ml.validation-test
(:require [clojure.test :as t]
[sparkling.conf :as conf]
[sparkling.api :as s]
[sparkling.ml.core :as mlc]
[clojure.java.io :as io]
[sparkling.ml.classification :as cl]
[sparkling.ml.validation :as v])
(:import [org.apache.spark.api.java JavaSparkContext]
[org.apache.spark.sql SQLContext]
[org.apache.spark.ml.tuning ParamGridBuilder CrossValidator CrossValidatorModel
TrainValidationSplit TrainValidationSplitModel]
[org.apache.spark.ml.classification NaiveBayes LogisticRegression
DecisionTreeClassifier RandomForestClassifier GBTClassifier ]
[org.apache.spark.ml.evaluation BinaryClassificationEvaluator MulticlassClassificationEvaluator]
[java.io File]))
(def cconf (-> (conf/spark-conf)
(conf/set-sparkling-registrator)
(conf/set "spark.kryo.registrationRequired" "false")
(conf/master "local[*]")
(conf/app-name "classifier-test")))
(t/deftest validation-test
(s/with-context c cconf
(let [sqc (mlc/sql-context c)]
(t/testing
"valid classes created "
(t/is (= (class (v/binary-classification-evaluator)) BinaryClassificationEvaluator))
(t/is (= (class (v/multiclass-classification-evaluator)) MulticlassClassificationEvaluator)))
(t/testing
"mandatory params passed"
(t/is (thrown? AssertionError (v/cross-validator {})))
(t/is (thrown? AssertionError (v/cross-validator {:estimator nil})))
(t/is (thrown? AssertionError (v/train-val-split-validator {})))
(t/is (thrown? AssertionError (v/train-val-split-validator {:estimator nil}))))
(t/testing
"type of validator created "
(let [estmap {:estimator (cl/logistic-regression)
:evaluator (v/binary-classification-evaluator)}]
(t/is (= (class (v/cross-validator estmap)) CrossValidator))
(t/is (= (class (v/train-val-split-validator estmap)) TrainValidationSplit ))))
(t/testing
"valid params created "
(t/is (= (.getMetricName (v/binary-classification-evaluator {:metric-name "areaUnderPR"})) "areaUnderPR" ))
(t/is (= (.getMetricName (v/multiclass-classification-evaluator {:metric-name "f1"})) "f1" ))))))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.