_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
6f5ea79465fd8375936e79c32ad7aeabc43754fb308146f79771b52a7b610cea | inhabitedtype/ocaml-aws | getQueryLoggingConfig.mli | open Types
type input = GetQueryLoggingConfigRequest.t
type output = GetQueryLoggingConfigResponse.t
type error = Errors_internal.t
include
Aws.Call with type input := input and type output := output and type error := error
| null | https://raw.githubusercontent.com/inhabitedtype/ocaml-aws/b6d5554c5d201202b5de8d0b0253871f7b66dab6/libraries/route53/lib/getQueryLoggingConfig.mli | ocaml | open Types
type input = GetQueryLoggingConfigRequest.t
type output = GetQueryLoggingConfigResponse.t
type error = Errors_internal.t
include
Aws.Call with type input := input and type output := output and type error := error
| |
7c3bbf95a3ad24691e52270a0b55ee3356f4e8a4e5d25eb7d6fe6325dae38de3 | wavewave/hoodle | Erase.hs | {-# LANGUAGE BangPatterns #-}
module Hoodle.Web.Erase where
import Control.Monad (when)
import Control.Monad.IO.Class (MonadIO (liftIO))
import Control.Monad.State (MonadState (get))
import Data.Foldable (toList, traverse_)
import Data.List (nub, sort)
import Data.Sequence (Seq, ViewR (..), singleton, viewr, (|>))
import qualified Data.Sequence as Seq (length)
import Hoodle.Web.Default (nextevent)
import qualified Hoodle.Web.ForeignJS as J
import Hoodle.Web.Type.Coroutine (MainCoroutine)
import Hoodle.Web.Type.Event (UserEvent (..))
import Hoodle.Web.Type.State
( docstateData,
hdlstateDocState,
hdlstateSVGBox,
hdlstateWebSocket,
)
import Hoodle.Web.Util
( intersectingStrokes,
sendBinary,
transformPathFromCanvasToSVG,
)
import Lens.Micro ((<&>), (^.))
import Message
( C2SMsg (DeleteStrokes),
CommitId (..),
TextSerializable ( serialize ) ,
)
eraseUpdatePeriod :: Int
eraseUpdatePeriod = 10
erasingMode :: [CommitId] -> Seq (Double, Double) -> MainCoroutine ()
erasingMode hstrks0 cxys = do
ev <- nextevent
case ev of
PointerMove cxy -> do
s <- get
let svg = s ^. hdlstateSVGBox
strks = s ^. hdlstateDocState . docstateData
case viewr cxys of
_ :> _ ->
if Seq.length cxys >= eraseUpdatePeriod
then do
xys <- liftIO $ transformPathFromCanvasToSVG svg (toList cxys)
let hstrks = intersectingStrokes xys strks
liftIO $
traverse_ (J.strokeChangeColor svg . ("stroke" ++) . show . unCommitId) hstrks
let !hstrks' = nub $ sort (hstrks ++ hstrks0)
erasingMode hstrks' (singleton cxy)
else erasingMode hstrks0 (cxys |> cxy)
_ -> pure ()
PointerUp _ -> do
sock <- get <&> (^. hdlstateWebSocket)
when (not . null $ hstrks0) $
liftIO $ do
let msg = DeleteStrokes hstrks0
sendBinary sock msg
_ -> erasingMode hstrks0 cxys
| null | https://raw.githubusercontent.com/wavewave/hoodle/1acd7a713697b6146bda13a38591cf868cea6685/web/client/Hoodle/Web/Erase.hs | haskell | # LANGUAGE BangPatterns # |
module Hoodle.Web.Erase where
import Control.Monad (when)
import Control.Monad.IO.Class (MonadIO (liftIO))
import Control.Monad.State (MonadState (get))
import Data.Foldable (toList, traverse_)
import Data.List (nub, sort)
import Data.Sequence (Seq, ViewR (..), singleton, viewr, (|>))
import qualified Data.Sequence as Seq (length)
import Hoodle.Web.Default (nextevent)
import qualified Hoodle.Web.ForeignJS as J
import Hoodle.Web.Type.Coroutine (MainCoroutine)
import Hoodle.Web.Type.Event (UserEvent (..))
import Hoodle.Web.Type.State
( docstateData,
hdlstateDocState,
hdlstateSVGBox,
hdlstateWebSocket,
)
import Hoodle.Web.Util
( intersectingStrokes,
sendBinary,
transformPathFromCanvasToSVG,
)
import Lens.Micro ((<&>), (^.))
import Message
( C2SMsg (DeleteStrokes),
CommitId (..),
TextSerializable ( serialize ) ,
)
eraseUpdatePeriod :: Int
eraseUpdatePeriod = 10
erasingMode :: [CommitId] -> Seq (Double, Double) -> MainCoroutine ()
erasingMode hstrks0 cxys = do
ev <- nextevent
case ev of
PointerMove cxy -> do
s <- get
let svg = s ^. hdlstateSVGBox
strks = s ^. hdlstateDocState . docstateData
case viewr cxys of
_ :> _ ->
if Seq.length cxys >= eraseUpdatePeriod
then do
xys <- liftIO $ transformPathFromCanvasToSVG svg (toList cxys)
let hstrks = intersectingStrokes xys strks
liftIO $
traverse_ (J.strokeChangeColor svg . ("stroke" ++) . show . unCommitId) hstrks
let !hstrks' = nub $ sort (hstrks ++ hstrks0)
erasingMode hstrks' (singleton cxy)
else erasingMode hstrks0 (cxys |> cxy)
_ -> pure ()
PointerUp _ -> do
sock <- get <&> (^. hdlstateWebSocket)
when (not . null $ hstrks0) $
liftIO $ do
let msg = DeleteStrokes hstrks0
sendBinary sock msg
_ -> erasingMode hstrks0 cxys
|
3229f8506cca652353223f54585b0aad5f9328f9b266405d96c7020e2d2c7d15 | BinaryAnalysisPlatform/bap-plugins | cut.mli | open Bap.Std
* A cut is a subgraph that contains a call to a sink we are
interested in . Each cut has a unique caller ( and callstring up to the
root of the program ) . We define cut * groups * since a single cut may
contain multiple sinks within the final calling sub .
E.g. , bar below will form a single cut group , which calls sink1 and .
bazz forms a second cut group .
foo
/ \
/ bazz
/ - > calls on some path
bar
- > calls sink1 on some path
- > calls sink 2 on some other path
interested in. Each cut has a unique caller (and callstring up to the
root of the program). We define cut *groups* since a single cut may
contain multiple sinks within the final calling sub.
E.g., bar below will form a single cut group, which calls sink1 and sink2.
bazz forms a second cut group.
foo
/ \
/ bazz
/ -> calls sink3 on some path
bar
-> calls sink1 on some path
-> calls sink 2 on some other path
*)
type src_config =
{src_at_root : bool;
src_at_nth : int;
src : string }
type cut_group = {
(* Stores the blks that call a given source *)
src_caller_blks : Blk.t seq;
sink_caller_blks : Blk.t seq;
(* The callstring from src to the lca *)
src_callstring : tid seq;
sink_callstring : tid seq;
(* The sub that calls src. src_caller_blks are all contained in
this sub *)
src_caller_sub : Sub.t;
sink_caller_sub : Sub.t;
lca_sub : Sub.t; (* root *)
lca_name : string;
depth: int; (* max depth we would have to inline to hit this *)
id: int; (* group id, for output *)
}
val print_cut_group : cut_group -> unit
val output_cut_group : cut_group -> unit
val cuts : project -> Graphs.Callgraph.t -> src_config -> string -> cut_group seq
| null | https://raw.githubusercontent.com/BinaryAnalysisPlatform/bap-plugins/2e9aa5c7c24ef494d0e7db1b43c5ceedcb4196a8/minos/cut.mli | ocaml | Stores the blks that call a given source
The callstring from src to the lca
The sub that calls src. src_caller_blks are all contained in
this sub
root
max depth we would have to inline to hit this
group id, for output | open Bap.Std
* A cut is a subgraph that contains a call to a sink we are
interested in . Each cut has a unique caller ( and callstring up to the
root of the program ) . We define cut * groups * since a single cut may
contain multiple sinks within the final calling sub .
E.g. , bar below will form a single cut group , which calls sink1 and .
bazz forms a second cut group .
foo
/ \
/ bazz
/ - > calls on some path
bar
- > calls sink1 on some path
- > calls sink 2 on some other path
interested in. Each cut has a unique caller (and callstring up to the
root of the program). We define cut *groups* since a single cut may
contain multiple sinks within the final calling sub.
E.g., bar below will form a single cut group, which calls sink1 and sink2.
bazz forms a second cut group.
foo
/ \
/ bazz
/ -> calls sink3 on some path
bar
-> calls sink1 on some path
-> calls sink 2 on some other path
*)
type src_config =
{src_at_root : bool;
src_at_nth : int;
src : string }
type cut_group = {
src_caller_blks : Blk.t seq;
sink_caller_blks : Blk.t seq;
src_callstring : tid seq;
sink_callstring : tid seq;
src_caller_sub : Sub.t;
sink_caller_sub : Sub.t;
lca_name : string;
}
val print_cut_group : cut_group -> unit
val output_cut_group : cut_group -> unit
val cuts : project -> Graphs.Callgraph.t -> src_config -> string -> cut_group seq
|
558e55518f080f4c65389f859abc47f3c561ee0d4a71bedbd94eda21c30ffbd1 | chrisdone/prana | IO.hs | # LANGUAGE Trustworthy #
# LANGUAGE CPP , NoImplicitPrelude , CApiFFI #
-----------------------------------------------------------------------------
-- |
-- Module : System.IO
Copyright : ( c ) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer :
-- Stability : stable
-- Portability : portable
--
The standard IO library .
--
-----------------------------------------------------------------------------
module System.IO (
* The IO monad
IO,
fixIO,
-- * Files and handles
FilePath,
Handle, -- abstract, instance of: Eq, Show.
| GHC note : a ' Handle ' will be automatically closed when the garbage
-- collector detects that it has become unreferenced by the program.
-- However, relying on this behaviour is not generally recommended:
-- the garbage collector is unpredictable. If possible, use
an explicit ' ' to close ' Handle 's when they are no longer
required . GHC does not currently attempt to free up file
-- descriptors when they have run out, it is your responsibility to
-- ensure that this doesn't happen.
-- ** Standard handles
-- | Three handles are allocated during program initialisation,
-- and are initially open.
stdin, stdout, stderr,
-- * Opening and closing files
-- ** Opening files
withFile,
openFile,
IOMode(ReadMode,WriteMode,AppendMode,ReadWriteMode),
-- ** Closing files
hClose,
-- ** Special cases
| These functions are also exported by the " Prelude " .
readFile,
writeFile,
appendFile,
-- ** File locking
-- $locking
-- * Operations on handles
-- ** Determining and changing the size of a file
hFileSize,
hSetFileSize,
-- ** Detecting the end of input
hIsEOF,
isEOF,
-- ** Buffering operations
BufferMode(NoBuffering,LineBuffering,BlockBuffering),
hSetBuffering,
hGetBuffering,
hFlush,
-- ** Repositioning handles
hGetPosn,
hSetPosn,
HandlePosn, -- abstract, instance of: Eq, Show.
hSeek,
SeekMode(AbsoluteSeek,RelativeSeek,SeekFromEnd),
hTell,
-- ** Handle properties
hIsOpen, hIsClosed,
hIsReadable, hIsWritable,
hIsSeekable,
* * Terminal operations ( not portable : GHC only )
hIsTerminalDevice,
hSetEcho,
hGetEcho,
* * Showing handle state ( not portable : GHC only )
hShow,
-- * Text input and output
-- ** Text input
hWaitForInput,
hReady,
hGetChar,
hGetLine,
hLookAhead,
hGetContents,
-- ** Text output
hPutChar,
hPutStr,
hPutStrLn,
hPrint,
-- ** Special cases for standard input and output
| These functions are also exported by the " Prelude " .
interact,
putChar,
putStr,
putStrLn,
print,
getChar,
getLine,
getContents,
readIO,
readLn,
-- * Binary input and output
withBinaryFile,
openBinaryFile,
hSetBinaryMode,
hPutBuf,
hGetBuf,
hGetBufSome,
hPutBufNonBlocking,
hGetBufNonBlocking,
-- * Temporary files
openTempFile,
openBinaryTempFile,
openTempFileWithDefaultPermissions,
openBinaryTempFileWithDefaultPermissions,
-- * Unicode encoding\/decoding
-- | A text-mode 'Handle' has an associated 'TextEncoding', which
is used to decode bytes into Unicode characters when reading ,
and encode Unicode characters into bytes when writing .
--
-- The default 'TextEncoding' is the same as the default encoding
-- on your system, which is also available as 'localeEncoding'.
( GHC note : on Windows , we currently do not support double - byte
encodings ; if the console\ 's code page is unsupported , then
-- 'localeEncoding' will be 'latin1'.)
--
-- Encoding and decoding errors are always detected and reported,
except during lazy I / O ( ' hGetContents ' , ' ' , and
-- 'readFile'), where a decoding error merely results in
-- termination of the character stream, as with other I/O errors.
hSetEncoding,
hGetEncoding,
-- ** Unicode encodings
TextEncoding,
latin1,
utf8, utf8_bom,
utf16, utf16le, utf16be,
utf32, utf32le, utf32be,
localeEncoding,
char8,
mkTextEncoding,
-- * Newline conversion
| In Haskell , a newline is always represented by the character
-- '\n'. However, in files and external character streams, a
-- newline may be represented by another character sequence, such
-- as '\r\n'.
--
A text - mode ' Handle ' has an associated ' NewlineMode ' that
-- specifies how to transate newline characters. The
' NewlineMode ' specifies the input and output translation
-- separately, so that for instance you can translate '\r\n'
-- to '\n' on input, but leave newlines as '\n' on output.
--
The default ' NewlineMode ' for a ' Handle ' is
-- 'nativeNewlineMode', which does no translation on Unix systems,
but translates ' \r\n ' to ' \n ' and back on Windows .
--
-- Binary-mode 'Handle's do no newline translation at all.
--
hSetNewlineMode,
Newline(..), nativeNewline,
NewlineMode(..),
noNewlineTranslation, universalNewlineMode, nativeNewlineMode,
) where
import Control.Exception.Base
import Data.Bits
import Data.Maybe
import Foreign.C.Error
#if defined(mingw32_HOST_OS)
import Foreign.C.String
import Foreign.Ptr
import Foreign.Marshal.Alloc
import Foreign.Storable
#endif
import Foreign.C.Types
import System.Posix.Internals
import System.Posix.Types
import GHC.Base
import GHC.List
#ifndef mingw32_HOST_OS
import GHC.IORef
#endif
import GHC.Num
import GHC.IO hiding ( bracket, onException )
import GHC.IO.IOMode
import GHC.IO.Handle.FD
import qualified GHC.IO.FD as FD
import GHC.IO.Handle
import GHC.IO.Handle.Text ( hGetBufSome, hPutStrLn )
import GHC.IO.Exception ( userError )
import GHC.IO.Encoding
import Text.Read
import GHC.Show
import GHC.MVar
-- -----------------------------------------------------------------------------
-- Standard IO
-- | Write a character to the standard output device
-- (same as 'hPutChar' 'stdout').
putChar :: Char -> IO ()
putChar c = hPutChar stdout c
-- | Write a string to the standard output device
-- (same as 'hPutStr' 'stdout').
putStr :: String -> IO ()
putStr s = hPutStr stdout s
-- | The same as 'putStr', but adds a newline character.
putStrLn :: String -> IO ()
putStrLn s = hPutStrLn stdout s
-- | The 'print' function outputs a value of any printable type to the
-- standard output device.
-- Printable types are those that are instances of class 'Show'; 'print'
-- converts values to strings for output using the 'show' operation and
-- adds a newline.
--
For example , a program to print the first 20 integers and their
powers of 2 could be written as :
--
> main = print ( [ ( n , 2^n ) | n < - [ 0 .. 19 ] ] )
print :: Show a => a -> IO ()
print x = putStrLn (show x)
-- | Read a character from the standard input device
( same as ' hGetChar ' ' stdin ' ) .
getChar :: IO Char
getChar = hGetChar stdin
-- | Read a line from the standard input device
( same as ' hGetLine ' ' stdin ' ) .
getLine :: IO String
getLine = hGetLine stdin
-- | The 'getContents' operation returns all user input as a single string,
-- which is read lazily as it is needed
( same as ' hGetContents ' ' stdin ' ) .
getContents :: IO String
getContents = hGetContents stdin
-- | The 'interact' function takes a function of type @String->String@
-- as its argument. The entire input from the standard input device is
-- passed to this function as its argument, and the resulting string is
-- output on the standard output device.
interact :: (String -> String) -> IO ()
interact f = do s <- getContents
putStr (f s)
-- | The 'readFile' function reads a file and
-- returns the contents of the file as a string.
The file is read lazily , on demand , as with ' ' .
readFile :: FilePath -> IO String
readFile name = openFile name ReadMode >>= hGetContents
-- | The computation 'writeFile' @file str@ function writes the string @str@,
-- to the file @file@.
writeFile :: FilePath -> String -> IO ()
writeFile f txt = withFile f WriteMode (\ hdl -> hPutStr hdl txt)
-- | The computation 'appendFile' @file str@ function appends the string @str@,
-- to the file @file@.
--
-- Note that 'writeFile' and 'appendFile' write a literal string
-- to a file. To write a value of any printable type, as with 'print',
use the ' show ' function to convert the value to a string first .
--
> main = appendFile " squares " ( show [ ( x , x*x ) | x < - [ 0,0.1 .. 2 ] ] )
appendFile :: FilePath -> String -> IO ()
appendFile f txt = withFile f AppendMode (\ hdl -> hPutStr hdl txt)
-- | The 'readLn' function combines 'getLine' and 'readIO'.
readLn :: Read a => IO a
readLn = do l <- getLine
r <- readIO l
return r
-- | The 'readIO' function is similar to 'read' except that it signals
-- parse failure to the 'IO' monad instead of terminating the program.
readIO :: Read a => String -> IO a
readIO s = case (do { (x,t) <- reads s ;
("","") <- lex t ;
return x }) of
[x] -> return x
[] -> ioError (userError "Prelude.readIO: no parse")
_ -> ioError (userError "Prelude.readIO: ambiguous parse")
| The Unicode encoding of the current locale
--
-- This is the initial locale encoding: if it has been subsequently changed by
-- 'GHC.IO.Encoding.setLocaleEncoding' this value will not reflect that change.
localeEncoding :: TextEncoding
localeEncoding = initLocaleEncoding
| Computation ' hReady ' @hdl@ indicates whether at least one item is
-- available for input from handle @hdl@.
--
-- This operation may fail with:
--
-- * 'System.IO.Error.isEOFError' if the end of file has been reached.
hReady :: Handle -> IO Bool
hReady h = hWaitForInput h 0
-- | Computation 'hPrint' @hdl t@ writes the string representation of @t@
given by the ' shows ' function to the file or channel managed by @hdl@
-- and appends a newline.
--
-- This operation may fail with:
--
* ' System . ' if the device is full ; or
--
-- * 'System.IO.Error.isPermissionError' if another system resource limit would be exceeded.
hPrint :: Show a => Handle -> a -> IO ()
hPrint hdl = hPutStrLn hdl . show
-- | @'withFile' name mode act@ opens a file using 'openFile' and passes
the resulting handle to the computation @act@. The handle will be
-- closed on exit from 'withFile', whether by normal termination or by
-- raising an exception. If closing the handle raises an exception, then
-- this exception will be raised by 'withFile' rather than any exception
-- raised by 'act'.
withFile :: FilePath -> IOMode -> (Handle -> IO r) -> IO r
withFile name mode = bracket (openFile name mode) hClose
-- | @'withBinaryFile' name mode act@ opens a file using 'openBinaryFile'
and passes the resulting handle to the computation @act@. The handle
-- will be closed on exit from 'withBinaryFile', whether by normal
-- termination or by raising an exception.
withBinaryFile :: FilePath -> IOMode -> (Handle -> IO r) -> IO r
withBinaryFile name mode = bracket (openBinaryFile name mode) hClose
-- ---------------------------------------------------------------------------
-- fixIO
-- | The implementation of 'mfix' for 'IO'. If the function passed
-- to 'fixIO' inspects its argument, the resulting action will throw
' FixIOException ' .
fixIO :: (a -> IO a) -> IO a
fixIO k = do
m <- newEmptyMVar
ans <- unsafeDupableInterleaveIO
(readMVar m `catch` \BlockedIndefinitelyOnMVar ->
throwIO FixIOException)
result <- k ans
putMVar m result
return result
NOTE : we do our own explicit black holing here , because GHC 's lazy
blackholing is n't enough . In an infinite loop , GHC may run the IO
-- computation a few times before it notices the loop, which is wrong.
--
NOTE2 : the explicit black - holing with an IORef ran into trouble
with multiple threads ( see # 5421 ) , so now we use an MVar . We used
to use takeMVar with unsafeInterleaveIO . This , however , uses noDuplicate # ,
which is not particularly cheap . Better to use readMVar , which can be
-- performed in multiple threads safely, and to use unsafeDupableInterleaveIO
to avoid the noDuplicate cost .
--
What we 'd ideally want is probably an IVar , but we do n't quite have those .
STM TVars look like an option at first , but I do n't think they are :
-- we'd need to be able to write to the variable in an IO context, which can
-- only be done using 'atomically', and 'atomically' is not allowed within
-- unsafePerformIO. We can't know if someone will try to use the result
of fixIO with unsafePerformIO !
--
-- See also System.IO.Unsafe.unsafeFixIO.
--
| The function creates a temporary file in ReadWrite mode .
-- The created file isn\'t deleted automatically, so you need to delete it manually.
--
-- The file is created with permissions such that only the current
user can it .
--
-- With some exceptions (see below), the file will be created securely
-- in the sense that an attacker should not be able to cause
-- openTempFile to overwrite another file on the filesystem using your
-- credentials, by putting symbolic links (on Unix) in the place where
the temporary file is to be created . On Unix the @O_CREAT@ and
@O_EXCL@ flags are used to prevent this attack , but note that
@O_EXCL@ is sometimes not supported on NFS filesystems , so if you
-- rely on this behaviour it is best to use local filesystems only.
--
openTempFile :: FilePath -- ^ Directory in which to create the file
-> String -- ^ File name template. If the template is \"foo.ext\" then
-- the created file will be \"fooXXX.ext\" where XXX is some
-- random number. Note that this should not contain any path
-- separator characters.
-> IO (FilePath, Handle)
openTempFile tmp_dir template
= openTempFile' "openTempFile" tmp_dir template False 0o600
-- | Like 'openTempFile', but opens the file in binary mode. See 'openBinaryFile' for more comments.
openBinaryTempFile :: FilePath -> String -> IO (FilePath, Handle)
openBinaryTempFile tmp_dir template
= openTempFile' "openBinaryTempFile" tmp_dir template True 0o600
-- | Like 'openTempFile', but uses the default file permissions
openTempFileWithDefaultPermissions :: FilePath -> String
-> IO (FilePath, Handle)
openTempFileWithDefaultPermissions tmp_dir template
= openTempFile' "openTempFileWithDefaultPermissions" tmp_dir template False 0o666
-- | Like 'openBinaryTempFile', but uses the default file permissions
openBinaryTempFileWithDefaultPermissions :: FilePath -> String
-> IO (FilePath, Handle)
openBinaryTempFileWithDefaultPermissions tmp_dir template
= openTempFile' "openBinaryTempFileWithDefaultPermissions" tmp_dir template True 0o666
openTempFile' :: String -> FilePath -> String -> Bool -> CMode
-> IO (FilePath, Handle)
openTempFile' loc tmp_dir template binary mode
| pathSeparator template
= fail $ "openTempFile': Template string must not contain path separator characters: "++template
| otherwise = findTempName
where
-- We split off the last extension, so we can use .foo.ext files
-- for temporary files (hidden on Unix OSes). Unfortunately we're
-- below filepath in the hierarchy here.
(prefix, suffix) =
case break (== '.') $ reverse template of
First case : template contains no ' . 's . Just re - reverse it .
(rev_suffix, "") -> (reverse rev_suffix, "")
Second case : template contains at least one ' . ' . Strip the
-- dot from the prefix and prepend it to the suffix (if we don't
-- do this, the unique number will get added after the '.' and
-- thus be part of the extension, which is wrong.)
(rev_suffix, '.':rest) -> (reverse rest, '.':reverse rev_suffix)
-- Otherwise, something is wrong, because (break (== '.')) should
-- always return a pair with either the empty string or a string
beginning with ' . ' as the second component .
_ -> errorWithoutStackTrace "bug in System.IO.openTempFile"
#if defined(mingw32_HOST_OS)
findTempName = do
let label = if null prefix then "ghc" else prefix
withCWString tmp_dir $ \c_tmp_dir ->
withCWString label $ \c_template ->
withCWString suffix $ \c_suffix ->
-- NOTE: revisit this when new I/O manager in place and use a UUID
-- based one when we are no longer MAX_PATH bound.
allocaBytes (sizeOf (undefined :: CWchar) * 260) $ \c_str -> do
res <- c_getTempFileNameErrorNo c_tmp_dir c_template c_suffix 0
c_str
if not res
then do errno <- getErrno
ioError (errnoToIOError loc errno Nothing (Just tmp_dir))
else do filename <- peekCWString c_str
handleResults filename
handleResults filename = do
let oflags1 = rw_flags .|. o_EXCL
binary_flags
| binary = o_BINARY
| otherwise = 0
oflags = oflags1 .|. binary_flags
fd <- withFilePath filename $ \ f -> c_open f oflags mode
case fd < 0 of
True -> do errno <- getErrno
ioError (errnoToIOError loc errno Nothing (Just tmp_dir))
False ->
do (fD,fd_type) <- FD.mkFD fd ReadWriteMode Nothing{-no stat-}
False{-is_socket-}
is_nonblock
enc <- getLocaleEncoding
h <- mkHandleFromFD fD fd_type filename ReadWriteMode
False{-set non-block-} (Just enc)
return (filename, h)
foreign import ccall "getTempFileNameErrorNo" c_getTempFileNameErrorNo
:: CWString -> CWString -> CWString -> CUInt -> Ptr CWchar -> IO Bool
pathSeparator :: String -> Bool
pathSeparator template = any (\x-> x == '/' || x == '\\') template
output_flags = std_flags
#else /* else mingw32_HOST_OS */
findTempName = do
rs <- rand_string
let filename = prefix ++ rs ++ suffix
filepath = tmp_dir `combine` filename
r <- openNewFile filepath binary mode
case r of
FileExists -> findTempName
OpenNewError errno -> ioError (errnoToIOError loc errno Nothing (Just tmp_dir))
NewFileCreated fd -> do
(fD,fd_type) <- FD.mkFD fd ReadWriteMode Nothing{-no stat-}
False{-is_socket-}
is_nonblock
enc <- getLocaleEncoding
h <- mkHandleFromFD fD fd_type filepath ReadWriteMode False{-set non-block-} (Just enc)
return (filepath, h)
where
XXX bits copied from System . FilePath , since that 's not available here
combine a b
| null b = a
| null a = b
| pathSeparator [last a] = a ++ b
| otherwise = a ++ [pathSeparatorChar] ++ b
tempCounter :: IORef Int
tempCounter = unsafePerformIO $ newIORef 0
# NOINLINE tempCounter #
-- build large digit-alike number
rand_string :: IO String
rand_string = do
r1 <- c_getpid
r2 <- atomicModifyIORef tempCounter (\n -> (n+1, n))
return $ show r1 ++ "-" ++ show r2
data OpenNewFileResult
= NewFileCreated CInt
| FileExists
| OpenNewError Errno
openNewFile :: FilePath -> Bool -> CMode -> IO OpenNewFileResult
openNewFile filepath binary mode = do
let oflags1 = rw_flags .|. o_EXCL
binary_flags
| binary = o_BINARY
| otherwise = 0
oflags = oflags1 .|. binary_flags
fd <- withFilePath filepath $ \ f ->
c_open f oflags mode
if fd < 0
then do
errno <- getErrno
case errno of
_ | errno == eEXIST -> return FileExists
_ -> return (OpenNewError errno)
else return (NewFileCreated fd)
-- XXX Should use filepath library
pathSeparatorChar :: Char
pathSeparatorChar = '/'
pathSeparator :: String -> Bool
pathSeparator template = pathSeparatorChar `elem` template
output_flags = std_flags .|. o_CREAT
#endif /* mingw32_HOST_OS */
XXX Copied from GHC.Handle
std_flags, output_flags, rw_flags :: CInt
std_flags = o_NONBLOCK .|. o_NOCTTY
rw_flags = output_flags .|. o_RDWR
-- $locking
-- Implementations should enforce as far as possible, at least locally to the
Haskell process , multiple - reader single - writer locking on files .
-- That is, /there may either be many handles on the same file which manage input, or just one handle on the file which manages output/. If any
-- open or semi-closed handle is managing a file for output, no new
-- handle can be allocated for that file. If any open or semi-closed
-- handle is managing a file for input, new handles can only be allocated
if they do not manage output . Whether two files are the same is
-- implementation-dependent, but they should normally be the same if they
-- have the same absolute path name and neither has been renamed, for
-- example.
--
-- /Warning/: the 'readFile' operation holds a semi-closed handle on
-- the file until the entire contents of the file have been consumed.
-- It follows that an attempt to write to a file (using 'writeFile', for
-- example) that was earlier opened by 'readFile' will usually result in
failure with ' System . IO.Error.isAlreadyInUseError ' .
| null | https://raw.githubusercontent.com/chrisdone/prana/f2e45538937d326aff562b6d49296eaedd015662/prana-boot/packages/base-4.11.1.0/System/IO.hs | haskell | ---------------------------------------------------------------------------
|
Module : System.IO
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer :
Stability : stable
Portability : portable
---------------------------------------------------------------------------
* Files and handles
abstract, instance of: Eq, Show.
collector detects that it has become unreferenced by the program.
However, relying on this behaviour is not generally recommended:
the garbage collector is unpredictable. If possible, use
descriptors when they have run out, it is your responsibility to
ensure that this doesn't happen.
** Standard handles
| Three handles are allocated during program initialisation,
and are initially open.
* Opening and closing files
** Opening files
** Closing files
** Special cases
** File locking
$locking
* Operations on handles
** Determining and changing the size of a file
** Detecting the end of input
** Buffering operations
** Repositioning handles
abstract, instance of: Eq, Show.
** Handle properties
* Text input and output
** Text input
** Text output
** Special cases for standard input and output
* Binary input and output
* Temporary files
* Unicode encoding\/decoding
| A text-mode 'Handle' has an associated 'TextEncoding', which
The default 'TextEncoding' is the same as the default encoding
on your system, which is also available as 'localeEncoding'.
'localeEncoding' will be 'latin1'.)
Encoding and decoding errors are always detected and reported,
'readFile'), where a decoding error merely results in
termination of the character stream, as with other I/O errors.
** Unicode encodings
* Newline conversion
'\n'. However, in files and external character streams, a
newline may be represented by another character sequence, such
as '\r\n'.
specifies how to transate newline characters. The
separately, so that for instance you can translate '\r\n'
to '\n' on input, but leave newlines as '\n' on output.
'nativeNewlineMode', which does no translation on Unix systems,
Binary-mode 'Handle's do no newline translation at all.
-----------------------------------------------------------------------------
Standard IO
| Write a character to the standard output device
(same as 'hPutChar' 'stdout').
| Write a string to the standard output device
(same as 'hPutStr' 'stdout').
| The same as 'putStr', but adds a newline character.
| The 'print' function outputs a value of any printable type to the
standard output device.
Printable types are those that are instances of class 'Show'; 'print'
converts values to strings for output using the 'show' operation and
adds a newline.
| Read a character from the standard input device
| Read a line from the standard input device
| The 'getContents' operation returns all user input as a single string,
which is read lazily as it is needed
| The 'interact' function takes a function of type @String->String@
as its argument. The entire input from the standard input device is
passed to this function as its argument, and the resulting string is
output on the standard output device.
| The 'readFile' function reads a file and
returns the contents of the file as a string.
| The computation 'writeFile' @file str@ function writes the string @str@,
to the file @file@.
| The computation 'appendFile' @file str@ function appends the string @str@,
to the file @file@.
Note that 'writeFile' and 'appendFile' write a literal string
to a file. To write a value of any printable type, as with 'print',
| The 'readLn' function combines 'getLine' and 'readIO'.
| The 'readIO' function is similar to 'read' except that it signals
parse failure to the 'IO' monad instead of terminating the program.
This is the initial locale encoding: if it has been subsequently changed by
'GHC.IO.Encoding.setLocaleEncoding' this value will not reflect that change.
available for input from handle @hdl@.
This operation may fail with:
* 'System.IO.Error.isEOFError' if the end of file has been reached.
| Computation 'hPrint' @hdl t@ writes the string representation of @t@
and appends a newline.
This operation may fail with:
* 'System.IO.Error.isPermissionError' if another system resource limit would be exceeded.
| @'withFile' name mode act@ opens a file using 'openFile' and passes
closed on exit from 'withFile', whether by normal termination or by
raising an exception. If closing the handle raises an exception, then
this exception will be raised by 'withFile' rather than any exception
raised by 'act'.
| @'withBinaryFile' name mode act@ opens a file using 'openBinaryFile'
will be closed on exit from 'withBinaryFile', whether by normal
termination or by raising an exception.
---------------------------------------------------------------------------
fixIO
| The implementation of 'mfix' for 'IO'. If the function passed
to 'fixIO' inspects its argument, the resulting action will throw
computation a few times before it notices the loop, which is wrong.
performed in multiple threads safely, and to use unsafeDupableInterleaveIO
we'd need to be able to write to the variable in an IO context, which can
only be done using 'atomically', and 'atomically' is not allowed within
unsafePerformIO. We can't know if someone will try to use the result
See also System.IO.Unsafe.unsafeFixIO.
The created file isn\'t deleted automatically, so you need to delete it manually.
The file is created with permissions such that only the current
With some exceptions (see below), the file will be created securely
in the sense that an attacker should not be able to cause
openTempFile to overwrite another file on the filesystem using your
credentials, by putting symbolic links (on Unix) in the place where
rely on this behaviour it is best to use local filesystems only.
^ Directory in which to create the file
^ File name template. If the template is \"foo.ext\" then
the created file will be \"fooXXX.ext\" where XXX is some
random number. Note that this should not contain any path
separator characters.
| Like 'openTempFile', but opens the file in binary mode. See 'openBinaryFile' for more comments.
| Like 'openTempFile', but uses the default file permissions
| Like 'openBinaryTempFile', but uses the default file permissions
We split off the last extension, so we can use .foo.ext files
for temporary files (hidden on Unix OSes). Unfortunately we're
below filepath in the hierarchy here.
dot from the prefix and prepend it to the suffix (if we don't
do this, the unique number will get added after the '.' and
thus be part of the extension, which is wrong.)
Otherwise, something is wrong, because (break (== '.')) should
always return a pair with either the empty string or a string
NOTE: revisit this when new I/O manager in place and use a UUID
based one when we are no longer MAX_PATH bound.
no stat
is_socket
set non-block
no stat
is_socket
set non-block
build large digit-alike number
XXX Should use filepath library
$locking
Implementations should enforce as far as possible, at least locally to the
That is, /there may either be many handles on the same file which manage input, or just one handle on the file which manages output/. If any
open or semi-closed handle is managing a file for output, no new
handle can be allocated for that file. If any open or semi-closed
handle is managing a file for input, new handles can only be allocated
implementation-dependent, but they should normally be the same if they
have the same absolute path name and neither has been renamed, for
example.
/Warning/: the 'readFile' operation holds a semi-closed handle on
the file until the entire contents of the file have been consumed.
It follows that an attempt to write to a file (using 'writeFile', for
example) that was earlier opened by 'readFile' will usually result in | # LANGUAGE Trustworthy #
# LANGUAGE CPP , NoImplicitPrelude , CApiFFI #
Copyright : ( c ) The University of Glasgow 2001
The standard IO library .
module System.IO (
* The IO monad
IO,
fixIO,
FilePath,
| GHC note : a ' Handle ' will be automatically closed when the garbage
an explicit ' ' to close ' Handle 's when they are no longer
required . GHC does not currently attempt to free up file
stdin, stdout, stderr,
withFile,
openFile,
IOMode(ReadMode,WriteMode,AppendMode,ReadWriteMode),
hClose,
| These functions are also exported by the " Prelude " .
readFile,
writeFile,
appendFile,
hFileSize,
hSetFileSize,
hIsEOF,
isEOF,
BufferMode(NoBuffering,LineBuffering,BlockBuffering),
hSetBuffering,
hGetBuffering,
hFlush,
hGetPosn,
hSetPosn,
hSeek,
SeekMode(AbsoluteSeek,RelativeSeek,SeekFromEnd),
hTell,
hIsOpen, hIsClosed,
hIsReadable, hIsWritable,
hIsSeekable,
* * Terminal operations ( not portable : GHC only )
hIsTerminalDevice,
hSetEcho,
hGetEcho,
* * Showing handle state ( not portable : GHC only )
hShow,
hWaitForInput,
hReady,
hGetChar,
hGetLine,
hLookAhead,
hGetContents,
hPutChar,
hPutStr,
hPutStrLn,
hPrint,
| These functions are also exported by the " Prelude " .
interact,
putChar,
putStr,
putStrLn,
print,
getChar,
getLine,
getContents,
readIO,
readLn,
withBinaryFile,
openBinaryFile,
hSetBinaryMode,
hPutBuf,
hGetBuf,
hGetBufSome,
hPutBufNonBlocking,
hGetBufNonBlocking,
openTempFile,
openBinaryTempFile,
openTempFileWithDefaultPermissions,
openBinaryTempFileWithDefaultPermissions,
is used to decode bytes into Unicode characters when reading ,
and encode Unicode characters into bytes when writing .
( GHC note : on Windows , we currently do not support double - byte
encodings ; if the console\ 's code page is unsupported , then
except during lazy I / O ( ' hGetContents ' , ' ' , and
hSetEncoding,
hGetEncoding,
TextEncoding,
latin1,
utf8, utf8_bom,
utf16, utf16le, utf16be,
utf32, utf32le, utf32be,
localeEncoding,
char8,
mkTextEncoding,
| In Haskell , a newline is always represented by the character
A text - mode ' Handle ' has an associated ' NewlineMode ' that
' NewlineMode ' specifies the input and output translation
The default ' NewlineMode ' for a ' Handle ' is
but translates ' \r\n ' to ' \n ' and back on Windows .
hSetNewlineMode,
Newline(..), nativeNewline,
NewlineMode(..),
noNewlineTranslation, universalNewlineMode, nativeNewlineMode,
) where
import Control.Exception.Base
import Data.Bits
import Data.Maybe
import Foreign.C.Error
#if defined(mingw32_HOST_OS)
import Foreign.C.String
import Foreign.Ptr
import Foreign.Marshal.Alloc
import Foreign.Storable
#endif
import Foreign.C.Types
import System.Posix.Internals
import System.Posix.Types
import GHC.Base
import GHC.List
#ifndef mingw32_HOST_OS
import GHC.IORef
#endif
import GHC.Num
import GHC.IO hiding ( bracket, onException )
import GHC.IO.IOMode
import GHC.IO.Handle.FD
import qualified GHC.IO.FD as FD
import GHC.IO.Handle
import GHC.IO.Handle.Text ( hGetBufSome, hPutStrLn )
import GHC.IO.Exception ( userError )
import GHC.IO.Encoding
import Text.Read
import GHC.Show
import GHC.MVar
putChar :: Char -> IO ()
putChar c = hPutChar stdout c
putStr :: String -> IO ()
putStr s = hPutStr stdout s
putStrLn :: String -> IO ()
putStrLn s = hPutStrLn stdout s
For example , a program to print the first 20 integers and their
powers of 2 could be written as :
> main = print ( [ ( n , 2^n ) | n < - [ 0 .. 19 ] ] )
print :: Show a => a -> IO ()
print x = putStrLn (show x)
( same as ' hGetChar ' ' stdin ' ) .
getChar :: IO Char
getChar = hGetChar stdin
( same as ' hGetLine ' ' stdin ' ) .
getLine :: IO String
getLine = hGetLine stdin
( same as ' hGetContents ' ' stdin ' ) .
getContents :: IO String
getContents = hGetContents stdin
interact :: (String -> String) -> IO ()
interact f = do s <- getContents
putStr (f s)
The file is read lazily , on demand , as with ' ' .
readFile :: FilePath -> IO String
readFile name = openFile name ReadMode >>= hGetContents
writeFile :: FilePath -> String -> IO ()
writeFile f txt = withFile f WriteMode (\ hdl -> hPutStr hdl txt)
use the ' show ' function to convert the value to a string first .
> main = appendFile " squares " ( show [ ( x , x*x ) | x < - [ 0,0.1 .. 2 ] ] )
appendFile :: FilePath -> String -> IO ()
appendFile f txt = withFile f AppendMode (\ hdl -> hPutStr hdl txt)
readLn :: Read a => IO a
readLn = do l <- getLine
r <- readIO l
return r
readIO :: Read a => String -> IO a
readIO s = case (do { (x,t) <- reads s ;
("","") <- lex t ;
return x }) of
[x] -> return x
[] -> ioError (userError "Prelude.readIO: no parse")
_ -> ioError (userError "Prelude.readIO: ambiguous parse")
| The Unicode encoding of the current locale
localeEncoding :: TextEncoding
localeEncoding = initLocaleEncoding
| Computation ' hReady ' @hdl@ indicates whether at least one item is
hReady :: Handle -> IO Bool
hReady h = hWaitForInput h 0
given by the ' shows ' function to the file or channel managed by @hdl@
* ' System . ' if the device is full ; or
hPrint :: Show a => Handle -> a -> IO ()
hPrint hdl = hPutStrLn hdl . show
the resulting handle to the computation @act@. The handle will be
withFile :: FilePath -> IOMode -> (Handle -> IO r) -> IO r
withFile name mode = bracket (openFile name mode) hClose
and passes the resulting handle to the computation @act@. The handle
withBinaryFile :: FilePath -> IOMode -> (Handle -> IO r) -> IO r
withBinaryFile name mode = bracket (openBinaryFile name mode) hClose
' FixIOException ' .
fixIO :: (a -> IO a) -> IO a
fixIO k = do
m <- newEmptyMVar
ans <- unsafeDupableInterleaveIO
(readMVar m `catch` \BlockedIndefinitelyOnMVar ->
throwIO FixIOException)
result <- k ans
putMVar m result
return result
NOTE : we do our own explicit black holing here , because GHC 's lazy
blackholing is n't enough . In an infinite loop , GHC may run the IO
NOTE2 : the explicit black - holing with an IORef ran into trouble
with multiple threads ( see # 5421 ) , so now we use an MVar . We used
to use takeMVar with unsafeInterleaveIO . This , however , uses noDuplicate # ,
which is not particularly cheap . Better to use readMVar , which can be
to avoid the noDuplicate cost .
What we 'd ideally want is probably an IVar , but we do n't quite have those .
STM TVars look like an option at first , but I do n't think they are :
of fixIO with unsafePerformIO !
| The function creates a temporary file in ReadWrite mode .
user can it .
the temporary file is to be created . On Unix the @O_CREAT@ and
@O_EXCL@ flags are used to prevent this attack , but note that
@O_EXCL@ is sometimes not supported on NFS filesystems , so if you
-> IO (FilePath, Handle)
openTempFile tmp_dir template
= openTempFile' "openTempFile" tmp_dir template False 0o600
openBinaryTempFile :: FilePath -> String -> IO (FilePath, Handle)
openBinaryTempFile tmp_dir template
= openTempFile' "openBinaryTempFile" tmp_dir template True 0o600
openTempFileWithDefaultPermissions :: FilePath -> String
-> IO (FilePath, Handle)
openTempFileWithDefaultPermissions tmp_dir template
= openTempFile' "openTempFileWithDefaultPermissions" tmp_dir template False 0o666
openBinaryTempFileWithDefaultPermissions :: FilePath -> String
-> IO (FilePath, Handle)
openBinaryTempFileWithDefaultPermissions tmp_dir template
= openTempFile' "openBinaryTempFileWithDefaultPermissions" tmp_dir template True 0o666
openTempFile' :: String -> FilePath -> String -> Bool -> CMode
-> IO (FilePath, Handle)
openTempFile' loc tmp_dir template binary mode
| pathSeparator template
= fail $ "openTempFile': Template string must not contain path separator characters: "++template
| otherwise = findTempName
where
(prefix, suffix) =
case break (== '.') $ reverse template of
First case : template contains no ' . 's . Just re - reverse it .
(rev_suffix, "") -> (reverse rev_suffix, "")
Second case : template contains at least one ' . ' . Strip the
(rev_suffix, '.':rest) -> (reverse rest, '.':reverse rev_suffix)
beginning with ' . ' as the second component .
_ -> errorWithoutStackTrace "bug in System.IO.openTempFile"
#if defined(mingw32_HOST_OS)
findTempName = do
let label = if null prefix then "ghc" else prefix
withCWString tmp_dir $ \c_tmp_dir ->
withCWString label $ \c_template ->
withCWString suffix $ \c_suffix ->
allocaBytes (sizeOf (undefined :: CWchar) * 260) $ \c_str -> do
res <- c_getTempFileNameErrorNo c_tmp_dir c_template c_suffix 0
c_str
if not res
then do errno <- getErrno
ioError (errnoToIOError loc errno Nothing (Just tmp_dir))
else do filename <- peekCWString c_str
handleResults filename
handleResults filename = do
let oflags1 = rw_flags .|. o_EXCL
binary_flags
| binary = o_BINARY
| otherwise = 0
oflags = oflags1 .|. binary_flags
fd <- withFilePath filename $ \ f -> c_open f oflags mode
case fd < 0 of
True -> do errno <- getErrno
ioError (errnoToIOError loc errno Nothing (Just tmp_dir))
False ->
is_nonblock
enc <- getLocaleEncoding
h <- mkHandleFromFD fD fd_type filename ReadWriteMode
return (filename, h)
foreign import ccall "getTempFileNameErrorNo" c_getTempFileNameErrorNo
:: CWString -> CWString -> CWString -> CUInt -> Ptr CWchar -> IO Bool
pathSeparator :: String -> Bool
pathSeparator template = any (\x-> x == '/' || x == '\\') template
output_flags = std_flags
#else /* else mingw32_HOST_OS */
findTempName = do
rs <- rand_string
let filename = prefix ++ rs ++ suffix
filepath = tmp_dir `combine` filename
r <- openNewFile filepath binary mode
case r of
FileExists -> findTempName
OpenNewError errno -> ioError (errnoToIOError loc errno Nothing (Just tmp_dir))
NewFileCreated fd -> do
is_nonblock
enc <- getLocaleEncoding
return (filepath, h)
where
XXX bits copied from System . FilePath , since that 's not available here
combine a b
| null b = a
| null a = b
| pathSeparator [last a] = a ++ b
| otherwise = a ++ [pathSeparatorChar] ++ b
tempCounter :: IORef Int
tempCounter = unsafePerformIO $ newIORef 0
# NOINLINE tempCounter #
rand_string :: IO String
rand_string = do
r1 <- c_getpid
r2 <- atomicModifyIORef tempCounter (\n -> (n+1, n))
return $ show r1 ++ "-" ++ show r2
data OpenNewFileResult
= NewFileCreated CInt
| FileExists
| OpenNewError Errno
openNewFile :: FilePath -> Bool -> CMode -> IO OpenNewFileResult
openNewFile filepath binary mode = do
let oflags1 = rw_flags .|. o_EXCL
binary_flags
| binary = o_BINARY
| otherwise = 0
oflags = oflags1 .|. binary_flags
fd <- withFilePath filepath $ \ f ->
c_open f oflags mode
if fd < 0
then do
errno <- getErrno
case errno of
_ | errno == eEXIST -> return FileExists
_ -> return (OpenNewError errno)
else return (NewFileCreated fd)
pathSeparatorChar :: Char
pathSeparatorChar = '/'
pathSeparator :: String -> Bool
pathSeparator template = pathSeparatorChar `elem` template
output_flags = std_flags .|. o_CREAT
#endif /* mingw32_HOST_OS */
XXX Copied from GHC.Handle
std_flags, output_flags, rw_flags :: CInt
std_flags = o_NONBLOCK .|. o_NOCTTY
rw_flags = output_flags .|. o_RDWR
Haskell process , multiple - reader single - writer locking on files .
if they do not manage output . Whether two files are the same is
failure with ' System . IO.Error.isAlreadyInUseError ' .
|
7ba1b61549b67cb75cc7104721fc85c91658b1cc3e55ffa16e28bfb845f51f08 | ghcjs/jsaddle-dom | ScriptProcessorNode.hs | # LANGUAGE PatternSynonyms #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.ScriptProcessorNode
(audioProcess, getBufferSize, ScriptProcessorNode(..),
gTypeScriptProcessorNode)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/ScriptProcessorNode.onaudioprocess Mozilla ScriptProcessorNode.onaudioprocess documentation >
audioProcess :: EventName ScriptProcessorNode AudioProcessingEvent
audioProcess = unsafeEventName (toJSString "audioprocess")
| < -US/docs/Web/API/ScriptProcessorNode.bufferSize Mozilla ScriptProcessorNode.bufferSize documentation >
getBufferSize :: (MonadDOM m) => ScriptProcessorNode -> m Int
getBufferSize self
= liftDOM (round <$> ((self ^. js "bufferSize") >>= valToNumber))
| null | https://raw.githubusercontent.com/ghcjs/jsaddle-dom/5f5094277d4b11f3dc3e2df6bb437b75712d268f/src/JSDOM/Generated/ScriptProcessorNode.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures # | # LANGUAGE PatternSynonyms #
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.ScriptProcessorNode
(audioProcess, getBufferSize, ScriptProcessorNode(..),
gTypeScriptProcessorNode)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/ScriptProcessorNode.onaudioprocess Mozilla ScriptProcessorNode.onaudioprocess documentation >
audioProcess :: EventName ScriptProcessorNode AudioProcessingEvent
audioProcess = unsafeEventName (toJSString "audioprocess")
| < -US/docs/Web/API/ScriptProcessorNode.bufferSize Mozilla ScriptProcessorNode.bufferSize documentation >
getBufferSize :: (MonadDOM m) => ScriptProcessorNode -> m Int
getBufferSize self
= liftDOM (round <$> ((self ^. js "bufferSize") >>= valToNumber))
|
67d7640e40e31995867e9b63c68ffae87e29a9470eab89de7b9b51190367958f | racket/rhombus-prototype | class-together.rkt | #lang racket/base
(require (for-syntax racket/base
syntax/parse/pre)
"provide.rkt"
"name-root.rkt"
"definition.rkt"
"expression.rkt"
"space.rkt"
(submod "class.rkt" for-together)
"interface.rkt"
(submod "interface.rkt" for-together)
(only-in "class-together-parse.rkt"
rhombus-together)
"forwarding-sequence.rkt"
"parse.rkt"
"parens.rkt")
(provide (for-spaces (rhombus/namespace
rhombus/space
#f)
class))
(define-space-syntax class
(space-syntax rhombus/class))
(define-name-root class
#:fields
(together))
(define-syntax class class-transformer)
(define-syntax together
(definition-transformer
(lambda (stx)
(syntax-parse stx
[(_ (_::block defn ...))
(with-syntax ([(defn ...)
(for/list ([defn (in-list (syntax->list #'(defn ...)))])
(syntax-parse defn
#:datum-literals (group block)
[((~and tag group) id . rest)
#:when (free-identifier=? #'id #'class)
#`(tag #,(datum->syntax #'here 'class_for_together #'id #'id) . rest)]
[((~and tag group) id . rest)
#:when (free-identifier=? #'id #'interface)
#`(tag #,(datum->syntax #'here 'interface_for_together #'id #'id) . rest)]
[_
(raise-syntax-error #f
"not a class or interface form"
stx
defn)]))])
#'((rhombus-mixed-forwarding-sequence (together-finish) rhombus-together
(rhombus-definition defn) ...)))]))))
(define-syntax (together-finish stx)
(syntax-parse stx
#:literals (begin)
[(_ (_ (begin defn ... last-defn)) ...)
#`(begin
defn ... ...
last-defn ...)]))
| null | https://raw.githubusercontent.com/racket/rhombus-prototype/4e66c1361bdde51c2df9332644800baead49e86f/rhombus/private/class-together.rkt | racket | #lang racket/base
(require (for-syntax racket/base
syntax/parse/pre)
"provide.rkt"
"name-root.rkt"
"definition.rkt"
"expression.rkt"
"space.rkt"
(submod "class.rkt" for-together)
"interface.rkt"
(submod "interface.rkt" for-together)
(only-in "class-together-parse.rkt"
rhombus-together)
"forwarding-sequence.rkt"
"parse.rkt"
"parens.rkt")
(provide (for-spaces (rhombus/namespace
rhombus/space
#f)
class))
(define-space-syntax class
(space-syntax rhombus/class))
(define-name-root class
#:fields
(together))
(define-syntax class class-transformer)
(define-syntax together
(definition-transformer
(lambda (stx)
(syntax-parse stx
[(_ (_::block defn ...))
(with-syntax ([(defn ...)
(for/list ([defn (in-list (syntax->list #'(defn ...)))])
(syntax-parse defn
#:datum-literals (group block)
[((~and tag group) id . rest)
#:when (free-identifier=? #'id #'class)
#`(tag #,(datum->syntax #'here 'class_for_together #'id #'id) . rest)]
[((~and tag group) id . rest)
#:when (free-identifier=? #'id #'interface)
#`(tag #,(datum->syntax #'here 'interface_for_together #'id #'id) . rest)]
[_
(raise-syntax-error #f
"not a class or interface form"
stx
defn)]))])
#'((rhombus-mixed-forwarding-sequence (together-finish) rhombus-together
(rhombus-definition defn) ...)))]))))
(define-syntax (together-finish stx)
(syntax-parse stx
#:literals (begin)
[(_ (_ (begin defn ... last-defn)) ...)
#`(begin
defn ... ...
last-defn ...)]))
| |
49a7200d517dbad16e4ab4b69521d8dd8e23b81500f17ec4f2d304b8e00e8ec3 | jiangpengnju/htdp2e | recursion-that-ignores-structure.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname recursion-that-ignores-structure) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
; [List-of Number] -> [List-of Number]
; creates a list of numbers with the same numbers as alon,
; sorted in ascending order
(check-expect (quick-sort '(3 2 1)) '(1 2 3))
(check-expect (quick-sort '(11 8 14 7)) '(7 8 11 14))
(check-expect (quick-sort '(11 9 2 18 12 14 4 1))
'(1 2 4 9 11 12 14 18))
(define (quick-sort alon)
(cond
[(empty? alon) '()]
[else (local ((define pivot (first alon))
(define smaller-items (filter (lambda (x) (< x pivot)) alon))
(define larger-items (filter (lambda (x) (> x pivot)) alon)))
(append (quick-sort smaller-items)
(list pivot)
(quick-sort larger-items)))])) | null | https://raw.githubusercontent.com/jiangpengnju/htdp2e/d41555519fbb378330f75c88141f72b00a9ab1d3/generative-recursion/non-stand-recursion/recursion-that-ignores-structure.rkt | racket | about the language level of this file in a form that our tools can easily process.
[List-of Number] -> [List-of Number]
creates a list of numbers with the same numbers as alon,
sorted in ascending order | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname recursion-that-ignores-structure) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(check-expect (quick-sort '(3 2 1)) '(1 2 3))
(check-expect (quick-sort '(11 8 14 7)) '(7 8 11 14))
(check-expect (quick-sort '(11 9 2 18 12 14 4 1))
'(1 2 4 9 11 12 14 18))
(define (quick-sort alon)
(cond
[(empty? alon) '()]
[else (local ((define pivot (first alon))
(define smaller-items (filter (lambda (x) (< x pivot)) alon))
(define larger-items (filter (lambda (x) (> x pivot)) alon)))
(append (quick-sort smaller-items)
(list pivot)
(quick-sort larger-items)))])) |
bdfd16fa2da70a2e3e3261fded61b373157d22ddf37e9f2ec8d6d8c592d90eb2 | atlas-engineer/nyxt | package.lisp | SPDX - FileCopyrightText : Atlas Engineer LLC
SPDX - License - Identifier : BSD-3 - Clause
(nyxt:define-package :nyxt/tests
(:use :lisp-unit2))
| null | https://raw.githubusercontent.com/atlas-engineer/nyxt/aa27fc47314046062d6f4e7ef5d8c95a62c2858f/tests/package.lisp | lisp | SPDX - FileCopyrightText : Atlas Engineer LLC
SPDX - License - Identifier : BSD-3 - Clause
(nyxt:define-package :nyxt/tests
(:use :lisp-unit2))
| |
6e218ac12d66871269d82da608c847e305c1194928619f215234fada70999dba | kepler16/gx.cljc | errors.cljc | (ns k16.gx.beta.errors)
(defrecord ErrorContext [error-type node-key
node-contents signal-key
causes])
(def ^:dynamic *err-ctx*
"Error context is used for creating/throwing exceptions with contextual data"
(map->ErrorContext {:error-type :general :causes []}))
(defn gather-error-messages
[ex]
#?(:clj (->> ex
(iterate ex-cause)
(take-while some?)
(mapv ex-message)
(interpose "; ")
(apply str))
:cljs (cond
(instance? cljs.core/ExceptionInfo ex)
(ex-message ex)
(instance? js/Error ex)
(ex-message ex)
:else ex)))
(defn add-err-cause
"Adds cause to error context, evaluates to nil"
[cause]
(set! *err-ctx* (update *err-ctx* :causes conj cause))
nil)
(defn gx-err-data
([internal-data]
(gx-err-data nil internal-data))
([message internal-data]
(gx-err-data message internal-data nil))
([message internal-data cause]
(cond-> {}
:always (into (filter (fn [[_ v]] v) *err-ctx*))
message (assoc :message message)
internal-data (assoc :internal-data internal-data)
cause (update :causes conj {:title (ex-message cause)
:data (ex-data cause)
:exception cause}))))
(defn throw-gx-err
([message]
(throw-gx-err message nil nil))
([message internal-data]
(throw-gx-err message internal-data nil))
([message internal-data cause]
(throw (ex-info message (gx-err-data message internal-data cause)))))
(defn ex->gx-err-data
[ex]
(->> (ex-data ex)
(merge *err-ctx*)
(filter (fn [[_ v]] v))
(into {:message (ex-message ex)})))
(defn- stringify
[token]
(cond
(string? token) token
(nil? token) nil
:else (pr-str token)))
(defn- tokenize
[& token-pairs]
(assert (even? (count token-pairs))
"tokenize accepts only even number of forms")
(apply str (transduce (comp
(map stringify)
(partition-all 2)
(filter (comp seq second))
(map (fn [[a b]] [a (str "'" b "'")]))
(interpose ", "))
(completing conj flatten)
token-pairs)))
(defn- cause->str
[{:keys [data exception]}]
(str "cause" (when data (str "(data = " data ")")) ": "
(gather-error-messages exception)))
(defn humanize-error
[{:keys [node-key signal-key message causes]} & rest-of-error]
(let [rest-of-error (filter seq rest-of-error)]
(apply str (concat [(or message "Error") ": "
(tokenize "node = " node-key
"signal = " signal-key)]
(when (seq rest-of-error)
(conj (interpose "\n\t• " rest-of-error)
"\n\t• "))
(when (seq causes)
(conj (interpose "\n\t• " (map cause->str causes))
"\n\t• "))))))
(defmulti humanize :error-type)
(defn humanize-all
[errors]
(->> errors
(map humanize)
(interpose "\n")
(apply str)))
(defmethod humanize :general
[error]
(humanize-error error))
(defmethod humanize :context
[{:keys [internal-data] :as error}]
(apply humanize-error error (:errors internal-data)))
(comment
(println
(humanize-all [{:internal-data
{:errors
(list {:foo ["disallowed key"]} "circular :gx/start -> :gx/stop -> :gx/start")},
:message "GX Context failure",
:error-type :context}])))
(defmethod humanize :normalize-node
[{:keys [internal-data] :as error}]
(humanize-error error (tokenize
"form = " (:form-def internal-data)
"token = " (:token internal-data))))
(comment
(println
(humanize
{:error-type :normalize-node,
:node-key :d,
:node-contents '(throw (ex-info "foo" (gx/ref :a))),
:signal-key nil,
:message "Special forms are not supported",
:internal-data
{:form-def '(throw (ex-info "foo" (gx/ref :a))), :token 'throw}})))
(defmethod humanize :deps-sort
[{:keys [internal-data] :as error}]
(apply humanize-error error (:errors internal-data)))
(comment
(println
(humanize-all
[{:internal-data {:errors ["circular :a -> :b -> :a" "circular :c -> :c"]},
:message "Dependency errors",
:error-type :deps-sort,
:signal-key :gx/start}
{:internal-data
{:errors '("circular :a -> :b -> :a")},
:message "Dependency errors",
:error-type :deps-sort,
:signal-key :gx/start}])))
(defmethod humanize :node-signal
[{:keys [internal-data] :as error}]
(humanize-error
error (when-let [{:keys [ex-message dep-node-keys]} internal-data]
(tokenize "error = " ex-message
"deps-nodes = " dep-node-keys))))
(comment
(println
(humanize-all
[{:internal-data {:dep-node-keys '(:c)},
:message "Failure in dependencies",
:error-type :node-signal,
:node-key :d,
:node-contents '(gx/ref :c),
:signal-key :gx/start}
{:internal-data {:dep-node-keys '(:b)},
:message "Failure in dependencies",
:error-type :node-signal,
:node-key :c,
:node-contents '(gx/ref :b),
:signal-key :gx/start}
{:internal-data
{:ex-message "Divide by zero",
:args {:props {:a 1}, :value nil}},
:message "Signal processor error",
:error-type :node-signal,
:node-key :b,
:node-contents '(/ (gx/ref :a) 0),
:signal-key :gx/start}])))
(defmethod humanize :props-validation
[{:keys [internal-data] :as error}]
(humanize-error
error (when-let [{:keys [schema-error]} internal-data]
(tokenize "schema-error = " schema-error))))
(comment
(println
(humanize {:error-type :props-validation,
:message "Props validation error",
:node-key :comp,
:node-contents
#:gx{:component 'k16.gx.beta.core-test/props-validation-component,
:start #:gx{:props-fn 'k16.gx.beta.core-test/my-props-fn}},
:signal-key :gx/start,
:internal-data
{:props-value {:name "John",
:last-name "Doe",
:full-name "John Doe"},
:props-schema [:map [:foo string?]],
:schema-error {:foo ["missing required key"]}}})))
(defmethod humanize :normalize-node-component
[{:keys [internal-data] :as error}]
(humanize-error
error (tokenize "schema-error = " (:schema-error internal-data)
"node-contents = " (:node-contents error))))
(comment
(println
(humanize {:message "Component schema error",
:error-type :normalize-node-component,
:node-key :c,
:node-contents
#:gx{:component 'k16.gx.beta.core-test/invalid-component-2},
:internal-data
{:component #:gx{:start #:gx{:processor "non callable val"}}
:component-schema [:map-of keyword?]
:schema-error
#{[:gx/start
#:gx{:processor ["should be an fn" "should be a keyword"]}]}}}))
(println
(humanize {:message "Component could not be resolved",
:error-type :normalize-node-component,
:node-key :z,
:node-contents #:gx{:component 'non.existend/component},
:internal-data {:component 'non.existend/component}})))
| null | https://raw.githubusercontent.com/kepler16/gx.cljc/623f563c557403943f11d63198534d4969362d4a/src/k16/gx/beta/errors.cljc | clojure | (ns k16.gx.beta.errors)
(defrecord ErrorContext [error-type node-key
node-contents signal-key
causes])
(def ^:dynamic *err-ctx*
"Error context is used for creating/throwing exceptions with contextual data"
(map->ErrorContext {:error-type :general :causes []}))
(defn gather-error-messages
[ex]
#?(:clj (->> ex
(iterate ex-cause)
(take-while some?)
(mapv ex-message)
(interpose "; ")
(apply str))
:cljs (cond
(instance? cljs.core/ExceptionInfo ex)
(ex-message ex)
(instance? js/Error ex)
(ex-message ex)
:else ex)))
(defn add-err-cause
"Adds cause to error context, evaluates to nil"
[cause]
(set! *err-ctx* (update *err-ctx* :causes conj cause))
nil)
(defn gx-err-data
([internal-data]
(gx-err-data nil internal-data))
([message internal-data]
(gx-err-data message internal-data nil))
([message internal-data cause]
(cond-> {}
:always (into (filter (fn [[_ v]] v) *err-ctx*))
message (assoc :message message)
internal-data (assoc :internal-data internal-data)
cause (update :causes conj {:title (ex-message cause)
:data (ex-data cause)
:exception cause}))))
(defn throw-gx-err
([message]
(throw-gx-err message nil nil))
([message internal-data]
(throw-gx-err message internal-data nil))
([message internal-data cause]
(throw (ex-info message (gx-err-data message internal-data cause)))))
(defn ex->gx-err-data
[ex]
(->> (ex-data ex)
(merge *err-ctx*)
(filter (fn [[_ v]] v))
(into {:message (ex-message ex)})))
(defn- stringify
[token]
(cond
(string? token) token
(nil? token) nil
:else (pr-str token)))
(defn- tokenize
[& token-pairs]
(assert (even? (count token-pairs))
"tokenize accepts only even number of forms")
(apply str (transduce (comp
(map stringify)
(partition-all 2)
(filter (comp seq second))
(map (fn [[a b]] [a (str "'" b "'")]))
(interpose ", "))
(completing conj flatten)
token-pairs)))
(defn- cause->str
[{:keys [data exception]}]
(str "cause" (when data (str "(data = " data ")")) ": "
(gather-error-messages exception)))
(defn humanize-error
[{:keys [node-key signal-key message causes]} & rest-of-error]
(let [rest-of-error (filter seq rest-of-error)]
(apply str (concat [(or message "Error") ": "
(tokenize "node = " node-key
"signal = " signal-key)]
(when (seq rest-of-error)
(conj (interpose "\n\t• " rest-of-error)
"\n\t• "))
(when (seq causes)
(conj (interpose "\n\t• " (map cause->str causes))
"\n\t• "))))))
(defmulti humanize :error-type)
(defn humanize-all
[errors]
(->> errors
(map humanize)
(interpose "\n")
(apply str)))
(defmethod humanize :general
[error]
(humanize-error error))
(defmethod humanize :context
[{:keys [internal-data] :as error}]
(apply humanize-error error (:errors internal-data)))
(comment
(println
(humanize-all [{:internal-data
{:errors
(list {:foo ["disallowed key"]} "circular :gx/start -> :gx/stop -> :gx/start")},
:message "GX Context failure",
:error-type :context}])))
(defmethod humanize :normalize-node
[{:keys [internal-data] :as error}]
(humanize-error error (tokenize
"form = " (:form-def internal-data)
"token = " (:token internal-data))))
(comment
(println
(humanize
{:error-type :normalize-node,
:node-key :d,
:node-contents '(throw (ex-info "foo" (gx/ref :a))),
:signal-key nil,
:message "Special forms are not supported",
:internal-data
{:form-def '(throw (ex-info "foo" (gx/ref :a))), :token 'throw}})))
(defmethod humanize :deps-sort
[{:keys [internal-data] :as error}]
(apply humanize-error error (:errors internal-data)))
(comment
(println
(humanize-all
[{:internal-data {:errors ["circular :a -> :b -> :a" "circular :c -> :c"]},
:message "Dependency errors",
:error-type :deps-sort,
:signal-key :gx/start}
{:internal-data
{:errors '("circular :a -> :b -> :a")},
:message "Dependency errors",
:error-type :deps-sort,
:signal-key :gx/start}])))
(defmethod humanize :node-signal
[{:keys [internal-data] :as error}]
(humanize-error
error (when-let [{:keys [ex-message dep-node-keys]} internal-data]
(tokenize "error = " ex-message
"deps-nodes = " dep-node-keys))))
(comment
(println
(humanize-all
[{:internal-data {:dep-node-keys '(:c)},
:message "Failure in dependencies",
:error-type :node-signal,
:node-key :d,
:node-contents '(gx/ref :c),
:signal-key :gx/start}
{:internal-data {:dep-node-keys '(:b)},
:message "Failure in dependencies",
:error-type :node-signal,
:node-key :c,
:node-contents '(gx/ref :b),
:signal-key :gx/start}
{:internal-data
{:ex-message "Divide by zero",
:args {:props {:a 1}, :value nil}},
:message "Signal processor error",
:error-type :node-signal,
:node-key :b,
:node-contents '(/ (gx/ref :a) 0),
:signal-key :gx/start}])))
(defmethod humanize :props-validation
[{:keys [internal-data] :as error}]
(humanize-error
error (when-let [{:keys [schema-error]} internal-data]
(tokenize "schema-error = " schema-error))))
(comment
(println
(humanize {:error-type :props-validation,
:message "Props validation error",
:node-key :comp,
:node-contents
#:gx{:component 'k16.gx.beta.core-test/props-validation-component,
:start #:gx{:props-fn 'k16.gx.beta.core-test/my-props-fn}},
:signal-key :gx/start,
:internal-data
{:props-value {:name "John",
:last-name "Doe",
:full-name "John Doe"},
:props-schema [:map [:foo string?]],
:schema-error {:foo ["missing required key"]}}})))
(defmethod humanize :normalize-node-component
[{:keys [internal-data] :as error}]
(humanize-error
error (tokenize "schema-error = " (:schema-error internal-data)
"node-contents = " (:node-contents error))))
(comment
(println
(humanize {:message "Component schema error",
:error-type :normalize-node-component,
:node-key :c,
:node-contents
#:gx{:component 'k16.gx.beta.core-test/invalid-component-2},
:internal-data
{:component #:gx{:start #:gx{:processor "non callable val"}}
:component-schema [:map-of keyword?]
:schema-error
#{[:gx/start
#:gx{:processor ["should be an fn" "should be a keyword"]}]}}}))
(println
(humanize {:message "Component could not be resolved",
:error-type :normalize-node-component,
:node-key :z,
:node-contents #:gx{:component 'non.existend/component},
:internal-data {:component 'non.existend/component}})))
| |
9369034c63775a4c7921596079ac14944176f871a07c00c4809be2931047fbb1 | keera-studios/haskell-game-programming | Connect.hs | import System.CWiid
main :: IO ()
main = do
putStrLn "Initializing WiiMote. Please press 1+2 to connect."
wm <- cwiidOpen
case wm of
Just _aWiimote -> putStrLn "Connected"
Nothing -> putStrLn "Could not connect"
| null | https://raw.githubusercontent.com/keera-studios/haskell-game-programming/d4998decb4664a978c87199347f53b420d763b1e/tutorials/hardware/wiimote/tutorial1/Connect.hs | haskell | import System.CWiid
main :: IO ()
main = do
putStrLn "Initializing WiiMote. Please press 1+2 to connect."
wm <- cwiidOpen
case wm of
Just _aWiimote -> putStrLn "Connected"
Nothing -> putStrLn "Could not connect"
| |
06ca6673f820c76bed4124a7ecc1779affe496505a9d76dd0cc09f862378c29e | krestenkrab/hanoidb | hanoidb_tests.erl | %% ----------------------------------------------------------------------------
%%
hanoidb : LSM - trees ( Log - Structured Merge Trees ) Indexed Storage
%%
Copyright 2011 - 2012 ( c ) .
/
%%
Copyright 2012 ( c ) Basho Technologies , Inc. All Rights Reserved .
%% /
%%
This file is provided to you under the Apache License , Version 2.0 ( the
%% "License"); you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
%% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
%% License for the specific language governing permissions and limitations
%% under the License.
%%
%% ----------------------------------------------------------------------------
-module(hanoidb_tests).
-include("include/hanoidb.hrl").
-include("src/hanoidb.hrl").
-ifdef(TEST).
-ifdef(TRIQ).
-include_lib("triq/include/triq.hrl").
-include_lib("triq/include/triq_statem.hrl").
-else.
-include_lib("proper/include/proper.hrl").
-endif.
-include_lib("eunit/include/eunit.hrl").
-endif.
-ifdef(PROPER).
-behaviour(proper_statem).
-endif.
-compile(export_all).
-export([command/1, initial_state/0,
next_state/3, postcondition/3,
precondition/2]).
-ifdef(pre18).
-define(OTP_DICT, dict()).
-else.
-define(OTP_DICT, dict:dict()).
-endif.
-record(tree, { elements = dict:new() :: ?OTP_DICT }).
-record(state, { open = dict:new() :: ?OTP_DICT,
closed = dict:new() :: ?OTP_DICT}).
-define(SERVER, hanoidb_drv).
full_test_() ->
{setup, spawn, fun () -> ok end, fun (_) -> ok end,
[
?_test(test_tree_simple_1()),
?_test(test_tree_simple_2()),
?_test(test_tree_simple_4()),
?_test(test_tree_simple_5())
]}.
longer_tree_test_() ->
{setup,
spawn,
fun () -> ok end,
fun (_) -> ok end,
[
{timeout, 300, ?_test(test_tree())}
]}.
longer_qc_test_() ->
{setup,
spawn,
fun () -> ok end,
fun (_) -> ok end,
[
{timeout, 120, ?_test(test_qc())}
]}.
-ifdef(TRIQ).
test_qc() ->
[?assertEqual(true, triq:module(?MODULE))].
-else.
qc_opts() -> [{numtests, 800}].
test_qc() ->
[?assertEqual([], proper:module(?MODULE, qc_opts()))].
-endif.
%% Generators
%% ----------------------------------------------------------------------
-define(NUM_TREES, 10).
%% Generate a name for a btree
g_btree_name() ->
?LET(I, choose(1,?NUM_TREES),
btree_name(I)).
%% Generate a key for the Tree
g_key() ->
binary().
%% Generate a value for the Tree
g_value() ->
binary().
g_fail_key() ->
?LET(T, choose(1,999999999999),
term_to_binary(T)).
g_open_tree(Open) ->
oneof(dict:fetch_keys(Open)).
%% Pick a name of a non-empty Btree
g_non_empty_btree(Open) ->
?LET(TreesWithKeys, dict:filter(fun(_K, #tree { elements = D}) ->
dict:size(D) > 0
end,
Open),
oneof(dict:fetch_keys(TreesWithKeys))).
g_existing_key(Name, Open) ->
#tree { elements = Elems } = dict:fetch(Name, Open),
oneof(dict:fetch_keys(Elems)).
g_non_existing_key(Name, Open) ->
?SUCHTHAT(Key, g_fail_key(),
begin
#tree { elements = D } = dict:fetch(Name, Open),
not dict:is_key(Key, D)
end).
g_fold_operation() ->
oneof([{fun (K, V, Acc) -> [{K, V} | Acc] end, []}]).
btree_name(I) ->
"Btree_" ++ integer_to_list(I).
test
%% ----------------------------------------------------------------------
initial_state() ->
ClosedBTrees = lists:foldl(fun(N, Closed) ->
dict:store(btree_name(N),
#tree { },
Closed)
end,
dict:new(),
lists:seq(1,?NUM_TREES)),
#state { closed=ClosedBTrees }.
command(#state { open = Open, closed = Closed } = S) ->
frequency(
[ {20, {call, ?SERVER, open, [oneof(dict:fetch_keys(Closed))]}}
|| closed_dicts(S)]
++ [ {20, {call, ?SERVER, close, [oneof(dict:fetch_keys(Open))]}}
|| open_dicts(S)]
++ [ {2000, {call, ?SERVER, put, cmd_put_args(S)}}
|| open_dicts(S)]
++ [ {1500, {call, ?SERVER, get_fail, cmd_get_fail_args(S)}}
|| open_dicts(S)]
++ [ {1500, {call, ?SERVER, get_exist, cmd_get_args(S)}}
|| open_dicts(S), open_dicts_with_keys(S)]
++ [ {500, {call, ?SERVER, delete_exist, cmd_delete_args(S)}}
|| open_dicts(S), open_dicts_with_keys(S)]
++ [ {125, {call, ?SERVER, fold_range, cmd_sync_fold_range_args(S)}}
|| open_dicts(S), open_dicts_with_keys(S)]
).
%% Precondition (abstract)
precondition(S, {call, ?SERVER, fold_range, [_Tree, _F, _A0, Range]}) ->
is_valid_range(Range) andalso open_dicts(S) andalso open_dicts_with_keys(S);
precondition(S, {call, ?SERVER, delete_exist, [_Name, _K]}) ->
open_dicts(S) andalso open_dicts_with_keys(S);
precondition(S, {call, ?SERVER, get_fail, [_Name, _K]}) ->
open_dicts(S);
precondition(S, {call, ?SERVER, get_exist, [_Name, _K]}) ->
open_dicts(S) andalso open_dicts_with_keys(S);
precondition(#state { open = Open }, {call, ?SERVER, put, [Name, _K, _V]}) ->
dict:is_key(Name, Open);
precondition(#state { open = Open, closed = Closed },
{call, ?SERVER, open, [Name]}) ->
(not (dict:is_key(Name, Open))) and (dict:is_key(Name, Closed));
precondition(#state { open = Open, closed = Closed },
{call, ?SERVER, close, [Name]}) ->
(dict:is_key(Name, Open)) and (not dict:is_key(Name, Closed)).
is_valid_range(#key_range{ from_key=FromKey, from_inclusive=FromIncl,
to_key=ToKey, to_inclusive=ToIncl,
limit=Limit })
when
(Limit == undefined) orelse (Limit > 0),
is_binary(FromKey),
(ToKey == undefined) orelse is_binary(ToKey),
FromKey =< ToKey,
is_boolean(FromIncl),
is_boolean(ToIncl)
->
if (FromKey == ToKey) ->
(FromIncl == true) and (ToIncl == true);
true ->
true
end;
is_valid_range(_) ->
false.
Next state manipulation ( abstract / concrete )
next_state(S, _Res, {call, ?SERVER, fold_range, [_Tree, _F, _A0, _Range]}) ->
S;
next_state(S, _Res, {call, ?SERVER, get_fail, [_Name, _Key]}) ->
S;
next_state(S, _Res, {call, ?SERVER, get_exist, [_Name, _Key]}) ->
S;
next_state(#state { open = Open} = S, _Res,
{call, ?SERVER, delete_exist, [Name, Key]}) ->
S#state { open = dict:update(Name,
fun(#tree { elements = Dict}) ->
#tree { elements =
dict:erase(Key, Dict)}
end,
Open)};
next_state(#state { open = Open} = S, _Res,
{call, ?SERVER, put, [Name, Key, Value]}) ->
S#state { open = dict:update(
Name,
fun(#tree { elements = Dict}) ->
#tree { elements =
dict:store(Key, Value, Dict) }
end,
Open)};
next_state(#state { open = Open, closed=Closed} = S,
_Res, {call, ?SERVER, open, [Name]}) ->
S#state { open = dict:store(Name, dict:fetch(Name, Closed) , Open),
closed = dict:erase(Name, Closed) };
next_state(#state { open = Open, closed=Closed} = S, _Res,
{call, ?SERVER, close, [Name]}) ->
S#state { closed = dict:store(Name, dict:fetch(Name, Open) , Closed),
open = dict:erase(Name, Open) }.
%% Postcondition check (concrete)
postcondition(#state { open = Open},
{call, ?SERVER, fold_range, [Tree, F, A0, Range]}, Result) ->
#tree { elements = TDict } = dict:fetch(Tree, Open),
DictResult = lists:sort(dict_range_query(TDict, F, A0, Range)),
CallResult = lists:sort(Result),
DictResult == CallResult;
postcondition(_S,
{call, ?SERVER, get_fail, [_Name, _Key]}, not_found) ->
true;
postcondition(#state { open = Open },
{call, ?SERVER, get_exist, [Name, Key]}, {ok, Value}) ->
#tree { elements = Elems } = dict:fetch(Name, Open),
dict:fetch(Key, Elems) == Value;
postcondition(_S, {call, ?SERVER, delete_exist, [_Name, _Key]}, ok) ->
true;
postcondition(_S, {call, ?SERVER, put, [_Name, _Key, _Value]}, ok) ->
true;
postcondition(_S, {call, ?SERVER, open, [_Name]}, ok) ->
true;
postcondition(_S, {call, ?SERVER, close, [_Name]}, ok) ->
true;
postcondition(_State, _Call, _Result) ->
: error_report([{not_matching_any_postcondition , _ State , _ Call , _ Result } ] ) ,
false.
%% Main property. Running a random set of commands is in agreement
%% with a dict.
prop_dict_agree() ->
?FORALL(Cmds, commands(?MODULE),
?TRAPEXIT(
begin
hanoidb_drv:start_link(),
{History,State,Result} = run_commands(?MODULE, Cmds),
hanoidb_drv:stop(),
cleanup_test_trees(State),
?WHENFAIL(io:format("History: ~w\nState: ~w\nResult: ~w\n",
[History,State,Result]),
Result =:= ok)
end)).
%% UNIT TESTS
%% ----------------------------------------------------------------------
test_tree_simple_1() ->
{ok, Tree} = hanoidb:open("simple"),
ok = hanoidb:put(Tree, <<>>, <<"data", 77:128>>),
{ok, <<"data", 77:128>>} = hanoidb:get(Tree, <<>>),
ok = hanoidb:close(Tree).
test_tree_simple_2() ->
{ok, Tree} = hanoidb:open("simple"),
ok = hanoidb:put(Tree, <<"ã">>, <<"µ">>),
{ok, <<"µ">>} = hanoidb:get(Tree, <<"ã">>),
ok = hanoidb:delete(Tree, <<"ã">>),
not_found = hanoidb:get(Tree, <<"ã">>),
ok = hanoidb:close(Tree).
test_tree_simple_4() ->
Key = <<56,11,62,42,35,163,16,100,9,224,8,228,130,94,198,2,126,117,243,
1,122,175,79,159,212,177,30,153,71,91,85,233,41,199,190,58,3,
173,220,9>>,
Value = <<212,167,12,6,105,152,17,80,243>>,
{ok, Tree} = hanoidb:open("simple"),
ok = hanoidb:put(Tree, Key, Value),
?assertEqual({ok, Value}, hanoidb:get(Tree, Key)),
ok = hanoidb:close(Tree).
test_tree_simple_5() ->
{ok, Tree} = hanoidb:open("simple"),
ok = hanoidb:put(Tree, <<"foo">>, <<"bar">>, 2),
{ok, <<"bar">>} = hanoidb:get(Tree, <<"foo">>),
ok = timer:sleep(3000),
not_found = hanoidb:get(Tree, <<"foo">>),
ok = hanoidb:close(Tree).
test_tree() ->
{ok, Tree} = hanoidb:open("simple2"),
lists:foldl(fun(N,_) ->
ok = hanoidb:put(Tree, <<N:128>>, <<"data",N:128>>)
end,
ok,
lists:seq(2,10000,1)),
io : , " INSERT DONE 1 ~ n " , [ ] ) ,
lists:foldl(fun(N,_) ->
ok = hanoidb:put(Tree, <<N:128>>, <<"data",N:128>>)
end,
ok,
lists:seq(4000,6000,1)),
io : , " INSERT DONE 2 ~ n " , [ ] ) ,
hanoidb:delete(Tree, <<1500:128>>),
io : , " DELETE DONE 3 ~ n " , [ ] ) ,
{Time1,{ok,Count1}} = timer:tc(?MODULE, run_fold, [Tree,1000,2000,9]),
error_logger : info_msg("time to fold : ~p / sec ( time=~p , count=~p)~n " , [ 1000000/(Time1 / Count1 ) , Time1/1000000 , Count1 ] ) ,
{Time2,{ok,Count2}} = timer:tc(?MODULE, run_fold, [Tree,1000,2000,1000]),
% error_logger:info_msg("time to fold: ~p/sec (time=~p, count=~p)~n", [1000000/(Time2/Count2), Time2/1000000, Count2]),
ok = hanoidb:close(Tree).
run_fold(Tree,From,To,Limit) ->
F = fun(<<N:128>>, _Value, {N, C}) ->
{N + 1, C + 1};
(<<1501:128>>, _Value, {1500, C}) ->
{1502, C + 1}
end,
{_, Count} = hanoidb:fold_range(Tree, F,
{From, 0},
#key_range{from_key= <<From:128>>, to_key= <<(To+1):128>>, limit=Limit}),
{ok, Count}.
%% Command processing
%% ----------------------------------------------------------------------
cmd_close_args(#state { open = Open }) ->
oneof(dict:fetch_keys(Open)).
cmd_put_args(#state { open = Open }) ->
?LET({Name, Key, Value},
{oneof(dict:fetch_keys(Open)), g_key(), g_value()},
[Name, Key, Value]).
cmd_get_fail_args(#state { open = Open}) ->
?LET(Name, g_open_tree(Open),
?LET(Key, g_non_existing_key(Name, Open),
[Name, Key])).
cmd_get_args(#state { open = Open}) ->
?LET(Name, g_non_empty_btree(Open),
?LET(Key, g_existing_key(Name, Open),
[Name, Key])).
cmd_delete_args(#state { open = Open}) ->
?LET(Name, g_non_empty_btree(Open),
?LET(Key, g_existing_key(Name, Open),
[Name, Key])).
cmd_sync_range_args(#state { open = Open }) ->
?LET(Tree, g_non_empty_btree(Open),
?LET({K1, K2}, {g_existing_key(Tree, Open),
g_existing_key(Tree, Open)},
[Tree, #key_range{from_key=K1, to_key=K2}])).
cmd_sync_fold_range_args(State) ->
?LET([Tree, Range], cmd_sync_range_args(State),
?LET({F, Acc0}, g_fold_operation(),
[Tree, F, Acc0, Range])).
%% Context management
%% ----------------------------------------------------------------------
cleanup_test_trees(#state { open = Open, closed = Closed }) ->
[cleanup_tree(N) || N <- dict:fetch_keys(Open)],
[cleanup_tree(N) || N <- dict:fetch_keys(Closed)].
cleanup_tree(Tree) ->
case file:list_dir(Tree) of
{error, enoent} ->
ok;
{ok, FileNames} ->
[ok = file:delete(filename:join([Tree, Fname]))
|| Fname <- FileNames],
file:del_dir(Tree)
end.
%% Various Helper routines
%% ----------------------------------------------------------------------
open_dicts_with_keys(#state { open = Open}) ->
lists:any(fun({_, #tree { elements = D}}) ->
dict:size(D) > 0
end,
dict:to_list(Open)).
open_dicts(#state { open = Open}) ->
dict:size(Open) > 0.
closed_dicts(#state { closed = Closed}) ->
dict:size(Closed) > 0.
dict_range_query(Dict, Fun, Acc0, Range) ->
KVs = dict_range_query(Dict, Range),
lists:foldl(fun({K, V}, Acc) ->
Fun(K, V, Acc)
end,
Acc0,
KVs).
dict_range_query(Dict, Range) ->
[{K, V} || {K, V} <- dict:to_list(Dict),
?KEY_IN_RANGE(K, Range)].
| null | https://raw.githubusercontent.com/krestenkrab/hanoidb/68333fa51a6fdf27834fc84f42d4421f9627e3b7/test/hanoidb_tests.erl | erlang | ----------------------------------------------------------------------------
/
"License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
----------------------------------------------------------------------------
Generators
----------------------------------------------------------------------
Generate a name for a btree
Generate a key for the Tree
Generate a value for the Tree
Pick a name of a non-empty Btree
----------------------------------------------------------------------
Precondition (abstract)
Postcondition check (concrete)
Main property. Running a random set of commands is in agreement
with a dict.
UNIT TESTS
----------------------------------------------------------------------
error_logger:info_msg("time to fold: ~p/sec (time=~p, count=~p)~n", [1000000/(Time2/Count2), Time2/1000000, Count2]),
Command processing
----------------------------------------------------------------------
Context management
----------------------------------------------------------------------
Various Helper routines
---------------------------------------------------------------------- | hanoidb : LSM - trees ( Log - Structured Merge Trees ) Indexed Storage
Copyright 2011 - 2012 ( c ) .
/
Copyright 2012 ( c ) Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License , Version 2.0 ( the
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(hanoidb_tests).
-include("include/hanoidb.hrl").
-include("src/hanoidb.hrl").
-ifdef(TEST).
-ifdef(TRIQ).
-include_lib("triq/include/triq.hrl").
-include_lib("triq/include/triq_statem.hrl").
-else.
-include_lib("proper/include/proper.hrl").
-endif.
-include_lib("eunit/include/eunit.hrl").
-endif.
-ifdef(PROPER).
-behaviour(proper_statem).
-endif.
-compile(export_all).
-export([command/1, initial_state/0,
next_state/3, postcondition/3,
precondition/2]).
-ifdef(pre18).
-define(OTP_DICT, dict()).
-else.
-define(OTP_DICT, dict:dict()).
-endif.
-record(tree, { elements = dict:new() :: ?OTP_DICT }).
-record(state, { open = dict:new() :: ?OTP_DICT,
closed = dict:new() :: ?OTP_DICT}).
-define(SERVER, hanoidb_drv).
full_test_() ->
{setup, spawn, fun () -> ok end, fun (_) -> ok end,
[
?_test(test_tree_simple_1()),
?_test(test_tree_simple_2()),
?_test(test_tree_simple_4()),
?_test(test_tree_simple_5())
]}.
longer_tree_test_() ->
{setup,
spawn,
fun () -> ok end,
fun (_) -> ok end,
[
{timeout, 300, ?_test(test_tree())}
]}.
longer_qc_test_() ->
{setup,
spawn,
fun () -> ok end,
fun (_) -> ok end,
[
{timeout, 120, ?_test(test_qc())}
]}.
-ifdef(TRIQ).
test_qc() ->
[?assertEqual(true, triq:module(?MODULE))].
-else.
qc_opts() -> [{numtests, 800}].
test_qc() ->
[?assertEqual([], proper:module(?MODULE, qc_opts()))].
-endif.
-define(NUM_TREES, 10).
g_btree_name() ->
?LET(I, choose(1,?NUM_TREES),
btree_name(I)).
g_key() ->
binary().
g_value() ->
binary().
g_fail_key() ->
?LET(T, choose(1,999999999999),
term_to_binary(T)).
g_open_tree(Open) ->
oneof(dict:fetch_keys(Open)).
g_non_empty_btree(Open) ->
?LET(TreesWithKeys, dict:filter(fun(_K, #tree { elements = D}) ->
dict:size(D) > 0
end,
Open),
oneof(dict:fetch_keys(TreesWithKeys))).
g_existing_key(Name, Open) ->
#tree { elements = Elems } = dict:fetch(Name, Open),
oneof(dict:fetch_keys(Elems)).
g_non_existing_key(Name, Open) ->
?SUCHTHAT(Key, g_fail_key(),
begin
#tree { elements = D } = dict:fetch(Name, Open),
not dict:is_key(Key, D)
end).
g_fold_operation() ->
oneof([{fun (K, V, Acc) -> [{K, V} | Acc] end, []}]).
btree_name(I) ->
"Btree_" ++ integer_to_list(I).
test
initial_state() ->
ClosedBTrees = lists:foldl(fun(N, Closed) ->
dict:store(btree_name(N),
#tree { },
Closed)
end,
dict:new(),
lists:seq(1,?NUM_TREES)),
#state { closed=ClosedBTrees }.
command(#state { open = Open, closed = Closed } = S) ->
frequency(
[ {20, {call, ?SERVER, open, [oneof(dict:fetch_keys(Closed))]}}
|| closed_dicts(S)]
++ [ {20, {call, ?SERVER, close, [oneof(dict:fetch_keys(Open))]}}
|| open_dicts(S)]
++ [ {2000, {call, ?SERVER, put, cmd_put_args(S)}}
|| open_dicts(S)]
++ [ {1500, {call, ?SERVER, get_fail, cmd_get_fail_args(S)}}
|| open_dicts(S)]
++ [ {1500, {call, ?SERVER, get_exist, cmd_get_args(S)}}
|| open_dicts(S), open_dicts_with_keys(S)]
++ [ {500, {call, ?SERVER, delete_exist, cmd_delete_args(S)}}
|| open_dicts(S), open_dicts_with_keys(S)]
++ [ {125, {call, ?SERVER, fold_range, cmd_sync_fold_range_args(S)}}
|| open_dicts(S), open_dicts_with_keys(S)]
).
precondition(S, {call, ?SERVER, fold_range, [_Tree, _F, _A0, Range]}) ->
is_valid_range(Range) andalso open_dicts(S) andalso open_dicts_with_keys(S);
precondition(S, {call, ?SERVER, delete_exist, [_Name, _K]}) ->
open_dicts(S) andalso open_dicts_with_keys(S);
precondition(S, {call, ?SERVER, get_fail, [_Name, _K]}) ->
open_dicts(S);
precondition(S, {call, ?SERVER, get_exist, [_Name, _K]}) ->
open_dicts(S) andalso open_dicts_with_keys(S);
precondition(#state { open = Open }, {call, ?SERVER, put, [Name, _K, _V]}) ->
dict:is_key(Name, Open);
precondition(#state { open = Open, closed = Closed },
{call, ?SERVER, open, [Name]}) ->
(not (dict:is_key(Name, Open))) and (dict:is_key(Name, Closed));
precondition(#state { open = Open, closed = Closed },
{call, ?SERVER, close, [Name]}) ->
(dict:is_key(Name, Open)) and (not dict:is_key(Name, Closed)).
is_valid_range(#key_range{ from_key=FromKey, from_inclusive=FromIncl,
to_key=ToKey, to_inclusive=ToIncl,
limit=Limit })
when
(Limit == undefined) orelse (Limit > 0),
is_binary(FromKey),
(ToKey == undefined) orelse is_binary(ToKey),
FromKey =< ToKey,
is_boolean(FromIncl),
is_boolean(ToIncl)
->
if (FromKey == ToKey) ->
(FromIncl == true) and (ToIncl == true);
true ->
true
end;
is_valid_range(_) ->
false.
Next state manipulation ( abstract / concrete )
next_state(S, _Res, {call, ?SERVER, fold_range, [_Tree, _F, _A0, _Range]}) ->
S;
next_state(S, _Res, {call, ?SERVER, get_fail, [_Name, _Key]}) ->
S;
next_state(S, _Res, {call, ?SERVER, get_exist, [_Name, _Key]}) ->
S;
next_state(#state { open = Open} = S, _Res,
{call, ?SERVER, delete_exist, [Name, Key]}) ->
S#state { open = dict:update(Name,
fun(#tree { elements = Dict}) ->
#tree { elements =
dict:erase(Key, Dict)}
end,
Open)};
next_state(#state { open = Open} = S, _Res,
{call, ?SERVER, put, [Name, Key, Value]}) ->
S#state { open = dict:update(
Name,
fun(#tree { elements = Dict}) ->
#tree { elements =
dict:store(Key, Value, Dict) }
end,
Open)};
next_state(#state { open = Open, closed=Closed} = S,
_Res, {call, ?SERVER, open, [Name]}) ->
S#state { open = dict:store(Name, dict:fetch(Name, Closed) , Open),
closed = dict:erase(Name, Closed) };
next_state(#state { open = Open, closed=Closed} = S, _Res,
{call, ?SERVER, close, [Name]}) ->
S#state { closed = dict:store(Name, dict:fetch(Name, Open) , Closed),
open = dict:erase(Name, Open) }.
postcondition(#state { open = Open},
{call, ?SERVER, fold_range, [Tree, F, A0, Range]}, Result) ->
#tree { elements = TDict } = dict:fetch(Tree, Open),
DictResult = lists:sort(dict_range_query(TDict, F, A0, Range)),
CallResult = lists:sort(Result),
DictResult == CallResult;
postcondition(_S,
{call, ?SERVER, get_fail, [_Name, _Key]}, not_found) ->
true;
postcondition(#state { open = Open },
{call, ?SERVER, get_exist, [Name, Key]}, {ok, Value}) ->
#tree { elements = Elems } = dict:fetch(Name, Open),
dict:fetch(Key, Elems) == Value;
postcondition(_S, {call, ?SERVER, delete_exist, [_Name, _Key]}, ok) ->
true;
postcondition(_S, {call, ?SERVER, put, [_Name, _Key, _Value]}, ok) ->
true;
postcondition(_S, {call, ?SERVER, open, [_Name]}, ok) ->
true;
postcondition(_S, {call, ?SERVER, close, [_Name]}, ok) ->
true;
postcondition(_State, _Call, _Result) ->
: error_report([{not_matching_any_postcondition , _ State , _ Call , _ Result } ] ) ,
false.
prop_dict_agree() ->
?FORALL(Cmds, commands(?MODULE),
?TRAPEXIT(
begin
hanoidb_drv:start_link(),
{History,State,Result} = run_commands(?MODULE, Cmds),
hanoidb_drv:stop(),
cleanup_test_trees(State),
?WHENFAIL(io:format("History: ~w\nState: ~w\nResult: ~w\n",
[History,State,Result]),
Result =:= ok)
end)).
test_tree_simple_1() ->
{ok, Tree} = hanoidb:open("simple"),
ok = hanoidb:put(Tree, <<>>, <<"data", 77:128>>),
{ok, <<"data", 77:128>>} = hanoidb:get(Tree, <<>>),
ok = hanoidb:close(Tree).
test_tree_simple_2() ->
{ok, Tree} = hanoidb:open("simple"),
ok = hanoidb:put(Tree, <<"ã">>, <<"µ">>),
{ok, <<"µ">>} = hanoidb:get(Tree, <<"ã">>),
ok = hanoidb:delete(Tree, <<"ã">>),
not_found = hanoidb:get(Tree, <<"ã">>),
ok = hanoidb:close(Tree).
test_tree_simple_4() ->
Key = <<56,11,62,42,35,163,16,100,9,224,8,228,130,94,198,2,126,117,243,
1,122,175,79,159,212,177,30,153,71,91,85,233,41,199,190,58,3,
173,220,9>>,
Value = <<212,167,12,6,105,152,17,80,243>>,
{ok, Tree} = hanoidb:open("simple"),
ok = hanoidb:put(Tree, Key, Value),
?assertEqual({ok, Value}, hanoidb:get(Tree, Key)),
ok = hanoidb:close(Tree).
test_tree_simple_5() ->
{ok, Tree} = hanoidb:open("simple"),
ok = hanoidb:put(Tree, <<"foo">>, <<"bar">>, 2),
{ok, <<"bar">>} = hanoidb:get(Tree, <<"foo">>),
ok = timer:sleep(3000),
not_found = hanoidb:get(Tree, <<"foo">>),
ok = hanoidb:close(Tree).
test_tree() ->
{ok, Tree} = hanoidb:open("simple2"),
lists:foldl(fun(N,_) ->
ok = hanoidb:put(Tree, <<N:128>>, <<"data",N:128>>)
end,
ok,
lists:seq(2,10000,1)),
io : , " INSERT DONE 1 ~ n " , [ ] ) ,
lists:foldl(fun(N,_) ->
ok = hanoidb:put(Tree, <<N:128>>, <<"data",N:128>>)
end,
ok,
lists:seq(4000,6000,1)),
io : , " INSERT DONE 2 ~ n " , [ ] ) ,
hanoidb:delete(Tree, <<1500:128>>),
io : , " DELETE DONE 3 ~ n " , [ ] ) ,
{Time1,{ok,Count1}} = timer:tc(?MODULE, run_fold, [Tree,1000,2000,9]),
error_logger : info_msg("time to fold : ~p / sec ( time=~p , count=~p)~n " , [ 1000000/(Time1 / Count1 ) , Time1/1000000 , Count1 ] ) ,
{Time2,{ok,Count2}} = timer:tc(?MODULE, run_fold, [Tree,1000,2000,1000]),
ok = hanoidb:close(Tree).
run_fold(Tree,From,To,Limit) ->
F = fun(<<N:128>>, _Value, {N, C}) ->
{N + 1, C + 1};
(<<1501:128>>, _Value, {1500, C}) ->
{1502, C + 1}
end,
{_, Count} = hanoidb:fold_range(Tree, F,
{From, 0},
#key_range{from_key= <<From:128>>, to_key= <<(To+1):128>>, limit=Limit}),
{ok, Count}.
cmd_close_args(#state { open = Open }) ->
oneof(dict:fetch_keys(Open)).
cmd_put_args(#state { open = Open }) ->
?LET({Name, Key, Value},
{oneof(dict:fetch_keys(Open)), g_key(), g_value()},
[Name, Key, Value]).
cmd_get_fail_args(#state { open = Open}) ->
?LET(Name, g_open_tree(Open),
?LET(Key, g_non_existing_key(Name, Open),
[Name, Key])).
cmd_get_args(#state { open = Open}) ->
?LET(Name, g_non_empty_btree(Open),
?LET(Key, g_existing_key(Name, Open),
[Name, Key])).
cmd_delete_args(#state { open = Open}) ->
?LET(Name, g_non_empty_btree(Open),
?LET(Key, g_existing_key(Name, Open),
[Name, Key])).
cmd_sync_range_args(#state { open = Open }) ->
?LET(Tree, g_non_empty_btree(Open),
?LET({K1, K2}, {g_existing_key(Tree, Open),
g_existing_key(Tree, Open)},
[Tree, #key_range{from_key=K1, to_key=K2}])).
cmd_sync_fold_range_args(State) ->
?LET([Tree, Range], cmd_sync_range_args(State),
?LET({F, Acc0}, g_fold_operation(),
[Tree, F, Acc0, Range])).
cleanup_test_trees(#state { open = Open, closed = Closed }) ->
[cleanup_tree(N) || N <- dict:fetch_keys(Open)],
[cleanup_tree(N) || N <- dict:fetch_keys(Closed)].
cleanup_tree(Tree) ->
case file:list_dir(Tree) of
{error, enoent} ->
ok;
{ok, FileNames} ->
[ok = file:delete(filename:join([Tree, Fname]))
|| Fname <- FileNames],
file:del_dir(Tree)
end.
open_dicts_with_keys(#state { open = Open}) ->
lists:any(fun({_, #tree { elements = D}}) ->
dict:size(D) > 0
end,
dict:to_list(Open)).
open_dicts(#state { open = Open}) ->
dict:size(Open) > 0.
closed_dicts(#state { closed = Closed}) ->
dict:size(Closed) > 0.
dict_range_query(Dict, Fun, Acc0, Range) ->
KVs = dict_range_query(Dict, Range),
lists:foldl(fun({K, V}, Acc) ->
Fun(K, V, Acc)
end,
Acc0,
KVs).
dict_range_query(Dict, Range) ->
[{K, V} || {K, V} <- dict:to_list(Dict),
?KEY_IN_RANGE(K, Range)].
|
8abb38c29ce8d0a8b6d3f2988615492b23ca6982cd2ab7ca90ce297cc0e8c105 | clojure-interop/aws-api | AmazonTextractAsync.clj | (ns com.amazonaws.services.textract.AmazonTextractAsync
"Interface for accessing Amazon Textract asynchronously. Each asynchronous method will return a Java Future object
overloads which accept an AsyncHandler can be used to receive
notification when an asynchronous operation completes.
Note: Do not directly implement this interface, new methods are added to it regularly. Extend from
AbstractAmazonTextractAsync instead.
Amazon Textract detects and analyzes text in documents and converts it into machine-readable text. This is the API
reference documentation for Amazon Textract."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.textract AmazonTextractAsync]))
(defn analyze-document-async
"Analyzes an input document for relationships between detected items.
The types of information returned are as follows:
Words and lines that are related to nearby lines and words. The related information is returned in two
Block objects each of type KEY_VALUE_SET: a KEY Block object and a VALUE Block object. For
example, Name: Ana Silva Carolina contains a key and value. Name: is the key. Ana Silva
Carolina is the value.
Table and table cell data. A TABLE Block object contains information about a detected table. A CELL Block object
is returned for each cell in a table.
Selectable elements such as checkboxes and radio buttons. A SELECTION_ELEMENT Block object contains information
about a selectable element.
Lines and words of text. A LINE Block object contains one or more WORD Block objects.
You can choose which type of analysis to perform by specifying the FeatureTypes list.
The output is returned in a list of BLOCK objects.
AnalyzeDocument is a synchronous operation. To analyze documents asynchronously, use
StartDocumentAnalysis.
For more information, see Document Text Analysis.
analyze-document-request - `com.amazonaws.services.textract.model.AnalyzeDocumentRequest`
async-handler - Asynchronous callback handler for events in the lifecycle of the request. Users can provide an implementation of the callback methods in this interface to receive notification of successful or unsuccessful completion of the operation. - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the AnalyzeDocument operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.textract.model.AnalyzeDocumentResult>`"
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.AnalyzeDocumentRequest analyze-document-request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.analyzeDocumentAsync analyze-document-request async-handler)))
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.AnalyzeDocumentRequest analyze-document-request]
(-> this (.analyzeDocumentAsync analyze-document-request))))
(defn detect-document-text-async
"Detects text in the input document. Amazon Textract can detect lines of text and the words that make up a line of
text. The input document must be an image in JPG or PNG format. DetectDocumentText returns the
detected text in an array of Block objects.
Each document page has as an associated Block of type PAGE. Each PAGE Block object is
the parent of LINE Block objects that represent the lines of detected text on a page. A LINE
Block object is a parent for each word that makes up the line. Words are represented by
Block objects of type WORD.
DetectDocumentText is a synchronous operation. To analyze documents asynchronously, use
StartDocumentTextDetection.
For more information, see Document Text Detection.
detect-document-text-request - `com.amazonaws.services.textract.model.DetectDocumentTextRequest`
async-handler - Asynchronous callback handler for events in the lifecycle of the request. Users can provide an implementation of the callback methods in this interface to receive notification of successful or unsuccessful completion of the operation. - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DetectDocumentText operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.textract.model.DetectDocumentTextResult>`"
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.DetectDocumentTextRequest detect-document-text-request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.detectDocumentTextAsync detect-document-text-request async-handler)))
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.DetectDocumentTextRequest detect-document-text-request]
(-> this (.detectDocumentTextAsync detect-document-text-request))))
(defn get-document-analysis-async
"Gets the results for an Amazon Textract asynchronous operation that analyzes text in a document.
You start asynchronous text analysis by calling StartDocumentAnalysis, which returns a job identifier (
JobId). When the text analysis operation finishes, Amazon Textract publishes a completion status to
the Amazon Simple Notification Service (Amazon SNS) topic that's registered in the initial call to
StartDocumentAnalysis. To get the results of the text-detection operation, first check that the
status value published to the Amazon SNS topic is SUCCEEDED. If so, call
GetDocumentAnalysis, and pass the job identifier (JobId) from the initial call to
StartDocumentAnalysis.
GetDocumentAnalysis returns an array of Block objects. The following types of information are
returned:
Words and lines that are related to nearby lines and words. The related information is returned in two
Block objects each of type KEY_VALUE_SET: a KEY Block object and a VALUE Block object. For
example, Name: Ana Silva Carolina contains a key and value. Name: is the key. Ana Silva
Carolina is the value.
Table and table cell data. A TABLE Block object contains information about a detected table. A CELL Block object
is returned for each cell in a table.
Selectable elements such as checkboxes and radio buttons. A SELECTION_ELEMENT Block object contains information
about a selectable element.
Lines and words of text. A LINE Block object contains one or more WORD Block objects.
Use the MaxResults parameter to limit the number of blocks returned. If there are more results than
specified in MaxResults, the value of NextToken in the operation response contains a
pagination token for getting the next set of results. To get the next page of results, call
GetDocumentAnalysis, and populate the NextToken request parameter with the token value
that's returned from the previous call to GetDocumentAnalysis.
For more information, see Document Text Analysis.
get-document-analysis-request - `com.amazonaws.services.textract.model.GetDocumentAnalysisRequest`
async-handler - Asynchronous callback handler for events in the lifecycle of the request. Users can provide an implementation of the callback methods in this interface to receive notification of successful or unsuccessful completion of the operation. - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetDocumentAnalysis operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.textract.model.GetDocumentAnalysisResult>`"
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.GetDocumentAnalysisRequest get-document-analysis-request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getDocumentAnalysisAsync get-document-analysis-request async-handler)))
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.GetDocumentAnalysisRequest get-document-analysis-request]
(-> this (.getDocumentAnalysisAsync get-document-analysis-request))))
(defn get-document-text-detection-async
"Gets the results for an Amazon Textract asynchronous operation that detects text in a document. Amazon Textract
can detect lines of text and the words that make up a line of text.
You start asynchronous text detection by calling StartDocumentTextDetection, which returns a job
identifier (JobId). When the text detection operation finishes, Amazon Textract publishes a
completion status to the Amazon Simple Notification Service (Amazon SNS) topic that's registered in the initial
call to StartDocumentTextDetection. To get the results of the text-detection operation, first check
that the status value published to the Amazon SNS topic is SUCCEEDED. If so, call
GetDocumentTextDetection, and pass the job identifier (JobId) from the initial call to
StartDocumentTextDetection.
GetDocumentTextDetection returns an array of Block objects.
Each document page has as an associated Block of type PAGE. Each PAGE Block object is
the parent of LINE Block objects that represent the lines of detected text on a page. A LINE
Block object is a parent for each word that makes up the line. Words are represented by
Block objects of type WORD.
Use the MaxResults parameter to limit the number of blocks that are returned. If there are more results than
specified in MaxResults, the value of NextToken in the operation response contains a
pagination token for getting the next set of results. To get the next page of results, call
GetDocumentTextDetection, and populate the NextToken request parameter with the token
value that's returned from the previous call to GetDocumentTextDetection.
For more information, see Document Text Detection.
get-document-text-detection-request - `com.amazonaws.services.textract.model.GetDocumentTextDetectionRequest`
async-handler - Asynchronous callback handler for events in the lifecycle of the request. Users can provide an implementation of the callback methods in this interface to receive notification of successful or unsuccessful completion of the operation. - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetDocumentTextDetection operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.textract.model.GetDocumentTextDetectionResult>`"
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.GetDocumentTextDetectionRequest get-document-text-detection-request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getDocumentTextDetectionAsync get-document-text-detection-request async-handler)))
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.GetDocumentTextDetectionRequest get-document-text-detection-request]
(-> this (.getDocumentTextDetectionAsync get-document-text-detection-request))))
(defn start-document-analysis-async
"Starts asynchronous analysis of an input document for relationships between detected items such as key and value
pairs, tables, and selection elements.
StartDocumentAnalysis can analyze text in documents that are in JPG, PNG, and PDF format. The
documents are stored in an Amazon S3 bucket. Use DocumentLocation to specify the bucket name and file name
of the document.
StartDocumentAnalysis returns a job identifier (JobId) that you use to get the results
of the operation. When text analysis is finished, Amazon Textract publishes a completion status to the Amazon
Simple Notification Service (Amazon SNS) topic that you specify in NotificationChannel. To get the
results of the text analysis operation, first check that the status value published to the Amazon SNS topic is
SUCCEEDED. If so, call GetDocumentAnalysis, and pass the job identifier (JobId)
from the initial call to StartDocumentAnalysis.
For more information, see Document Text Analysis.
start-document-analysis-request - `com.amazonaws.services.textract.model.StartDocumentAnalysisRequest`
async-handler - Asynchronous callback handler for events in the lifecycle of the request. Users can provide an implementation of the callback methods in this interface to receive notification of successful or unsuccessful completion of the operation. - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the StartDocumentAnalysis operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.textract.model.StartDocumentAnalysisResult>`"
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.StartDocumentAnalysisRequest start-document-analysis-request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.startDocumentAnalysisAsync start-document-analysis-request async-handler)))
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.StartDocumentAnalysisRequest start-document-analysis-request]
(-> this (.startDocumentAnalysisAsync start-document-analysis-request))))
(defn start-document-text-detection-async
"Starts the asynchronous detection of text in a document. Amazon Textract can detect lines of text and the words
that make up a line of text.
StartDocumentTextDetection can analyze text in documents that are in JPG, PNG, and PDF format. The
documents are stored in an Amazon S3 bucket. Use DocumentLocation to specify the bucket name and file name
of the document.
StartTextDetection returns a job identifier (JobId) that you use to get the results of
the operation. When text detection is finished, Amazon Textract publishes a completion status to the Amazon
Simple Notification Service (Amazon SNS) topic that you specify in NotificationChannel. To get the
results of the text detection operation, first check that the status value published to the Amazon SNS topic is
SUCCEEDED. If so, call GetDocumentTextDetection, and pass the job identifier (
JobId) from the initial call to StartDocumentTextDetection.
For more information, see Document Text Detection.
start-document-text-detection-request - `com.amazonaws.services.textract.model.StartDocumentTextDetectionRequest`
async-handler - Asynchronous callback handler for events in the lifecycle of the request. Users can provide an implementation of the callback methods in this interface to receive notification of successful or unsuccessful completion of the operation. - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the StartDocumentTextDetection operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.textract.model.StartDocumentTextDetectionResult>`"
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.StartDocumentTextDetectionRequest start-document-text-detection-request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.startDocumentTextDetectionAsync start-document-text-detection-request async-handler)))
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.StartDocumentTextDetectionRequest start-document-text-detection-request]
(-> this (.startDocumentTextDetectionAsync start-document-text-detection-request))))
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.textract/src/com/amazonaws/services/textract/AmazonTextractAsync.clj | clojure | (ns com.amazonaws.services.textract.AmazonTextractAsync
"Interface for accessing Amazon Textract asynchronously. Each asynchronous method will return a Java Future object
overloads which accept an AsyncHandler can be used to receive
notification when an asynchronous operation completes.
Note: Do not directly implement this interface, new methods are added to it regularly. Extend from
AbstractAmazonTextractAsync instead.
Amazon Textract detects and analyzes text in documents and converts it into machine-readable text. This is the API
reference documentation for Amazon Textract."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.textract AmazonTextractAsync]))
(defn analyze-document-async
"Analyzes an input document for relationships between detected items.
The types of information returned are as follows:
Words and lines that are related to nearby lines and words. The related information is returned in two
Block objects each of type KEY_VALUE_SET: a KEY Block object and a VALUE Block object. For
example, Name: Ana Silva Carolina contains a key and value. Name: is the key. Ana Silva
Carolina is the value.
Table and table cell data. A TABLE Block object contains information about a detected table. A CELL Block object
is returned for each cell in a table.
Selectable elements such as checkboxes and radio buttons. A SELECTION_ELEMENT Block object contains information
about a selectable element.
Lines and words of text. A LINE Block object contains one or more WORD Block objects.
You can choose which type of analysis to perform by specifying the FeatureTypes list.
The output is returned in a list of BLOCK objects.
AnalyzeDocument is a synchronous operation. To analyze documents asynchronously, use
StartDocumentAnalysis.
For more information, see Document Text Analysis.
analyze-document-request - `com.amazonaws.services.textract.model.AnalyzeDocumentRequest`
async-handler - Asynchronous callback handler for events in the lifecycle of the request. Users can provide an implementation of the callback methods in this interface to receive notification of successful or unsuccessful completion of the operation. - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the AnalyzeDocument operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.textract.model.AnalyzeDocumentResult>`"
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.AnalyzeDocumentRequest analyze-document-request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.analyzeDocumentAsync analyze-document-request async-handler)))
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.AnalyzeDocumentRequest analyze-document-request]
(-> this (.analyzeDocumentAsync analyze-document-request))))
(defn detect-document-text-async
"Detects text in the input document. Amazon Textract can detect lines of text and the words that make up a line of
text. The input document must be an image in JPG or PNG format. DetectDocumentText returns the
detected text in an array of Block objects.
Each document page has as an associated Block of type PAGE. Each PAGE Block object is
the parent of LINE Block objects that represent the lines of detected text on a page. A LINE
Block object is a parent for each word that makes up the line. Words are represented by
Block objects of type WORD.
DetectDocumentText is a synchronous operation. To analyze documents asynchronously, use
StartDocumentTextDetection.
For more information, see Document Text Detection.
detect-document-text-request - `com.amazonaws.services.textract.model.DetectDocumentTextRequest`
async-handler - Asynchronous callback handler for events in the lifecycle of the request. Users can provide an implementation of the callback methods in this interface to receive notification of successful or unsuccessful completion of the operation. - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DetectDocumentText operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.textract.model.DetectDocumentTextResult>`"
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.DetectDocumentTextRequest detect-document-text-request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.detectDocumentTextAsync detect-document-text-request async-handler)))
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.DetectDocumentTextRequest detect-document-text-request]
(-> this (.detectDocumentTextAsync detect-document-text-request))))
(defn get-document-analysis-async
"Gets the results for an Amazon Textract asynchronous operation that analyzes text in a document.
You start asynchronous text analysis by calling StartDocumentAnalysis, which returns a job identifier (
JobId). When the text analysis operation finishes, Amazon Textract publishes a completion status to
the Amazon Simple Notification Service (Amazon SNS) topic that's registered in the initial call to
StartDocumentAnalysis. To get the results of the text-detection operation, first check that the
status value published to the Amazon SNS topic is SUCCEEDED. If so, call
GetDocumentAnalysis, and pass the job identifier (JobId) from the initial call to
StartDocumentAnalysis.
GetDocumentAnalysis returns an array of Block objects. The following types of information are
returned:
Words and lines that are related to nearby lines and words. The related information is returned in two
Block objects each of type KEY_VALUE_SET: a KEY Block object and a VALUE Block object. For
example, Name: Ana Silva Carolina contains a key and value. Name: is the key. Ana Silva
Carolina is the value.
Table and table cell data. A TABLE Block object contains information about a detected table. A CELL Block object
is returned for each cell in a table.
Selectable elements such as checkboxes and radio buttons. A SELECTION_ELEMENT Block object contains information
about a selectable element.
Lines and words of text. A LINE Block object contains one or more WORD Block objects.
Use the MaxResults parameter to limit the number of blocks returned. If there are more results than
specified in MaxResults, the value of NextToken in the operation response contains a
pagination token for getting the next set of results. To get the next page of results, call
GetDocumentAnalysis, and populate the NextToken request parameter with the token value
that's returned from the previous call to GetDocumentAnalysis.
For more information, see Document Text Analysis.
get-document-analysis-request - `com.amazonaws.services.textract.model.GetDocumentAnalysisRequest`
async-handler - Asynchronous callback handler for events in the lifecycle of the request. Users can provide an implementation of the callback methods in this interface to receive notification of successful or unsuccessful completion of the operation. - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetDocumentAnalysis operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.textract.model.GetDocumentAnalysisResult>`"
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.GetDocumentAnalysisRequest get-document-analysis-request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getDocumentAnalysisAsync get-document-analysis-request async-handler)))
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.GetDocumentAnalysisRequest get-document-analysis-request]
(-> this (.getDocumentAnalysisAsync get-document-analysis-request))))
(defn get-document-text-detection-async
"Gets the results for an Amazon Textract asynchronous operation that detects text in a document. Amazon Textract
can detect lines of text and the words that make up a line of text.
You start asynchronous text detection by calling StartDocumentTextDetection, which returns a job
identifier (JobId). When the text detection operation finishes, Amazon Textract publishes a
completion status to the Amazon Simple Notification Service (Amazon SNS) topic that's registered in the initial
call to StartDocumentTextDetection. To get the results of the text-detection operation, first check
that the status value published to the Amazon SNS topic is SUCCEEDED. If so, call
GetDocumentTextDetection, and pass the job identifier (JobId) from the initial call to
StartDocumentTextDetection.
GetDocumentTextDetection returns an array of Block objects.
Each document page has as an associated Block of type PAGE. Each PAGE Block object is
the parent of LINE Block objects that represent the lines of detected text on a page. A LINE
Block object is a parent for each word that makes up the line. Words are represented by
Block objects of type WORD.
Use the MaxResults parameter to limit the number of blocks that are returned. If there are more results than
specified in MaxResults, the value of NextToken in the operation response contains a
pagination token for getting the next set of results. To get the next page of results, call
GetDocumentTextDetection, and populate the NextToken request parameter with the token
value that's returned from the previous call to GetDocumentTextDetection.
For more information, see Document Text Detection.
get-document-text-detection-request - `com.amazonaws.services.textract.model.GetDocumentTextDetectionRequest`
async-handler - Asynchronous callback handler for events in the lifecycle of the request. Users can provide an implementation of the callback methods in this interface to receive notification of successful or unsuccessful completion of the operation. - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetDocumentTextDetection operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.textract.model.GetDocumentTextDetectionResult>`"
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.GetDocumentTextDetectionRequest get-document-text-detection-request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getDocumentTextDetectionAsync get-document-text-detection-request async-handler)))
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.GetDocumentTextDetectionRequest get-document-text-detection-request]
(-> this (.getDocumentTextDetectionAsync get-document-text-detection-request))))
(defn start-document-analysis-async
"Starts asynchronous analysis of an input document for relationships between detected items such as key and value
pairs, tables, and selection elements.
StartDocumentAnalysis can analyze text in documents that are in JPG, PNG, and PDF format. The
documents are stored in an Amazon S3 bucket. Use DocumentLocation to specify the bucket name and file name
of the document.
StartDocumentAnalysis returns a job identifier (JobId) that you use to get the results
of the operation. When text analysis is finished, Amazon Textract publishes a completion status to the Amazon
Simple Notification Service (Amazon SNS) topic that you specify in NotificationChannel. To get the
results of the text analysis operation, first check that the status value published to the Amazon SNS topic is
SUCCEEDED. If so, call GetDocumentAnalysis, and pass the job identifier (JobId)
from the initial call to StartDocumentAnalysis.
For more information, see Document Text Analysis.
start-document-analysis-request - `com.amazonaws.services.textract.model.StartDocumentAnalysisRequest`
async-handler - Asynchronous callback handler for events in the lifecycle of the request. Users can provide an implementation of the callback methods in this interface to receive notification of successful or unsuccessful completion of the operation. - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the StartDocumentAnalysis operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.textract.model.StartDocumentAnalysisResult>`"
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.StartDocumentAnalysisRequest start-document-analysis-request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.startDocumentAnalysisAsync start-document-analysis-request async-handler)))
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.StartDocumentAnalysisRequest start-document-analysis-request]
(-> this (.startDocumentAnalysisAsync start-document-analysis-request))))
(defn start-document-text-detection-async
"Starts the asynchronous detection of text in a document. Amazon Textract can detect lines of text and the words
that make up a line of text.
StartDocumentTextDetection can analyze text in documents that are in JPG, PNG, and PDF format. The
documents are stored in an Amazon S3 bucket. Use DocumentLocation to specify the bucket name and file name
of the document.
StartTextDetection returns a job identifier (JobId) that you use to get the results of
the operation. When text detection is finished, Amazon Textract publishes a completion status to the Amazon
Simple Notification Service (Amazon SNS) topic that you specify in NotificationChannel. To get the
results of the text detection operation, first check that the status value published to the Amazon SNS topic is
SUCCEEDED. If so, call GetDocumentTextDetection, and pass the job identifier (
JobId) from the initial call to StartDocumentTextDetection.
For more information, see Document Text Detection.
start-document-text-detection-request - `com.amazonaws.services.textract.model.StartDocumentTextDetectionRequest`
async-handler - Asynchronous callback handler for events in the lifecycle of the request. Users can provide an implementation of the callback methods in this interface to receive notification of successful or unsuccessful completion of the operation. - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the StartDocumentTextDetection operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.textract.model.StartDocumentTextDetectionResult>`"
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.StartDocumentTextDetectionRequest start-document-text-detection-request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.startDocumentTextDetectionAsync start-document-text-detection-request async-handler)))
(^java.util.concurrent.Future [^AmazonTextractAsync this ^com.amazonaws.services.textract.model.StartDocumentTextDetectionRequest start-document-text-detection-request]
(-> this (.startDocumentTextDetectionAsync start-document-text-detection-request))))
| |
778e1351ebb00fa2b886aff6126e030967ba79c7fe3824f331247fde09ee108c | spechub/Hets | Sublogics.hs | {-# LANGUAGE DeriveDataTypeable #-}
|
Module : ./CASL_DL / Sublogics.hs
Description : sublogic analysis for CASL_DL
Copyright : ( c ) 2008
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : experimental
Portability : portable
Sublogic analysis for CASL_DL
This module provides the sublogic functions ( as required by Logic.hs )
for CASL_DL . The functions allow to compute the minimal sublogics needed
by a given element , to check whether an item is part of a given
sublogic , and to project an element into a given sublogic .
Module : ./CASL_DL/Sublogics.hs
Description : sublogic analysis for CASL_DL
Copyright : (c) Dominik Luecke 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : experimental
Portability : portable
Sublogic analysis for CASL_DL
This module provides the sublogic functions (as required by Logic.hs)
for CASL_DL. The functions allow to compute the minimal sublogics needed
by a given element, to check whether an item is part of a given
sublogic, and to project an element into a given sublogic.
-}
module CASL_DL.Sublogics where
import Data.Data
data CASL_DL_SL = SROIQ deriving (Eq, Ord, Typeable, Data)
instance Show CASL_DL_SL where
show SROIQ = "SROIQ"
| null | https://raw.githubusercontent.com/spechub/Hets/af7b628a75aab0d510b8ae7f067a5c9bc48d0f9e/CASL_DL/Sublogics.hs | haskell | # LANGUAGE DeriveDataTypeable # | |
Module : ./CASL_DL / Sublogics.hs
Description : sublogic analysis for CASL_DL
Copyright : ( c ) 2008
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : experimental
Portability : portable
Sublogic analysis for CASL_DL
This module provides the sublogic functions ( as required by Logic.hs )
for CASL_DL . The functions allow to compute the minimal sublogics needed
by a given element , to check whether an item is part of a given
sublogic , and to project an element into a given sublogic .
Module : ./CASL_DL/Sublogics.hs
Description : sublogic analysis for CASL_DL
Copyright : (c) Dominik Luecke 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : experimental
Portability : portable
Sublogic analysis for CASL_DL
This module provides the sublogic functions (as required by Logic.hs)
for CASL_DL. The functions allow to compute the minimal sublogics needed
by a given element, to check whether an item is part of a given
sublogic, and to project an element into a given sublogic.
-}
module CASL_DL.Sublogics where
import Data.Data
data CASL_DL_SL = SROIQ deriving (Eq, Ord, Typeable, Data)
instance Show CASL_DL_SL where
show SROIQ = "SROIQ"
|
d8f7be068840bab441e9e811b1d3912c836322f1d2767377e220723cab1042c3 | gsakkas/rite | 0089.ml | CaseG VarG [(WildPatG,Nothing,VarG),(TuplePatG (fromList [EmptyPatG]),Nothing,AppG [EmptyG,EmptyG])]
match x with
| (h1 , h2) -> ((h1 + h2) / 10) :: (((h1 + h2) mod 10) :: a)
| _ -> a
| null | https://raw.githubusercontent.com/gsakkas/rite/958a0ad2460e15734447bc07bd181f5d35956d3b/data/sp14/clusters/0089.ml | ocaml | CaseG VarG [(WildPatG,Nothing,VarG),(TuplePatG (fromList [EmptyPatG]),Nothing,AppG [EmptyG,EmptyG])]
match x with
| (h1 , h2) -> ((h1 + h2) / 10) :: (((h1 + h2) mod 10) :: a)
| _ -> a
| |
2504d0f6b422617d9116efadcee3f11153a8467daee0e521d9a224ee3af1924e | vouillon/osm | projection.mli | OSM tools
* Copyright ( C ) 2013
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2013 Jérôme Vouillon
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
val filter : ?o:Column.spec -> Column.t -> int -> Column.t
val filter_pred : ?o:Column.spec -> Column.t -> (int -> bool) -> Column.t
val filter_pred_2 :
?o:Column.spec -> Column.t -> Column.t -> (int -> int -> bool) -> Column.t
val project : ?o:Column.spec -> Column.t -> Column.t -> Column.t
(*
[project index input]
The index must be sorted. There can be duplicated entries in the index.
*)
val project_unique : Column.t - > Column.t - > Column.output_stream
( *
[ project_unique index input ]
Both the index and the input table must be sorted .
val project_unique : Column.t -> Column.t -> Column.output_stream
(*
[project_unique index input]
Both the index and the input table must be sorted.
*)
*)
val inter : ?o:Column.spec -> Column.t -> Column.t -> Column.t
val diff : ?o:Column.spec -> Column.t -> Column.t -> Column.t
| null | https://raw.githubusercontent.com/vouillon/osm/a42d1bcc82a4ad73c26c81ac7a75f9f1c7470344/database/projection.mli | ocaml |
[project index input]
The index must be sorted. There can be duplicated entries in the index.
[project_unique index input]
Both the index and the input table must be sorted.
| OSM tools
* Copyright ( C ) 2013
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2013 Jérôme Vouillon
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
val filter : ?o:Column.spec -> Column.t -> int -> Column.t
val filter_pred : ?o:Column.spec -> Column.t -> (int -> bool) -> Column.t
val filter_pred_2 :
?o:Column.spec -> Column.t -> Column.t -> (int -> int -> bool) -> Column.t
val project : ?o:Column.spec -> Column.t -> Column.t -> Column.t
val project_unique : Column.t - > Column.t - > Column.output_stream
( *
[ project_unique index input ]
Both the index and the input table must be sorted .
val project_unique : Column.t -> Column.t -> Column.output_stream
*)
val inter : ?o:Column.spec -> Column.t -> Column.t -> Column.t
val diff : ?o:Column.spec -> Column.t -> Column.t -> Column.t
|
e0735bfc7158c42b6f8672c3f382fda6e7a4c7f74196c350e69c29520d607abe | hypernumbers/hypernumbers | mochiweb_request.erl | @author < >
2007 Mochi Media , Inc.
%% @doc MochiWeb HTTP Request abstraction.
-module(mochiweb_request, [Socket, Method, RawPath, Version, Headers]).
-author('').
-include_lib("kernel/include/file.hrl").
-define(QUIP, "Heave awa lads I'm no deid yet!").
-define(READ_SIZE, 8192).
-export([get_header_value/1, get_primary_header_value/1, get/1, dump/0]).
-export([send/1, recv/1, recv/2, recv_body/0, recv_body/1, stream_body/3]).
-export([start_response/1, start_response_length/1, start_raw_response/1]).
-export([respond/1, ok/1]).
-export([not_found/0, not_found/1]).
-export([parse_post/0, parse_qs/0]).
-export([should_close/0, cleanup/0]).
-export([parse_cookie/0, get_cookie_value/1]).
-export([serve_file/2, serve_file/3]).
-export([test/0]).
-define(SAVE_QS, mochiweb_request_qs).
-define(SAVE_PATH, mochiweb_request_path).
-define(SAVE_RECV, mochiweb_request_recv).
-define(SAVE_BODY, mochiweb_request_body).
-define(SAVE_BODY_LENGTH, mochiweb_request_body_length).
-define(SAVE_POST, mochiweb_request_post).
-define(SAVE_COOKIE, mochiweb_request_cookie).
-define(SAVE_FORCE_CLOSE, mochiweb_request_force_close).
%% @type iolist() = [iolist() | binary() | char()].
%% @type iodata() = binary() | iolist().
%% @type key() = atom() | string() | binary()
%% @type value() = atom() | string() | binary() | integer()
%% @type headers(). A mochiweb_headers structure.
%% @type response(). A mochiweb_response parameterized module instance.
%% @type ioheaders() = headers() | [{key(), value()}].
10 second default idle timeout
-define(IDLE_TIMEOUT, 10000).
Maximum recv_body ( ) length of 1 MB
-define(MAX_RECV_BODY, (1024*1024)).
get_header_value(K ) - > undefined | Value
%% @doc Get the value of a given request header.
get_header_value(K) ->
mochiweb_headers:get_value(K, Headers).
get_primary_header_value(K) ->
mochiweb_headers:get_primary_value(K, Headers).
%% @type field() = socket | method | raw_path | version | headers | peer | path | body_length | range
( ) ) - > term ( )
%% @doc Return the internal representation of the given field.
get(socket) ->
Socket;
get(method) ->
Method;
get(raw_path) ->
RawPath;
get(version) ->
Version;
get(headers) ->
Headers;
get(peer) ->
case inet:peername(Socket) of
{ok, {Addr={10, _, _, _}, _Port}} ->
case get_header_value("x-forwarded-for") of
undefined ->
inet_parse:ntoa(Addr);
Hosts ->
string:strip(lists:last(string:tokens(Hosts, ",")))
end;
{ok, {{127, 0, 0, 1}, _Port}} ->
case get_header_value("x-forwarded-for") of
undefined ->
"127.0.0.1";
Hosts ->
string:strip(lists:last(string:tokens(Hosts, ",")))
end;
{ok, {Addr, _Port}} ->
inet_parse:ntoa(Addr)
end;
get(path) ->
case erlang:get(?SAVE_PATH) of
undefined ->
{Path0, _, _} = mochiweb_util:urlsplit_path(RawPath),
Path = mochiweb_util:unquote(Path0),
put(?SAVE_PATH, Path),
Path;
Cached ->
Cached
end;
get(body_length) ->
erlang:get(?SAVE_BODY_LENGTH);
get(range) ->
case get_header_value(range) of
undefined ->
undefined;
RawRange ->
parse_range_request(RawRange)
end.
@spec dump ( ) - > { mochiweb_request , [ { atom ( ) , term ( ) } ] }
%% @doc Dump the internal representation to a "human readable" set of terms
%% for debugging/inspection purposes.
dump() ->
{?MODULE, [{method, Method},
{version, Version},
{raw_path, RawPath},
{headers, mochiweb_headers:to_list(Headers)}]}.
( ) ) - > ok
%% @doc Send data over the socket.
send(Data) ->
case gen_tcp:send(Socket, Data) of
ok ->
ok;
_ ->
exit(normal)
end.
recv(integer ( ) ) - > binary ( )
%% @doc Receive Length bytes from the client as a binary, with the default
%% idle timeout.
recv(Length) ->
recv(Length, ?IDLE_TIMEOUT).
recv(integer ( ) , integer ( ) ) - > binary ( )
%% @doc Receive Length bytes from the client as a binary, with the given
Timeout in msec .
recv(Length, Timeout) ->
case gen_tcp:recv(Socket, Length, Timeout) of
{ok, Data} ->
put(?SAVE_RECV, true),
Data;
_ ->
exit(normal)
end.
( ) - > undefined | chunked | unknown_transfer_encoding | integer ( )
%% @doc Infer body length from transfer-encoding and content-length headers.
body_length() ->
case get_header_value("transfer-encoding") of
undefined ->
case get_header_value("content-length") of
undefined ->
undefined;
Length ->
list_to_integer(Length)
end;
"chunked" ->
chunked;
Unknown ->
{unknown_transfer_encoding, Unknown}
end.
%% @spec recv_body() -> binary()
@doc Receive the body of the HTTP request ( defined by Content - Length ) .
Will only receive up to the default max - body length of 1 MB .
recv_body() ->
recv_body(?MAX_RECV_BODY).
recv_body(integer ( ) ) - > binary ( )
@doc Receive the body of the HTTP request ( defined by Content - Length ) .
Will receive up to MaxBody bytes .
recv_body(MaxBody) ->
we could use a sane constant for chunk size
Body = stream_body(?MAX_RECV_BODY, fun
({0, _ChunkedFooter}, {_LengthAcc, BinAcc}) ->
iolist_to_binary(lists:reverse(BinAcc));
({Length, Bin}, {LengthAcc, BinAcc}) ->
NewLength = Length + LengthAcc,
if NewLength > MaxBody ->
exit({body_too_large, chunked});
true ->
{NewLength, [Bin | BinAcc]}
end
end, {0, []}, MaxBody),
put(?SAVE_BODY, Body),
Body.
stream_body(MaxChunkSize, ChunkFun, FunState) ->
stream_body(MaxChunkSize, ChunkFun, FunState, undefined).
stream_body(MaxChunkSize, ChunkFun, FunState, MaxBodyLength) ->
Expect = case get_header_value("expect") of
undefined ->
undefined;
Value when is_list(Value) ->
string:to_lower(Value)
end,
case Expect of
"100-continue" ->
start_raw_response({100, gb_trees:empty()});
_Else ->
ok
end,
case body_length() of
undefined ->
undefined;
{unknown_transfer_encoding, Unknown} ->
exit({unknown_transfer_encoding, Unknown});
chunked ->
In this case the MaxBody is actually used to
% determine the maximum allowed size of a single
% chunk.
stream_chunked_body(MaxChunkSize, ChunkFun, FunState);
0 ->
<<>>;
Length when is_integer(Length) ->
case MaxBodyLength of
MaxBodyLength when is_integer(MaxBodyLength), MaxBodyLength < Length ->
exit({body_too_large, content_length});
_ ->
stream_unchunked_body(Length, ChunkFun, FunState)
end;
Length ->
exit({length_not_integer, Length})
end.
start_response({integer ( ) , ioheaders ( ) } ) - > response ( )
%% @doc Start the HTTP response by sending the Code HTTP response and
%% ResponseHeaders. The server will set header defaults such as Server
%% and Date if not present in ResponseHeaders.
start_response({Code, ResponseHeaders}) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = mochiweb_headers:default_from_list(server_headers(),
HResponse),
start_raw_response({Code, HResponse1}).
@spec start_raw_response({integer ( ) , headers ( ) } ) - > response ( )
%% @doc Start the HTTP response by sending the Code HTTP response and
%% ResponseHeaders.
start_raw_response({Code, ResponseHeaders}) ->
F = fun ({K, V}, Acc) ->
[make_io(K), <<": ">>, V, <<"\r\n">> | Acc]
end,
End = lists:foldl(F, [<<"\r\n">>],
mochiweb_headers:to_list(ResponseHeaders)),
send([make_version(Version), make_code(Code), <<"\r\n">> | End]),
mochiweb:new_response({THIS, Code, ResponseHeaders}).
@spec start_response_length({integer ( ) , ioheaders ( ) , integer ( ) } ) - > response ( )
%% @doc Start the HTTP response by sending the Code HTTP response and
%% ResponseHeaders including a Content-Length of Length. The server
%% will set header defaults such as Server
%% and Date if not present in ResponseHeaders.
start_response_length({Code, ResponseHeaders, Length}) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = mochiweb_headers:enter("Content-Length", Length, HResponse),
start_response({Code, HResponse1}).
respond({integer ( ) , ioheaders ( ) , ( ) | chunked | { file , IoDevice } } ) - > response ( )
%% @doc Start the HTTP response with start_response, and send Body to the
%% client (if the get(method) /= 'HEAD'). The Content-Length header
%% will be set by the Body length, and the server will insert header
%% defaults.
respond({Code, ResponseHeaders, {file, IoDevice}}) ->
Length = iodevice_size(IoDevice),
Response = start_response_length({Code, ResponseHeaders, Length}),
case Method of
'HEAD' ->
ok;
_ ->
iodevice_stream(IoDevice)
end,
Response;
respond({Code, ResponseHeaders, chunked}) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = case Method of
'HEAD' ->
This is what Google does , /
is chunked but HEAD gets Content - Length : 0 .
The RFC is ambiguous so emulating Google is smart .
mochiweb_headers:enter("Content-Length", "0",
HResponse);
_ when Version >= {1, 1} ->
%% Only use chunked encoding for HTTP/1.1
mochiweb_headers:enter("Transfer-Encoding", "chunked",
HResponse);
_ ->
%% For pre-1.1 clients we send the data as-is
%% without a Content-Length header and without
%% chunk delimiters. Since the end of the document
%% is now ambiguous we must force a close.
put(?SAVE_FORCE_CLOSE, true),
HResponse
end,
start_response({Code, HResponse1});
respond({Code, ResponseHeaders, Body}) ->
Response = start_response_length({Code, ResponseHeaders, iolist_size(Body)}),
case Method of
'HEAD' ->
ok;
_ ->
send(Body)
end,
Response.
( ) - > response ( )
@doc for < code > not_found([])</code > .
not_found() ->
not_found([]).
not_found(ExtraHeaders ) - > response ( )
@doc for < code > respond({404 , [ { " Content - Type " , " text / plain " }
%% | ExtraHeaders], <<"Not found.">>})</code>.
not_found(ExtraHeaders) ->
respond({404, [{"Content-Type", "text/plain"} | ExtraHeaders],
<<"Not found.">>}).
@spec ok({value ( ) , ( ) } | { value ( ) , ioheaders ( ) , ( ) | { file , IoDevice } } ) - >
%% response()
@doc respond({200 , [ { " Content - Type " , ContentType } | Headers ] , Body } ) .
ok({ContentType, Body}) ->
ok({ContentType, [], Body});
ok({ContentType, ResponseHeaders, Body}) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
case THIS:get(range) of
X when X =:= undefined; X =:= fail ->
HResponse1 = mochiweb_headers:enter("Content-Type", ContentType, HResponse),
respond({200, HResponse1, Body});
Ranges ->
{PartList, Size} = range_parts(Body, Ranges),
case PartList of
[] -> %% no valid ranges
HResponse1 = mochiweb_headers:enter("Content-Type",
ContentType,
HResponse),
could be 416 , for now we 'll just return 200
respond({200, HResponse1, Body});
PartList ->
{RangeHeaders, RangeBody} =
parts_to_body(PartList, ContentType, Size),
HResponse1 = mochiweb_headers:enter_from_list(
[{"Accept-Ranges", "bytes"} |
RangeHeaders],
HResponse),
respond({206, HResponse1, RangeBody})
end
end.
should_close ( ) - > bool ( )
%% @doc Return true if the connection must be closed. If false, using
%% Keep-Alive should be safe.
should_close() ->
ForceClose = erlang:get(mochiweb_request_force_close) =/= undefined,
DidNotRecv = erlang:get(mochiweb_request_recv) =:= undefined,
ForceClose orelse Version < {1, 0}
%% Connection: close
orelse get_header_value("connection") =:= "close"
HTTP 1.0 requires Connection : Keep - Alive
orelse (Version =:= {1, 0}
andalso get_header_value("connection") =/= "Keep-Alive")
%% unread data left on the socket, can't safely continue
orelse (DidNotRecv
andalso get_header_value("content-length") =/= undefined
andalso list_to_integer(get_header_value("content-length")) > 0)
orelse (DidNotRecv
andalso get_header_value("transfer-encoding") =:= "chunked").
@spec cleanup ( ) - > ok
%% @doc Clean up any junk in the process dictionary, required before continuing
%% a Keep-Alive request.
cleanup() ->
[erase(K) || K <- [?SAVE_QS,
?SAVE_PATH,
?SAVE_RECV,
?SAVE_BODY,
?SAVE_POST,
?SAVE_COOKIE,
?SAVE_FORCE_CLOSE]],
ok.
( ) - > [ { Key::string ( ) , Value::string ( ) } ]
%% @doc Parse the query string of the URL.
parse_qs() ->
case erlang:get(?SAVE_QS) of
undefined ->
{_, QueryString, _} = mochiweb_util:urlsplit_path(RawPath),
Parsed = mochiweb_util:parse_qs(QueryString),
put(?SAVE_QS, Parsed),
Parsed;
Cached ->
Cached
end.
%% @spec get_cookie_value(Key::string) -> string() | undefined
%% @doc Get the value of the given cookie.
get_cookie_value(Key) ->
proplists:get_value(Key, parse_cookie()).
parse_cookie ( ) - > [ { Key::string ( ) , Value::string ( ) } ]
%% @doc Parse the cookie header.
parse_cookie() ->
case erlang:get(?SAVE_COOKIE) of
undefined ->
Cookies = case get_header_value("cookie") of
undefined ->
[];
Value ->
mochiweb_cookies:parse_cookie(Value)
end,
put(?SAVE_COOKIE, Cookies),
Cookies;
Cached ->
Cached
end.
parse_post ( ) - > [ { Key::string ( ) , Value::string ( ) } ]
@doc an application / x - www - form - urlencoded form POST . This
%% has the side-effect of calling recv_body().
parse_post() ->
case erlang:get(?SAVE_POST) of
undefined ->
Parsed = case recv_body() of
undefined ->
[];
Binary ->
case get_primary_header_value("content-type") of
"application/x-www-form-urlencoded" ++ _ ->
mochiweb_util:parse_qs(Binary);
_ ->
[]
end
end,
put(?SAVE_POST, Parsed),
Parsed;
Cached ->
Cached
end.
%% @spec stream_chunked_body(integer(), fun(), term()) -> term()
%% @doc The function is called for each chunk.
%% Used internally by read_chunked_body.
stream_chunked_body(MaxChunkSize, Fun, FunState) ->
case read_chunk_length() of
0 ->
Fun({0, read_chunk(0)}, FunState);
Length when Length > MaxChunkSize ->
NewState = read_sub_chunks(Length, MaxChunkSize, Fun, FunState),
stream_chunked_body(MaxChunkSize, Fun, NewState);
Length ->
NewState = Fun({Length, read_chunk(Length)}, FunState),
stream_chunked_body(MaxChunkSize, Fun, NewState)
end.
stream_unchunked_body(0, Fun, FunState) ->
Fun({0, <<>>}, FunState);
stream_unchunked_body(Length, Fun, FunState) when Length > 0 ->
Bin = recv(0),
BinSize = byte_size(Bin),
if BinSize > Length ->
<<OurBody:Length/binary, Extra/binary>> = Bin,
gen_tcp:unrecv(Socket, Extra),
NewState = Fun({Length, OurBody}, FunState),
stream_unchunked_body(0, Fun, NewState);
true ->
NewState = Fun({BinSize, Bin}, FunState),
stream_unchunked_body(Length - BinSize, Fun, NewState)
end.
read_chunk_length ( ) - > integer ( )
%% @doc Read the length of the next HTTP chunk.
read_chunk_length() ->
inet:setopts(Socket, [{packet, line}]),
case gen_tcp:recv(Socket, 0, ?IDLE_TIMEOUT) of
{ok, Header} ->
inet:setopts(Socket, [{packet, raw}]),
Splitter = fun (C) ->
C =/= $\r andalso C =/= $\n andalso C =/= $
end,
{Hex, _Rest} = lists:splitwith(Splitter, binary_to_list(Header)),
mochihex:to_int(Hex);
_ ->
exit(normal)
end.
read_chunk(integer ( ) ) - > Chunk::binary ( ) | [ Footer::binary ( ) ]
%% @doc Read in a HTTP chunk of the given length. If Length is 0, then read the
%% HTTP footers (as a list of binaries, since they're nominal).
read_chunk(0) ->
inet:setopts(Socket, [{packet, line}]),
F = fun (F1, Acc) ->
case gen_tcp:recv(Socket, 0, ?IDLE_TIMEOUT) of
{ok, <<"\r\n">>} ->
Acc;
{ok, Footer} ->
F1(F1, [Footer | Acc]);
_ ->
exit(normal)
end
end,
Footers = F(F, []),
inet:setopts(Socket, [{packet, raw}]),
Footers;
read_chunk(Length) ->
case gen_tcp:recv(Socket, 2 + Length, ?IDLE_TIMEOUT) of
{ok, <<Chunk:Length/binary, "\r\n">>} ->
Chunk;
_ ->
exit(normal)
end.
read_sub_chunks(Length, MaxChunkSize, Fun, FunState) when Length > MaxChunkSize ->
Bin = recv(MaxChunkSize),
NewState = Fun({size(Bin), Bin}, FunState),
read_sub_chunks(Length - MaxChunkSize, MaxChunkSize, Fun, NewState);
read_sub_chunks(Length, _MaxChunkSize, Fun, FunState) ->
Fun({Length, read_chunk(Length)}, FunState).
, DocRoot ) - > Response
@doc Serve a file relative to DocRoot .
serve_file(Path, DocRoot) ->
serve_file(Path, DocRoot, []).
, DocRoot , ExtraHeaders ) - > Response
@doc Serve a file relative to DocRoot .
serve_file(Path, DocRoot, ExtraHeaders) ->
case mochiweb_util:safe_relative_path(Path) of
undefined ->
not_found(ExtraHeaders);
RelPath ->
FullPath = filename:join([DocRoot, RelPath]),
case filelib:is_dir(FullPath) of
true ->
maybe_redirect(RelPath, FullPath, ExtraHeaders);
false ->
maybe_serve_file(FullPath, ExtraHeaders)
end
end.
%% Internal API
This has the same effect as the DirectoryIndex directive in httpd
directory_index(FullPath) ->
filename:join([FullPath, "index.html"]).
maybe_redirect([], FullPath, ExtraHeaders) ->
maybe_serve_file(directory_index(FullPath), ExtraHeaders);
maybe_redirect(RelPath, FullPath, ExtraHeaders) ->
case string:right(RelPath, 1) of
"/" ->
maybe_serve_file(directory_index(FullPath), ExtraHeaders);
_ ->
Host = mochiweb_headers:get_value("host", Headers),
Location = "http://" ++ Host ++ "/" ++ RelPath ++ "/",
LocationBin = list_to_binary(Location),
MoreHeaders = [{"Location", Location},
{"Content-Type", "text/html"} | ExtraHeaders],
Top = <<"<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">"
"<html><head>"
"<title>301 Moved Permanently</title>"
"</head><body>"
"<h1>Moved Permanently</h1>"
"<p>The document has moved <a href=\"">>,
Bottom = <<">here</a>.</p></body></html>\n">>,
Body = <<Top/binary, LocationBin/binary, Bottom/binary>>,
respond({301, MoreHeaders, Body})
end.
maybe_serve_file(File, ExtraHeaders) ->
case file:read_file_info(File) of
{ok, FileInfo} ->
LastModified = httpd_util:rfc1123_date(FileInfo#file_info.mtime),
case get_header_value("if-modified-since") of
LastModified ->
respond({304, ExtraHeaders, ""});
_ ->
case file:open(File, [raw, binary]) of
{ok, IoDevice} ->
ContentType = mochiweb_util:guess_mime(File),
Res = ok({ContentType,
[{"last-modified", LastModified}
| ExtraHeaders],
{file, IoDevice}}),
file:close(IoDevice),
Res;
_ ->
not_found(ExtraHeaders)
end
end;
{error, _} ->
not_found(ExtraHeaders)
end.
server_headers() ->
[{"Server", "MochiWeb/1.0 (" ++ ?QUIP ++ ")"},
{"Date", httpd_util:rfc1123_date()}].
make_io(Atom) when is_atom(Atom) ->
atom_to_list(Atom);
make_io(Integer) when is_integer(Integer) ->
integer_to_list(Integer);
make_io(Io) when is_list(Io); is_binary(Io) ->
Io.
make_code(X) when is_integer(X) ->
[integer_to_list(X), [" " | httpd_util:reason_phrase(X)]];
make_code(Io) when is_list(Io); is_binary(Io) ->
Io.
make_version({1, 0}) ->
<<"HTTP/1.0 ">>;
make_version(_) ->
<<"HTTP/1.1 ">>.
iodevice_stream(IoDevice) ->
case file:read(IoDevice, ?READ_SIZE) of
eof ->
ok;
{ok, Data} ->
ok = send(Data),
iodevice_stream(IoDevice)
end.
parts_to_body([{Start, End, Body}], ContentType, Size) ->
%% return body for a range reponse with a single body
HeaderList = [{"Content-Type", ContentType},
{"Content-Range",
["bytes ",
make_io(Start), "-", make_io(End),
"/", make_io(Size)]}],
{HeaderList, Body};
parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) ->
%% return
header Content - Type : multipart / byteranges ;
%% and multipart body
Boundary = mochihex:to_hex(crypto:rand_bytes(8)),
HeaderList = [{"Content-Type",
["multipart/byteranges; ",
"boundary=", Boundary]}],
MultiPartBody = multipart_body(BodyList, ContentType, Boundary, Size),
{HeaderList, MultiPartBody}.
multipart_body([], _ContentType, Boundary, _Size) ->
["--", Boundary, "--\r\n"];
multipart_body([{Start, End, Body} | BodyList], ContentType, Boundary, Size) ->
["--", Boundary, "\r\n",
"Content-Type: ", ContentType, "\r\n",
"Content-Range: ",
"bytes ", make_io(Start), "-", make_io(End),
"/", make_io(Size), "\r\n\r\n",
Body, "\r\n"
| multipart_body(BodyList, ContentType, Boundary, Size)].
iodevice_size(IoDevice) ->
{ok, Size} = file:position(IoDevice, eof),
{ok, 0} = file:position(IoDevice, bof),
Size.
range_parts({file, IoDevice}, Ranges) ->
Size = iodevice_size(IoDevice),
F = fun (Spec, Acc) ->
case range_skip_length(Spec, Size) of
invalid_range ->
Acc;
V ->
[V | Acc]
end
end,
LocNums = lists:foldr(F, [], Ranges),
{ok, Data} = file:pread(IoDevice, LocNums),
Bodies = lists:zipwith(fun ({Skip, Length}, PartialBody) ->
{Skip, Skip + Length - 1, PartialBody}
end,
LocNums, Data),
{Bodies, Size};
range_parts(Body0, Ranges) ->
Body = iolist_to_binary(Body0),
Size = size(Body),
F = fun(Spec, Acc) ->
case range_skip_length(Spec, Size) of
invalid_range ->
Acc;
{Skip, Length} ->
<<_:Skip/binary, PartialBody:Length/binary, _/binary>> = Body,
[{Skip, Skip + Length - 1, PartialBody} | Acc]
end
end,
{lists:foldr(F, [], Ranges), Size}.
range_skip_length(Spec, Size) ->
case Spec of
{none, R} when R =< Size, R >= 0 ->
{Size - R, R};
{none, _OutOfRange} ->
{0, Size};
{R, none} when R >= 0, R < Size ->
{R, Size - R};
{_OutOfRange, none} ->
invalid_range;
{Start, End} when 0 =< Start, Start =< End, End < Size ->
{Start, End - Start + 1};
{_OutOfRange, _End} ->
invalid_range
end.
parse_range_request(RawRange) when is_list(RawRange) ->
try
"bytes=" ++ RangeString = RawRange,
Ranges = string:tokens(RangeString, ","),
lists:map(fun ("-" ++ V) ->
{none, list_to_integer(V)};
(R) ->
case string:tokens(R, "-") of
[S1, S2] ->
{list_to_integer(S1), list_to_integer(S2)};
[S] ->
{list_to_integer(S), none}
end
end,
Ranges)
catch
_:_ ->
fail
end.
test() ->
ok = test_range(),
ok.
test_range() ->
%% valid, single ranges
io:format("Testing parse_range_request with valid single ranges~n"),
io:format("1"),
[{20, 30}] = parse_range_request("bytes=20-30"),
io:format("2"),
[{20, none}] = parse_range_request("bytes=20-"),
io:format("3"),
[{none, 20}] = parse_range_request("bytes=-20"),
io:format(".. ok ~n"),
%% invalid, single ranges
io:format("Testing parse_range_request with invalid ranges~n"),
io:format("1"),
fail = parse_range_request(""),
io:format("2"),
fail = parse_range_request("garbage"),
io:format("3"),
fail = parse_range_request("bytes=-20-30"),
io:format(".. ok ~n"),
%% valid, multiple range
io:format("Testing parse_range_request with valid multiple ranges~n"),
io:format("1"),
[{20, 30}, {50, 100}, {110, 200}] =
parse_range_request("bytes=20-30,50-100,110-200"),
io:format("2"),
[{20, none}, {50, 100}, {none, 200}] =
parse_range_request("bytes=20-,50-100,-200"),
io:format(".. ok~n"),
%% no ranges
io:format("Testing out parse_range_request with no ranges~n"),
io:format("1"),
[] = parse_range_request("bytes="),
io:format(".. ok~n"),
Body = <<"012345678901234567890123456789012345678901234567890123456789">>,
60
BodySize = 60,
these values assume BodySize = : = 60
io:format("Testing out range_skip_length on valid ranges~n"),
io:format("1"),
1 - 9
io:format("2"),
10 - 19
io:format("3"),
{40, 20} = range_skip_length({none, 20}, BodySize), %% -20
io:format("4"),
30-
io:format(".. ok ~n"),
%% valid edge cases for range_skip_length
io:format("Testing out range_skip_length on valid edge case ranges~n"),
io:format("1"),
{BodySize, 0} = range_skip_length({none, 0}, BodySize),
io:format("2"),
{0, BodySize} = range_skip_length({none, BodySize}, BodySize),
io:format("3"),
{0, BodySize} = range_skip_length({0, none}, BodySize),
BodySizeLess1 = BodySize - 1,
io:format("4"),
{BodySizeLess1, 1} = range_skip_length({BodySize - 1, none}, BodySize),
%% out of range, return whole thing
io:format("5"),
{0, BodySize} = range_skip_length({none, BodySize + 1}, BodySize),
io:format("6"),
{0, BodySize} = range_skip_length({none, -1}, BodySize),
io:format(".. ok ~n"),
%% invalid ranges
io:format("Testing out range_skip_length on invalid ranges~n"),
io:format("1"),
invalid_range = range_skip_length({-1, 30}, BodySize),
io:format("2"),
invalid_range = range_skip_length({0, BodySize + 1}, BodySize),
io:format("3"),
invalid_range = range_skip_length({-1, BodySize + 1}, BodySize),
io:format("4"),
invalid_range = range_skip_length({BodySize, 40}, BodySize),
io:format("5"),
invalid_range = range_skip_length({-1, none}, BodySize),
io:format("6"),
invalid_range = range_skip_length({BodySize, none}, BodySize),
io:format(".. ok ~n"),
ok.
| null | https://raw.githubusercontent.com/hypernumbers/hypernumbers/281319f60c0ac60fb009ee6d1e4826f4f2d51c4e/lib/mochiweb/src/mochiweb_request.erl | erlang | @doc MochiWeb HTTP Request abstraction.
@type iolist() = [iolist() | binary() | char()].
@type iodata() = binary() | iolist().
@type key() = atom() | string() | binary()
@type value() = atom() | string() | binary() | integer()
@type headers(). A mochiweb_headers structure.
@type response(). A mochiweb_response parameterized module instance.
@type ioheaders() = headers() | [{key(), value()}].
@doc Get the value of a given request header.
@type field() = socket | method | raw_path | version | headers | peer | path | body_length | range
@doc Return the internal representation of the given field.
@doc Dump the internal representation to a "human readable" set of terms
for debugging/inspection purposes.
@doc Send data over the socket.
@doc Receive Length bytes from the client as a binary, with the default
idle timeout.
@doc Receive Length bytes from the client as a binary, with the given
@doc Infer body length from transfer-encoding and content-length headers.
@spec recv_body() -> binary()
determine the maximum allowed size of a single
chunk.
@doc Start the HTTP response by sending the Code HTTP response and
ResponseHeaders. The server will set header defaults such as Server
and Date if not present in ResponseHeaders.
@doc Start the HTTP response by sending the Code HTTP response and
ResponseHeaders.
@doc Start the HTTP response by sending the Code HTTP response and
ResponseHeaders including a Content-Length of Length. The server
will set header defaults such as Server
and Date if not present in ResponseHeaders.
@doc Start the HTTP response with start_response, and send Body to the
client (if the get(method) /= 'HEAD'). The Content-Length header
will be set by the Body length, and the server will insert header
defaults.
Only use chunked encoding for HTTP/1.1
For pre-1.1 clients we send the data as-is
without a Content-Length header and without
chunk delimiters. Since the end of the document
is now ambiguous we must force a close.
| ExtraHeaders], <<"Not found.">>})</code>.
response()
no valid ranges
@doc Return true if the connection must be closed. If false, using
Keep-Alive should be safe.
Connection: close
unread data left on the socket, can't safely continue
@doc Clean up any junk in the process dictionary, required before continuing
a Keep-Alive request.
@doc Parse the query string of the URL.
@spec get_cookie_value(Key::string) -> string() | undefined
@doc Get the value of the given cookie.
@doc Parse the cookie header.
has the side-effect of calling recv_body().
@spec stream_chunked_body(integer(), fun(), term()) -> term()
@doc The function is called for each chunk.
Used internally by read_chunked_body.
@doc Read the length of the next HTTP chunk.
@doc Read in a HTTP chunk of the given length. If Length is 0, then read the
HTTP footers (as a list of binaries, since they're nominal).
Internal API
return body for a range reponse with a single body
return
and multipart body
valid, single ranges
invalid, single ranges
valid, multiple range
no ranges
-20
valid edge cases for range_skip_length
out of range, return whole thing
invalid ranges | @author < >
2007 Mochi Media , Inc.
-module(mochiweb_request, [Socket, Method, RawPath, Version, Headers]).
-author('').
-include_lib("kernel/include/file.hrl").
-define(QUIP, "Heave awa lads I'm no deid yet!").
-define(READ_SIZE, 8192).
-export([get_header_value/1, get_primary_header_value/1, get/1, dump/0]).
-export([send/1, recv/1, recv/2, recv_body/0, recv_body/1, stream_body/3]).
-export([start_response/1, start_response_length/1, start_raw_response/1]).
-export([respond/1, ok/1]).
-export([not_found/0, not_found/1]).
-export([parse_post/0, parse_qs/0]).
-export([should_close/0, cleanup/0]).
-export([parse_cookie/0, get_cookie_value/1]).
-export([serve_file/2, serve_file/3]).
-export([test/0]).
-define(SAVE_QS, mochiweb_request_qs).
-define(SAVE_PATH, mochiweb_request_path).
-define(SAVE_RECV, mochiweb_request_recv).
-define(SAVE_BODY, mochiweb_request_body).
-define(SAVE_BODY_LENGTH, mochiweb_request_body_length).
-define(SAVE_POST, mochiweb_request_post).
-define(SAVE_COOKIE, mochiweb_request_cookie).
-define(SAVE_FORCE_CLOSE, mochiweb_request_force_close).
10 second default idle timeout
-define(IDLE_TIMEOUT, 10000).
Maximum recv_body ( ) length of 1 MB
-define(MAX_RECV_BODY, (1024*1024)).
get_header_value(K ) - > undefined | Value
get_header_value(K) ->
mochiweb_headers:get_value(K, Headers).
get_primary_header_value(K) ->
mochiweb_headers:get_primary_value(K, Headers).
( ) ) - > term ( )
get(socket) ->
Socket;
get(method) ->
Method;
get(raw_path) ->
RawPath;
get(version) ->
Version;
get(headers) ->
Headers;
get(peer) ->
case inet:peername(Socket) of
{ok, {Addr={10, _, _, _}, _Port}} ->
case get_header_value("x-forwarded-for") of
undefined ->
inet_parse:ntoa(Addr);
Hosts ->
string:strip(lists:last(string:tokens(Hosts, ",")))
end;
{ok, {{127, 0, 0, 1}, _Port}} ->
case get_header_value("x-forwarded-for") of
undefined ->
"127.0.0.1";
Hosts ->
string:strip(lists:last(string:tokens(Hosts, ",")))
end;
{ok, {Addr, _Port}} ->
inet_parse:ntoa(Addr)
end;
get(path) ->
case erlang:get(?SAVE_PATH) of
undefined ->
{Path0, _, _} = mochiweb_util:urlsplit_path(RawPath),
Path = mochiweb_util:unquote(Path0),
put(?SAVE_PATH, Path),
Path;
Cached ->
Cached
end;
get(body_length) ->
erlang:get(?SAVE_BODY_LENGTH);
get(range) ->
case get_header_value(range) of
undefined ->
undefined;
RawRange ->
parse_range_request(RawRange)
end.
@spec dump ( ) - > { mochiweb_request , [ { atom ( ) , term ( ) } ] }
dump() ->
{?MODULE, [{method, Method},
{version, Version},
{raw_path, RawPath},
{headers, mochiweb_headers:to_list(Headers)}]}.
( ) ) - > ok
send(Data) ->
case gen_tcp:send(Socket, Data) of
ok ->
ok;
_ ->
exit(normal)
end.
recv(integer ( ) ) - > binary ( )
recv(Length) ->
recv(Length, ?IDLE_TIMEOUT).
recv(integer ( ) , integer ( ) ) - > binary ( )
Timeout in msec .
recv(Length, Timeout) ->
case gen_tcp:recv(Socket, Length, Timeout) of
{ok, Data} ->
put(?SAVE_RECV, true),
Data;
_ ->
exit(normal)
end.
( ) - > undefined | chunked | unknown_transfer_encoding | integer ( )
body_length() ->
case get_header_value("transfer-encoding") of
undefined ->
case get_header_value("content-length") of
undefined ->
undefined;
Length ->
list_to_integer(Length)
end;
"chunked" ->
chunked;
Unknown ->
{unknown_transfer_encoding, Unknown}
end.
@doc Receive the body of the HTTP request ( defined by Content - Length ) .
Will only receive up to the default max - body length of 1 MB .
recv_body() ->
recv_body(?MAX_RECV_BODY).
recv_body(integer ( ) ) - > binary ( )
@doc Receive the body of the HTTP request ( defined by Content - Length ) .
Will receive up to MaxBody bytes .
recv_body(MaxBody) ->
we could use a sane constant for chunk size
Body = stream_body(?MAX_RECV_BODY, fun
({0, _ChunkedFooter}, {_LengthAcc, BinAcc}) ->
iolist_to_binary(lists:reverse(BinAcc));
({Length, Bin}, {LengthAcc, BinAcc}) ->
NewLength = Length + LengthAcc,
if NewLength > MaxBody ->
exit({body_too_large, chunked});
true ->
{NewLength, [Bin | BinAcc]}
end
end, {0, []}, MaxBody),
put(?SAVE_BODY, Body),
Body.
stream_body(MaxChunkSize, ChunkFun, FunState) ->
stream_body(MaxChunkSize, ChunkFun, FunState, undefined).
stream_body(MaxChunkSize, ChunkFun, FunState, MaxBodyLength) ->
Expect = case get_header_value("expect") of
undefined ->
undefined;
Value when is_list(Value) ->
string:to_lower(Value)
end,
case Expect of
"100-continue" ->
start_raw_response({100, gb_trees:empty()});
_Else ->
ok
end,
case body_length() of
undefined ->
undefined;
{unknown_transfer_encoding, Unknown} ->
exit({unknown_transfer_encoding, Unknown});
chunked ->
In this case the MaxBody is actually used to
stream_chunked_body(MaxChunkSize, ChunkFun, FunState);
0 ->
<<>>;
Length when is_integer(Length) ->
case MaxBodyLength of
MaxBodyLength when is_integer(MaxBodyLength), MaxBodyLength < Length ->
exit({body_too_large, content_length});
_ ->
stream_unchunked_body(Length, ChunkFun, FunState)
end;
Length ->
exit({length_not_integer, Length})
end.
start_response({integer ( ) , ioheaders ( ) } ) - > response ( )
start_response({Code, ResponseHeaders}) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = mochiweb_headers:default_from_list(server_headers(),
HResponse),
start_raw_response({Code, HResponse1}).
@spec start_raw_response({integer ( ) , headers ( ) } ) - > response ( )
start_raw_response({Code, ResponseHeaders}) ->
F = fun ({K, V}, Acc) ->
[make_io(K), <<": ">>, V, <<"\r\n">> | Acc]
end,
End = lists:foldl(F, [<<"\r\n">>],
mochiweb_headers:to_list(ResponseHeaders)),
send([make_version(Version), make_code(Code), <<"\r\n">> | End]),
mochiweb:new_response({THIS, Code, ResponseHeaders}).
@spec start_response_length({integer ( ) , ioheaders ( ) , integer ( ) } ) - > response ( )
start_response_length({Code, ResponseHeaders, Length}) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = mochiweb_headers:enter("Content-Length", Length, HResponse),
start_response({Code, HResponse1}).
respond({integer ( ) , ioheaders ( ) , ( ) | chunked | { file , IoDevice } } ) - > response ( )
respond({Code, ResponseHeaders, {file, IoDevice}}) ->
Length = iodevice_size(IoDevice),
Response = start_response_length({Code, ResponseHeaders, Length}),
case Method of
'HEAD' ->
ok;
_ ->
iodevice_stream(IoDevice)
end,
Response;
respond({Code, ResponseHeaders, chunked}) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = case Method of
'HEAD' ->
This is what Google does , /
is chunked but HEAD gets Content - Length : 0 .
The RFC is ambiguous so emulating Google is smart .
mochiweb_headers:enter("Content-Length", "0",
HResponse);
_ when Version >= {1, 1} ->
mochiweb_headers:enter("Transfer-Encoding", "chunked",
HResponse);
_ ->
put(?SAVE_FORCE_CLOSE, true),
HResponse
end,
start_response({Code, HResponse1});
respond({Code, ResponseHeaders, Body}) ->
Response = start_response_length({Code, ResponseHeaders, iolist_size(Body)}),
case Method of
'HEAD' ->
ok;
_ ->
send(Body)
end,
Response.
( ) - > response ( )
@doc for < code > not_found([])</code > .
not_found() ->
not_found([]).
not_found(ExtraHeaders ) - > response ( )
@doc for < code > respond({404 , [ { " Content - Type " , " text / plain " }
not_found(ExtraHeaders) ->
respond({404, [{"Content-Type", "text/plain"} | ExtraHeaders],
<<"Not found.">>}).
@spec ok({value ( ) , ( ) } | { value ( ) , ioheaders ( ) , ( ) | { file , IoDevice } } ) - >
@doc respond({200 , [ { " Content - Type " , ContentType } | Headers ] , Body } ) .
ok({ContentType, Body}) ->
ok({ContentType, [], Body});
ok({ContentType, ResponseHeaders, Body}) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
case THIS:get(range) of
X when X =:= undefined; X =:= fail ->
HResponse1 = mochiweb_headers:enter("Content-Type", ContentType, HResponse),
respond({200, HResponse1, Body});
Ranges ->
{PartList, Size} = range_parts(Body, Ranges),
case PartList of
HResponse1 = mochiweb_headers:enter("Content-Type",
ContentType,
HResponse),
could be 416 , for now we 'll just return 200
respond({200, HResponse1, Body});
PartList ->
{RangeHeaders, RangeBody} =
parts_to_body(PartList, ContentType, Size),
HResponse1 = mochiweb_headers:enter_from_list(
[{"Accept-Ranges", "bytes"} |
RangeHeaders],
HResponse),
respond({206, HResponse1, RangeBody})
end
end.
should_close ( ) - > bool ( )
should_close() ->
ForceClose = erlang:get(mochiweb_request_force_close) =/= undefined,
DidNotRecv = erlang:get(mochiweb_request_recv) =:= undefined,
ForceClose orelse Version < {1, 0}
orelse get_header_value("connection") =:= "close"
HTTP 1.0 requires Connection : Keep - Alive
orelse (Version =:= {1, 0}
andalso get_header_value("connection") =/= "Keep-Alive")
orelse (DidNotRecv
andalso get_header_value("content-length") =/= undefined
andalso list_to_integer(get_header_value("content-length")) > 0)
orelse (DidNotRecv
andalso get_header_value("transfer-encoding") =:= "chunked").
@spec cleanup ( ) - > ok
cleanup() ->
[erase(K) || K <- [?SAVE_QS,
?SAVE_PATH,
?SAVE_RECV,
?SAVE_BODY,
?SAVE_POST,
?SAVE_COOKIE,
?SAVE_FORCE_CLOSE]],
ok.
( ) - > [ { Key::string ( ) , Value::string ( ) } ]
parse_qs() ->
case erlang:get(?SAVE_QS) of
undefined ->
{_, QueryString, _} = mochiweb_util:urlsplit_path(RawPath),
Parsed = mochiweb_util:parse_qs(QueryString),
put(?SAVE_QS, Parsed),
Parsed;
Cached ->
Cached
end.
get_cookie_value(Key) ->
proplists:get_value(Key, parse_cookie()).
parse_cookie ( ) - > [ { Key::string ( ) , Value::string ( ) } ]
parse_cookie() ->
case erlang:get(?SAVE_COOKIE) of
undefined ->
Cookies = case get_header_value("cookie") of
undefined ->
[];
Value ->
mochiweb_cookies:parse_cookie(Value)
end,
put(?SAVE_COOKIE, Cookies),
Cookies;
Cached ->
Cached
end.
parse_post ( ) - > [ { Key::string ( ) , Value::string ( ) } ]
@doc an application / x - www - form - urlencoded form POST . This
parse_post() ->
case erlang:get(?SAVE_POST) of
undefined ->
Parsed = case recv_body() of
undefined ->
[];
Binary ->
case get_primary_header_value("content-type") of
"application/x-www-form-urlencoded" ++ _ ->
mochiweb_util:parse_qs(Binary);
_ ->
[]
end
end,
put(?SAVE_POST, Parsed),
Parsed;
Cached ->
Cached
end.
stream_chunked_body(MaxChunkSize, Fun, FunState) ->
case read_chunk_length() of
0 ->
Fun({0, read_chunk(0)}, FunState);
Length when Length > MaxChunkSize ->
NewState = read_sub_chunks(Length, MaxChunkSize, Fun, FunState),
stream_chunked_body(MaxChunkSize, Fun, NewState);
Length ->
NewState = Fun({Length, read_chunk(Length)}, FunState),
stream_chunked_body(MaxChunkSize, Fun, NewState)
end.
stream_unchunked_body(0, Fun, FunState) ->
Fun({0, <<>>}, FunState);
stream_unchunked_body(Length, Fun, FunState) when Length > 0 ->
Bin = recv(0),
BinSize = byte_size(Bin),
if BinSize > Length ->
<<OurBody:Length/binary, Extra/binary>> = Bin,
gen_tcp:unrecv(Socket, Extra),
NewState = Fun({Length, OurBody}, FunState),
stream_unchunked_body(0, Fun, NewState);
true ->
NewState = Fun({BinSize, Bin}, FunState),
stream_unchunked_body(Length - BinSize, Fun, NewState)
end.
read_chunk_length ( ) - > integer ( )
read_chunk_length() ->
inet:setopts(Socket, [{packet, line}]),
case gen_tcp:recv(Socket, 0, ?IDLE_TIMEOUT) of
{ok, Header} ->
inet:setopts(Socket, [{packet, raw}]),
Splitter = fun (C) ->
C =/= $\r andalso C =/= $\n andalso C =/= $
end,
{Hex, _Rest} = lists:splitwith(Splitter, binary_to_list(Header)),
mochihex:to_int(Hex);
_ ->
exit(normal)
end.
read_chunk(integer ( ) ) - > Chunk::binary ( ) | [ Footer::binary ( ) ]
read_chunk(0) ->
inet:setopts(Socket, [{packet, line}]),
F = fun (F1, Acc) ->
case gen_tcp:recv(Socket, 0, ?IDLE_TIMEOUT) of
{ok, <<"\r\n">>} ->
Acc;
{ok, Footer} ->
F1(F1, [Footer | Acc]);
_ ->
exit(normal)
end
end,
Footers = F(F, []),
inet:setopts(Socket, [{packet, raw}]),
Footers;
read_chunk(Length) ->
case gen_tcp:recv(Socket, 2 + Length, ?IDLE_TIMEOUT) of
{ok, <<Chunk:Length/binary, "\r\n">>} ->
Chunk;
_ ->
exit(normal)
end.
read_sub_chunks(Length, MaxChunkSize, Fun, FunState) when Length > MaxChunkSize ->
Bin = recv(MaxChunkSize),
NewState = Fun({size(Bin), Bin}, FunState),
read_sub_chunks(Length - MaxChunkSize, MaxChunkSize, Fun, NewState);
read_sub_chunks(Length, _MaxChunkSize, Fun, FunState) ->
Fun({Length, read_chunk(Length)}, FunState).
, DocRoot ) - > Response
@doc Serve a file relative to DocRoot .
serve_file(Path, DocRoot) ->
serve_file(Path, DocRoot, []).
, DocRoot , ExtraHeaders ) - > Response
@doc Serve a file relative to DocRoot .
serve_file(Path, DocRoot, ExtraHeaders) ->
case mochiweb_util:safe_relative_path(Path) of
undefined ->
not_found(ExtraHeaders);
RelPath ->
FullPath = filename:join([DocRoot, RelPath]),
case filelib:is_dir(FullPath) of
true ->
maybe_redirect(RelPath, FullPath, ExtraHeaders);
false ->
maybe_serve_file(FullPath, ExtraHeaders)
end
end.
This has the same effect as the DirectoryIndex directive in httpd
directory_index(FullPath) ->
filename:join([FullPath, "index.html"]).
maybe_redirect([], FullPath, ExtraHeaders) ->
maybe_serve_file(directory_index(FullPath), ExtraHeaders);
maybe_redirect(RelPath, FullPath, ExtraHeaders) ->
case string:right(RelPath, 1) of
"/" ->
maybe_serve_file(directory_index(FullPath), ExtraHeaders);
_ ->
Host = mochiweb_headers:get_value("host", Headers),
Location = "http://" ++ Host ++ "/" ++ RelPath ++ "/",
LocationBin = list_to_binary(Location),
MoreHeaders = [{"Location", Location},
{"Content-Type", "text/html"} | ExtraHeaders],
Top = <<"<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">"
"<html><head>"
"<title>301 Moved Permanently</title>"
"</head><body>"
"<h1>Moved Permanently</h1>"
"<p>The document has moved <a href=\"">>,
Bottom = <<">here</a>.</p></body></html>\n">>,
Body = <<Top/binary, LocationBin/binary, Bottom/binary>>,
respond({301, MoreHeaders, Body})
end.
maybe_serve_file(File, ExtraHeaders) ->
case file:read_file_info(File) of
{ok, FileInfo} ->
LastModified = httpd_util:rfc1123_date(FileInfo#file_info.mtime),
case get_header_value("if-modified-since") of
LastModified ->
respond({304, ExtraHeaders, ""});
_ ->
case file:open(File, [raw, binary]) of
{ok, IoDevice} ->
ContentType = mochiweb_util:guess_mime(File),
Res = ok({ContentType,
[{"last-modified", LastModified}
| ExtraHeaders],
{file, IoDevice}}),
file:close(IoDevice),
Res;
_ ->
not_found(ExtraHeaders)
end
end;
{error, _} ->
not_found(ExtraHeaders)
end.
server_headers() ->
[{"Server", "MochiWeb/1.0 (" ++ ?QUIP ++ ")"},
{"Date", httpd_util:rfc1123_date()}].
make_io(Atom) when is_atom(Atom) ->
atom_to_list(Atom);
make_io(Integer) when is_integer(Integer) ->
integer_to_list(Integer);
make_io(Io) when is_list(Io); is_binary(Io) ->
Io.
make_code(X) when is_integer(X) ->
[integer_to_list(X), [" " | httpd_util:reason_phrase(X)]];
make_code(Io) when is_list(Io); is_binary(Io) ->
Io.
make_version({1, 0}) ->
<<"HTTP/1.0 ">>;
make_version(_) ->
<<"HTTP/1.1 ">>.
iodevice_stream(IoDevice) ->
case file:read(IoDevice, ?READ_SIZE) of
eof ->
ok;
{ok, Data} ->
ok = send(Data),
iodevice_stream(IoDevice)
end.
parts_to_body([{Start, End, Body}], ContentType, Size) ->
HeaderList = [{"Content-Type", ContentType},
{"Content-Range",
["bytes ",
make_io(Start), "-", make_io(End),
"/", make_io(Size)]}],
{HeaderList, Body};
parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) ->
header Content - Type : multipart / byteranges ;
Boundary = mochihex:to_hex(crypto:rand_bytes(8)),
HeaderList = [{"Content-Type",
["multipart/byteranges; ",
"boundary=", Boundary]}],
MultiPartBody = multipart_body(BodyList, ContentType, Boundary, Size),
{HeaderList, MultiPartBody}.
multipart_body([], _ContentType, Boundary, _Size) ->
["--", Boundary, "--\r\n"];
multipart_body([{Start, End, Body} | BodyList], ContentType, Boundary, Size) ->
["--", Boundary, "\r\n",
"Content-Type: ", ContentType, "\r\n",
"Content-Range: ",
"bytes ", make_io(Start), "-", make_io(End),
"/", make_io(Size), "\r\n\r\n",
Body, "\r\n"
| multipart_body(BodyList, ContentType, Boundary, Size)].
iodevice_size(IoDevice) ->
{ok, Size} = file:position(IoDevice, eof),
{ok, 0} = file:position(IoDevice, bof),
Size.
range_parts({file, IoDevice}, Ranges) ->
Size = iodevice_size(IoDevice),
F = fun (Spec, Acc) ->
case range_skip_length(Spec, Size) of
invalid_range ->
Acc;
V ->
[V | Acc]
end
end,
LocNums = lists:foldr(F, [], Ranges),
{ok, Data} = file:pread(IoDevice, LocNums),
Bodies = lists:zipwith(fun ({Skip, Length}, PartialBody) ->
{Skip, Skip + Length - 1, PartialBody}
end,
LocNums, Data),
{Bodies, Size};
range_parts(Body0, Ranges) ->
Body = iolist_to_binary(Body0),
Size = size(Body),
F = fun(Spec, Acc) ->
case range_skip_length(Spec, Size) of
invalid_range ->
Acc;
{Skip, Length} ->
<<_:Skip/binary, PartialBody:Length/binary, _/binary>> = Body,
[{Skip, Skip + Length - 1, PartialBody} | Acc]
end
end,
{lists:foldr(F, [], Ranges), Size}.
range_skip_length(Spec, Size) ->
case Spec of
{none, R} when R =< Size, R >= 0 ->
{Size - R, R};
{none, _OutOfRange} ->
{0, Size};
{R, none} when R >= 0, R < Size ->
{R, Size - R};
{_OutOfRange, none} ->
invalid_range;
{Start, End} when 0 =< Start, Start =< End, End < Size ->
{Start, End - Start + 1};
{_OutOfRange, _End} ->
invalid_range
end.
parse_range_request(RawRange) when is_list(RawRange) ->
try
"bytes=" ++ RangeString = RawRange,
Ranges = string:tokens(RangeString, ","),
lists:map(fun ("-" ++ V) ->
{none, list_to_integer(V)};
(R) ->
case string:tokens(R, "-") of
[S1, S2] ->
{list_to_integer(S1), list_to_integer(S2)};
[S] ->
{list_to_integer(S), none}
end
end,
Ranges)
catch
_:_ ->
fail
end.
test() ->
ok = test_range(),
ok.
test_range() ->
io:format("Testing parse_range_request with valid single ranges~n"),
io:format("1"),
[{20, 30}] = parse_range_request("bytes=20-30"),
io:format("2"),
[{20, none}] = parse_range_request("bytes=20-"),
io:format("3"),
[{none, 20}] = parse_range_request("bytes=-20"),
io:format(".. ok ~n"),
io:format("Testing parse_range_request with invalid ranges~n"),
io:format("1"),
fail = parse_range_request(""),
io:format("2"),
fail = parse_range_request("garbage"),
io:format("3"),
fail = parse_range_request("bytes=-20-30"),
io:format(".. ok ~n"),
io:format("Testing parse_range_request with valid multiple ranges~n"),
io:format("1"),
[{20, 30}, {50, 100}, {110, 200}] =
parse_range_request("bytes=20-30,50-100,110-200"),
io:format("2"),
[{20, none}, {50, 100}, {none, 200}] =
parse_range_request("bytes=20-,50-100,-200"),
io:format(".. ok~n"),
io:format("Testing out parse_range_request with no ranges~n"),
io:format("1"),
[] = parse_range_request("bytes="),
io:format(".. ok~n"),
Body = <<"012345678901234567890123456789012345678901234567890123456789">>,
60
BodySize = 60,
these values assume BodySize = : = 60
io:format("Testing out range_skip_length on valid ranges~n"),
io:format("1"),
1 - 9
io:format("2"),
10 - 19
io:format("3"),
io:format("4"),
30-
io:format(".. ok ~n"),
io:format("Testing out range_skip_length on valid edge case ranges~n"),
io:format("1"),
{BodySize, 0} = range_skip_length({none, 0}, BodySize),
io:format("2"),
{0, BodySize} = range_skip_length({none, BodySize}, BodySize),
io:format("3"),
{0, BodySize} = range_skip_length({0, none}, BodySize),
BodySizeLess1 = BodySize - 1,
io:format("4"),
{BodySizeLess1, 1} = range_skip_length({BodySize - 1, none}, BodySize),
io:format("5"),
{0, BodySize} = range_skip_length({none, BodySize + 1}, BodySize),
io:format("6"),
{0, BodySize} = range_skip_length({none, -1}, BodySize),
io:format(".. ok ~n"),
io:format("Testing out range_skip_length on invalid ranges~n"),
io:format("1"),
invalid_range = range_skip_length({-1, 30}, BodySize),
io:format("2"),
invalid_range = range_skip_length({0, BodySize + 1}, BodySize),
io:format("3"),
invalid_range = range_skip_length({-1, BodySize + 1}, BodySize),
io:format("4"),
invalid_range = range_skip_length({BodySize, 40}, BodySize),
io:format("5"),
invalid_range = range_skip_length({-1, none}, BodySize),
io:format("6"),
invalid_range = range_skip_length({BodySize, none}, BodySize),
io:format(".. ok ~n"),
ok.
|
6eb0373cf895616734dc7d872095887de24873f63f1ce0f7f8027fadc31ae289 | shaneutt/erlang_notebook | erlang_notebook_mnesia_db.erl | -module(erlang_notebook_mnesia_db).
-export([init/0, get_item/0, set_item/0]).
-export_type([erlang_notebook_item/0]).
-define(TABLENAME, erlang_notebook_config).
-define(INTERVAL, 300000).
%%====================================================================
%% API functions for setup and initialization
%%====================================================================
-type erlang_notebook_item() :: binary().
-spec init() -> ok.
init() ->
Nodes = application:get_env(erlang_notebook, nodes, [node()]),
% add our Nodes to the mnesia database over the network
mnesia:change_config(extra_db_nodes, Nodes),
% check to see if the table we're looking for exists already in mnesia across all Nodes
TableExists = lists:member(erlang_notebook_config, mnesia:system_info(tables)),
if
if the table already exists in the cluster , one of our Nodes has already created it and
% we can simply add a copy of the existing table to this node()
TableExists -> mnesia:add_table_copy(erlang_notebook_config, node(), ram_copies);
if the table does n't exist then we are the first node ( ) here and we should create the table
not TableExists -> mnesia:create_table(erlang_notebook_config, [{ram_copies, Nodes}])
end,
% ensure that the schemas sync properly, and wait for them to sync
ok = mnesia:wait_for_tables([schema, erlang_notebook_config], 10000),
% for this test app we're just going to add new random data every once in a while
timer:apply_after(?INTERVAL, erlang_notebook_mnesia_db, set_item, []),
ok.
%%====================================================================
%% API functions for setting and retrieving data
%%====================================================================
-spec get_item() -> {ok, erlang_notebook_item()} | {error, noitem}.
get_item() ->
mnesia queries are anonymous functions that use 's query functions
% inside a transaction.
Query = fun() ->
case mnesia:read(?TABLENAME, item) of
in this case we should have a unique record , so just expect one result , get the
% Item by the ?TABLENAME and term(), and if it matches capture it and return success
[{?TABLENAME, item, Item}] -> {ok, Item};
% otherwise error
[] -> {error, noitem}
end
end,
run the Query in a transaction
mnesia:activity(transaction, Query).
%%====================================================================
Internal functions
%%====================================================================
-spec set_item() -> {ok, erlang_notebook_item()}.
set_item() ->
Query = fun() ->
%
Item = crypto:rand_bytes(32),
ok = mnesia:write({?TABLENAME, item, Item}),
{ok, Item}
end,
mnesia:activity(transaction, Query).
| null | https://raw.githubusercontent.com/shaneutt/erlang_notebook/b4f291db2d1b2711569e275fed09446fcc1ffb66/apps/erlang_notebook_mnesia_db/src/erlang_notebook_mnesia_db.erl | erlang | ====================================================================
API functions for setup and initialization
====================================================================
add our Nodes to the mnesia database over the network
check to see if the table we're looking for exists already in mnesia across all Nodes
we can simply add a copy of the existing table to this node()
ensure that the schemas sync properly, and wait for them to sync
for this test app we're just going to add new random data every once in a while
====================================================================
API functions for setting and retrieving data
====================================================================
inside a transaction.
Item by the ?TABLENAME and term(), and if it matches capture it and return success
otherwise error
====================================================================
====================================================================
| -module(erlang_notebook_mnesia_db).
-export([init/0, get_item/0, set_item/0]).
-export_type([erlang_notebook_item/0]).
-define(TABLENAME, erlang_notebook_config).
-define(INTERVAL, 300000).
-type erlang_notebook_item() :: binary().
-spec init() -> ok.
init() ->
Nodes = application:get_env(erlang_notebook, nodes, [node()]),
mnesia:change_config(extra_db_nodes, Nodes),
TableExists = lists:member(erlang_notebook_config, mnesia:system_info(tables)),
if
if the table already exists in the cluster , one of our Nodes has already created it and
TableExists -> mnesia:add_table_copy(erlang_notebook_config, node(), ram_copies);
if the table does n't exist then we are the first node ( ) here and we should create the table
not TableExists -> mnesia:create_table(erlang_notebook_config, [{ram_copies, Nodes}])
end,
ok = mnesia:wait_for_tables([schema, erlang_notebook_config], 10000),
timer:apply_after(?INTERVAL, erlang_notebook_mnesia_db, set_item, []),
ok.
-spec get_item() -> {ok, erlang_notebook_item()} | {error, noitem}.
get_item() ->
mnesia queries are anonymous functions that use 's query functions
Query = fun() ->
case mnesia:read(?TABLENAME, item) of
in this case we should have a unique record , so just expect one result , get the
[{?TABLENAME, item, Item}] -> {ok, Item};
[] -> {error, noitem}
end
end,
run the Query in a transaction
mnesia:activity(transaction, Query).
Internal functions
-spec set_item() -> {ok, erlang_notebook_item()}.
set_item() ->
Query = fun() ->
Item = crypto:rand_bytes(32),
ok = mnesia:write({?TABLENAME, item, Item}),
{ok, Item}
end,
mnesia:activity(transaction, Query).
|
71d0637ecc013eeeb3d429c594a079526bfd8af24ddacf91d49dd7b21b49de6e | RBornat/jape | forcedef.ml |
Copyright ( C ) 2003 - 19
This file is part of the proof engine , which is part of .
is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
( or look at ) .
Copyright (C) 2003-19 Richard Bornat & Bernard Sufrin
This file is part of the jape proof engine, which is part of jape.
Jape is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Jape is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with jape; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
(or look at ).
*)
open Listfuns
open Miscellaneous
open Optionfuns
open Sml
open Sequent
open Stringfuns
open Symbol
open Symboltype
open Termfuns
open Termstring
open Termparse
type term = Termtype.term
type forcedef = ForceAlways
| ForceNever
| ForcePrim of term
| ForceBoth of (forcedef * forcedef)
| ForceEither of (forcedef * forcedef)
| ForceIf of (forcedef * forcedef)
| ForceEverywhere of forcedef
| ForceNowhere of forcedef
| ForceAll of (term * term list * forcedef)
| ForceSome of (term * term list * forcedef)
(* pat vars body: a binder *)
let term_of_forcedef fd =
match fd with
| ForcePrim t -> Some t
| _ -> None
let rec catelim_string_of_forcedef f ss =
match f with
ForceAlways -> "ALWAYS" :: ss
| ForceNever -> "NEVER" :: ss
| ForcePrim t -> "FORCE " :: catelim_string_of_termarg t ss
| ForceBoth (f1, f2) -> "BOTH (" :: catelim_string_of_forcedef f1 (") (" :: catelim_string_of_forcedef f2 (")"::ss))
| ForceEither (f1, f2) -> "EITHER (" :: catelim_string_of_forcedef f1 (") (" :: catelim_string_of_forcedef f2 (")"::ss))
| ForceIf (f1, f2) -> "IF (" :: catelim_string_of_forcedef f1 (") (" :: catelim_string_of_forcedef f2 (")"::ss))
| ForceEverywhere f -> "EVERYWHERE (" :: catelim_string_of_forcedef f (")"::ss)
| ForceNowhere f -> "NOWHERE (" :: catelim_string_of_forcedef f (")"::ss)
| ForceAll (t, vs, f) -> "ALL (" :: catelim_string_of_term t (") (" :: catelim_string_of_forcedef f (")"::ss))
| ForceSome (t, vs, f) -> "SOME (" :: catelim_string_of_term t (") (" :: catelim_string_of_forcedef f (")"::ss))
let rec string_of_forcedef f = implode (catelim_string_of_forcedef f [])
for some reason this goes exponential when the body is a function ( i.e. f fd |~~ etc ) .
Did n't understand vii/01 ; still do n't understand x/14 . RB
Didn't understand vii/01; still don't understand x/14. RB
*)
let rec option_mapforcedef f fd =
let omff = option_mapforcedef f in
let ompair = option_rewrite2 omff omff in
let omtvsfd = option_rewrite3 (fun v -> None) (fun v -> None) omff in
match f fd with
| Some _ as result -> result
| None ->
match fd with
| ForceAlways
| ForceNever
| ForcePrim _ -> None
| ForceBoth pair -> ompair pair &~~ (_Some <.> (fun v->ForceBoth v))
| ForceEither pair -> ompair pair &~~ (_Some <.> (fun v->ForceEither v))
| ForceIf pair -> ompair pair &~~ (_Some <.> (fun v->ForceIf v))
| ForceEverywhere fd -> omff fd &~~ (_Some <.> (fun v->ForceEverywhere v))
| ForceNowhere fd -> omff fd &~~ (_Some <.> (fun v->ForceNowhere v))
| ForceAll tvsfd -> omtvsfd tvsfd &~~ (_Some <.> (fun v->ForceAll v))
| ForceSome tvsfd -> omtvsfd tvsfd &~~ (_Some <.> (fun v->ForceSome v))
let mapforcedef f = anyway (option_mapforcedef f)
let rec option_mapforcedefterms f fd =
let omt = option_mapterm f in
let omff = option_mapforcedefterms f in
let omtvsfd = option_rewrite3 omt (option_rewritelist omt) omff in
let fdf fd =
match fd with
| ForcePrim t -> omt t &~~ (_Some <.> (fun v->ForcePrim v))
| ForceAll tvsfd -> omtvsfd tvsfd &~~ (_Some <.> (fun v->ForceAll v))
| ForceSome tvsfd -> omtvsfd tvsfd &~~ (_Some <.> (fun v->ForceSome v))
| _ -> None
in
option_mapforcedef fdf fd
let rec mapforcedefterms f = anyway (option_mapforcedefterms f)
let rec findinforcedef f fd =
let rec findinpair (fd1, fd2) =
(findinforcedef f fd1 |~~ (fun _ -> findinforcedef f fd2))
in
match fd with
ForceAlways -> None
| ForceNever -> None
| ForcePrim t -> findterm f t
| ForceBoth pair -> findinpair pair
| ForceEither pair -> findinpair pair
| ForceIf pair -> findinpair pair
| ForceEverywhere fd -> findinforcedef f fd
| ForceNowhere fd -> findinforcedef f fd
| ForceAll (t, _, fd) ->
(findterm f t |~~ (fun _ -> findinforcedef f fd))
| ForceSome (t, _, fd) ->
(findterm f t |~~ (fun _ -> findinforcedef f fd))
let rec existsinforcedef f =
bool_of_opt <.> findinforcedef (fun t -> if f t then Some true else None)
this ran into trouble when it emerged that it was using ALL as a reserved word , which had
been previously used in LAYOUT tactics to mean ' display all subtrees ' . On the principle
that reserved words ought not to be easily confused , this was a problem . Also , it conflicted
with the principle that we ought to be able to load old proofs .
First I tried to fix it by making LAYOUT use another word ( ALLL instead of ALL ) but ugh ! .
Second I tried to fix it by parsing it as a term and then analysing . This worked up to a
point , but it fell apart when it saw FORCE P(i ) , which parses as ( but obviously does n't mean )
( FORCE P ) i.
But I stuck with it , demanding only that you bracket the argument of FORCE .
been previously used in LAYOUT tactics to mean 'display all subtrees'. On the principle
that reserved words ought not to be easily confused, this was a problem. Also, it conflicted
with the principle that we ought to be able to load old proofs.
First I tried to fix it by making LAYOUT use another word (ALLL instead of ALL) but ugh!.
Second I tried to fix it by parsing it as a term and then analysing. This worked up to a
point, but it fell apart when it saw FORCE P(i), which parses as (but obviously doesn't mean)
(FORCE P) i.
But I stuck with it, demanding only that you bracket the argument of FORCE.
*)
I should have liked BOTH and EITHER to work as infix , but it makes life too
complicated ... so Lisp - style FORCEDEFs rule . Anyway , it avoids stupidities
about priorities !
complicated ... so Lisp-style FORCEDEFs rule. Anyway, it avoids stupidities
about priorities!
*)
let rec parseForceDef () =
(* there is no operator priority in forcedefs ... *)
let decodeApp t =
match explodeApp false t with
(f, args) -> (string_of_term f, args)
in
let rec tranForceDef t =
match decodeApp t with
("ALWAYS" , [ ]) -> ForceAlways
| ("NEVER" , [ ]) -> ForceNever
| ("FORCE" , [t]) -> ForcePrim (tranPrim t)
| ("EVERYWHERE" , [f]) -> ForceEverywhere (tranForceDef f)
| ("NOWHERE" , [f]) -> ForceNowhere (tranForceDef f)
| ("ALL" , [pat; f]) -> ForceAll (tranForceDefBinder pat f)
| ("SOME" , [pat; f]) -> ForceSome (tranForceDefBinder pat f)
| ("BOTH" , [f1; f2]) -> ForceBoth(tranForceDef f1, tranForceDef f2)
| ("EITHER" , [f1; f2]) -> ForceEither(tranForceDef f1, tranForceDef f2)
| ("IF" , [f1; f2]) -> ForceIf(tranForceDef f1, tranForceDef f2)
| _ -> raise (ParseError_
["FORCE t, EVERYWHERE f, NOWHERE f, ALL pat f, SOME pat f, BOTH f f, EITHER f f ";
"or IF f f expected in FORCEDEF; found "; string_of_term t
])
and tranPrim t =
try checkTacticTerm t; debracket t
with Tacastrophe_ ss ->
raise (ParseError_ ("FORCE " :: string_of_term t :: " contains " :: ss))
and tranForceDefBinder pat f =
let vs = isVariable <| termvars pat in
if List.exists (not <.> isextensibleId) vs then
raise (ParseError_ ["ALL and SOME must use CLASS VARIABLE identifiers to describe individuals"])
else (pat,vs,tranForceDef f)
in
tranForceDef (asTactic parseTerm EOF)
(* now also includes the disproof universe bit of shared proofs *)
type coordinate = Coord of (int * int)
type world = World of (coordinate * coordinate list * term list)
type model = Model of world list
let rec parsemodel () =
let rec parseCoord () =
match currsymb () with
BRA "(" ->
scansymb ();
let rec parseInt () =
match currsymb () with
NUM n -> (scansymb (); atoi n)
| sy ->
match string_of_symbol sy with
"-" -> (scansymb (); - parseUnsignedInt "-")
| "~" -> (scansymb (); - parseUnsignedInt "~")
| s -> bang [s]
and bang ss =
raise
(ParseError_ ("number expected in coordinate; found " :: ss))
and parseUnsignedInt s1 =
match currsymb () with
NUM n -> (scansymb (); atoi n)
| s2 -> bang [s1; " followed by "; string_of_symbol s2]
in
let x = parseInt () in
let y =
if currsymb () = commasymbol then
(scansymb (); parseInt ())
else
raise
(ParseError_
["comma expected after x value in world coordinate"])
in
begin match currsymb () with
KET ")" -> (scansymb (); Coord (x, y))
| sy ->
raise
(ParseError_
["right paren expected after coordinate; found ";
string_of_symbol sy])
end
| sy ->
raise
(ParseError_
["coordinate expected, starting with left paren; found ";
string_of_symbol sy])
in
let rec parseWorlds () =
match currsymb () with
SHYID "WORLD" ->
scansymb ();
let c = parseCoord () in
let chs =
match currsymb () with
SHYID "CHILDREN" ->
(scansymb ();
parseList
(function BRA "(" -> true | _ -> false)
(fun _ -> parseCoord ()) commasymbol)
| _ -> []
in
let ts =
match currsymb () with
SHYID "LABELS" ->
(scansymb (); parseList canstartTerm parseTerm commasymbol)
| _ -> []
in
World (c, chs, ts) :: parseWorlds ()
| _ -> []
in
match currsymb () with
SHYID "SEMANTICS" ->
(scansymb ();
let seq = parseSeq () in
(match parseWorlds () with
[] -> raise (ParseError_ ["empty disproof description"])
| worlds -> Some (seq, Model worlds)))
| _ -> None
let rec catelim_string_of_model a1 a2 =
match a1, a2 with
None, ss -> ss
| Some (seq, Model worlds), ss ->
let sep = "\n" in
let rec catelim_string_of_int i ss =
(string_of_int : int -> string) i :: ss
in
let rec catelim_string_of_coord =
fun (Coord c) ->
catelim_string_of_pair catelim_string_of_int catelim_string_of_int "," c
in
let rec catelim_string_of_world =
fun (World (c, chs, ts)) ss ->
let sep2 = sep ^ " " in
let rec catelim_string_of_children chs ss =
match chs with
[] -> ss
| _ ->
sep2 :: "CHILDREN" :: " " ::
catelim_string_of_list catelim_string_of_coord ", " chs ss
in
let rec catelim_string_of_labels ts ss =
match ts with
[] -> ss
| _ ->
sep2 :: "LABELS" :: " " ::
catelim_string_of_list catelim_string_of_term ", " ts ss
in
"WORLD" :: " " ::
catelim_string_of_coord c
(catelim_string_of_children chs (catelim_string_of_labels ts ss))
in
"SEMANTICS" :: sep ::
catelim_string_of_seq seq
(sep ::
catelim_string_of_list catelim_string_of_world sep worlds
("\n" :: ss))
| null | https://raw.githubusercontent.com/RBornat/jape/afe9f207e89e965636b43ef8fad38fd1f69737ae/distrib/camlengine/forcedef.ml | ocaml | pat vars body: a binder
there is no operator priority in forcedefs ...
now also includes the disproof universe bit of shared proofs |
Copyright ( C ) 2003 - 19
This file is part of the proof engine , which is part of .
is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
( or look at ) .
Copyright (C) 2003-19 Richard Bornat & Bernard Sufrin
This file is part of the jape proof engine, which is part of jape.
Jape is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Jape is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with jape; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
(or look at ).
*)
open Listfuns
open Miscellaneous
open Optionfuns
open Sml
open Sequent
open Stringfuns
open Symbol
open Symboltype
open Termfuns
open Termstring
open Termparse
type term = Termtype.term
type forcedef = ForceAlways
| ForceNever
| ForcePrim of term
| ForceBoth of (forcedef * forcedef)
| ForceEither of (forcedef * forcedef)
| ForceIf of (forcedef * forcedef)
| ForceEverywhere of forcedef
| ForceNowhere of forcedef
| ForceAll of (term * term list * forcedef)
| ForceSome of (term * term list * forcedef)
let term_of_forcedef fd =
match fd with
| ForcePrim t -> Some t
| _ -> None
let rec catelim_string_of_forcedef f ss =
match f with
ForceAlways -> "ALWAYS" :: ss
| ForceNever -> "NEVER" :: ss
| ForcePrim t -> "FORCE " :: catelim_string_of_termarg t ss
| ForceBoth (f1, f2) -> "BOTH (" :: catelim_string_of_forcedef f1 (") (" :: catelim_string_of_forcedef f2 (")"::ss))
| ForceEither (f1, f2) -> "EITHER (" :: catelim_string_of_forcedef f1 (") (" :: catelim_string_of_forcedef f2 (")"::ss))
| ForceIf (f1, f2) -> "IF (" :: catelim_string_of_forcedef f1 (") (" :: catelim_string_of_forcedef f2 (")"::ss))
| ForceEverywhere f -> "EVERYWHERE (" :: catelim_string_of_forcedef f (")"::ss)
| ForceNowhere f -> "NOWHERE (" :: catelim_string_of_forcedef f (")"::ss)
| ForceAll (t, vs, f) -> "ALL (" :: catelim_string_of_term t (") (" :: catelim_string_of_forcedef f (")"::ss))
| ForceSome (t, vs, f) -> "SOME (" :: catelim_string_of_term t (") (" :: catelim_string_of_forcedef f (")"::ss))
let rec string_of_forcedef f = implode (catelim_string_of_forcedef f [])
for some reason this goes exponential when the body is a function ( i.e. f fd |~~ etc ) .
Did n't understand vii/01 ; still do n't understand x/14 . RB
Didn't understand vii/01; still don't understand x/14. RB
*)
let rec option_mapforcedef f fd =
let omff = option_mapforcedef f in
let ompair = option_rewrite2 omff omff in
let omtvsfd = option_rewrite3 (fun v -> None) (fun v -> None) omff in
match f fd with
| Some _ as result -> result
| None ->
match fd with
| ForceAlways
| ForceNever
| ForcePrim _ -> None
| ForceBoth pair -> ompair pair &~~ (_Some <.> (fun v->ForceBoth v))
| ForceEither pair -> ompair pair &~~ (_Some <.> (fun v->ForceEither v))
| ForceIf pair -> ompair pair &~~ (_Some <.> (fun v->ForceIf v))
| ForceEverywhere fd -> omff fd &~~ (_Some <.> (fun v->ForceEverywhere v))
| ForceNowhere fd -> omff fd &~~ (_Some <.> (fun v->ForceNowhere v))
| ForceAll tvsfd -> omtvsfd tvsfd &~~ (_Some <.> (fun v->ForceAll v))
| ForceSome tvsfd -> omtvsfd tvsfd &~~ (_Some <.> (fun v->ForceSome v))
let mapforcedef f = anyway (option_mapforcedef f)
let rec option_mapforcedefterms f fd =
let omt = option_mapterm f in
let omff = option_mapforcedefterms f in
let omtvsfd = option_rewrite3 omt (option_rewritelist omt) omff in
let fdf fd =
match fd with
| ForcePrim t -> omt t &~~ (_Some <.> (fun v->ForcePrim v))
| ForceAll tvsfd -> omtvsfd tvsfd &~~ (_Some <.> (fun v->ForceAll v))
| ForceSome tvsfd -> omtvsfd tvsfd &~~ (_Some <.> (fun v->ForceSome v))
| _ -> None
in
option_mapforcedef fdf fd
let rec mapforcedefterms f = anyway (option_mapforcedefterms f)
let rec findinforcedef f fd =
let rec findinpair (fd1, fd2) =
(findinforcedef f fd1 |~~ (fun _ -> findinforcedef f fd2))
in
match fd with
ForceAlways -> None
| ForceNever -> None
| ForcePrim t -> findterm f t
| ForceBoth pair -> findinpair pair
| ForceEither pair -> findinpair pair
| ForceIf pair -> findinpair pair
| ForceEverywhere fd -> findinforcedef f fd
| ForceNowhere fd -> findinforcedef f fd
| ForceAll (t, _, fd) ->
(findterm f t |~~ (fun _ -> findinforcedef f fd))
| ForceSome (t, _, fd) ->
(findterm f t |~~ (fun _ -> findinforcedef f fd))
let rec existsinforcedef f =
bool_of_opt <.> findinforcedef (fun t -> if f t then Some true else None)
this ran into trouble when it emerged that it was using ALL as a reserved word , which had
been previously used in LAYOUT tactics to mean ' display all subtrees ' . On the principle
that reserved words ought not to be easily confused , this was a problem . Also , it conflicted
with the principle that we ought to be able to load old proofs .
First I tried to fix it by making LAYOUT use another word ( ALLL instead of ALL ) but ugh ! .
Second I tried to fix it by parsing it as a term and then analysing . This worked up to a
point , but it fell apart when it saw FORCE P(i ) , which parses as ( but obviously does n't mean )
( FORCE P ) i.
But I stuck with it , demanding only that you bracket the argument of FORCE .
been previously used in LAYOUT tactics to mean 'display all subtrees'. On the principle
that reserved words ought not to be easily confused, this was a problem. Also, it conflicted
with the principle that we ought to be able to load old proofs.
First I tried to fix it by making LAYOUT use another word (ALLL instead of ALL) but ugh!.
Second I tried to fix it by parsing it as a term and then analysing. This worked up to a
point, but it fell apart when it saw FORCE P(i), which parses as (but obviously doesn't mean)
(FORCE P) i.
But I stuck with it, demanding only that you bracket the argument of FORCE.
*)
I should have liked BOTH and EITHER to work as infix , but it makes life too
complicated ... so Lisp - style FORCEDEFs rule . Anyway , it avoids stupidities
about priorities !
complicated ... so Lisp-style FORCEDEFs rule. Anyway, it avoids stupidities
about priorities!
*)
let rec parseForceDef () =
let decodeApp t =
match explodeApp false t with
(f, args) -> (string_of_term f, args)
in
let rec tranForceDef t =
match decodeApp t with
("ALWAYS" , [ ]) -> ForceAlways
| ("NEVER" , [ ]) -> ForceNever
| ("FORCE" , [t]) -> ForcePrim (tranPrim t)
| ("EVERYWHERE" , [f]) -> ForceEverywhere (tranForceDef f)
| ("NOWHERE" , [f]) -> ForceNowhere (tranForceDef f)
| ("ALL" , [pat; f]) -> ForceAll (tranForceDefBinder pat f)
| ("SOME" , [pat; f]) -> ForceSome (tranForceDefBinder pat f)
| ("BOTH" , [f1; f2]) -> ForceBoth(tranForceDef f1, tranForceDef f2)
| ("EITHER" , [f1; f2]) -> ForceEither(tranForceDef f1, tranForceDef f2)
| ("IF" , [f1; f2]) -> ForceIf(tranForceDef f1, tranForceDef f2)
| _ -> raise (ParseError_
["FORCE t, EVERYWHERE f, NOWHERE f, ALL pat f, SOME pat f, BOTH f f, EITHER f f ";
"or IF f f expected in FORCEDEF; found "; string_of_term t
])
and tranPrim t =
try checkTacticTerm t; debracket t
with Tacastrophe_ ss ->
raise (ParseError_ ("FORCE " :: string_of_term t :: " contains " :: ss))
and tranForceDefBinder pat f =
let vs = isVariable <| termvars pat in
if List.exists (not <.> isextensibleId) vs then
raise (ParseError_ ["ALL and SOME must use CLASS VARIABLE identifiers to describe individuals"])
else (pat,vs,tranForceDef f)
in
tranForceDef (asTactic parseTerm EOF)
type coordinate = Coord of (int * int)
type world = World of (coordinate * coordinate list * term list)
type model = Model of world list
let rec parsemodel () =
let rec parseCoord () =
match currsymb () with
BRA "(" ->
scansymb ();
let rec parseInt () =
match currsymb () with
NUM n -> (scansymb (); atoi n)
| sy ->
match string_of_symbol sy with
"-" -> (scansymb (); - parseUnsignedInt "-")
| "~" -> (scansymb (); - parseUnsignedInt "~")
| s -> bang [s]
and bang ss =
raise
(ParseError_ ("number expected in coordinate; found " :: ss))
and parseUnsignedInt s1 =
match currsymb () with
NUM n -> (scansymb (); atoi n)
| s2 -> bang [s1; " followed by "; string_of_symbol s2]
in
let x = parseInt () in
let y =
if currsymb () = commasymbol then
(scansymb (); parseInt ())
else
raise
(ParseError_
["comma expected after x value in world coordinate"])
in
begin match currsymb () with
KET ")" -> (scansymb (); Coord (x, y))
| sy ->
raise
(ParseError_
["right paren expected after coordinate; found ";
string_of_symbol sy])
end
| sy ->
raise
(ParseError_
["coordinate expected, starting with left paren; found ";
string_of_symbol sy])
in
let rec parseWorlds () =
match currsymb () with
SHYID "WORLD" ->
scansymb ();
let c = parseCoord () in
let chs =
match currsymb () with
SHYID "CHILDREN" ->
(scansymb ();
parseList
(function BRA "(" -> true | _ -> false)
(fun _ -> parseCoord ()) commasymbol)
| _ -> []
in
let ts =
match currsymb () with
SHYID "LABELS" ->
(scansymb (); parseList canstartTerm parseTerm commasymbol)
| _ -> []
in
World (c, chs, ts) :: parseWorlds ()
| _ -> []
in
match currsymb () with
SHYID "SEMANTICS" ->
(scansymb ();
let seq = parseSeq () in
(match parseWorlds () with
[] -> raise (ParseError_ ["empty disproof description"])
| worlds -> Some (seq, Model worlds)))
| _ -> None
let rec catelim_string_of_model a1 a2 =
match a1, a2 with
None, ss -> ss
| Some (seq, Model worlds), ss ->
let sep = "\n" in
let rec catelim_string_of_int i ss =
(string_of_int : int -> string) i :: ss
in
let rec catelim_string_of_coord =
fun (Coord c) ->
catelim_string_of_pair catelim_string_of_int catelim_string_of_int "," c
in
let rec catelim_string_of_world =
fun (World (c, chs, ts)) ss ->
let sep2 = sep ^ " " in
let rec catelim_string_of_children chs ss =
match chs with
[] -> ss
| _ ->
sep2 :: "CHILDREN" :: " " ::
catelim_string_of_list catelim_string_of_coord ", " chs ss
in
let rec catelim_string_of_labels ts ss =
match ts with
[] -> ss
| _ ->
sep2 :: "LABELS" :: " " ::
catelim_string_of_list catelim_string_of_term ", " ts ss
in
"WORLD" :: " " ::
catelim_string_of_coord c
(catelim_string_of_children chs (catelim_string_of_labels ts ss))
in
"SEMANTICS" :: sep ::
catelim_string_of_seq seq
(sep ::
catelim_string_of_list catelim_string_of_world sep worlds
("\n" :: ss))
|
c2f7eb0b023203c737b747d50063cdd0f42e969ac694246bd9b0d2c233add956 | ekmett/transformers | Strict.hs | -----------------------------------------------------------------------------
-- |
Module : Control . . Trans . State . Strict
Copyright : ( c ) 2001 ,
( c ) Oregon Graduate Institute of Science and Technology , 2001
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- Strict state monads, passing an updateable state through a computation.
-- See below for examples.
--
-- In this version, sequencing of computations is strict in the state.
For a lazy version , see " Control . . Trans . Writer . Lazy " , which
-- has the same interface.
--
-- Some computations may not require the full power of state transformers:
--
* For a read - only state , see " Control . . Trans . Reader " .
--
-- * To accumulate a value without using it on the way, see
" Control . . Trans . Writer " .
-----------------------------------------------------------------------------
module Control.Monad.Trans.State.Strict (
* The State monad
State,
state,
runState,
evalState,
execState,
mapState,
withState,
-- * The StateT monad transformer
StateT(..),
evalStateT,
execStateT,
mapStateT,
withStateT,
-- * State operations
get,
put,
modify,
gets,
-- * Lifting other operations
liftCallCC,
liftCallCC',
liftCatch,
liftListen,
liftPass,
-- * Examples
-- ** State monads
-- $examples
-- ** Counting
-- $counting
-- ** Labelling trees
-- $labelling
) where
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Data.Functor.Identity
import Control.Applicative
import Control.Monad
import Control.Monad.Fix
-- ---------------------------------------------------------------------------
-- | A state monad parameterized by the type @s@ of the state to carry.
--
The ' return ' function leaves the state unchanged , while uses
the final state of the first computation as the initial state of
the second .
type State s = StateT s Identity
-- | Construct a state monad computation from a function.
( The inverse of ' runState ' . )
state :: (s -> (a, s)) -- ^pure state transformer
-> State s a -- ^equivalent state-passing computation
state f = StateT (Identity . f)
-- | Unwrap a state monad computation as a function.
-- (The inverse of 'state'.)
runState :: State s a -- ^state-passing computation to execute
-> s -- ^initial state
-> (a, s) -- ^return value and final state
runState m = runIdentity . runStateT m
-- | Evaluate a state computation with the given initial state
-- and return the final value, discarding the final state.
--
* @'evalState ' m s = ' fst ' ( ' runState ' m s)@
evalState :: State s a -- ^state-passing computation to execute
-> s -- ^initial value
-> a -- ^return value of the state computation
evalState m s = fst (runState m s)
-- | Evaluate a state computation with the given initial state
-- and return the final state, discarding the final value.
--
* ' m s = ' snd ' ( ' runState ' m s)@
execState :: State s a -- ^state-passing computation to execute
-> s -- ^initial value
-> s -- ^final state
execState m s = snd (runState m s)
-- | Map both the return value and final state of a computation using
-- the given function.
--
* @'runState ' ( ' mapState ' f m ) = f . ' runState ' m@
mapState :: ((a, s) -> (b, s)) -> State s a -> State s b
mapState f = mapStateT (Identity . f . runIdentity)
| @'withState ' f m@ executes action @m@ on a state modified by
applying @f@.
--
* @'withState ' f m = ' modify ' f > > m@
withState :: (s -> s) -> State s a -> State s a
withState = withStateT
-- ---------------------------------------------------------------------------
-- | A state transformer monad parameterized by:
--
-- * @s@ - The state.
--
-- * @m@ - The inner monad.
--
The ' return ' function leaves the state unchanged , while uses
the final state of the first computation as the initial state of
the second .
newtype StateT s m a = StateT { runStateT :: s -> m (a,s) }
-- | Evaluate a state computation with the given initial state
-- and return the final value, discarding the final state.
--
-- * @'evalStateT' m s = 'liftM' 'fst' ('runStateT' m s)@
evalStateT :: (Monad m) => StateT s m a -> s -> m a
evalStateT m s = do
(a, _) <- runStateT m s
return a
-- | Evaluate a state computation with the given initial state
-- and return the final state, discarding the final value.
--
* @'execStateT ' m s = ' liftM ' ' snd ' ( ' runStateT ' m s)@
execStateT :: (Monad m) => StateT s m a -> s -> m s
execStateT m s = do
(_, s') <- runStateT m s
return s'
-- | Map both the return value and final state of a computation using
-- the given function.
--
-- * @'runStateT' ('mapStateT' f m) = f . 'runStateT' m@
mapStateT :: (m (a, s) -> n (b, s)) -> StateT s m a -> StateT s n b
mapStateT f m = StateT $ f . runStateT m
-- | @'withStateT' f m@ executes action @m@ on a state modified by
applying @f@.
--
-- * @'withStateT' f m = 'modify' f >> m@
withStateT :: (s -> s) -> StateT s m a -> StateT s m a
withStateT f m = StateT $ runStateT m . f
instance (Functor m) => Functor (StateT s m) where
fmap f m = StateT $ \ s ->
fmap (\ (a, s') -> (f a, s')) $ runStateT m s
instance (Functor m, Monad m) => Applicative (StateT s m) where
pure = return
(<*>) = ap
instance (Functor m, MonadPlus m) => Alternative (StateT s m) where
empty = mzero
(<|>) = mplus
instance (Monad m) => Monad (StateT s m) where
return a = StateT $ \s -> return (a, s)
m >>= k = StateT $ \s -> do
(a, s') <- runStateT m s
runStateT (k a) s'
fail str = StateT $ \_ -> fail str
instance (MonadPlus m) => MonadPlus (StateT s m) where
mzero = StateT $ \_ -> mzero
m `mplus` n = StateT $ \s -> runStateT m s `mplus` runStateT n s
instance (MonadFix m) => MonadFix (StateT s m) where
mfix f = StateT $ \s -> mfix $ \ ~(a, _) -> runStateT (f a) s
instance MonadTrans (StateT s) where
lift m = StateT $ \s -> do
a <- m
return (a, s)
instance (MonadIO m) => MonadIO (StateT s m) where
liftIO = lift . liftIO
-- | Fetch the current value of the state within the monad.
get :: (Monad m) => StateT s m s
get = StateT $ \s -> return (s, s)
-- | @'put' s@ sets the state within the monad to @s@.
put :: (Monad m) => s -> StateT s m ()
put s = StateT $ \_ -> return ((), s)
-- | @'modify' f@ is an action that updates the state to the result of
applying @f@ to the current state .
modify :: (Monad m) => (s -> s) -> StateT s m ()
modify f = do
s <- get
put (f s)
-- | Get a specific component of the state, using a projection function
-- supplied.
--
-- * @'gets' f = 'liftM' f 'get'@
gets :: (Monad m) => (s -> a) -> StateT s m a
gets f = do
s <- get
return (f s)
-- | Uniform lifting of a @callCC@ operation to the new monad.
-- This version rolls back to the original state on entering the
-- continuation.
liftCallCC :: ((((a,s) -> m (b,s)) -> m (a,s)) -> m (a,s)) ->
((a -> StateT s m b) -> StateT s m a) -> StateT s m a
liftCallCC callCC f = StateT $ \s ->
callCC $ \c ->
runStateT (f (\a -> StateT $ \ _ -> c (a, s))) s
-- | In-situ lifting of a @callCC@ operation to the new monad.
-- This version uses the current state on entering the continuation.
-- It does not satisfy the laws of a monad transformer.
liftCallCC' :: ((((a,s) -> m (b,s)) -> m (a,s)) -> m (a,s)) ->
((a -> StateT s m b) -> StateT s m a) -> StateT s m a
liftCallCC' callCC f = StateT $ \s ->
callCC $ \c ->
runStateT (f (\a -> StateT $ \s' -> c (a, s'))) s
-- | Lift a @catchError@ operation to the new monad.
liftCatch :: (m (a,s) -> (e -> m (a,s)) -> m (a,s)) ->
StateT s m a -> (e -> StateT s m a) -> StateT s m a
liftCatch catchError m h =
StateT $ \s -> runStateT m s `catchError` \e -> runStateT (h e) s
-- | Lift a @listen@ operation to the new monad.
liftListen :: Monad m =>
(m (a,s) -> m ((a,s),w)) -> StateT s m a -> StateT s m (a,w)
liftListen listen m = StateT $ \s -> do
((a, s'), w) <- listen (runStateT m s)
return ((a, w), s')
| Lift a @pass@ operation to the new monad .
liftPass :: Monad m =>
(m ((a,s),b) -> m (a,s)) -> StateT s m (a,b) -> StateT s m a
liftPass pass m = StateT $ \s -> pass $ do
((a, f), s') <- runStateT m s
return ((a, s'), f)
$ examples
from ParseLib with Hugs :
> type a = StateT String [ ] a
> = = > StateT ( String - > [ ( a , String ) ] )
For example , item can be written as :
> item = do ( x : xs ) < - get
> put xs
> return x
>
> type BoringState s a = StateT s Identity a
> = = > StateT ( s - > Identity ( a , s ) )
>
> type StateWithIO s a = StateT s IO a
> = = > StateT ( s - > IO ( a , s ) )
>
> type StateWithErr s a = StateT s Maybe a
> = = > StateT ( s - > Maybe ( a , s ) )
Parser from ParseLib with Hugs:
> type Parser a = StateT String [] a
> ==> StateT (String -> [(a,String)])
For example, item can be written as:
> item = do (x:xs) <- get
> put xs
> return x
>
> type BoringState s a = StateT s Identity a
> ==> StateT (s -> Identity (a,s))
>
> type StateWithIO s a = StateT s IO a
> ==> StateT (s -> IO (a,s))
>
> type StateWithErr s a = StateT s Maybe a
> ==> StateT (s -> Maybe (a,s))
-}
$ counting
A function to increment a counter . Taken from the paper
/Generalising Monads to , ( < /~rjmh/ > ) , November 1998 :
> tick : : State Int Int
> tick = do n < - get
> put ( n+1 )
> return n
Add one to the given number using the state monad :
> plusOne : : Int - > Int
> plusOne n = execState tick n
A contrived addition example . Works only with positive numbers :
> plus : : Int - > Int - > Int
> plus n x = execState ( sequence $ replicate n tick ) x
A function to increment a counter. Taken from the paper
/Generalising Monads to Arrows/, John
Hughes (</~rjmh/>), November 1998:
> tick :: State Int Int
> tick = do n <- get
> put (n+1)
> return n
Add one to the given number using the state monad:
> plusOne :: Int -> Int
> plusOne n = execState tick n
A contrived addition example. Works only with positive numbers:
> plus :: Int -> Int -> Int
> plus n x = execState (sequence $ replicate n tick) x
-}
$ labelling
An example from /The Craft of Functional Programming/ , Simon
Thompson ( < / > ) ,
Addison - Wesley 1999 : \"Given an arbitrary tree , transform it to a
tree of integers in which the original elements are replaced by
natural numbers , starting from 0 . The same element has to be
replaced by the same number at every occurrence , and when we meet
an as - yet - unvisited element we have to find a \'new\ ' number to match
it with:\ "
> data Tree a = Nil | Node a ( Tree a ) ( Tree a ) deriving ( Show , Eq )
> type Table a = [ a ]
> numberTree : : Eq a = > Tree a - > State ( Table a ) ( Tree Int )
> return Nil
> numberTree ( Node x t1 t2 )
> = do x
> nt1 < - numberTree t1
> nt2 < - numberTree t2
> return ( Node num nt1 nt2 )
> where
> numberNode : : Eq a = > a - > State ( Table a ) Int
> numberNode x
> = do table < - get
> ( , newPos ) < - return ( nNode x table )
> put
> return newPos
> : : ( Eq a ) = > a - > Table a - > ( Table a , Int )
> x table
> = case ( findIndexInList (= = x ) table ) of
> Nothing - > ( table + + [ x ] , length table )
> Just i - > ( table , i )
> findIndexInList : : ( a - > Bool ) - > [ a ] - > Maybe Int
> findIndexInList = findIndexInListHelp 0
> findIndexInListHelp _ _ [ ] = Nothing
> findIndexInListHelp count f ( h : t )
> = if ( f h )
> then Just count
> else findIndexInListHelp ( count+1 ) f t
numTree applies numberTree with an initial state :
> : : ( Eq a ) = > Tree a - > Tree Int
> numTree t = evalState ( numberTree t ) [ ]
> testTree = Node " Zero " ( Node " One " ( Node " Two " Nil Nil ) ( Node " One " ( Node " Zero " Nil Nil ) Nil ) ) Nil
> > Node 0 ( Node 1 ( Node 2 ) ( Node 1 ( Node 0 ) Nil ) ) Nil
sumTree is a little helper function that does not use the State monad :
> sumTree : : ( a ) = > Tree a - > a
> 0
> sumTree ( Node e t1 t2 ) = e + ( sumTree t1 ) + ( sumTree t2 )
An example from /The Craft of Functional Programming/, Simon
Thompson (</>),
Addison-Wesley 1999: \"Given an arbitrary tree, transform it to a
tree of integers in which the original elements are replaced by
natural numbers, starting from 0. The same element has to be
replaced by the same number at every occurrence, and when we meet
an as-yet-unvisited element we have to find a \'new\' number to match
it with:\"
> data Tree a = Nil | Node a (Tree a) (Tree a) deriving (Show, Eq)
> type Table a = [a]
> numberTree :: Eq a => Tree a -> State (Table a) (Tree Int)
> numberTree Nil = return Nil
> numberTree (Node x t1 t2)
> = do num <- numberNode x
> nt1 <- numberTree t1
> nt2 <- numberTree t2
> return (Node num nt1 nt2)
> where
> numberNode :: Eq a => a -> State (Table a) Int
> numberNode x
> = do table <- get
> (newTable, newPos) <- return (nNode x table)
> put newTable
> return newPos
> nNode:: (Eq a) => a -> Table a -> (Table a, Int)
> nNode x table
> = case (findIndexInList (== x) table) of
> Nothing -> (table ++ [x], length table)
> Just i -> (table, i)
> findIndexInList :: (a -> Bool) -> [a] -> Maybe Int
> findIndexInList = findIndexInListHelp 0
> findIndexInListHelp _ _ [] = Nothing
> findIndexInListHelp count f (h:t)
> = if (f h)
> then Just count
> else findIndexInListHelp (count+1) f t
numTree applies numberTree with an initial state:
> numTree :: (Eq a) => Tree a -> Tree Int
> numTree t = evalState (numberTree t) []
> testTree = Node "Zero" (Node "One" (Node "Two" Nil Nil) (Node "One" (Node "Zero" Nil Nil) Nil)) Nil
> numTree testTree => Node 0 (Node 1 (Node 2 Nil Nil) (Node 1 (Node 0 Nil Nil) Nil)) Nil
sumTree is a little helper function that does not use the State monad:
> sumTree :: (Num a) => Tree a -> a
> sumTree Nil = 0
> sumTree (Node e t1 t2) = e + (sumTree t1) + (sumTree t2)
-}
| null | https://raw.githubusercontent.com/ekmett/transformers/eb2f3223114f67f68c8970458ca863e231bdb576/Control/Monad/Trans/State/Strict.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD-style (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : portable
Strict state monads, passing an updateable state through a computation.
See below for examples.
In this version, sequencing of computations is strict in the state.
has the same interface.
Some computations may not require the full power of state transformers:
* To accumulate a value without using it on the way, see
---------------------------------------------------------------------------
* The StateT monad transformer
* State operations
* Lifting other operations
* Examples
** State monads
$examples
** Counting
$counting
** Labelling trees
$labelling
---------------------------------------------------------------------------
| A state monad parameterized by the type @s@ of the state to carry.
| Construct a state monad computation from a function.
^pure state transformer
^equivalent state-passing computation
| Unwrap a state monad computation as a function.
(The inverse of 'state'.)
^state-passing computation to execute
^initial state
^return value and final state
| Evaluate a state computation with the given initial state
and return the final value, discarding the final state.
^state-passing computation to execute
^initial value
^return value of the state computation
| Evaluate a state computation with the given initial state
and return the final state, discarding the final value.
^state-passing computation to execute
^initial value
^final state
| Map both the return value and final state of a computation using
the given function.
---------------------------------------------------------------------------
| A state transformer monad parameterized by:
* @s@ - The state.
* @m@ - The inner monad.
| Evaluate a state computation with the given initial state
and return the final value, discarding the final state.
* @'evalStateT' m s = 'liftM' 'fst' ('runStateT' m s)@
| Evaluate a state computation with the given initial state
and return the final state, discarding the final value.
| Map both the return value and final state of a computation using
the given function.
* @'runStateT' ('mapStateT' f m) = f . 'runStateT' m@
| @'withStateT' f m@ executes action @m@ on a state modified by
* @'withStateT' f m = 'modify' f >> m@
| Fetch the current value of the state within the monad.
| @'put' s@ sets the state within the monad to @s@.
| @'modify' f@ is an action that updates the state to the result of
| Get a specific component of the state, using a projection function
supplied.
* @'gets' f = 'liftM' f 'get'@
| Uniform lifting of a @callCC@ operation to the new monad.
This version rolls back to the original state on entering the
continuation.
| In-situ lifting of a @callCC@ operation to the new monad.
This version uses the current state on entering the continuation.
It does not satisfy the laws of a monad transformer.
| Lift a @catchError@ operation to the new monad.
| Lift a @listen@ operation to the new monad. | Module : Control . . Trans . State . Strict
Copyright : ( c ) 2001 ,
( c ) Oregon Graduate Institute of Science and Technology , 2001
For a lazy version , see " Control . . Trans . Writer . Lazy " , which
* For a read - only state , see " Control . . Trans . Reader " .
" Control . . Trans . Writer " .
module Control.Monad.Trans.State.Strict (
* The State monad
State,
state,
runState,
evalState,
execState,
mapState,
withState,
StateT(..),
evalStateT,
execStateT,
mapStateT,
withStateT,
get,
put,
modify,
gets,
liftCallCC,
liftCallCC',
liftCatch,
liftListen,
liftPass,
) where
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Data.Functor.Identity
import Control.Applicative
import Control.Monad
import Control.Monad.Fix
The ' return ' function leaves the state unchanged , while uses
the final state of the first computation as the initial state of
the second .
type State s = StateT s Identity
( The inverse of ' runState ' . )
state f = StateT (Identity . f)
runState m = runIdentity . runStateT m
* @'evalState ' m s = ' fst ' ( ' runState ' m s)@
evalState m s = fst (runState m s)
* ' m s = ' snd ' ( ' runState ' m s)@
execState m s = snd (runState m s)
* @'runState ' ( ' mapState ' f m ) = f . ' runState ' m@
mapState :: ((a, s) -> (b, s)) -> State s a -> State s b
mapState f = mapStateT (Identity . f . runIdentity)
| @'withState ' f m@ executes action @m@ on a state modified by
applying @f@.
* @'withState ' f m = ' modify ' f > > m@
withState :: (s -> s) -> State s a -> State s a
withState = withStateT
The ' return ' function leaves the state unchanged , while uses
the final state of the first computation as the initial state of
the second .
newtype StateT s m a = StateT { runStateT :: s -> m (a,s) }
evalStateT :: (Monad m) => StateT s m a -> s -> m a
evalStateT m s = do
(a, _) <- runStateT m s
return a
* @'execStateT ' m s = ' liftM ' ' snd ' ( ' runStateT ' m s)@
execStateT :: (Monad m) => StateT s m a -> s -> m s
execStateT m s = do
(_, s') <- runStateT m s
return s'
mapStateT :: (m (a, s) -> n (b, s)) -> StateT s m a -> StateT s n b
mapStateT f m = StateT $ f . runStateT m
applying @f@.
withStateT :: (s -> s) -> StateT s m a -> StateT s m a
withStateT f m = StateT $ runStateT m . f
instance (Functor m) => Functor (StateT s m) where
fmap f m = StateT $ \ s ->
fmap (\ (a, s') -> (f a, s')) $ runStateT m s
instance (Functor m, Monad m) => Applicative (StateT s m) where
pure = return
(<*>) = ap
instance (Functor m, MonadPlus m) => Alternative (StateT s m) where
empty = mzero
(<|>) = mplus
instance (Monad m) => Monad (StateT s m) where
return a = StateT $ \s -> return (a, s)
m >>= k = StateT $ \s -> do
(a, s') <- runStateT m s
runStateT (k a) s'
fail str = StateT $ \_ -> fail str
instance (MonadPlus m) => MonadPlus (StateT s m) where
mzero = StateT $ \_ -> mzero
m `mplus` n = StateT $ \s -> runStateT m s `mplus` runStateT n s
instance (MonadFix m) => MonadFix (StateT s m) where
mfix f = StateT $ \s -> mfix $ \ ~(a, _) -> runStateT (f a) s
instance MonadTrans (StateT s) where
lift m = StateT $ \s -> do
a <- m
return (a, s)
instance (MonadIO m) => MonadIO (StateT s m) where
liftIO = lift . liftIO
get :: (Monad m) => StateT s m s
get = StateT $ \s -> return (s, s)
put :: (Monad m) => s -> StateT s m ()
put s = StateT $ \_ -> return ((), s)
applying @f@ to the current state .
modify :: (Monad m) => (s -> s) -> StateT s m ()
modify f = do
s <- get
put (f s)
gets :: (Monad m) => (s -> a) -> StateT s m a
gets f = do
s <- get
return (f s)
liftCallCC :: ((((a,s) -> m (b,s)) -> m (a,s)) -> m (a,s)) ->
((a -> StateT s m b) -> StateT s m a) -> StateT s m a
liftCallCC callCC f = StateT $ \s ->
callCC $ \c ->
runStateT (f (\a -> StateT $ \ _ -> c (a, s))) s
liftCallCC' :: ((((a,s) -> m (b,s)) -> m (a,s)) -> m (a,s)) ->
((a -> StateT s m b) -> StateT s m a) -> StateT s m a
liftCallCC' callCC f = StateT $ \s ->
callCC $ \c ->
runStateT (f (\a -> StateT $ \s' -> c (a, s'))) s
liftCatch :: (m (a,s) -> (e -> m (a,s)) -> m (a,s)) ->
StateT s m a -> (e -> StateT s m a) -> StateT s m a
liftCatch catchError m h =
StateT $ \s -> runStateT m s `catchError` \e -> runStateT (h e) s
liftListen :: Monad m =>
(m (a,s) -> m ((a,s),w)) -> StateT s m a -> StateT s m (a,w)
liftListen listen m = StateT $ \s -> do
((a, s'), w) <- listen (runStateT m s)
return ((a, w), s')
| Lift a @pass@ operation to the new monad .
liftPass :: Monad m =>
(m ((a,s),b) -> m (a,s)) -> StateT s m (a,b) -> StateT s m a
liftPass pass m = StateT $ \s -> pass $ do
((a, f), s') <- runStateT m s
return ((a, s'), f)
$ examples
from ParseLib with Hugs :
> type a = StateT String [ ] a
> = = > StateT ( String - > [ ( a , String ) ] )
For example , item can be written as :
> item = do ( x : xs ) < - get
> put xs
> return x
>
> type BoringState s a = StateT s Identity a
> = = > StateT ( s - > Identity ( a , s ) )
>
> type StateWithIO s a = StateT s IO a
> = = > StateT ( s - > IO ( a , s ) )
>
> type StateWithErr s a = StateT s Maybe a
> = = > StateT ( s - > Maybe ( a , s ) )
Parser from ParseLib with Hugs:
> type Parser a = StateT String [] a
> ==> StateT (String -> [(a,String)])
For example, item can be written as:
> item = do (x:xs) <- get
> put xs
> return x
>
> type BoringState s a = StateT s Identity a
> ==> StateT (s -> Identity (a,s))
>
> type StateWithIO s a = StateT s IO a
> ==> StateT (s -> IO (a,s))
>
> type StateWithErr s a = StateT s Maybe a
> ==> StateT (s -> Maybe (a,s))
-}
$ counting
A function to increment a counter . Taken from the paper
/Generalising Monads to , ( < /~rjmh/ > ) , November 1998 :
> tick : : State Int Int
> tick = do n < - get
> put ( n+1 )
> return n
Add one to the given number using the state monad :
> plusOne : : Int - > Int
> plusOne n = execState tick n
A contrived addition example . Works only with positive numbers :
> plus : : Int - > Int - > Int
> plus n x = execState ( sequence $ replicate n tick ) x
A function to increment a counter. Taken from the paper
/Generalising Monads to Arrows/, John
Hughes (</~rjmh/>), November 1998:
> tick :: State Int Int
> tick = do n <- get
> put (n+1)
> return n
Add one to the given number using the state monad:
> plusOne :: Int -> Int
> plusOne n = execState tick n
A contrived addition example. Works only with positive numbers:
> plus :: Int -> Int -> Int
> plus n x = execState (sequence $ replicate n tick) x
-}
$ labelling
An example from /The Craft of Functional Programming/ , Simon
Thompson ( < / > ) ,
Addison - Wesley 1999 : \"Given an arbitrary tree , transform it to a
tree of integers in which the original elements are replaced by
natural numbers , starting from 0 . The same element has to be
replaced by the same number at every occurrence , and when we meet
an as - yet - unvisited element we have to find a \'new\ ' number to match
it with:\ "
> data Tree a = Nil | Node a ( Tree a ) ( Tree a ) deriving ( Show , Eq )
> type Table a = [ a ]
> numberTree : : Eq a = > Tree a - > State ( Table a ) ( Tree Int )
> return Nil
> numberTree ( Node x t1 t2 )
> = do x
> nt1 < - numberTree t1
> nt2 < - numberTree t2
> return ( Node num nt1 nt2 )
> where
> numberNode : : Eq a = > a - > State ( Table a ) Int
> numberNode x
> = do table < - get
> ( , newPos ) < - return ( nNode x table )
> put
> return newPos
> : : ( Eq a ) = > a - > Table a - > ( Table a , Int )
> x table
> = case ( findIndexInList (= = x ) table ) of
> Nothing - > ( table + + [ x ] , length table )
> Just i - > ( table , i )
> findIndexInList : : ( a - > Bool ) - > [ a ] - > Maybe Int
> findIndexInList = findIndexInListHelp 0
> findIndexInListHelp _ _ [ ] = Nothing
> findIndexInListHelp count f ( h : t )
> = if ( f h )
> then Just count
> else findIndexInListHelp ( count+1 ) f t
numTree applies numberTree with an initial state :
> : : ( Eq a ) = > Tree a - > Tree Int
> numTree t = evalState ( numberTree t ) [ ]
> testTree = Node " Zero " ( Node " One " ( Node " Two " Nil Nil ) ( Node " One " ( Node " Zero " Nil Nil ) Nil ) ) Nil
> > Node 0 ( Node 1 ( Node 2 ) ( Node 1 ( Node 0 ) Nil ) ) Nil
sumTree is a little helper function that does not use the State monad :
> sumTree : : ( a ) = > Tree a - > a
> 0
> sumTree ( Node e t1 t2 ) = e + ( sumTree t1 ) + ( sumTree t2 )
An example from /The Craft of Functional Programming/, Simon
Thompson (</>),
Addison-Wesley 1999: \"Given an arbitrary tree, transform it to a
tree of integers in which the original elements are replaced by
natural numbers, starting from 0. The same element has to be
replaced by the same number at every occurrence, and when we meet
an as-yet-unvisited element we have to find a \'new\' number to match
it with:\"
> data Tree a = Nil | Node a (Tree a) (Tree a) deriving (Show, Eq)
> type Table a = [a]
> numberTree :: Eq a => Tree a -> State (Table a) (Tree Int)
> numberTree Nil = return Nil
> numberTree (Node x t1 t2)
> = do num <- numberNode x
> nt1 <- numberTree t1
> nt2 <- numberTree t2
> return (Node num nt1 nt2)
> where
> numberNode :: Eq a => a -> State (Table a) Int
> numberNode x
> = do table <- get
> (newTable, newPos) <- return (nNode x table)
> put newTable
> return newPos
> nNode:: (Eq a) => a -> Table a -> (Table a, Int)
> nNode x table
> = case (findIndexInList (== x) table) of
> Nothing -> (table ++ [x], length table)
> Just i -> (table, i)
> findIndexInList :: (a -> Bool) -> [a] -> Maybe Int
> findIndexInList = findIndexInListHelp 0
> findIndexInListHelp _ _ [] = Nothing
> findIndexInListHelp count f (h:t)
> = if (f h)
> then Just count
> else findIndexInListHelp (count+1) f t
numTree applies numberTree with an initial state:
> numTree :: (Eq a) => Tree a -> Tree Int
> numTree t = evalState (numberTree t) []
> testTree = Node "Zero" (Node "One" (Node "Two" Nil Nil) (Node "One" (Node "Zero" Nil Nil) Nil)) Nil
> numTree testTree => Node 0 (Node 1 (Node 2 Nil Nil) (Node 1 (Node 0 Nil Nil) Nil)) Nil
sumTree is a little helper function that does not use the State monad:
> sumTree :: (Num a) => Tree a -> a
> sumTree Nil = 0
> sumTree (Node e t1 t2) = e + (sumTree t1) + (sumTree t2)
-}
|
28acad5db35b7ab9aaac48d90cf9d6be96e05de72c9cd5e1291927827b4c6b76 | andy128k/cl-gobject-introspection | package.lisp |
(in-package cl-user)
(cl:defpackage flood-game-example
(:use #:cl))
| null | https://raw.githubusercontent.com/andy128k/cl-gobject-introspection/d0136c8d9ade2560123af1fc55bbf70d2e3db539/examples/flood-game/src/package.lisp | lisp |
(in-package cl-user)
(cl:defpackage flood-game-example
(:use #:cl))
| |
267f1b44f58f7bc675c1be15f0353615fb9f81694de33640024f88ae00f32bfd | kadena-io/chainweb-node | Paging.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveFunctor #
# LANGUAGE DeriveGeneric #
{-# LANGUAGE DeriveTraversable #-}
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
-- |
Module : Chainweb . Utils . Paging
Copyright : Copyright © 2018 Kadena LLC .
License : MIT
Maintainer : < >
-- Stability: experimental
--
-- Tools for paging HTTP responses
--
module Chainweb.Utils.Paging
(
-- * Limit
Limit(..)
-- * Page
, Page(..)
, pageLimit
, pageItems
, pageNext
-- * Next Item
, NextItem(..)
, _getNextItem
, getNextItem
, isExclusive
, isInclusive
, nextItemToText
, nextItemFromText
-- * End-Of-Stream
, Eos(..)
, isEos
, atEos
-- * Tools for creating pages from streams
, finitePrefixOfInfiniteStreamToPage
, finiteStreamToPage
, seekFiniteStreamToPage
) where
import Control.Lens (Getter, to)
import Control.Lens.TH
import Control.Monad.Catch
import Data.Aeson
import Data.Functor.Of
import Data.Hashable
import Data.Maybe
import qualified Data.Text as T
import GHC.Generics (Generic)
import Numeric.Natural
import qualified Streaming.Prelude as S
-- internal modules
import Chainweb.Utils hiding ((==>))
-- -------------------------------------------------------------------------- --
-- Limit
-- | Limit the result of a query to a maximum number of items
--
newtype Limit = Limit { _getLimit :: Natural }
deriving stock (Eq, Show, Generic)
deriving anyclass (Hashable)
deriving newtype (Num, Real, Integral, Enum, Ord)
-- -------------------------------------------------------------------------- --
-- Page
data Page k a = Page
{ _pageLimit :: !Limit
-- ^ The number of items in the page
, _pageItems :: ![a]
-- ^ The items of the page
, _pageNext :: !(Maybe k)
-- ^ A cursor for querying the next page, if there is any. The value
-- is given the next parameter of the respective query interface.
}
deriving (Show, Eq, Ord, Generic, Functor, Foldable)
makeLenses ''Page
pageProperties
:: HasTextRepresentation k
=> ToJSON k
=> ToJSON a
=> KeyValue kv
=> Page k a
-> [kv]
pageProperties p =
[ "limit" .= _getLimit (_pageLimit p)
, "items" .= _pageItems p
, "next" .= _pageNext p
]
# INLINE pageProperties #
instance (HasTextRepresentation k, ToJSON k, ToJSON a) => ToJSON (Page k a) where
toJSON = object . pageProperties
toEncoding = pairs . mconcat . pageProperties
# INLINE toJSON #
# INLINE toEncoding #
instance (HasTextRepresentation k, FromJSON k, FromJSON a) => FromJSON (Page k a) where
parseJSON = withObject "page" $ \o -> Page
<$> (Limit <$> (o .: "limit"))
<*> o .: "items"
<*> o .: "next"
# INLINE parseJSON #
-- -------------------------------------------------------------------------- --
-- Next Item
-- | When seeking a position in a stream, define if the given position
-- is inclusive or exclusive.
--
-- Inclusive: return all items of the stream starting with the given key.
-- Exclusive: return all items of the stream starting immidiately after the given key.
--
data NextItem k
= Inclusive k
| Exclusive k
deriving stock (Eq, Show, Ord, Functor, Foldable, Traversable)
_getNextItem :: NextItem k -> k
_getNextItem (Inclusive k) = k
_getNextItem (Exclusive k) = k
{-# INLINE _getNextItem #-}
getNextItem :: Getter (NextItem k) k
getNextItem = to _getNextItem
# INLINE getNextItem #
isInclusive :: NextItem k -> Bool
isInclusive Inclusive{} = True
isInclusive _ = False
isExclusive :: NextItem k -> Bool
isExclusive Exclusive{} = True
isExclusive _ = False
nextItemToText :: HasTextRepresentation k => NextItem k -> T.Text
nextItemToText (Inclusive k) = "inclusive:" <> toText k
nextItemToText (Exclusive k) = "exclusive:" <> toText k
nextItemFromText :: MonadThrow m => HasTextRepresentation k => T.Text -> m (NextItem k)
nextItemFromText t = case T.break (== ':') t of
(a, b)
| a == "inclusive" -> Inclusive <$> fromText (T.drop 1 b)
| a == "exclusive" -> Exclusive <$> fromText (T.drop 1 b)
| T.null b -> throwM . TextFormatException $ "missing ':' in next item: \"" <> t <> "\"."
| otherwise -> throwM $ TextFormatException $ "unrecognized next item: \"" <> t <> "\"."
instance HasTextRepresentation k => HasTextRepresentation (NextItem k) where
toText = nextItemToText
# INLINE toText #
fromText = nextItemFromText
{-# INLINE fromText #-}
instance HasTextRepresentation k => ToJSON (NextItem k) where
toJSON = toJSON . toText
toEncoding = toEncoding . toText
# INLINE toJSON #
# INLINE toEncoding #
instance HasTextRepresentation k => FromJSON (NextItem k) where
parseJSON = parseJsonFromText "NextItem"
# INLINE parseJSON #
-- -------------------------------------------------------------------------- --
-- End-Of-Stream
-- | Data type to indicate end of stream
--
newtype Eos = Eos { _getEos :: Bool }
deriving stock (Eq, Show, Ord, Generic)
deriving newtype (Enum, Bounded, FromJSON, ToJSON)
isEos :: Eos -> Bool
isEos = _getEos
atEos :: Monad m => S.Stream (Of a) m () -> m Eos
atEos = fmap (Eos . isNothing) . S.head_
-- -------------------------------------------------------------------------- --
-- Tools for turning streams into pages
-- | Create page from a non-empty stream that is a non-blocking finite prefix of
-- a possibly blocking infinite stream.
--
-- If the given stream contains more items than requested by the caller an
-- 'Inclusive' cursor is added to the page. Otherwise the last item of the
-- stream is added as 'Exclusive' cursor.
--
-- If the input stream is empty we assume that it is because of a limiting
-- filter that results in a query for a finite stream. No cursor is returned.
--
-- For an empty input we can't return a next cursor. We can't return just
-- 'Nothing' because that is used in a 'Page' to signal the end of the stream,
-- which contradicts the assumption that the input stream is the prefix of an
-- infinite stream. So, when we see an empty stream we assume that it's empty
-- because of some filter and return 'Nothing'
--
finitePrefixOfInfiniteStreamToPage
:: MonadThrow m
=> (a -> k)
-> Maybe Limit
-> S.Stream (Of a) m ()
-> m (Page (NextItem k) a)
finitePrefixOfInfiniteStreamToPage k limit s = do
(items' :> limit' :> lastKey :> tailStream) <- S.toList
. S.length
. S.copy
. S.last
. S.copy
. maybe (mempty <$) (\n -> S.splitAt (int $ _getLimit n)) limit
$ s
maybeNext <- fmap k <$> S.head_ tailStream
return $ Page (int limit') items' $ case maybeNext of
Nothing -> case lastKey of
Nothing -> Nothing
Just l -> Just (Exclusive $ k l)
Just next -> Just (Inclusive next)
-- | Create 'Page' from a (possibly empty) prefix of a non-blocking finite
-- stream. If the input stream has more than the requested number of items
-- an 'Inclusive' cursor is added. Otherwise it is assumed that the stream
-- has ended and 'Nothing' is returned as cursor.
--
finiteStreamToPage
:: Monad m
=> (a -> k)
-> Maybe Limit
-> S.Stream (Of a) m ()
-> m (Page (NextItem k) a)
finiteStreamToPage k limit s = do
(items' :> limit' :> tailStream) <- S.toList
. S.length
. S.copy
. maybe (mempty <$) (\n -> S.splitAt (int $ _getLimit n)) limit
$ s
next <- fmap (Inclusive . k) <$> S.head_ tailStream
return $ Page (int limit') items' next
| Quick and dirty pagin implementation . Usage should be avoided .
--
seekFiniteStreamToPage
:: Monad m
=> Eq k
=> (a -> k)
-> Maybe (NextItem k)
-> Maybe Limit
-> S.Stream (Of a) m ()
-> m (Page (NextItem k) a)
seekFiniteStreamToPage k next limit = finiteStreamToPage k limit
. case next of
Nothing -> id
Just (Exclusive n) -> S.drop 1 . S.dropWhile (\x -> k x /= n)
Just (Inclusive n) -> S.dropWhile (\x -> k x /= n)
| null | https://raw.githubusercontent.com/kadena-io/chainweb-node/62e5eeccd1ae4a5e4ca56452f7c85d07cdb483c4/src/Chainweb/Utils/Paging.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE DeriveTraversable #
# LANGUAGE OverloadedStrings #
|
Stability: experimental
Tools for paging HTTP responses
* Limit
* Page
* Next Item
* End-Of-Stream
* Tools for creating pages from streams
internal modules
-------------------------------------------------------------------------- --
Limit
| Limit the result of a query to a maximum number of items
-------------------------------------------------------------------------- --
Page
^ The number of items in the page
^ The items of the page
^ A cursor for querying the next page, if there is any. The value
is given the next parameter of the respective query interface.
-------------------------------------------------------------------------- --
Next Item
| When seeking a position in a stream, define if the given position
is inclusive or exclusive.
Inclusive: return all items of the stream starting with the given key.
Exclusive: return all items of the stream starting immidiately after the given key.
# INLINE _getNextItem #
# INLINE fromText #
-------------------------------------------------------------------------- --
End-Of-Stream
| Data type to indicate end of stream
-------------------------------------------------------------------------- --
Tools for turning streams into pages
| Create page from a non-empty stream that is a non-blocking finite prefix of
a possibly blocking infinite stream.
If the given stream contains more items than requested by the caller an
'Inclusive' cursor is added to the page. Otherwise the last item of the
stream is added as 'Exclusive' cursor.
If the input stream is empty we assume that it is because of a limiting
filter that results in a query for a finite stream. No cursor is returned.
For an empty input we can't return a next cursor. We can't return just
'Nothing' because that is used in a 'Page' to signal the end of the stream,
which contradicts the assumption that the input stream is the prefix of an
infinite stream. So, when we see an empty stream we assume that it's empty
because of some filter and return 'Nothing'
| Create 'Page' from a (possibly empty) prefix of a non-blocking finite
stream. If the input stream has more than the requested number of items
an 'Inclusive' cursor is added. Otherwise it is assumed that the stream
has ended and 'Nothing' is returned as cursor.
| # LANGUAGE DeriveFunctor #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
Module : Chainweb . Utils . Paging
Copyright : Copyright © 2018 Kadena LLC .
License : MIT
Maintainer : < >
module Chainweb.Utils.Paging
(
Limit(..)
, Page(..)
, pageLimit
, pageItems
, pageNext
, NextItem(..)
, _getNextItem
, getNextItem
, isExclusive
, isInclusive
, nextItemToText
, nextItemFromText
, Eos(..)
, isEos
, atEos
, finitePrefixOfInfiniteStreamToPage
, finiteStreamToPage
, seekFiniteStreamToPage
) where
import Control.Lens (Getter, to)
import Control.Lens.TH
import Control.Monad.Catch
import Data.Aeson
import Data.Functor.Of
import Data.Hashable
import Data.Maybe
import qualified Data.Text as T
import GHC.Generics (Generic)
import Numeric.Natural
import qualified Streaming.Prelude as S
import Chainweb.Utils hiding ((==>))
newtype Limit = Limit { _getLimit :: Natural }
deriving stock (Eq, Show, Generic)
deriving anyclass (Hashable)
deriving newtype (Num, Real, Integral, Enum, Ord)
data Page k a = Page
{ _pageLimit :: !Limit
, _pageItems :: ![a]
, _pageNext :: !(Maybe k)
}
deriving (Show, Eq, Ord, Generic, Functor, Foldable)
makeLenses ''Page
pageProperties
:: HasTextRepresentation k
=> ToJSON k
=> ToJSON a
=> KeyValue kv
=> Page k a
-> [kv]
pageProperties p =
[ "limit" .= _getLimit (_pageLimit p)
, "items" .= _pageItems p
, "next" .= _pageNext p
]
# INLINE pageProperties #
instance (HasTextRepresentation k, ToJSON k, ToJSON a) => ToJSON (Page k a) where
toJSON = object . pageProperties
toEncoding = pairs . mconcat . pageProperties
# INLINE toJSON #
# INLINE toEncoding #
instance (HasTextRepresentation k, FromJSON k, FromJSON a) => FromJSON (Page k a) where
parseJSON = withObject "page" $ \o -> Page
<$> (Limit <$> (o .: "limit"))
<*> o .: "items"
<*> o .: "next"
# INLINE parseJSON #
data NextItem k
= Inclusive k
| Exclusive k
deriving stock (Eq, Show, Ord, Functor, Foldable, Traversable)
_getNextItem :: NextItem k -> k
_getNextItem (Inclusive k) = k
_getNextItem (Exclusive k) = k
getNextItem :: Getter (NextItem k) k
getNextItem = to _getNextItem
# INLINE getNextItem #
isInclusive :: NextItem k -> Bool
isInclusive Inclusive{} = True
isInclusive _ = False
isExclusive :: NextItem k -> Bool
isExclusive Exclusive{} = True
isExclusive _ = False
nextItemToText :: HasTextRepresentation k => NextItem k -> T.Text
nextItemToText (Inclusive k) = "inclusive:" <> toText k
nextItemToText (Exclusive k) = "exclusive:" <> toText k
nextItemFromText :: MonadThrow m => HasTextRepresentation k => T.Text -> m (NextItem k)
nextItemFromText t = case T.break (== ':') t of
(a, b)
| a == "inclusive" -> Inclusive <$> fromText (T.drop 1 b)
| a == "exclusive" -> Exclusive <$> fromText (T.drop 1 b)
| T.null b -> throwM . TextFormatException $ "missing ':' in next item: \"" <> t <> "\"."
| otherwise -> throwM $ TextFormatException $ "unrecognized next item: \"" <> t <> "\"."
instance HasTextRepresentation k => HasTextRepresentation (NextItem k) where
toText = nextItemToText
# INLINE toText #
fromText = nextItemFromText
instance HasTextRepresentation k => ToJSON (NextItem k) where
toJSON = toJSON . toText
toEncoding = toEncoding . toText
# INLINE toJSON #
# INLINE toEncoding #
instance HasTextRepresentation k => FromJSON (NextItem k) where
parseJSON = parseJsonFromText "NextItem"
# INLINE parseJSON #
newtype Eos = Eos { _getEos :: Bool }
deriving stock (Eq, Show, Ord, Generic)
deriving newtype (Enum, Bounded, FromJSON, ToJSON)
isEos :: Eos -> Bool
isEos = _getEos
atEos :: Monad m => S.Stream (Of a) m () -> m Eos
atEos = fmap (Eos . isNothing) . S.head_
finitePrefixOfInfiniteStreamToPage
:: MonadThrow m
=> (a -> k)
-> Maybe Limit
-> S.Stream (Of a) m ()
-> m (Page (NextItem k) a)
finitePrefixOfInfiniteStreamToPage k limit s = do
(items' :> limit' :> lastKey :> tailStream) <- S.toList
. S.length
. S.copy
. S.last
. S.copy
. maybe (mempty <$) (\n -> S.splitAt (int $ _getLimit n)) limit
$ s
maybeNext <- fmap k <$> S.head_ tailStream
return $ Page (int limit') items' $ case maybeNext of
Nothing -> case lastKey of
Nothing -> Nothing
Just l -> Just (Exclusive $ k l)
Just next -> Just (Inclusive next)
finiteStreamToPage
:: Monad m
=> (a -> k)
-> Maybe Limit
-> S.Stream (Of a) m ()
-> m (Page (NextItem k) a)
finiteStreamToPage k limit s = do
(items' :> limit' :> tailStream) <- S.toList
. S.length
. S.copy
. maybe (mempty <$) (\n -> S.splitAt (int $ _getLimit n)) limit
$ s
next <- fmap (Inclusive . k) <$> S.head_ tailStream
return $ Page (int limit') items' next
| Quick and dirty pagin implementation . Usage should be avoided .
seekFiniteStreamToPage
:: Monad m
=> Eq k
=> (a -> k)
-> Maybe (NextItem k)
-> Maybe Limit
-> S.Stream (Of a) m ()
-> m (Page (NextItem k) a)
seekFiniteStreamToPage k next limit = finiteStreamToPage k limit
. case next of
Nothing -> id
Just (Exclusive n) -> S.drop 1 . S.dropWhile (\x -> k x /= n)
Just (Inclusive n) -> S.dropWhile (\x -> k x /= n)
|
53de37480d912113f07c880b06db3b65e267b2b15ffc2d339e4ffb9792076d3e | YoshikuniJujo/test_haskell | Lib.hs | # LANGUAGE TemplateHaskell #
# LANGUAGE PatternSynonyms #
# OPTIONS_GHC -Wall -fno - warn - tabs #
module Lib where
import Foreign.Storable
import Foreign.C.Enum
enum "Foo" ''Int [''Show, ''Read, ''Storable] [
("FooBar", 123),
("FooBaz", 456) ]
| null | https://raw.githubusercontent.com/YoshikuniJujo/test_haskell/87b3078eab3276e5c0df0d7407f1e20afe6dc2f0/features/ffi/c_enum_structure/try-c-enum/src/Lib.hs | haskell | # LANGUAGE TemplateHaskell #
# LANGUAGE PatternSynonyms #
# OPTIONS_GHC -Wall -fno - warn - tabs #
module Lib where
import Foreign.Storable
import Foreign.C.Enum
enum "Foo" ''Int [''Show, ''Read, ''Storable] [
("FooBar", 123),
("FooBaz", 456) ]
| |
5e1d417eb0c253a5114cb05c5e9089f648f6a3c6e11a145ecdd8efe2a92ded23 | vii/teepeedee2 | css.lisp | (in-package #:tpd2.ml)
From -CSS2/propidx.html
; if you want more just use "strings"
(defvar *css-properties* '(
:azimuth
:background
:background-color
:background-image
:background-repeat
:background-attachment
:background-position
:background-attachment
:background-color
:background-image
:background-position
:background-repeat
:border
:border-width
:border-style
:border-collapse
:border-color
:border-spacing
:border-style
:border-top
:border-right
:border-bottom
:border-left
:border-top-width
:border-style
:border-top-color
:border-right-color
:border-bottom-color
:border-left-color
:border-top-style
:border-right-style
:border-bottom-style
:border-left-style
:border-top-width
:border-right-width
:border-bottom-width
:border-left-width
:border-width
:bottom
:caption-side
:clear
:clip
:color
:content
:counter-increment
:counter-reset
:cue
:cue-before
:cue-after
:cursor
:direction
:display
:elevation
:empty-cells
:float
:font
:font-style
:font-variant
:font-weight
:font-size
:font-family
:font-family
:font-size
:font-size-adjust
:font-stretch
:font-style
:font-variant
:font-weight
:height
:left
:letter-spacing
:line-height
:list-style
:list-style-type
:list-style-position
:list-style-image
:list-style-position
:list-style-type
:margin
:margin-top
:margin-right
:margin-bottom
:margin-left
:marker-offset
:marks
:max-height
:max-width
:min-height
:min-width
:orphans
:outline
:outline-color
:outline-style
:outline-color
:outline-style
:outline-width
:overflow
:padding
:padding-top
:padding-right
:padding-bottom
:padding-left
:page
:page-break-after
:page-break-before
:page-break-inside
:pause
:pause-after
:pause-before
:pitch
:pitch-range
:play-during
:position
:quotes
:richness
:right
:size
:speak
:speak-header
:speak-numeral
:speak-punctuation
:speech-rate
:stress
:table-layout
:text-align
:text-decoration
:text-indent
:text-shadow
:text-transform
:top
:unicode-bidi
:vertical-align
:visibility
:voice-family
:volume
:white-space
:widows
:width
:word-spacing
:z-index
:x-opacity
:x-column-width
:x-column-gap
:x-border-radius))
;; Write CSS like this: (("p.asdfsaf" "p + p") :property "value" :property "value")
(defun validate-properties (properties)
(loop for (property) on properties by #'cddr
when (keywordp property) do
(assert (member property *css-properties*) (property))))
(defun css-output-properties (properties)
(append (list " {")
(css-output-properties-form properties)
(list "}" #\Newline)))
(defgeneric css-output-selector-form (selector properties))
(defmethod css-output-selector-form ((str string) properties)
(append (list str)
(css-output-properties properties)))
(defun css-selector-form-to-string (form)
(cond ((symbolp form)
(assert (and (eql #\< (char (symbol-name form) 0)) (fboundp form)) (form) "Misspelled? ~A" form)
(subseq (symbol-name form) 1))
(t form)))
(defmethod css-output-selector-form ((sym symbol) properties)
(css-output-selector-form (css-selector-form-to-string sym) properties))
(defmethod css-output-selector-form ((l list) properties)
(case (first l)
(quote
(append
(rest l)
(css-output-properties properties)))
(:possibly-unsupported-selectors
(loop for form in (rest l)
append (css-output-selector-form form properties)))
(t
(append
(loop for once = t then nil
for form in l
unless once collect ","
collect (css-selector-form-to-string form))
(css-output-properties properties)))))
(defgeneric css-output-property-form (property value))
(defun css-output-property-value-form (value)
(loop for v in (force-list value) for once = t then nil unless once collect " " collect v))
(defmethod css-output-property-form (property value)
(list* (if (keywordp property)
(string-downcase (symbol-name property))
property)
": "
(css-output-property-value-form value)))
(defun css-output-property-under-different-names (names value)
(loop for p in names
for once = nil then t
append
(css-output-property-form p value)
unless once collect ";"))
(defmethod css-output-property-form ((property (eql :x-opacity)) value)
(check-type value (real 0 1))
(append
(css-output-property-under-different-names '("opacity" "-moz-opacity") value)
(list ";")
(css-output-property-form "filter" (strcat "alpha(opacity=" (floor (* 100 value)) ")"))))
(defmethod css-output-property-form ((property (eql :x-column-gap)) value)
(css-output-property-under-different-names '("-moz-column-gap" "column-gap") value))
(defmethod css-output-property-form ((property (eql :x-column-width)) value)
(css-output-property-under-different-names '("-moz-column-width" "column-width") value))
(defmethod css-output-property-form ((property (eql :x-border-radius)) value)
(css-output-property-under-different-names '("-moz-border-radius" "-webkit-border-radius" "border-radius") value))
(defun css-output-properties-form (properties)
(loop for (property value) on properties by #'cddr
append (css-output-property-form property value)
collect ";"))
(defmacro css-html-style (&body selector-properties)
(flet ((validate (selector properties)
(declare (ignore selector))
(validate-properties properties)))
`(tpd2.ml.html:<style :type "text/css"
(output-ml-comment
#\Newline
,@(loop for sp in selector-properties
for selector = (first sp)
for properties = (rest sp)
do
(validate selector properties)
append (css-output-selector-form selector properties))))))
(defmacro css-attrib (&rest properties)
(validate-properties properties)
`(sendbuf-to-byte-vector
(with-sendbuf ()
,@(css-output-properties-form properties))))
| null | https://raw.githubusercontent.com/vii/teepeedee2/a2ed78c51d782993591c3284562daeed3aba3d40/src/ml/css.lisp | lisp | if you want more just use "strings"
Write CSS like this: (("p.asdfsaf" "p + p") :property "value" :property "value") | (in-package #:tpd2.ml)
From -CSS2/propidx.html
(defvar *css-properties* '(
:azimuth
:background
:background-color
:background-image
:background-repeat
:background-attachment
:background-position
:background-attachment
:background-color
:background-image
:background-position
:background-repeat
:border
:border-width
:border-style
:border-collapse
:border-color
:border-spacing
:border-style
:border-top
:border-right
:border-bottom
:border-left
:border-top-width
:border-style
:border-top-color
:border-right-color
:border-bottom-color
:border-left-color
:border-top-style
:border-right-style
:border-bottom-style
:border-left-style
:border-top-width
:border-right-width
:border-bottom-width
:border-left-width
:border-width
:bottom
:caption-side
:clear
:clip
:color
:content
:counter-increment
:counter-reset
:cue
:cue-before
:cue-after
:cursor
:direction
:display
:elevation
:empty-cells
:float
:font
:font-style
:font-variant
:font-weight
:font-size
:font-family
:font-family
:font-size
:font-size-adjust
:font-stretch
:font-style
:font-variant
:font-weight
:height
:left
:letter-spacing
:line-height
:list-style
:list-style-type
:list-style-position
:list-style-image
:list-style-position
:list-style-type
:margin
:margin-top
:margin-right
:margin-bottom
:margin-left
:marker-offset
:marks
:max-height
:max-width
:min-height
:min-width
:orphans
:outline
:outline-color
:outline-style
:outline-color
:outline-style
:outline-width
:overflow
:padding
:padding-top
:padding-right
:padding-bottom
:padding-left
:page
:page-break-after
:page-break-before
:page-break-inside
:pause
:pause-after
:pause-before
:pitch
:pitch-range
:play-during
:position
:quotes
:richness
:right
:size
:speak
:speak-header
:speak-numeral
:speak-punctuation
:speech-rate
:stress
:table-layout
:text-align
:text-decoration
:text-indent
:text-shadow
:text-transform
:top
:unicode-bidi
:vertical-align
:visibility
:voice-family
:volume
:white-space
:widows
:width
:word-spacing
:z-index
:x-opacity
:x-column-width
:x-column-gap
:x-border-radius))
(defun validate-properties (properties)
(loop for (property) on properties by #'cddr
when (keywordp property) do
(assert (member property *css-properties*) (property))))
(defun css-output-properties (properties)
(append (list " {")
(css-output-properties-form properties)
(list "}" #\Newline)))
(defgeneric css-output-selector-form (selector properties))
(defmethod css-output-selector-form ((str string) properties)
(append (list str)
(css-output-properties properties)))
(defun css-selector-form-to-string (form)
(cond ((symbolp form)
(assert (and (eql #\< (char (symbol-name form) 0)) (fboundp form)) (form) "Misspelled? ~A" form)
(subseq (symbol-name form) 1))
(t form)))
(defmethod css-output-selector-form ((sym symbol) properties)
(css-output-selector-form (css-selector-form-to-string sym) properties))
(defmethod css-output-selector-form ((l list) properties)
(case (first l)
(quote
(append
(rest l)
(css-output-properties properties)))
(:possibly-unsupported-selectors
(loop for form in (rest l)
append (css-output-selector-form form properties)))
(t
(append
(loop for once = t then nil
for form in l
unless once collect ","
collect (css-selector-form-to-string form))
(css-output-properties properties)))))
(defgeneric css-output-property-form (property value))
(defun css-output-property-value-form (value)
(loop for v in (force-list value) for once = t then nil unless once collect " " collect v))
(defmethod css-output-property-form (property value)
(list* (if (keywordp property)
(string-downcase (symbol-name property))
property)
": "
(css-output-property-value-form value)))
(defun css-output-property-under-different-names (names value)
(loop for p in names
for once = nil then t
append
(css-output-property-form p value)
unless once collect ";"))
(defmethod css-output-property-form ((property (eql :x-opacity)) value)
(check-type value (real 0 1))
(append
(css-output-property-under-different-names '("opacity" "-moz-opacity") value)
(list ";")
(css-output-property-form "filter" (strcat "alpha(opacity=" (floor (* 100 value)) ")"))))
(defmethod css-output-property-form ((property (eql :x-column-gap)) value)
(css-output-property-under-different-names '("-moz-column-gap" "column-gap") value))
(defmethod css-output-property-form ((property (eql :x-column-width)) value)
(css-output-property-under-different-names '("-moz-column-width" "column-width") value))
(defmethod css-output-property-form ((property (eql :x-border-radius)) value)
(css-output-property-under-different-names '("-moz-border-radius" "-webkit-border-radius" "border-radius") value))
(defun css-output-properties-form (properties)
(loop for (property value) on properties by #'cddr
append (css-output-property-form property value)
collect ";"))
(defmacro css-html-style (&body selector-properties)
(flet ((validate (selector properties)
(declare (ignore selector))
(validate-properties properties)))
`(tpd2.ml.html:<style :type "text/css"
(output-ml-comment
#\Newline
,@(loop for sp in selector-properties
for selector = (first sp)
for properties = (rest sp)
do
(validate selector properties)
append (css-output-selector-form selector properties))))))
(defmacro css-attrib (&rest properties)
(validate-properties properties)
`(sendbuf-to-byte-vector
(with-sendbuf ()
,@(css-output-properties-form properties))))
|
b166e142dcecaa615a1c3d275d13431b240610608575d49211ca983009ff6929 | 0xYUANTI/stdlib2 | s2_fs.erl | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% @doc Filesystem.
%%% @end
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%_* Module declaration ===============================================
-module(s2_fs).
%%%_* Exports ==========================================================
-export([ read/1
, write/2
]).
-export([ with_fd/2
, with_fds/2
]).
-export([ with_temp_fd/1
, with_temp_fd/2
, with_temp_fds/2
]).
-export([ with_temp_file/1
, with_temp_file/2
, with_temp_files/2
]).
-export([ with_temp_dir/1
, with_temp_dir/2
, with_temp_dirs/2
]).
%%%_* Includes =========================================================
-include_lib("eunit/include/eunit.hrl").
-include_lib("stdlib2/include/prelude.hrl").
%%%_* Code =============================================================
%%%_ * read/write ------------------------------------------------------
-spec read(file()) -> _ | undefined.
read(File) -> case file:read_file(File) of
{ok, Bin} -> ?b2t(Bin);
{error, enoent} -> undefined
end.
-spec write(file(), _) -> _.
write(File, Term) -> ok = file:write_file(File, ?t2b(Term)),
Term.
read_write_test() ->
with_temp_file(fun(F) ->
foo = write(F, foo),
foo = read(F)
end),
undefined = read("nosuchfile").
%%%_ * with_fd ---------------------------------------------------------
-spec with_fd(file(), fun((fd()) -> A)) -> A.
%% @doc
with_fd(File, F) ->
{ok, FD} = file:open(File, [read, write]),
try F(FD)
after file:close(FD)
end.
-spec with_fds([file()], fun(([fd()]) -> A)) -> A.
%% @doc
with_fds(Files, F) -> s2_funs:unwind_with(fun with_fd/2, Files, F).
with_fds_ok_test() ->
with_temp_files(2, fun([F1, F2]) ->
ok =
with_fds([F1, F2], fun([FD1, FD2]) ->
file:write(FD1, <<"foo">>),
file:write(FD2, <<"bar">>)
end),
[{ok, "foo"}, {ok, "bar"}] =
with_fds([F1, F2], fun([FD1, FD2]) ->
[ file:read(FD1, 3)
, file:read(FD2, 3)
]
end)
end).
with_fds_error_test() ->
with_temp_files(2, fun([F1, F2]) ->
ok = file:change_mode(F2, 8#00000),
{'EXIT', _} = (catch with_fds([F1, F2], fun(_) -> ok end))
end).
%%%_ * with_temp_fd ----------------------------------------------------
-spec with_temp_fd(fun(({file(), fd()}) -> A)) -> A.
%% @doc
with_temp_fd(F) ->
with_temp_fd("with_temp_fd", F).
with_temp_fd(Prefix, F) ->
File = s2_sh:mktemp_u(Prefix),
{ok, FD} = file:open(File, [read, write, exclusive]),
try F({File, FD})
after file:close(FD), file:delete(File)
end.
with_temp_fd_test() ->
ok = with_temp_fd(fun({_, FD}) ->
file:write(FD, "foo")
end).
-spec with_temp_fds(pos_integer() | [file()],
fun(([{file(), fd()}]) -> A)) -> A.
%% @doc
with_temp_fds(N, F) when is_integer(N) ->
with_temp_fds(lists:duplicate(N, "with_temp_fds"), F);
with_temp_fds(Prefixes, F) when is_list(Prefixes) ->
s2_funs:unwind_with(fun with_temp_fd/2, Prefixes, F).
with_temp_fds_ok_test() ->
with_temp_fds(2, fun([{_, FD1}, {_, FD2}]) ->
ok = file:write(FD1, "foo"),
ok = file:write(FD2, "bar")
end).
with_temp_fds_error_test() ->
{F1, F2} =
(catch with_temp_fds(2, fun([{F1, _}, {F2, _}]) ->
throw({F1, F2})
end)),
false = filelib:is_file(F1),
false = filelib:is_file(F2).
%%%_ * with_temp_file --------------------------------------------------
-spec with_temp_file(fun((file()) -> A)) -> A.
%% @doc
with_temp_file(F) ->
with_temp_file("with_temp_file", F).
with_temp_file(Prefix, F) ->
File = s2_sh:mktemp(Prefix),
try F(File)
after file:delete(File)
end.
-spec with_temp_files([file()], fun(([file()]) -> A)) -> A.
%% @doc
with_temp_files(N, F) when is_integer(N) ->
with_temp_files(lists:duplicate(N, "with_temp_files"), F);
with_temp_files(Prefixes, F) when is_list(Prefixes) ->
s2_funs:unwind_with(fun with_temp_file/2, Prefixes, F).
with_temp_files_ok_test() ->
with_temp_files(2, fun([F1, F2]) ->
{ok, _} = file:open(F1, [read]),
{ok, _} = file:open(F2, [read])
end).
with_temp_files_error_test() ->
{F1, F2} =
(catch with_temp_files(2, fun([F1, F2]) -> throw({F1, F2}) end)),
false = filelib:is_file(F1),
false = filelib:is_file(F2).
%%%_ * with_temp_dir ---------------------------------------------------
-spec with_temp_dir(fun((file()) -> A)) -> A.
%% @doc
with_temp_dir(F) ->
with_temp_dir("with_temp_dir", F).
with_temp_dir(Prefix, F) ->
File = s2_sh:mktemp_d(Prefix),
try F(File)
after s2_sh:rm_rf(File)
end.
-spec with_temp_dirs([file()], fun(([file()]) -> A)) -> A.
%% @doc
with_temp_dirs(N, F) when is_integer(N) ->
with_temp_dirs(lists:duplicate(N, "with_temp_dirs"), F);
with_temp_dirs(Prefixes, F) when is_list(Prefixes) ->
s2_funs:unwind_with(fun with_temp_dir/2, Prefixes, F).
with_temp_dirs_ok_test() ->
with_temp_dirs(2, fun([F1, F2]) ->
true = filelib:is_dir(F1),
true = filelib:is_dir(F2)
end).
with_temp_dirs_error_test() ->
{F1, F2} =
(catch with_temp_dirs(2, fun([F1, F2]) -> throw({F1, F2}) end)),
false = filelib:is_dir(F1),
false = filelib:is_dir(F2).
%%%_* Emacs ============================================================
%%% Local Variables:
%%% allout-layout: t
erlang - indent - level : 2
%%% End:
| null | https://raw.githubusercontent.com/0xYUANTI/stdlib2/0c334200fd9c7ddd79f6dcc3a63c0aa5de5d3a33/src/s2_fs.erl | erlang |
@doc Filesystem.
@end
_* Module declaration ===============================================
_* Exports ==========================================================
_* Includes =========================================================
_* Code =============================================================
_ * read/write ------------------------------------------------------
_ * with_fd ---------------------------------------------------------
@doc
@doc
_ * with_temp_fd ----------------------------------------------------
@doc
@doc
_ * with_temp_file --------------------------------------------------
@doc
@doc
_ * with_temp_dir ---------------------------------------------------
@doc
@doc
_* Emacs ============================================================
Local Variables:
allout-layout: t
End: |
-module(s2_fs).
-export([ read/1
, write/2
]).
-export([ with_fd/2
, with_fds/2
]).
-export([ with_temp_fd/1
, with_temp_fd/2
, with_temp_fds/2
]).
-export([ with_temp_file/1
, with_temp_file/2
, with_temp_files/2
]).
-export([ with_temp_dir/1
, with_temp_dir/2
, with_temp_dirs/2
]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("stdlib2/include/prelude.hrl").
-spec read(file()) -> _ | undefined.
read(File) -> case file:read_file(File) of
{ok, Bin} -> ?b2t(Bin);
{error, enoent} -> undefined
end.
-spec write(file(), _) -> _.
write(File, Term) -> ok = file:write_file(File, ?t2b(Term)),
Term.
read_write_test() ->
with_temp_file(fun(F) ->
foo = write(F, foo),
foo = read(F)
end),
undefined = read("nosuchfile").
-spec with_fd(file(), fun((fd()) -> A)) -> A.
with_fd(File, F) ->
{ok, FD} = file:open(File, [read, write]),
try F(FD)
after file:close(FD)
end.
-spec with_fds([file()], fun(([fd()]) -> A)) -> A.
with_fds(Files, F) -> s2_funs:unwind_with(fun with_fd/2, Files, F).
with_fds_ok_test() ->
with_temp_files(2, fun([F1, F2]) ->
ok =
with_fds([F1, F2], fun([FD1, FD2]) ->
file:write(FD1, <<"foo">>),
file:write(FD2, <<"bar">>)
end),
[{ok, "foo"}, {ok, "bar"}] =
with_fds([F1, F2], fun([FD1, FD2]) ->
[ file:read(FD1, 3)
, file:read(FD2, 3)
]
end)
end).
with_fds_error_test() ->
with_temp_files(2, fun([F1, F2]) ->
ok = file:change_mode(F2, 8#00000),
{'EXIT', _} = (catch with_fds([F1, F2], fun(_) -> ok end))
end).
-spec with_temp_fd(fun(({file(), fd()}) -> A)) -> A.
with_temp_fd(F) ->
with_temp_fd("with_temp_fd", F).
with_temp_fd(Prefix, F) ->
File = s2_sh:mktemp_u(Prefix),
{ok, FD} = file:open(File, [read, write, exclusive]),
try F({File, FD})
after file:close(FD), file:delete(File)
end.
with_temp_fd_test() ->
ok = with_temp_fd(fun({_, FD}) ->
file:write(FD, "foo")
end).
-spec with_temp_fds(pos_integer() | [file()],
fun(([{file(), fd()}]) -> A)) -> A.
with_temp_fds(N, F) when is_integer(N) ->
with_temp_fds(lists:duplicate(N, "with_temp_fds"), F);
with_temp_fds(Prefixes, F) when is_list(Prefixes) ->
s2_funs:unwind_with(fun with_temp_fd/2, Prefixes, F).
with_temp_fds_ok_test() ->
with_temp_fds(2, fun([{_, FD1}, {_, FD2}]) ->
ok = file:write(FD1, "foo"),
ok = file:write(FD2, "bar")
end).
with_temp_fds_error_test() ->
{F1, F2} =
(catch with_temp_fds(2, fun([{F1, _}, {F2, _}]) ->
throw({F1, F2})
end)),
false = filelib:is_file(F1),
false = filelib:is_file(F2).
-spec with_temp_file(fun((file()) -> A)) -> A.
with_temp_file(F) ->
with_temp_file("with_temp_file", F).
with_temp_file(Prefix, F) ->
File = s2_sh:mktemp(Prefix),
try F(File)
after file:delete(File)
end.
-spec with_temp_files([file()], fun(([file()]) -> A)) -> A.
with_temp_files(N, F) when is_integer(N) ->
with_temp_files(lists:duplicate(N, "with_temp_files"), F);
with_temp_files(Prefixes, F) when is_list(Prefixes) ->
s2_funs:unwind_with(fun with_temp_file/2, Prefixes, F).
with_temp_files_ok_test() ->
with_temp_files(2, fun([F1, F2]) ->
{ok, _} = file:open(F1, [read]),
{ok, _} = file:open(F2, [read])
end).
with_temp_files_error_test() ->
{F1, F2} =
(catch with_temp_files(2, fun([F1, F2]) -> throw({F1, F2}) end)),
false = filelib:is_file(F1),
false = filelib:is_file(F2).
-spec with_temp_dir(fun((file()) -> A)) -> A.
with_temp_dir(F) ->
with_temp_dir("with_temp_dir", F).
with_temp_dir(Prefix, F) ->
File = s2_sh:mktemp_d(Prefix),
try F(File)
after s2_sh:rm_rf(File)
end.
-spec with_temp_dirs([file()], fun(([file()]) -> A)) -> A.
with_temp_dirs(N, F) when is_integer(N) ->
with_temp_dirs(lists:duplicate(N, "with_temp_dirs"), F);
with_temp_dirs(Prefixes, F) when is_list(Prefixes) ->
s2_funs:unwind_with(fun with_temp_dir/2, Prefixes, F).
with_temp_dirs_ok_test() ->
with_temp_dirs(2, fun([F1, F2]) ->
true = filelib:is_dir(F1),
true = filelib:is_dir(F2)
end).
with_temp_dirs_error_test() ->
{F1, F2} =
(catch with_temp_dirs(2, fun([F1, F2]) -> throw({F1, F2}) end)),
false = filelib:is_dir(F1),
false = filelib:is_dir(F2).
erlang - indent - level : 2
|
301c315af3493829525951c065dd6b12c3a6f3b37ee9205a8dd42e41d0048c5f | RefactoringTools/HaRe | Declare1.hs | module FreeAndDeclared.Declare1 where
import qualified Data.Generics as G
toplevel :: Integer -> Integer
toplevel x = c * x
c,d :: Integer
c = 7
d = 9
-- Pattern bind
tup :: (Int, Int)
h :: Int
t :: Int
tup@(h,t) = head $ zip [1..10] [3..15]
data D = A | B String | C
-- Retrieve the String from a B
unD (B y) = y
-- But no others.
Infix data constructor , see
data F = G | (:|) String String
unF (a :| b) = (a,b)
-- Main routine
main = do
a <- getChar
putStrLn "foo"
mkT = "no clash with Data.Generics"
| null | https://raw.githubusercontent.com/RefactoringTools/HaRe/ef5dee64c38fb104e6e5676095946279fbce381c/test/testdata/FreeAndDeclared/Declare1.hs | haskell | Pattern bind
Retrieve the String from a B
But no others.
Main routine | module FreeAndDeclared.Declare1 where
import qualified Data.Generics as G
toplevel :: Integer -> Integer
toplevel x = c * x
c,d :: Integer
c = 7
d = 9
tup :: (Int, Int)
h :: Int
t :: Int
tup@(h,t) = head $ zip [1..10] [3..15]
data D = A | B String | C
unD (B y) = y
Infix data constructor , see
data F = G | (:|) String String
unF (a :| b) = (a,b)
main = do
a <- getChar
putStrLn "foo"
mkT = "no clash with Data.Generics"
|
9189d39e50f03ae8d554bda61c6336b0a0cc08a074563ba5bf87436b2ba45c56 | janestreet/merlin-jst | ast_helper.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, LexiFi
(* *)
Copyright 2012 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
* Helpers to produce Parsetree fragments
{ b Warning } This module is unstable and part of
{ { ! Compiler_libs}compiler - libs } .
{b Warning} This module is unstable and part of
{{!Compiler_libs}compiler-libs}.
*)
open Asttypes
open Docstrings
open Parsetree
type 'a with_loc = 'a Location.loc
type loc = Location.t
type lid = Longident.t with_loc
type str = string with_loc
type str_opt = string option with_loc
type attrs = attribute list
* { 1 Default locations }
val default_loc: loc ref
(** Default value for all optional location arguments. *)
val with_default_loc: loc -> (unit -> 'a) -> 'a
(** Set the [default_loc] within the scope of the execution
of the provided function. *)
(** {1 Constants} *)
module Const : sig
val char : char -> constant
val string :
?quotation_delimiter:string -> ?loc:Location.t -> string -> constant
val integer : ?suffix:char -> string -> constant
val int : ?suffix:char -> int -> constant
val int32 : ?suffix:char -> int32 -> constant
val int64 : ?suffix:char -> int64 -> constant
val nativeint : ?suffix:char -> nativeint -> constant
val float : ?suffix:char -> string -> constant
end
(** {1 Attributes} *)
module Attr : sig
(** This should be used by all external tools (e.g., ppxs) to create
attributes. Inside the compiler, this should be used only when it is
known the attribute does not need to be tracked for misplaced attribute
warnings. Otherwise, use [Builtin_attributes.mk_internal]. *)
val mk: ?loc:loc -> str -> payload -> attribute
end
* { 1 Core language }
(** Type expressions *)
module Typ :
sig
val mk: ?loc:loc -> ?attrs:attrs -> core_type_desc -> core_type
val attr: core_type -> attribute -> core_type
val any: ?loc:loc -> ?attrs:attrs -> unit -> core_type
val var: ?loc:loc -> ?attrs:attrs -> string -> core_type
val arrow: ?loc:loc -> ?attrs:attrs -> arg_label -> core_type -> core_type
-> core_type
val tuple: ?loc:loc -> ?attrs:attrs -> core_type list -> core_type
val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type
val object_: ?loc:loc -> ?attrs:attrs -> object_field list
-> closed_flag -> core_type
val class_: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type
val alias: ?loc:loc -> ?attrs:attrs -> core_type -> string -> core_type
val variant: ?loc:loc -> ?attrs:attrs -> row_field list -> closed_flag
-> label list option -> core_type
val poly: ?loc:loc -> ?attrs:attrs -> str list -> core_type -> core_type
val package: ?loc:loc -> ?attrs:attrs -> lid -> (lid * core_type) list
-> core_type
val extension: ?loc:loc -> ?attrs:attrs -> extension -> core_type
val force_poly: core_type -> core_type
val varify_constructors: str list -> core_type -> core_type
* [ varify_constructors newtypes te ] is type expression [ te ] , of which
any of nullary type constructor [ tc ] is replaced by type variable of
the same name , if [ tc ] 's name appears in [ newtypes ] .
Raise [ Syntaxerr . Variable_in_scope ] if any type variable inside [ te ]
appears in [ newtypes ] .
@since 4.05
any of nullary type constructor [tc] is replaced by type variable of
the same name, if [tc]'s name appears in [newtypes].
Raise [Syntaxerr.Variable_in_scope] if any type variable inside [te]
appears in [newtypes].
@since 4.05
*)
end
(** Patterns *)
module Pat:
sig
val mk: ?loc:loc -> ?attrs:attrs -> pattern_desc -> pattern
val attr:pattern -> attribute -> pattern
val any: ?loc:loc -> ?attrs:attrs -> unit -> pattern
val var: ?loc:loc -> ?attrs:attrs -> str -> pattern
val alias: ?loc:loc -> ?attrs:attrs -> pattern -> str -> pattern
val constant: ?loc:loc -> ?attrs:attrs -> constant -> pattern
val interval: ?loc:loc -> ?attrs:attrs -> constant -> constant -> pattern
val tuple: ?loc:loc -> ?attrs:attrs -> pattern list -> pattern
val construct: ?loc:loc -> ?attrs:attrs ->
lid -> (str list * pattern) option -> pattern
val variant: ?loc:loc -> ?attrs:attrs -> label -> pattern option -> pattern
val record: ?loc:loc -> ?attrs:attrs -> (lid * pattern) list -> closed_flag
-> pattern
val array: ?loc:loc -> ?attrs:attrs -> pattern list -> pattern
val or_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern -> pattern
val constraint_: ?loc:loc -> ?attrs:attrs -> pattern -> core_type -> pattern
val type_: ?loc:loc -> ?attrs:attrs -> lid -> pattern
val lazy_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern
val unpack: ?loc:loc -> ?attrs:attrs -> str_opt -> pattern
val open_: ?loc:loc -> ?attrs:attrs -> lid -> pattern -> pattern
val exception_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern
val extension: ?loc:loc -> ?attrs:attrs -> extension -> pattern
end
(** Expressions *)
module Exp:
sig
val mk: ?loc:loc -> ?attrs:attrs -> expression_desc -> expression
val attr: expression -> attribute -> expression
val ident: ?loc:loc -> ?attrs:attrs -> lid -> expression
val constant: ?loc:loc -> ?attrs:attrs -> constant -> expression
val let_: ?loc:loc -> ?attrs:attrs -> rec_flag -> value_binding list
-> expression -> expression
val fun_: ?loc:loc -> ?attrs:attrs -> arg_label -> expression option
-> pattern -> expression -> expression
val function_: ?loc:loc -> ?attrs:attrs -> case list -> expression
val apply: ?loc:loc -> ?attrs:attrs -> expression
-> (arg_label * expression) list -> expression
val match_: ?loc:loc -> ?attrs:attrs -> expression -> case list
-> expression
val try_: ?loc:loc -> ?attrs:attrs -> expression -> case list -> expression
val tuple: ?loc:loc -> ?attrs:attrs -> expression list -> expression
val construct: ?loc:loc -> ?attrs:attrs -> lid -> expression option
-> expression
val variant: ?loc:loc -> ?attrs:attrs -> label -> expression option
-> expression
val record: ?loc:loc -> ?attrs:attrs -> (lid * expression) list
-> expression option -> expression
val field: ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression
val setfield: ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression
-> expression
val array: ?loc:loc -> ?attrs:attrs -> expression list -> expression
val ifthenelse: ?loc:loc -> ?attrs:attrs -> expression -> expression
-> expression option -> expression
val sequence: ?loc:loc -> ?attrs:attrs -> expression -> expression
-> expression
val while_: ?loc:loc -> ?attrs:attrs -> expression -> expression
-> expression
val for_: ?loc:loc -> ?attrs:attrs -> pattern -> expression -> expression
-> direction_flag -> expression -> expression
val coerce: ?loc:loc -> ?attrs:attrs -> expression -> core_type option
-> core_type -> expression
val constraint_: ?loc:loc -> ?attrs:attrs -> expression -> core_type
-> expression
val send: ?loc:loc -> ?attrs:attrs -> expression -> str -> expression
val new_: ?loc:loc -> ?attrs:attrs -> lid -> expression
val setinstvar: ?loc:loc -> ?attrs:attrs -> str -> expression -> expression
val override: ?loc:loc -> ?attrs:attrs -> (str * expression) list
-> expression
val letmodule: ?loc:loc -> ?attrs:attrs -> str_opt -> module_expr
-> expression -> expression
val letexception:
?loc:loc -> ?attrs:attrs -> extension_constructor -> expression
-> expression
val assert_: ?loc:loc -> ?attrs:attrs -> expression -> expression
val lazy_: ?loc:loc -> ?attrs:attrs -> expression -> expression
val poly: ?loc:loc -> ?attrs:attrs -> expression -> core_type option
-> expression
val object_: ?loc:loc -> ?attrs:attrs -> class_structure -> expression
val newtype: ?loc:loc -> ?attrs:attrs -> str -> expression -> expression
val pack: ?loc:loc -> ?attrs:attrs -> module_expr -> expression
val open_: ?loc:loc -> ?attrs:attrs -> open_declaration -> expression
-> expression
val letop: ?loc:loc -> ?attrs:attrs -> binding_op
-> binding_op list -> expression -> expression
val extension: ?loc:loc -> ?attrs:attrs -> extension -> expression
val unreachable: ?loc:loc -> ?attrs:attrs -> unit -> expression
val case: pattern -> ?guard:expression -> expression -> case
val binding_op: str -> pattern -> expression -> loc -> binding_op
end
(** Value declarations *)
module Val:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
?prim:string list -> str -> core_type -> value_description
end
(** Type declarations *)
module Type:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
?params:(core_type * (variance * injectivity)) list ->
?cstrs:(core_type * core_type * loc) list ->
?kind:type_kind -> ?priv:private_flag -> ?manifest:core_type -> str ->
type_declaration
val constructor: ?loc:loc -> ?attrs:attrs -> ?info:info ->
?vars:str list -> ?args:constructor_arguments -> ?res:core_type ->
str ->
constructor_declaration
val field: ?loc:loc -> ?attrs:attrs -> ?info:info ->
?mut:mutable_flag -> str -> core_type -> label_declaration
end
(** Type extensions *)
module Te:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
?params:(core_type * (variance * injectivity)) list ->
?priv:private_flag -> lid -> extension_constructor list -> type_extension
val mk_exception: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
extension_constructor -> type_exception
val constructor: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info ->
str -> extension_constructor_kind -> extension_constructor
val decl: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info ->
?vars:str list -> ?args:constructor_arguments -> ?res:core_type ->
str ->
extension_constructor
val rebind: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info ->
str -> lid -> extension_constructor
end
(** {1 Module language} *)
(** Module type expressions *)
module Mty:
sig
val mk: ?loc:loc -> ?attrs:attrs -> module_type_desc -> module_type
val attr: module_type -> attribute -> module_type
val ident: ?loc:loc -> ?attrs:attrs -> lid -> module_type
val alias: ?loc:loc -> ?attrs:attrs -> lid -> module_type
val signature: ?loc:loc -> ?attrs:attrs -> signature -> module_type
val functor_: ?loc:loc -> ?attrs:attrs ->
functor_parameter -> module_type -> module_type
val with_: ?loc:loc -> ?attrs:attrs -> module_type ->
with_constraint list -> module_type
val typeof_: ?loc:loc -> ?attrs:attrs -> module_expr -> module_type
val extension: ?loc:loc -> ?attrs:attrs -> extension -> module_type
end
(** Module expressions *)
module Mod:
sig
val mk: ?loc:loc -> ?attrs:attrs -> module_expr_desc -> module_expr
val attr: module_expr -> attribute -> module_expr
val ident: ?loc:loc -> ?attrs:attrs -> lid -> module_expr
val structure: ?loc:loc -> ?attrs:attrs -> structure -> module_expr
val functor_: ?loc:loc -> ?attrs:attrs ->
functor_parameter -> module_expr -> module_expr
val apply: ?loc:loc -> ?attrs:attrs -> module_expr -> module_expr ->
module_expr
val constraint_: ?loc:loc -> ?attrs:attrs -> module_expr -> module_type ->
module_expr
val unpack: ?loc:loc -> ?attrs:attrs -> expression -> module_expr
val extension: ?loc:loc -> ?attrs:attrs -> extension -> module_expr
end
(** Signature items *)
module Sig:
sig
val mk: ?loc:loc -> signature_item_desc -> signature_item
val value: ?loc:loc -> value_description -> signature_item
val type_: ?loc:loc -> rec_flag -> type_declaration list -> signature_item
val type_subst: ?loc:loc -> type_declaration list -> signature_item
val type_extension: ?loc:loc -> type_extension -> signature_item
val exception_: ?loc:loc -> type_exception -> signature_item
val module_: ?loc:loc -> module_declaration -> signature_item
val mod_subst: ?loc:loc -> module_substitution -> signature_item
val rec_module: ?loc:loc -> module_declaration list -> signature_item
val modtype: ?loc:loc -> module_type_declaration -> signature_item
val modtype_subst: ?loc:loc -> module_type_declaration -> signature_item
val open_: ?loc:loc -> open_description -> signature_item
val include_: ?loc:loc -> include_description -> signature_item
val class_: ?loc:loc -> class_description list -> signature_item
val class_type: ?loc:loc -> class_type_declaration list -> signature_item
val extension: ?loc:loc -> ?attrs:attrs -> extension -> signature_item
val attribute: ?loc:loc -> attribute -> signature_item
val text: text -> signature_item list
end
(** Structure items *)
module Str:
sig
val mk: ?loc:loc -> structure_item_desc -> structure_item
val eval: ?loc:loc -> ?attrs:attributes -> expression -> structure_item
val value: ?loc:loc -> rec_flag -> value_binding list -> structure_item
val primitive: ?loc:loc -> value_description -> structure_item
val type_: ?loc:loc -> rec_flag -> type_declaration list -> structure_item
val type_extension: ?loc:loc -> type_extension -> structure_item
val exception_: ?loc:loc -> type_exception -> structure_item
val module_: ?loc:loc -> module_binding -> structure_item
val rec_module: ?loc:loc -> module_binding list -> structure_item
val modtype: ?loc:loc -> module_type_declaration -> structure_item
val open_: ?loc:loc -> open_declaration -> structure_item
val class_: ?loc:loc -> class_declaration list -> structure_item
val class_type: ?loc:loc -> class_type_declaration list -> structure_item
val include_: ?loc:loc -> include_declaration -> structure_item
val extension: ?loc:loc -> ?attrs:attrs -> extension -> structure_item
val attribute: ?loc:loc -> attribute -> structure_item
val text: text -> structure_item list
end
(** Module declarations *)
module Md:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
str_opt -> module_type -> module_declaration
end
(** Module substitutions *)
module Ms:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
str -> lid -> module_substitution
end
(** Module type declarations *)
module Mtd:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
?typ:module_type -> str -> module_type_declaration
end
(** Module bindings *)
module Mb:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
str_opt -> module_expr -> module_binding
end
(** Opens *)
module Opn:
sig
val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs ->
?override:override_flag -> 'a -> 'a open_infos
end
(** Includes *)
module Incl:
sig
val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> 'a -> 'a include_infos
end
(** Value bindings *)
module Vb:
sig
val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
pattern -> expression -> value_binding
end
(** {1 Class language} *)
(** Class type expressions *)
module Cty:
sig
val mk: ?loc:loc -> ?attrs:attrs -> class_type_desc -> class_type
val attr: class_type -> attribute -> class_type
val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_type
val signature: ?loc:loc -> ?attrs:attrs -> class_signature -> class_type
val arrow: ?loc:loc -> ?attrs:attrs -> arg_label -> core_type ->
class_type -> class_type
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_type
val open_: ?loc:loc -> ?attrs:attrs -> open_description -> class_type
-> class_type
end
(** Class type fields *)
module Ctf:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
class_type_field_desc -> class_type_field
val attr: class_type_field -> attribute -> class_type_field
val inherit_: ?loc:loc -> ?attrs:attrs -> class_type -> class_type_field
val val_: ?loc:loc -> ?attrs:attrs -> str -> mutable_flag ->
virtual_flag -> core_type -> class_type_field
val method_: ?loc:loc -> ?attrs:attrs -> str -> private_flag ->
virtual_flag -> core_type -> class_type_field
val constraint_: ?loc:loc -> ?attrs:attrs -> core_type -> core_type ->
class_type_field
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_type_field
val attribute: ?loc:loc -> attribute -> class_type_field
val text: text -> class_type_field list
end
(** Class expressions *)
module Cl:
sig
val mk: ?loc:loc -> ?attrs:attrs -> class_expr_desc -> class_expr
val attr: class_expr -> attribute -> class_expr
val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_expr
val structure: ?loc:loc -> ?attrs:attrs -> class_structure -> class_expr
val fun_: ?loc:loc -> ?attrs:attrs -> arg_label -> expression option ->
pattern -> class_expr -> class_expr
val apply: ?loc:loc -> ?attrs:attrs -> class_expr ->
(arg_label * expression) list -> class_expr
val let_: ?loc:loc -> ?attrs:attrs -> rec_flag -> value_binding list ->
class_expr -> class_expr
val constraint_: ?loc:loc -> ?attrs:attrs -> class_expr -> class_type ->
class_expr
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_expr
val open_: ?loc:loc -> ?attrs:attrs -> open_description -> class_expr
-> class_expr
end
(** Class fields *)
module Cf:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> class_field_desc ->
class_field
val attr: class_field -> attribute -> class_field
val inherit_: ?loc:loc -> ?attrs:attrs -> override_flag -> class_expr ->
str option -> class_field
val val_: ?loc:loc -> ?attrs:attrs -> str -> mutable_flag ->
class_field_kind -> class_field
val method_: ?loc:loc -> ?attrs:attrs -> str -> private_flag ->
class_field_kind -> class_field
val constraint_: ?loc:loc -> ?attrs:attrs -> core_type -> core_type ->
class_field
val initializer_: ?loc:loc -> ?attrs:attrs -> expression -> class_field
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_field
val attribute: ?loc:loc -> attribute -> class_field
val text: text -> class_field list
val virtual_: core_type -> class_field_kind
val concrete: override_flag -> expression -> class_field_kind
end
(** Classes *)
module Ci:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
?virt:virtual_flag ->
?params:(core_type * (variance * injectivity)) list ->
str -> 'a -> 'a class_infos
end
(** Class signatures *)
module Csig:
sig
val mk: core_type -> class_type_field list -> class_signature
end
(** Class structures *)
module Cstr:
sig
val mk: pattern -> class_field list -> class_structure
end
(** Row fields *)
module Rf:
sig
val mk: ?loc:loc -> ?attrs:attrs -> row_field_desc -> row_field
val tag: ?loc:loc -> ?attrs:attrs ->
label with_loc -> bool -> core_type list -> row_field
val inherit_: ?loc:loc -> core_type -> row_field
end
(** Object fields *)
module Of:
sig
val mk: ?loc:loc -> ?attrs:attrs ->
object_field_desc -> object_field
val tag: ?loc:loc -> ?attrs:attrs ->
label with_loc -> core_type -> object_field
val inherit_: ?loc:loc -> core_type -> object_field
end
| null | https://raw.githubusercontent.com/janestreet/merlin-jst/980b574405617fa0dfb0b79a84a66536b46cd71b/upstream/ocaml_flambda/parsing/ast_helper.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Default value for all optional location arguments.
* Set the [default_loc] within the scope of the execution
of the provided function.
* {1 Constants}
* {1 Attributes}
* This should be used by all external tools (e.g., ppxs) to create
attributes. Inside the compiler, this should be used only when it is
known the attribute does not need to be tracked for misplaced attribute
warnings. Otherwise, use [Builtin_attributes.mk_internal].
* Type expressions
* Patterns
* Expressions
* Value declarations
* Type declarations
* Type extensions
* {1 Module language}
* Module type expressions
* Module expressions
* Signature items
* Structure items
* Module declarations
* Module substitutions
* Module type declarations
* Module bindings
* Opens
* Includes
* Value bindings
* {1 Class language}
* Class type expressions
* Class type fields
* Class expressions
* Class fields
* Classes
* Class signatures
* Class structures
* Row fields
* Object fields | , LexiFi
Copyright 2012 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* Helpers to produce Parsetree fragments
{ b Warning } This module is unstable and part of
{ { ! Compiler_libs}compiler - libs } .
{b Warning} This module is unstable and part of
{{!Compiler_libs}compiler-libs}.
*)
open Asttypes
open Docstrings
open Parsetree
type 'a with_loc = 'a Location.loc
type loc = Location.t
type lid = Longident.t with_loc
type str = string with_loc
type str_opt = string option with_loc
type attrs = attribute list
* { 1 Default locations }
val default_loc: loc ref
val with_default_loc: loc -> (unit -> 'a) -> 'a
module Const : sig
val char : char -> constant
val string :
?quotation_delimiter:string -> ?loc:Location.t -> string -> constant
val integer : ?suffix:char -> string -> constant
val int : ?suffix:char -> int -> constant
val int32 : ?suffix:char -> int32 -> constant
val int64 : ?suffix:char -> int64 -> constant
val nativeint : ?suffix:char -> nativeint -> constant
val float : ?suffix:char -> string -> constant
end
module Attr : sig
val mk: ?loc:loc -> str -> payload -> attribute
end
* { 1 Core language }
module Typ :
sig
val mk: ?loc:loc -> ?attrs:attrs -> core_type_desc -> core_type
val attr: core_type -> attribute -> core_type
val any: ?loc:loc -> ?attrs:attrs -> unit -> core_type
val var: ?loc:loc -> ?attrs:attrs -> string -> core_type
val arrow: ?loc:loc -> ?attrs:attrs -> arg_label -> core_type -> core_type
-> core_type
val tuple: ?loc:loc -> ?attrs:attrs -> core_type list -> core_type
val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type
val object_: ?loc:loc -> ?attrs:attrs -> object_field list
-> closed_flag -> core_type
val class_: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type
val alias: ?loc:loc -> ?attrs:attrs -> core_type -> string -> core_type
val variant: ?loc:loc -> ?attrs:attrs -> row_field list -> closed_flag
-> label list option -> core_type
val poly: ?loc:loc -> ?attrs:attrs -> str list -> core_type -> core_type
val package: ?loc:loc -> ?attrs:attrs -> lid -> (lid * core_type) list
-> core_type
val extension: ?loc:loc -> ?attrs:attrs -> extension -> core_type
val force_poly: core_type -> core_type
val varify_constructors: str list -> core_type -> core_type
* [ varify_constructors newtypes te ] is type expression [ te ] , of which
any of nullary type constructor [ tc ] is replaced by type variable of
the same name , if [ tc ] 's name appears in [ newtypes ] .
Raise [ Syntaxerr . Variable_in_scope ] if any type variable inside [ te ]
appears in [ newtypes ] .
@since 4.05
any of nullary type constructor [tc] is replaced by type variable of
the same name, if [tc]'s name appears in [newtypes].
Raise [Syntaxerr.Variable_in_scope] if any type variable inside [te]
appears in [newtypes].
@since 4.05
*)
end
module Pat:
sig
val mk: ?loc:loc -> ?attrs:attrs -> pattern_desc -> pattern
val attr:pattern -> attribute -> pattern
val any: ?loc:loc -> ?attrs:attrs -> unit -> pattern
val var: ?loc:loc -> ?attrs:attrs -> str -> pattern
val alias: ?loc:loc -> ?attrs:attrs -> pattern -> str -> pattern
val constant: ?loc:loc -> ?attrs:attrs -> constant -> pattern
val interval: ?loc:loc -> ?attrs:attrs -> constant -> constant -> pattern
val tuple: ?loc:loc -> ?attrs:attrs -> pattern list -> pattern
val construct: ?loc:loc -> ?attrs:attrs ->
lid -> (str list * pattern) option -> pattern
val variant: ?loc:loc -> ?attrs:attrs -> label -> pattern option -> pattern
val record: ?loc:loc -> ?attrs:attrs -> (lid * pattern) list -> closed_flag
-> pattern
val array: ?loc:loc -> ?attrs:attrs -> pattern list -> pattern
val or_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern -> pattern
val constraint_: ?loc:loc -> ?attrs:attrs -> pattern -> core_type -> pattern
val type_: ?loc:loc -> ?attrs:attrs -> lid -> pattern
val lazy_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern
val unpack: ?loc:loc -> ?attrs:attrs -> str_opt -> pattern
val open_: ?loc:loc -> ?attrs:attrs -> lid -> pattern -> pattern
val exception_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern
val extension: ?loc:loc -> ?attrs:attrs -> extension -> pattern
end
module Exp:
sig
val mk: ?loc:loc -> ?attrs:attrs -> expression_desc -> expression
val attr: expression -> attribute -> expression
val ident: ?loc:loc -> ?attrs:attrs -> lid -> expression
val constant: ?loc:loc -> ?attrs:attrs -> constant -> expression
val let_: ?loc:loc -> ?attrs:attrs -> rec_flag -> value_binding list
-> expression -> expression
val fun_: ?loc:loc -> ?attrs:attrs -> arg_label -> expression option
-> pattern -> expression -> expression
val function_: ?loc:loc -> ?attrs:attrs -> case list -> expression
val apply: ?loc:loc -> ?attrs:attrs -> expression
-> (arg_label * expression) list -> expression
val match_: ?loc:loc -> ?attrs:attrs -> expression -> case list
-> expression
val try_: ?loc:loc -> ?attrs:attrs -> expression -> case list -> expression
val tuple: ?loc:loc -> ?attrs:attrs -> expression list -> expression
val construct: ?loc:loc -> ?attrs:attrs -> lid -> expression option
-> expression
val variant: ?loc:loc -> ?attrs:attrs -> label -> expression option
-> expression
val record: ?loc:loc -> ?attrs:attrs -> (lid * expression) list
-> expression option -> expression
val field: ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression
val setfield: ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression
-> expression
val array: ?loc:loc -> ?attrs:attrs -> expression list -> expression
val ifthenelse: ?loc:loc -> ?attrs:attrs -> expression -> expression
-> expression option -> expression
val sequence: ?loc:loc -> ?attrs:attrs -> expression -> expression
-> expression
val while_: ?loc:loc -> ?attrs:attrs -> expression -> expression
-> expression
val for_: ?loc:loc -> ?attrs:attrs -> pattern -> expression -> expression
-> direction_flag -> expression -> expression
val coerce: ?loc:loc -> ?attrs:attrs -> expression -> core_type option
-> core_type -> expression
val constraint_: ?loc:loc -> ?attrs:attrs -> expression -> core_type
-> expression
val send: ?loc:loc -> ?attrs:attrs -> expression -> str -> expression
val new_: ?loc:loc -> ?attrs:attrs -> lid -> expression
val setinstvar: ?loc:loc -> ?attrs:attrs -> str -> expression -> expression
val override: ?loc:loc -> ?attrs:attrs -> (str * expression) list
-> expression
val letmodule: ?loc:loc -> ?attrs:attrs -> str_opt -> module_expr
-> expression -> expression
val letexception:
?loc:loc -> ?attrs:attrs -> extension_constructor -> expression
-> expression
val assert_: ?loc:loc -> ?attrs:attrs -> expression -> expression
val lazy_: ?loc:loc -> ?attrs:attrs -> expression -> expression
val poly: ?loc:loc -> ?attrs:attrs -> expression -> core_type option
-> expression
val object_: ?loc:loc -> ?attrs:attrs -> class_structure -> expression
val newtype: ?loc:loc -> ?attrs:attrs -> str -> expression -> expression
val pack: ?loc:loc -> ?attrs:attrs -> module_expr -> expression
val open_: ?loc:loc -> ?attrs:attrs -> open_declaration -> expression
-> expression
val letop: ?loc:loc -> ?attrs:attrs -> binding_op
-> binding_op list -> expression -> expression
val extension: ?loc:loc -> ?attrs:attrs -> extension -> expression
val unreachable: ?loc:loc -> ?attrs:attrs -> unit -> expression
val case: pattern -> ?guard:expression -> expression -> case
val binding_op: str -> pattern -> expression -> loc -> binding_op
end
module Val:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
?prim:string list -> str -> core_type -> value_description
end
module Type:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
?params:(core_type * (variance * injectivity)) list ->
?cstrs:(core_type * core_type * loc) list ->
?kind:type_kind -> ?priv:private_flag -> ?manifest:core_type -> str ->
type_declaration
val constructor: ?loc:loc -> ?attrs:attrs -> ?info:info ->
?vars:str list -> ?args:constructor_arguments -> ?res:core_type ->
str ->
constructor_declaration
val field: ?loc:loc -> ?attrs:attrs -> ?info:info ->
?mut:mutable_flag -> str -> core_type -> label_declaration
end
module Te:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
?params:(core_type * (variance * injectivity)) list ->
?priv:private_flag -> lid -> extension_constructor list -> type_extension
val mk_exception: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
extension_constructor -> type_exception
val constructor: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info ->
str -> extension_constructor_kind -> extension_constructor
val decl: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info ->
?vars:str list -> ?args:constructor_arguments -> ?res:core_type ->
str ->
extension_constructor
val rebind: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info ->
str -> lid -> extension_constructor
end
module Mty:
sig
val mk: ?loc:loc -> ?attrs:attrs -> module_type_desc -> module_type
val attr: module_type -> attribute -> module_type
val ident: ?loc:loc -> ?attrs:attrs -> lid -> module_type
val alias: ?loc:loc -> ?attrs:attrs -> lid -> module_type
val signature: ?loc:loc -> ?attrs:attrs -> signature -> module_type
val functor_: ?loc:loc -> ?attrs:attrs ->
functor_parameter -> module_type -> module_type
val with_: ?loc:loc -> ?attrs:attrs -> module_type ->
with_constraint list -> module_type
val typeof_: ?loc:loc -> ?attrs:attrs -> module_expr -> module_type
val extension: ?loc:loc -> ?attrs:attrs -> extension -> module_type
end
module Mod:
sig
val mk: ?loc:loc -> ?attrs:attrs -> module_expr_desc -> module_expr
val attr: module_expr -> attribute -> module_expr
val ident: ?loc:loc -> ?attrs:attrs -> lid -> module_expr
val structure: ?loc:loc -> ?attrs:attrs -> structure -> module_expr
val functor_: ?loc:loc -> ?attrs:attrs ->
functor_parameter -> module_expr -> module_expr
val apply: ?loc:loc -> ?attrs:attrs -> module_expr -> module_expr ->
module_expr
val constraint_: ?loc:loc -> ?attrs:attrs -> module_expr -> module_type ->
module_expr
val unpack: ?loc:loc -> ?attrs:attrs -> expression -> module_expr
val extension: ?loc:loc -> ?attrs:attrs -> extension -> module_expr
end
module Sig:
sig
val mk: ?loc:loc -> signature_item_desc -> signature_item
val value: ?loc:loc -> value_description -> signature_item
val type_: ?loc:loc -> rec_flag -> type_declaration list -> signature_item
val type_subst: ?loc:loc -> type_declaration list -> signature_item
val type_extension: ?loc:loc -> type_extension -> signature_item
val exception_: ?loc:loc -> type_exception -> signature_item
val module_: ?loc:loc -> module_declaration -> signature_item
val mod_subst: ?loc:loc -> module_substitution -> signature_item
val rec_module: ?loc:loc -> module_declaration list -> signature_item
val modtype: ?loc:loc -> module_type_declaration -> signature_item
val modtype_subst: ?loc:loc -> module_type_declaration -> signature_item
val open_: ?loc:loc -> open_description -> signature_item
val include_: ?loc:loc -> include_description -> signature_item
val class_: ?loc:loc -> class_description list -> signature_item
val class_type: ?loc:loc -> class_type_declaration list -> signature_item
val extension: ?loc:loc -> ?attrs:attrs -> extension -> signature_item
val attribute: ?loc:loc -> attribute -> signature_item
val text: text -> signature_item list
end
module Str:
sig
val mk: ?loc:loc -> structure_item_desc -> structure_item
val eval: ?loc:loc -> ?attrs:attributes -> expression -> structure_item
val value: ?loc:loc -> rec_flag -> value_binding list -> structure_item
val primitive: ?loc:loc -> value_description -> structure_item
val type_: ?loc:loc -> rec_flag -> type_declaration list -> structure_item
val type_extension: ?loc:loc -> type_extension -> structure_item
val exception_: ?loc:loc -> type_exception -> structure_item
val module_: ?loc:loc -> module_binding -> structure_item
val rec_module: ?loc:loc -> module_binding list -> structure_item
val modtype: ?loc:loc -> module_type_declaration -> structure_item
val open_: ?loc:loc -> open_declaration -> structure_item
val class_: ?loc:loc -> class_declaration list -> structure_item
val class_type: ?loc:loc -> class_type_declaration list -> structure_item
val include_: ?loc:loc -> include_declaration -> structure_item
val extension: ?loc:loc -> ?attrs:attrs -> extension -> structure_item
val attribute: ?loc:loc -> attribute -> structure_item
val text: text -> structure_item list
end
module Md:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
str_opt -> module_type -> module_declaration
end
module Ms:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
str -> lid -> module_substitution
end
module Mtd:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
?typ:module_type -> str -> module_type_declaration
end
module Mb:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
str_opt -> module_expr -> module_binding
end
module Opn:
sig
val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs ->
?override:override_flag -> 'a -> 'a open_infos
end
module Incl:
sig
val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> 'a -> 'a include_infos
end
module Vb:
sig
val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
pattern -> expression -> value_binding
end
module Cty:
sig
val mk: ?loc:loc -> ?attrs:attrs -> class_type_desc -> class_type
val attr: class_type -> attribute -> class_type
val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_type
val signature: ?loc:loc -> ?attrs:attrs -> class_signature -> class_type
val arrow: ?loc:loc -> ?attrs:attrs -> arg_label -> core_type ->
class_type -> class_type
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_type
val open_: ?loc:loc -> ?attrs:attrs -> open_description -> class_type
-> class_type
end
module Ctf:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
class_type_field_desc -> class_type_field
val attr: class_type_field -> attribute -> class_type_field
val inherit_: ?loc:loc -> ?attrs:attrs -> class_type -> class_type_field
val val_: ?loc:loc -> ?attrs:attrs -> str -> mutable_flag ->
virtual_flag -> core_type -> class_type_field
val method_: ?loc:loc -> ?attrs:attrs -> str -> private_flag ->
virtual_flag -> core_type -> class_type_field
val constraint_: ?loc:loc -> ?attrs:attrs -> core_type -> core_type ->
class_type_field
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_type_field
val attribute: ?loc:loc -> attribute -> class_type_field
val text: text -> class_type_field list
end
module Cl:
sig
val mk: ?loc:loc -> ?attrs:attrs -> class_expr_desc -> class_expr
val attr: class_expr -> attribute -> class_expr
val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_expr
val structure: ?loc:loc -> ?attrs:attrs -> class_structure -> class_expr
val fun_: ?loc:loc -> ?attrs:attrs -> arg_label -> expression option ->
pattern -> class_expr -> class_expr
val apply: ?loc:loc -> ?attrs:attrs -> class_expr ->
(arg_label * expression) list -> class_expr
val let_: ?loc:loc -> ?attrs:attrs -> rec_flag -> value_binding list ->
class_expr -> class_expr
val constraint_: ?loc:loc -> ?attrs:attrs -> class_expr -> class_type ->
class_expr
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_expr
val open_: ?loc:loc -> ?attrs:attrs -> open_description -> class_expr
-> class_expr
end
module Cf:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> class_field_desc ->
class_field
val attr: class_field -> attribute -> class_field
val inherit_: ?loc:loc -> ?attrs:attrs -> override_flag -> class_expr ->
str option -> class_field
val val_: ?loc:loc -> ?attrs:attrs -> str -> mutable_flag ->
class_field_kind -> class_field
val method_: ?loc:loc -> ?attrs:attrs -> str -> private_flag ->
class_field_kind -> class_field
val constraint_: ?loc:loc -> ?attrs:attrs -> core_type -> core_type ->
class_field
val initializer_: ?loc:loc -> ?attrs:attrs -> expression -> class_field
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_field
val attribute: ?loc:loc -> attribute -> class_field
val text: text -> class_field list
val virtual_: core_type -> class_field_kind
val concrete: override_flag -> expression -> class_field_kind
end
module Ci:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
?virt:virtual_flag ->
?params:(core_type * (variance * injectivity)) list ->
str -> 'a -> 'a class_infos
end
module Csig:
sig
val mk: core_type -> class_type_field list -> class_signature
end
module Cstr:
sig
val mk: pattern -> class_field list -> class_structure
end
module Rf:
sig
val mk: ?loc:loc -> ?attrs:attrs -> row_field_desc -> row_field
val tag: ?loc:loc -> ?attrs:attrs ->
label with_loc -> bool -> core_type list -> row_field
val inherit_: ?loc:loc -> core_type -> row_field
end
module Of:
sig
val mk: ?loc:loc -> ?attrs:attrs ->
object_field_desc -> object_field
val tag: ?loc:loc -> ?attrs:attrs ->
label with_loc -> core_type -> object_field
val inherit_: ?loc:loc -> core_type -> object_field
end
|
be689bd78697fdadc06c6d029fff9f1c0211a56b208ca2a18bceafc6b9c6024b | 3b/3bil | minimal2.lisp | (in-package :avm2-compiler)
minimal sample , for new5 compiler/3b - swf / fu
(with-open-file (s "/tmp/foo.swf" :direction :output
:element-type '(unsigned-byte 8) :if-exists :supersede)
(3b-swf:write-swf
s
(append
(list
(3b-swf:file-attributes :has-metadata t :as3 t :use-network t
:use-gpu t :direct-blit t)
(3b-swf:script-limits 60 1000)
(3b-swf:background-color #x869ca7)
(3b-swf:frame-label "frame1"))
(compile-abc-tag (((nil :test-Class))
:inherit (avm2-compiler::*cl-symbol-table*
fu::%*fu-symbol-table*))
(c3* :top-level
(defclass-swf :test-class (flash:flash.display.sprite)
(text
canvas
(%app :allocation :class ))
(:fake-accessors t)
(:constructor ()
(%set-property-static :test-class %app this)
(main this)))
(defun app ()
(%get-property-static :test-class %app))
(defun main (arg)
(let ((canvas (fu:display (%new- flash:flash.display.sprite)
:parent arg))
(foo (fu:text-field :width 350
:auto-size "left"
:text-color (fu:rgb 0.2 0.9 0.2)
:word-wrap t
:background t
:background-color #x20202020
:text "Hello World"
:parent arg)))
(setf (text (app)) foo)
(setf (canvas (app)) canvas)
(frame nil)
(flash:add-event-listener canvas "click" #'frame)))
(defun frame (evt)
(let* ((canvas (canvas (app)))
(gfx (flash:.graphics canvas))
(matrix (%new- flash:flash.geom.matrix)))
(setf (flash:.opaque-background canvas) #x0d0f00)
(flash:clear gfx)
(fu:with-fill gfx (#x202600 0.5)
(flash:draw-rect gfx 0 0 400 300 ))
(flash:create-gradient-box matrix 400 300 0 0 0)
(flash:begin-gradient-fill gfx "radial"
(vector #x202600 #x0d0f00) ;; colors
(vector 1 1) ;; alpha
(vector 0 255) ;; ratios
matrix)
(flash:draw-rect gfx 0 0 400 300 )
(ftrace "1click")
(when evt
(incf (flash:.text (text (app))) " click!"))
(flash:end-fill gfx)))))
(list
(3b-swf:show-frame)))
:x-twips 400
:y-twips 300
:frame-rate 30
:compress t
:flash-version 9)
)
#+nil
(defparameter *foo-swf*
(let ((%swf::*blob-tags* (list))
(%swf::*trace-tags* (list)))
(with-open-file (s "/tmp/foo.swf"
:element-type '(unsigned-byte 8))
(%swf:read-swf s))))
| null | https://raw.githubusercontent.com/3b/3bil/c852181848bedf476373e901869ca29471f926ee/test/minimal2.lisp | lisp | colors
alpha
ratios | (in-package :avm2-compiler)
minimal sample , for new5 compiler/3b - swf / fu
(with-open-file (s "/tmp/foo.swf" :direction :output
:element-type '(unsigned-byte 8) :if-exists :supersede)
(3b-swf:write-swf
s
(append
(list
(3b-swf:file-attributes :has-metadata t :as3 t :use-network t
:use-gpu t :direct-blit t)
(3b-swf:script-limits 60 1000)
(3b-swf:background-color #x869ca7)
(3b-swf:frame-label "frame1"))
(compile-abc-tag (((nil :test-Class))
:inherit (avm2-compiler::*cl-symbol-table*
fu::%*fu-symbol-table*))
(c3* :top-level
(defclass-swf :test-class (flash:flash.display.sprite)
(text
canvas
(%app :allocation :class ))
(:fake-accessors t)
(:constructor ()
(%set-property-static :test-class %app this)
(main this)))
(defun app ()
(%get-property-static :test-class %app))
(defun main (arg)
(let ((canvas (fu:display (%new- flash:flash.display.sprite)
:parent arg))
(foo (fu:text-field :width 350
:auto-size "left"
:text-color (fu:rgb 0.2 0.9 0.2)
:word-wrap t
:background t
:background-color #x20202020
:text "Hello World"
:parent arg)))
(setf (text (app)) foo)
(setf (canvas (app)) canvas)
(frame nil)
(flash:add-event-listener canvas "click" #'frame)))
(defun frame (evt)
(let* ((canvas (canvas (app)))
(gfx (flash:.graphics canvas))
(matrix (%new- flash:flash.geom.matrix)))
(setf (flash:.opaque-background canvas) #x0d0f00)
(flash:clear gfx)
(fu:with-fill gfx (#x202600 0.5)
(flash:draw-rect gfx 0 0 400 300 ))
(flash:create-gradient-box matrix 400 300 0 0 0)
(flash:begin-gradient-fill gfx "radial"
matrix)
(flash:draw-rect gfx 0 0 400 300 )
(ftrace "1click")
(when evt
(incf (flash:.text (text (app))) " click!"))
(flash:end-fill gfx)))))
(list
(3b-swf:show-frame)))
:x-twips 400
:y-twips 300
:frame-rate 30
:compress t
:flash-version 9)
)
#+nil
(defparameter *foo-swf*
(let ((%swf::*blob-tags* (list))
(%swf::*trace-tags* (list)))
(with-open-file (s "/tmp/foo.swf"
:element-type '(unsigned-byte 8))
(%swf:read-swf s))))
|
2a04d14e13c2694cf8852f4958664f03a38bcc583449d97ea644e80ee4225abd | factisresearch/mq-demo | TestHelper.hs | module Mgw.Util.TestHelper
( withLogging, withLoggingAndLevel, withLoggingAndLevelInteractive, parseArgs )
where
----------------------------------------
-- LOCAL
----------------------------------------
import Mgw.Util.Setup
import Mgw.Util.Logging
import Mgw.Util.DynConfig
import Mgw.Util.Config
import Mgw.Util.ExitAction
----------------------------------------
-- SITE-PACKAGES
----------------------------------------
import System.Console.GetOpt
----------------------------------------
-- STDLIB
----------------------------------------
import Control.Monad
import Control.Monad.State
import Control.Monad.Identity (runIdentity)
import Data.List (isPrefixOf)
import System.Exit
import System.IO
import qualified System.Environment as Env
setup :: [String] -> LogLevel -> Bool -> IO [String]
setup args ll isInteractive =
do let logargs = filter isLoggingOpt args
otherargs = filter (not . isLoggingOpt) args
opts = logLevelOpts updateLogging
updateLogging f s = modify (\cfg -> runIdentity (f cfg s))
optNames = concat [map (('-':).(:[])) ss ++ map ("--"++) ls
| Option ss ls _ _ <- opts]
isLoggingOpt s = any (\x -> x `isPrefixOf` s) optNames
cfg <-
case getOpt RequireOrder opts logargs of
(actions, _nonOpts, []) ->
let cfg = execState (sequence_ actions) defcfg
in return cfg
(_, _, msgs) -> fail (show msgs)
_ <- setupDynConfigIfNotDone defaultDynConfig Nothing
setupLoggingWithConfig cfg
return otherargs
where
staticLogConfig =
if isInteractive
then defaultStaticLogConfig { lc_defaultTargets = [] }
else defaultStaticLogConfig
defcfg = defaultLogConfig { lc_dynamic = defaultDynamicLogConfig { lc_defaultLevel = ll }
, lc_static = staticLogConfig }
withLogging :: [String] -> ([String] -> IO a) -> IO a
withLogging args =
withLoggingAndLevel args WARN
withLoggingAndLevel :: [String] -> LogLevel -> ([String] -> IO a) -> IO a
withLoggingAndLevel args ll action =
withExitActions (setup args ll False >>= action)
withLoggingAndLevelInteractive :: [String] -> LogLevel -> ([String] -> IO a) -> IO a
withLoggingAndLevelInteractive args ll action =
withExitActions (setup args ll True >>= action)
parseArgs :: [OptDescr (StateT s IO ())]
-> s
-> [String]
-> (String -> String)
-> IO (s, [String])
parseArgs options defaultCfg args usageHeader =
do progName <- Env.getProgName
when ("-h" `elem` args || "--help" `elem` args) $ usage progName []
case getOpt RequireOrder options args of
(actions, nonOpts, []) ->
do cfg <- execStateT (sequence_ actions) defaultCfg
return (cfg, nonOpts)
(_, _, msgs) -> usage progName msgs
where
usage progName msgs =
do hPutStrLn stderr $ concat msgs ++ usageInfo (usageHeader progName) options
exitWith (ExitFailure 127)
| null | https://raw.githubusercontent.com/factisresearch/mq-demo/0efa1991ca647a86a8c22e516a7a1fb392ab4596/server/src/lib/Mgw/Util/TestHelper.hs | haskell | --------------------------------------
LOCAL
--------------------------------------
--------------------------------------
SITE-PACKAGES
--------------------------------------
--------------------------------------
STDLIB
-------------------------------------- | module Mgw.Util.TestHelper
( withLogging, withLoggingAndLevel, withLoggingAndLevelInteractive, parseArgs )
where
import Mgw.Util.Setup
import Mgw.Util.Logging
import Mgw.Util.DynConfig
import Mgw.Util.Config
import Mgw.Util.ExitAction
import System.Console.GetOpt
import Control.Monad
import Control.Monad.State
import Control.Monad.Identity (runIdentity)
import Data.List (isPrefixOf)
import System.Exit
import System.IO
import qualified System.Environment as Env
setup :: [String] -> LogLevel -> Bool -> IO [String]
setup args ll isInteractive =
do let logargs = filter isLoggingOpt args
otherargs = filter (not . isLoggingOpt) args
opts = logLevelOpts updateLogging
updateLogging f s = modify (\cfg -> runIdentity (f cfg s))
optNames = concat [map (('-':).(:[])) ss ++ map ("--"++) ls
| Option ss ls _ _ <- opts]
isLoggingOpt s = any (\x -> x `isPrefixOf` s) optNames
cfg <-
case getOpt RequireOrder opts logargs of
(actions, _nonOpts, []) ->
let cfg = execState (sequence_ actions) defcfg
in return cfg
(_, _, msgs) -> fail (show msgs)
_ <- setupDynConfigIfNotDone defaultDynConfig Nothing
setupLoggingWithConfig cfg
return otherargs
where
staticLogConfig =
if isInteractive
then defaultStaticLogConfig { lc_defaultTargets = [] }
else defaultStaticLogConfig
defcfg = defaultLogConfig { lc_dynamic = defaultDynamicLogConfig { lc_defaultLevel = ll }
, lc_static = staticLogConfig }
withLogging :: [String] -> ([String] -> IO a) -> IO a
withLogging args =
withLoggingAndLevel args WARN
withLoggingAndLevel :: [String] -> LogLevel -> ([String] -> IO a) -> IO a
withLoggingAndLevel args ll action =
withExitActions (setup args ll False >>= action)
withLoggingAndLevelInteractive :: [String] -> LogLevel -> ([String] -> IO a) -> IO a
withLoggingAndLevelInteractive args ll action =
withExitActions (setup args ll True >>= action)
parseArgs :: [OptDescr (StateT s IO ())]
-> s
-> [String]
-> (String -> String)
-> IO (s, [String])
parseArgs options defaultCfg args usageHeader =
do progName <- Env.getProgName
when ("-h" `elem` args || "--help" `elem` args) $ usage progName []
case getOpt RequireOrder options args of
(actions, nonOpts, []) ->
do cfg <- execStateT (sequence_ actions) defaultCfg
return (cfg, nonOpts)
(_, _, msgs) -> usage progName msgs
where
usage progName msgs =
do hPutStrLn stderr $ concat msgs ++ usageInfo (usageHeader progName) options
exitWith (ExitFailure 127)
|
8aa87741940119401d3c5b7e52ec89765201a8a2eda8c111cfc8c51c20a9972d | himura/lens-regex | sample.hs | # LANGUAGE QuasiQuotes #
OUTPUT :
# # # # # # # # # # # # # # # # # #
# # Target String :
target1 = " hoge00 fuga hoge01 neko
# # Example : ^ .. regex [ r|hoge[0 - 9]+| ]
[ MatchPart { _ = " hoge00 " , _ captures = [ ] } , MatchPart { _ = " hoge01 " , _ captures = [ ] } , MatchPart { _ = " hoge02 " , _ captures = [ ] } ]
# # Example : ^ .. regex [ r|hoge[0 - 9]+| ] .
[ " hoge00","hoge01","hoge02 " ]
# # Example : ^ ? regex [ r|hoge[0 - 9]+| ] . index 1 .
Just " hoge01 "
# # Example : ^ ? regex [ r|hoge[0 - 9]+| ] . index 3 .
Nothing
# # Example : target1 & regex [ r|hoge[0 - 9]+| ] . .~ " "
" fuga HOGE neko HOGE "
# # Example : target1 & regex [ r|hoge[0 - 9]+| ] .index 1 . .~ " "
" hoge00 fuga HOGE neko
# # Example : target1 & regex [ r|hoge[0 - 9]+| ] .index 1 . matchedString % ~ ( \s - > " < < " + + s + + " > > " )
" hoge00 fuga < < hoge01 > > neko hoge02 "
# # # # # # # # # # # # # # # # # #
# # Target String :
target2 = " < img src=\"/image / shinku0721.jpg\ " / shinku141.jpg\ " > "
# # Example : target2 ^ .. regex [ r|<img src="([^"]+)"[^>]*>| ] . captures . traversed . index 0
[ " /image / shinku0721.jpg","/image / shinku141.jpg " ]
OUTPUT:
##################
## Target String:
target1 = "hoge00 fuga hoge01 neko hoge02"
## Example: target1 ^.. regex [r|hoge[0-9]+|]
[MatchPart {_matchedString = "hoge00", _captures = []},MatchPart {_matchedString = "hoge01", _captures = []},MatchPart {_matchedString = "hoge02", _captures = []}]
## Example: target1 ^.. regex [r|hoge[0-9]+|] . matchedString
["hoge00","hoge01","hoge02"]
## Example: target1 ^? regex [r|hoge[0-9]+|] . index 1 . matchedString
Just "hoge01"
## Example: target1 ^? regex [r|hoge[0-9]+|] . index 3 . matchedString
Nothing
## Example: target1 & regex [r|hoge[0-9]+|] . matchedString .~ "HOGE"
"HOGE fuga HOGE neko HOGE"
## Example: target1 & regex [r|hoge[0-9]+|] .index 1 . matchedString .~ "HOGE"
"hoge00 fuga HOGE neko hoge02"
## Example: target1 & regex [r|hoge[0-9]+|] .index 1 . matchedString %~ (\s -> "<<" ++ s ++ ">>")
"hoge00 fuga <<hoge01>> neko hoge02"
##################
## Target String:
target2 = "<img src=\"/image/shinku0721.jpg\" alt=\"shinku birthday\"><img src=\"/image/shinku141.jpg\">"
## Example: target2 ^.. regex [r|<img src="([^"]+)"[^>]*>|] . captures . traversed . index 0
["/image/shinku0721.jpg","/image/shinku141.jpg"]
-}
module Main where
import Control.Lens
import Text.Regex.Lens
import Text.Regex.Posix
import Text.Regex.Quote
main :: IO ()
main = do
let target1 = "hoge00 fuga hoge01 neko hoge02"
putStrLn "##################"
putStrLn "## Target String:"
putStrLn $ "target1 = " ++ show target1
putStrLn "\n## Example: target1 ^.. regex [r|hoge[0-9]+|]"
print $ target1 ^.. regex [r|hoge[0-9]+|]
putStrLn "\n## Example: target1 ^.. regex [r|hoge[0-9]+|] . matchedString"
print $ target1 ^.. regex [r|hoge[0-9]+|] . matchedString
putStrLn "\n## Example: target1 ^? regex [r|hoge[0-9]+|] . index 1 . matchedString"
print $ target1 ^? regex [r|hoge[0-9]+|] . index 1 . matchedString
putStrLn "\n## Example: target1 ^? regex [r|hoge[0-9]+|] . index 3 . matchedString"
print $ target1 ^? regex [r|hoge[0-9]+|] . index 3 . matchedString
putStrLn "\n## Example: target1 & regex [r|hoge[0-9]+|] . matchedString .~ \"HOGE\""
print $ target1 & regex [r|hoge[0-9]+|] . matchedString .~ "HOGE"
putStrLn "\n## Example: target1 & regex [r|hoge[0-9]+|] .index 1 . matchedString .~ \"HOGE\""
print $ target1 & regex [r|hoge[0-9]+|] . index 1 . matchedString .~ "HOGE"
putStrLn "\n## Example: target1 & regex [r|hoge[0-9]+|] .index 1 . matchedString %~ (\\s -> \"<<\" ++ s ++ \">>\")"
print $ target1 & regex [r|hoge[0-9]+|] . index 1 . matchedString %~ (\s -> "<<" ++ s ++ ">>")
let target2 = "<img src=\"/image/shinku0721.jpg\" alt=\"shinku birthday\"><img src=\"/image/shinku141.jpg\">"
putStrLn "##################"
putStrLn "## Target String:"
putStrLn $ "target2 = " ++ show target2
putStrLn "\n## Example: target2 ^.. regex [r|<img src=\"([^\"]+)\"[^>]*>|] . captures . traversed . index 0"
print $ target2 ^.. regex [r|<img src="([^"]+)"[^>]*>|] . captures . traversed . index 0
| null | https://raw.githubusercontent.com/himura/lens-regex/1e2dc8250131744fd12c0df05404f0dc9b17f3b9/sample/sample.hs | haskell | # LANGUAGE QuasiQuotes #
OUTPUT :
# # # # # # # # # # # # # # # # # #
# # Target String :
target1 = " hoge00 fuga hoge01 neko
# # Example : ^ .. regex [ r|hoge[0 - 9]+| ]
[ MatchPart { _ = " hoge00 " , _ captures = [ ] } , MatchPart { _ = " hoge01 " , _ captures = [ ] } , MatchPart { _ = " hoge02 " , _ captures = [ ] } ]
# # Example : ^ .. regex [ r|hoge[0 - 9]+| ] .
[ " hoge00","hoge01","hoge02 " ]
# # Example : ^ ? regex [ r|hoge[0 - 9]+| ] . index 1 .
Just " hoge01 "
# # Example : ^ ? regex [ r|hoge[0 - 9]+| ] . index 3 .
Nothing
# # Example : target1 & regex [ r|hoge[0 - 9]+| ] . .~ " "
" fuga HOGE neko HOGE "
# # Example : target1 & regex [ r|hoge[0 - 9]+| ] .index 1 . .~ " "
" hoge00 fuga HOGE neko
# # Example : target1 & regex [ r|hoge[0 - 9]+| ] .index 1 . matchedString % ~ ( \s - > " < < " + + s + + " > > " )
" hoge00 fuga < < hoge01 > > neko hoge02 "
# # # # # # # # # # # # # # # # # #
# # Target String :
target2 = " < img src=\"/image / shinku0721.jpg\ " / shinku141.jpg\ " > "
# # Example : target2 ^ .. regex [ r|<img src="([^"]+)"[^>]*>| ] . captures . traversed . index 0
[ " /image / shinku0721.jpg","/image / shinku141.jpg " ]
OUTPUT:
##################
## Target String:
target1 = "hoge00 fuga hoge01 neko hoge02"
## Example: target1 ^.. regex [r|hoge[0-9]+|]
[MatchPart {_matchedString = "hoge00", _captures = []},MatchPart {_matchedString = "hoge01", _captures = []},MatchPart {_matchedString = "hoge02", _captures = []}]
## Example: target1 ^.. regex [r|hoge[0-9]+|] . matchedString
["hoge00","hoge01","hoge02"]
## Example: target1 ^? regex [r|hoge[0-9]+|] . index 1 . matchedString
Just "hoge01"
## Example: target1 ^? regex [r|hoge[0-9]+|] . index 3 . matchedString
Nothing
## Example: target1 & regex [r|hoge[0-9]+|] . matchedString .~ "HOGE"
"HOGE fuga HOGE neko HOGE"
## Example: target1 & regex [r|hoge[0-9]+|] .index 1 . matchedString .~ "HOGE"
"hoge00 fuga HOGE neko hoge02"
## Example: target1 & regex [r|hoge[0-9]+|] .index 1 . matchedString %~ (\s -> "<<" ++ s ++ ">>")
"hoge00 fuga <<hoge01>> neko hoge02"
##################
## Target String:
target2 = "<img src=\"/image/shinku0721.jpg\" alt=\"shinku birthday\"><img src=\"/image/shinku141.jpg\">"
## Example: target2 ^.. regex [r|<img src="([^"]+)"[^>]*>|] . captures . traversed . index 0
["/image/shinku0721.jpg","/image/shinku141.jpg"]
-}
module Main where
import Control.Lens
import Text.Regex.Lens
import Text.Regex.Posix
import Text.Regex.Quote
main :: IO ()
main = do
let target1 = "hoge00 fuga hoge01 neko hoge02"
putStrLn "##################"
putStrLn "## Target String:"
putStrLn $ "target1 = " ++ show target1
putStrLn "\n## Example: target1 ^.. regex [r|hoge[0-9]+|]"
print $ target1 ^.. regex [r|hoge[0-9]+|]
putStrLn "\n## Example: target1 ^.. regex [r|hoge[0-9]+|] . matchedString"
print $ target1 ^.. regex [r|hoge[0-9]+|] . matchedString
putStrLn "\n## Example: target1 ^? regex [r|hoge[0-9]+|] . index 1 . matchedString"
print $ target1 ^? regex [r|hoge[0-9]+|] . index 1 . matchedString
putStrLn "\n## Example: target1 ^? regex [r|hoge[0-9]+|] . index 3 . matchedString"
print $ target1 ^? regex [r|hoge[0-9]+|] . index 3 . matchedString
putStrLn "\n## Example: target1 & regex [r|hoge[0-9]+|] . matchedString .~ \"HOGE\""
print $ target1 & regex [r|hoge[0-9]+|] . matchedString .~ "HOGE"
putStrLn "\n## Example: target1 & regex [r|hoge[0-9]+|] .index 1 . matchedString .~ \"HOGE\""
print $ target1 & regex [r|hoge[0-9]+|] . index 1 . matchedString .~ "HOGE"
putStrLn "\n## Example: target1 & regex [r|hoge[0-9]+|] .index 1 . matchedString %~ (\\s -> \"<<\" ++ s ++ \">>\")"
print $ target1 & regex [r|hoge[0-9]+|] . index 1 . matchedString %~ (\s -> "<<" ++ s ++ ">>")
let target2 = "<img src=\"/image/shinku0721.jpg\" alt=\"shinku birthday\"><img src=\"/image/shinku141.jpg\">"
putStrLn "##################"
putStrLn "## Target String:"
putStrLn $ "target2 = " ++ show target2
putStrLn "\n## Example: target2 ^.. regex [r|<img src=\"([^\"]+)\"[^>]*>|] . captures . traversed . index 0"
print $ target2 ^.. regex [r|<img src="([^"]+)"[^>]*>|] . captures . traversed . index 0
| |
b2f25efe815b021cc29600218454974cf8b951fa94cc825fe595b291af0ff19f | ghc/nofib | Lzw.hs | module Main (main){-export list added by partain-} where {
-- partain: with "ghc -cpp -DSLEAZY_UNBOXING", you get (guess what)?
-- without it, you get the code as originally written.
--
-- Things done here:
-- * The obvious unboxing (e.g., Int ==> Int#).
-- * use quot/rem, not div/mod
* inline PrefixElement type into PrefixTree . PT constructor
* cvt final clause of 3 - way comparison to " otherwise "
-- * use shifts, not quot/rem (not necessary: C compiler converts
-- them just fine)
--
-- Obviously, more egregious hacking could be done:
* replace Tuple / List types that mention with specialised
-- variants
#if defined(__GLASGOW_HASKELL__) && defined(SLEAZY_UNBOXING)
#define FAST_INT Int#
#define ILIT(x) (x#)
#define IBOX(x) (I# (x))
#define _ADD_ `plusInt#`
#define _SUB_ `minusInt#`
#define _MUL_ `timesInt#`
#define _DIV_ `divInt#`
#define _QUOT_ `quotInt#`
#define _REM_ `remInt#`
#define _NEG_ negateInt#
#define _EQ_ `eqInt#`
#define _LT_ `ltInt#`
#define _LE_ `leInt#`
#define _GE_ `geInt#`
#define _GT_ `gtInt#`
#define _CHR_ chr#
#define FAST_BOOL Int#
#define _TRUE_ 1#
#define _FALSE_ 0#
#define _IS_TRUE_(x) ((x) `eqInt#` 1#)
#define FAST_CHAR Char#
#define CBOX(x) (C# (x))
data FAST_TRIPLE = TRIP [Char] Int# PrefixTree;
#define _TRIP_(a,b,c) (TRIP (a) (b) (c))
#define PrefixElement FAST_CHAR FAST_INT PrefixTree
#define _PTE_(a,b,c) (a) (b) (c)
#else {- ! __GLASGOW_HASKELL__ -}
#define FAST_INT Int
#define ILIT(x) (x)
#define IBOX(x) (x)
#define _ADD_ +
#define _SUB_ -
#define _MUL_ *
#define _DIV_ `div`
#define _QUOT_ `quot`
#define _REM_ `rem`
#define _NEG_ -
#define _EQ_ ==
#define _LT_ <
#define _LE_ <=
#define _GE_ >=
#define _GT_ >
#define _CHR_ toEnum
#define FAST_BOOL Bool
#define _TRUE_ True
#define _FALSE_ False
#define _IS_TRUE_(x) (x)
#define FAST_CHAR Char
#define CBOX(x) (x)
type FAST_TRIPLE = ([Char], Int, PrefixTree);
#define _TRIP_(a,b,c) ((a), (b), (c))
data PrefixElement = PTE FAST_CHAR FAST_INT PrefixTree;
#define _PTE_(a,b,c) (PTE (a) (b) (c))
#endif {- ! __GLASGOW_HASKELL__ -}
-- end of partain
data PrefixTree = PTNil | PT PrefixElement PrefixTree PrefixTree;
: : PrefixTree ; -- partain : sig
create_code_table = create_code_table2 ILIT(0) ILIT(256);
create_code_table2 :: FAST_INT -> FAST_INT -> PrefixTree;
create_code_table2 first_code ILIT(0) = PTNil;
create_code_table2 first_code ILIT(1)
= PT _PTE_((_CHR_ first_code), first_code, PTNil) PTNil PTNil;
create_code_table2 first_code n_codes
= PT _PTE_((_CHR_ m_code), m_code, PTNil) left right
where {
left = create_code_table2 first_code (m_code _SUB_ first_code);
right = create_code_table2 m_code2 ((first_code _ADD_ n_codes) _SUB_ m_code2);
m_code = (first_code _ADD_ (first_code _ADD_ n_codes _SUB_ ILIT(1))) _QUOT_ ILIT(2);
m_code2 = m_code _ADD_ ILIT(1);
};
lzw_code_file :: [Char] -> PrefixTree -> FAST_INT -> [Int];
lzw_code_file [] code_table next_code = [];
lzw_code_file input code_table next_code
= -- partain: case-ified lazy where
case (code_string input ILIT(0) next_code code_table) of {
_TRIP_(input2,n,code_table2) ->
IBOX(n) : lzw_code_file input2 code_table2 (next_code _ADD_ ILIT(1))
};
code_string :: [Char] -> FAST_INT -> FAST_INT -> PrefixTree -> FAST_TRIPLE;
#if defined(__GLASGOW_HASKELL__) && defined(SLEAZY_UNBOXING)
t )
| CBOX(c) < CBOX(k) = f1 r1 {-p-} k v t r
| CBOX(c) > CBOX(k) = f2 r2 {-p-} k v t l
| otherwise {- CBOX(c) == CBOX(k) -} = f3 r3 k v l r
#else
code_string input@(CBOX(c) : input2) old_code next_code (PT p@(PTE k v t) l r)
| CBOX(c) < CBOX(k) = f1 r1 p r
| CBOX(c) > CBOX(k) = f2 r2 p l
| otherwise {- CBOX(c) == CBOX(k) -} = f3 r3 k v l r
#endif
where {
r1 = code_string input old_code next_code l;
r2 = code_string input old_code next_code r;
r3 = code_string input2 v next_code t;
#if defined(__GLASGOW_HASKELL__) && defined(SLEAZY_UNBOXING)
f1 _TRIP_(input_l,nl,l2) k v t r = _TRIP_(input_l,nl,PT k v t l2 r);
f2 _TRIP_(input_r,nr,r2) k v t l = _TRIP_(input_r,nr,PT k v t l r2);
#else
f1 _TRIP_(input_l,nl,l2) p r = _TRIP_(input_l,nl,PT p l2 r);
f2 _TRIP_(input_r,nr,r2) p l = _TRIP_(input_r,nr,PT p l r2);
#endif
f3 _TRIP_(input2,n,t2) k v l r = _TRIP_(input2, n, PT _PTE_(k, v, t2) l r);
};
code_string input@(c : input2 ) old_code next_code ( PT v t ) l r )
| c < k = ( input_l , nl , PT p l ' r )
| c > k = ( input_r , nr , PT p l r ' )
| c = = k = ( input',n , PT ( PTE k v t ' ) l r )
-- where {
( input_l , nl , l ' ) = code_string input old_code next_code l ;
( input_r , nr , r ' ) = code_string input old_code next_code r ;
-- (input',n,t') = code_string input2 v next_code t;
-- };
code_string input@(CBOX(c) : input_file2) old_code next_code PTNil
= if (next_code _GE_ ILIT(4096))
then _TRIP_(input, old_code, PTNil)
else _TRIP_(input, old_code, PT _PTE_(c, next_code, PTNil) PTNil PTNil);
code_string [] old_code next_code code_table = _TRIP_([], old_code, PTNil);
integer_list_to_char_list (IBOX(n) : l)
= CBOX(_CHR_ (n _QUOT_ ILIT(16))) : integer_list_to_char_list2 l n;
integer_list_to_char_list [] = [];
integer_list_to_char_list2 (IBOX(c) : l) n
= CBOX(_CHR_ ((n _MUL_ ILIT(16)) _ADD_ ((c _QUOT_ ILIT(256)) _REM_ ILIT(16))))
: CBOX(_CHR_ c)
: integer_list_to_char_list l;
integer_list_to_char_list2 [] n = CBOX(_CHR_ (n _MUL_ ILIT(16))) : [];
main :: IO ();
main = getContents >>= \ input_string -> main2 input_string;
main2 :: String -> IO ();
main2 input_string
= putStr output_list
where {
output_list = integer_list_to_char_list code_list;
code_list = lzw_code_file input_string create_code_table ILIT(256);
};
}
| null | https://raw.githubusercontent.com/ghc/nofib/f34b90b5a6ce46284693119a06d1133908b11856/real/compress/Lzw.hs | haskell | export list added by partain
partain: with "ghc -cpp -DSLEAZY_UNBOXING", you get (guess what)?
without it, you get the code as originally written.
Things done here:
* The obvious unboxing (e.g., Int ==> Int#).
* use quot/rem, not div/mod
* use shifts, not quot/rem (not necessary: C compiler converts
them just fine)
Obviously, more egregious hacking could be done:
variants
! __GLASGOW_HASKELL__
! __GLASGOW_HASKELL__
end of partain
partain : sig
partain: case-ified lazy where
p
p
CBOX(c) == CBOX(k)
CBOX(c) == CBOX(k)
where {
(input',n,t') = code_string input2 v next_code t;
}; |
* inline PrefixElement type into PrefixTree . PT constructor
* cvt final clause of 3 - way comparison to " otherwise "
* replace Tuple / List types that mention with specialised
#if defined(__GLASGOW_HASKELL__) && defined(SLEAZY_UNBOXING)
#define FAST_INT Int#
#define ILIT(x) (x#)
#define IBOX(x) (I# (x))
#define _ADD_ `plusInt#`
#define _SUB_ `minusInt#`
#define _MUL_ `timesInt#`
#define _DIV_ `divInt#`
#define _QUOT_ `quotInt#`
#define _REM_ `remInt#`
#define _NEG_ negateInt#
#define _EQ_ `eqInt#`
#define _LT_ `ltInt#`
#define _LE_ `leInt#`
#define _GE_ `geInt#`
#define _GT_ `gtInt#`
#define _CHR_ chr#
#define FAST_BOOL Int#
#define _TRUE_ 1#
#define _FALSE_ 0#
#define _IS_TRUE_(x) ((x) `eqInt#` 1#)
#define FAST_CHAR Char#
#define CBOX(x) (C# (x))
data FAST_TRIPLE = TRIP [Char] Int# PrefixTree;
#define _TRIP_(a,b,c) (TRIP (a) (b) (c))
#define PrefixElement FAST_CHAR FAST_INT PrefixTree
#define _PTE_(a,b,c) (a) (b) (c)
#define FAST_INT Int
#define ILIT(x) (x)
#define IBOX(x) (x)
#define _ADD_ +
#define _SUB_ -
#define _MUL_ *
#define _DIV_ `div`
#define _QUOT_ `quot`
#define _REM_ `rem`
#define _NEG_ -
#define _EQ_ ==
#define _LT_ <
#define _LE_ <=
#define _GE_ >=
#define _GT_ >
#define _CHR_ toEnum
#define FAST_BOOL Bool
#define _TRUE_ True
#define _FALSE_ False
#define _IS_TRUE_(x) (x)
#define FAST_CHAR Char
#define CBOX(x) (x)
type FAST_TRIPLE = ([Char], Int, PrefixTree);
#define _TRIP_(a,b,c) ((a), (b), (c))
data PrefixElement = PTE FAST_CHAR FAST_INT PrefixTree;
#define _PTE_(a,b,c) (PTE (a) (b) (c))
data PrefixTree = PTNil | PT PrefixElement PrefixTree PrefixTree;
create_code_table = create_code_table2 ILIT(0) ILIT(256);
create_code_table2 :: FAST_INT -> FAST_INT -> PrefixTree;
create_code_table2 first_code ILIT(0) = PTNil;
create_code_table2 first_code ILIT(1)
= PT _PTE_((_CHR_ first_code), first_code, PTNil) PTNil PTNil;
create_code_table2 first_code n_codes
= PT _PTE_((_CHR_ m_code), m_code, PTNil) left right
where {
left = create_code_table2 first_code (m_code _SUB_ first_code);
right = create_code_table2 m_code2 ((first_code _ADD_ n_codes) _SUB_ m_code2);
m_code = (first_code _ADD_ (first_code _ADD_ n_codes _SUB_ ILIT(1))) _QUOT_ ILIT(2);
m_code2 = m_code _ADD_ ILIT(1);
};
lzw_code_file :: [Char] -> PrefixTree -> FAST_INT -> [Int];
lzw_code_file [] code_table next_code = [];
lzw_code_file input code_table next_code
case (code_string input ILIT(0) next_code code_table) of {
_TRIP_(input2,n,code_table2) ->
IBOX(n) : lzw_code_file input2 code_table2 (next_code _ADD_ ILIT(1))
};
code_string :: [Char] -> FAST_INT -> FAST_INT -> PrefixTree -> FAST_TRIPLE;
#if defined(__GLASGOW_HASKELL__) && defined(SLEAZY_UNBOXING)
t )
#else
code_string input@(CBOX(c) : input2) old_code next_code (PT p@(PTE k v t) l r)
| CBOX(c) < CBOX(k) = f1 r1 p r
| CBOX(c) > CBOX(k) = f2 r2 p l
#endif
where {
r1 = code_string input old_code next_code l;
r2 = code_string input old_code next_code r;
r3 = code_string input2 v next_code t;
#if defined(__GLASGOW_HASKELL__) && defined(SLEAZY_UNBOXING)
f1 _TRIP_(input_l,nl,l2) k v t r = _TRIP_(input_l,nl,PT k v t l2 r);
f2 _TRIP_(input_r,nr,r2) k v t l = _TRIP_(input_r,nr,PT k v t l r2);
#else
f1 _TRIP_(input_l,nl,l2) p r = _TRIP_(input_l,nl,PT p l2 r);
f2 _TRIP_(input_r,nr,r2) p l = _TRIP_(input_r,nr,PT p l r2);
#endif
f3 _TRIP_(input2,n,t2) k v l r = _TRIP_(input2, n, PT _PTE_(k, v, t2) l r);
};
code_string input@(c : input2 ) old_code next_code ( PT v t ) l r )
| c < k = ( input_l , nl , PT p l ' r )
| c > k = ( input_r , nr , PT p l r ' )
| c = = k = ( input',n , PT ( PTE k v t ' ) l r )
( input_l , nl , l ' ) = code_string input old_code next_code l ;
( input_r , nr , r ' ) = code_string input old_code next_code r ;
code_string input@(CBOX(c) : input_file2) old_code next_code PTNil
= if (next_code _GE_ ILIT(4096))
then _TRIP_(input, old_code, PTNil)
else _TRIP_(input, old_code, PT _PTE_(c, next_code, PTNil) PTNil PTNil);
code_string [] old_code next_code code_table = _TRIP_([], old_code, PTNil);
integer_list_to_char_list (IBOX(n) : l)
= CBOX(_CHR_ (n _QUOT_ ILIT(16))) : integer_list_to_char_list2 l n;
integer_list_to_char_list [] = [];
integer_list_to_char_list2 (IBOX(c) : l) n
= CBOX(_CHR_ ((n _MUL_ ILIT(16)) _ADD_ ((c _QUOT_ ILIT(256)) _REM_ ILIT(16))))
: CBOX(_CHR_ c)
: integer_list_to_char_list l;
integer_list_to_char_list2 [] n = CBOX(_CHR_ (n _MUL_ ILIT(16))) : [];
main :: IO ();
main = getContents >>= \ input_string -> main2 input_string;
main2 :: String -> IO ();
main2 input_string
= putStr output_list
where {
output_list = integer_list_to_char_list code_list;
code_list = lzw_code_file input_string create_code_table ILIT(256);
};
}
|
0dd1691bf0bedd5e7b63a8a0c252cfc199679bfd80718d45c132b76147163606 | msakai/toysolver | SimplexTextbook.hs | # LANGUAGE TemplateHaskell #
module Test.SimplexTextbook (simplexTextbookTestGroup) where
import Control.Monad
import Control.Monad.State
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
import Data.List
import Data.Ratio
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.TH
import Text.Printf
import qualified ToySolver.Data.LA as LA
import ToySolver.Data.LA ((.<=.))
import ToySolver.Arith.Simplex.Textbook
import qualified ToySolver.Arith.Simplex.Textbook.LPSolver as LP
example_3_2 :: Tableau Rational
example_3_2 = IntMap.fromList
[ (4, (IntMap.fromList [(1,2), (2,1), (3,1)], 2))
, (5, (IntMap.fromList [(1,1), (2,2), (3,3)], 5))
, (6, (IntMap.fromList [(1,2), (2,2), (3,1)], 6))
, (objRowIndex, (IntMap.fromList [(1,-3), (2,-2), (3,-3)], 0))
]
case_example_3_2_simplex :: Assertion
case_example_3_2_simplex = do
assertBool "simplex failed" ret
assertBool "invalid tableau" (isValidTableau result)
assertBool "infeasible tableau" (isFeasible result)
assertBool "unoptimal tableau" (isOptimal OptMax result)
currentObjValue result @?= 27/5
where
ret :: Bool
result :: Tableau Rational
(ret,result) = simplex OptMax example_3_2
case_example_3_2_primalDualSimplex :: Assertion
case_example_3_2_primalDualSimplex = do
assertBool "simplex failed" ret
assertBool "invalid tableau" (isValidTableau result)
assertBool "infeasible tableau" (isFeasible result)
assertBool "unoptimal tableau" (isOptimal OptMax result)
currentObjValue result @?= 27/5
where
ret :: Bool
result :: Tableau Rational
(ret,result) = primalDualSimplex OptMax example_3_2
-- from /~wei/lpch5.pdf
example_5_3_phase1 :: Tableau Rational
example_5_3_phase1 = IntMap.fromList
[ (6, (IntMap.fromList [(2,-1), (3,-1), (5,1)], 1))
, (7, (IntMap.fromList [(3,1), (4,-1), (5,1)], 0))
]
case_example_5_3_phase1 :: Assertion
case_example_5_3_phase1 = do
let (ret,result) = phaseI example_5_3_phase1 (IntSet.fromList [6,7])
assertBool "phase1 failed" ret
assertBool "invalid tableau" (isValidTableau result)
assertBool "infeasible tableau" (isFeasible result)
-- 退化して巡回の起こるKuhnの7変数3制約の例
kuhn_7_3 :: Tableau Rational
kuhn_7_3 = IntMap.fromList
[ (1, (IntMap.fromList [(4,-2), (5,-9), (6,1), (7,9)], 0))
, (2, (IntMap.fromList [(4,1/3), (5,1), (6,-1/3), (7,-2)], 0))
, (3, (IntMap.fromList [(4,2), (5,3), (6,-1), (7,-12)], 2))
, (objRowIndex, (IntMap.fromList [(4,2), (5,3), (6,-1), (7,-12)], 0))
]
case_kuhn_7_3 :: Assertion
case_kuhn_7_3 = do
assertBool "simplex failed" ret
assertBool "invalid tableau" (isValidTableau result)
currentObjValue result @?= -2
where
ret :: Bool
result :: Tableau Rational
(ret,result) = simplex OptMin kuhn_7_3
-- case_pd_kuhn_7_3 :: Assertion
-- case_pd_kuhn_7_3 = do
assertBool " simplex failed " ret
-- assertBool "invalid tableau" (isValidTableau result)
-- currentObjValue result @?= -2
-- where
ret : :
-- result :: Tableau Rational
( ret , result ) = primalDualSimplex
-- from /~wei/lpch5.pdf
example_5_7 :: Tableau Rational
example_5_7 = IntMap.fromList
[ (4, (IntMap.fromList [(1,-1), (2,-2), (3,-3)], -5))
, (5, (IntMap.fromList [(1,-2), (2,-2), (3,-1)], -6))
, (objRowIndex, (IntMap.fromList [(1,3),(2,4),(3,5)], 0))
]
case_example_5_7 :: Assertion
case_example_5_7 = do
assertBool "dual simplex failed" ret
assertBool "invalid tableau" (isValidTableau result)
currentObjValue result @?= -11
where
ret :: Bool
result :: Tableau Rational
(ret,result) = dualSimplex OptMax example_5_7
case_pd_example_5_7 :: Assertion
case_pd_example_5_7 = do
assertBool "dual simplex failed" ret
assertBool "invalid tableau" (isValidTableau result)
currentObjValue result @?= -11
where
ret :: Bool
result :: Tableau Rational
(ret,result) = primalDualSimplex OptMax example_5_7
------------------------------------------------------------------------
case_lp_example_5_7_twoPhaseSimplex :: Assertion
case_lp_example_5_7_twoPhaseSimplex = do
ret @?= LP.Optimum
oval @?= -11
assertBool "invalid tableau" (isValidTableau tbl)
assertBool "infeasible tableau" (isFeasible tbl)
assertBool "non-optimal tableau" (isOptimal OptMax tbl)
where
oval :: Rational
((ret,tbl,oval),result) = flip runState (LP.emptySolver IntSet.empty) $ do
_ <- LP.newVar
x1 <- LP.newVar
x2 <- LP.newVar
x3 <- LP.newVar
LP.addConstraint (LA.fromTerms [(-1,x1),(-2,x2),(-3,x3)] .<=. LA.constant (-5))
LP.addConstraint (LA.fromTerms [(-2,x1),(-2,x2),(-1,x3)] .<=. LA.constant (-6))
let obj = LA.fromTerms [(-3,x1), (-4,x2),(-5,x3)]
ret <- LP.twoPhaseSimplex OptMax obj
tbl <- LP.getTableau
m <- LP.getModel (IntSet.fromList [x1,x2,x3])
let oval = LA.eval m obj
return (ret,tbl,oval)
case_lp_example_5_7_primalDualSimplex :: Assertion
case_lp_example_5_7_primalDualSimplex = do
ret @?= LP.Optimum
oval @?= -11
assertBool "invalid tableau" (isValidTableau tbl)
assertBool "infeasible tableau" (isFeasible tbl)
assertBool "non-optimal tableau" (isOptimal OptMax tbl)
where
oval :: Rational
((ret,tbl,oval),result) = flip runState (LP.emptySolver IntSet.empty) $ do
_ <- LP.newVar
x1 <- LP.newVar
x2 <- LP.newVar
x3 <- LP.newVar
LP.addConstraint (LA.fromTerms [(-1,x1),(-2,x2),(-3,x3)] .<=. LA.constant (-5))
LP.addConstraint (LA.fromTerms [(-2,x1),(-2,x2),(-1,x3)] .<=. LA.constant (-6))
let obj = LA.fromTerms [(-3,x1), (-4,x2),(-5,x3)]
ret <- LP.primalDualSimplex OptMax obj
tbl <- LP.getTableau
m <- LP.getModel (IntSet.fromList [x1,x2,x3])
let oval = LA.eval m obj
return (ret,tbl,oval)
------------------------------------------------------------------------
-- Test harness
simplexTextbookTestGroup :: TestTree
simplexTextbookTestGroup = $(testGroupGenerator)
| null | https://raw.githubusercontent.com/msakai/toysolver/6233d130d3dcea32fa34c26feebd151f546dea85/test/Test/SimplexTextbook.hs | haskell | from /~wei/lpch5.pdf
退化して巡回の起こるKuhnの7変数3制約の例
case_pd_kuhn_7_3 :: Assertion
case_pd_kuhn_7_3 = do
assertBool "invalid tableau" (isValidTableau result)
currentObjValue result @?= -2
where
result :: Tableau Rational
from /~wei/lpch5.pdf
----------------------------------------------------------------------
----------------------------------------------------------------------
Test harness | # LANGUAGE TemplateHaskell #
module Test.SimplexTextbook (simplexTextbookTestGroup) where
import Control.Monad
import Control.Monad.State
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
import Data.List
import Data.Ratio
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.TH
import Text.Printf
import qualified ToySolver.Data.LA as LA
import ToySolver.Data.LA ((.<=.))
import ToySolver.Arith.Simplex.Textbook
import qualified ToySolver.Arith.Simplex.Textbook.LPSolver as LP
example_3_2 :: Tableau Rational
example_3_2 = IntMap.fromList
[ (4, (IntMap.fromList [(1,2), (2,1), (3,1)], 2))
, (5, (IntMap.fromList [(1,1), (2,2), (3,3)], 5))
, (6, (IntMap.fromList [(1,2), (2,2), (3,1)], 6))
, (objRowIndex, (IntMap.fromList [(1,-3), (2,-2), (3,-3)], 0))
]
case_example_3_2_simplex :: Assertion
case_example_3_2_simplex = do
assertBool "simplex failed" ret
assertBool "invalid tableau" (isValidTableau result)
assertBool "infeasible tableau" (isFeasible result)
assertBool "unoptimal tableau" (isOptimal OptMax result)
currentObjValue result @?= 27/5
where
ret :: Bool
result :: Tableau Rational
(ret,result) = simplex OptMax example_3_2
case_example_3_2_primalDualSimplex :: Assertion
case_example_3_2_primalDualSimplex = do
assertBool "simplex failed" ret
assertBool "invalid tableau" (isValidTableau result)
assertBool "infeasible tableau" (isFeasible result)
assertBool "unoptimal tableau" (isOptimal OptMax result)
currentObjValue result @?= 27/5
where
ret :: Bool
result :: Tableau Rational
(ret,result) = primalDualSimplex OptMax example_3_2
example_5_3_phase1 :: Tableau Rational
example_5_3_phase1 = IntMap.fromList
[ (6, (IntMap.fromList [(2,-1), (3,-1), (5,1)], 1))
, (7, (IntMap.fromList [(3,1), (4,-1), (5,1)], 0))
]
case_example_5_3_phase1 :: Assertion
case_example_5_3_phase1 = do
let (ret,result) = phaseI example_5_3_phase1 (IntSet.fromList [6,7])
assertBool "phase1 failed" ret
assertBool "invalid tableau" (isValidTableau result)
assertBool "infeasible tableau" (isFeasible result)
kuhn_7_3 :: Tableau Rational
kuhn_7_3 = IntMap.fromList
[ (1, (IntMap.fromList [(4,-2), (5,-9), (6,1), (7,9)], 0))
, (2, (IntMap.fromList [(4,1/3), (5,1), (6,-1/3), (7,-2)], 0))
, (3, (IntMap.fromList [(4,2), (5,3), (6,-1), (7,-12)], 2))
, (objRowIndex, (IntMap.fromList [(4,2), (5,3), (6,-1), (7,-12)], 0))
]
case_kuhn_7_3 :: Assertion
case_kuhn_7_3 = do
assertBool "simplex failed" ret
assertBool "invalid tableau" (isValidTableau result)
currentObjValue result @?= -2
where
ret :: Bool
result :: Tableau Rational
(ret,result) = simplex OptMin kuhn_7_3
assertBool " simplex failed " ret
ret : :
( ret , result ) = primalDualSimplex
example_5_7 :: Tableau Rational
example_5_7 = IntMap.fromList
[ (4, (IntMap.fromList [(1,-1), (2,-2), (3,-3)], -5))
, (5, (IntMap.fromList [(1,-2), (2,-2), (3,-1)], -6))
, (objRowIndex, (IntMap.fromList [(1,3),(2,4),(3,5)], 0))
]
case_example_5_7 :: Assertion
case_example_5_7 = do
assertBool "dual simplex failed" ret
assertBool "invalid tableau" (isValidTableau result)
currentObjValue result @?= -11
where
ret :: Bool
result :: Tableau Rational
(ret,result) = dualSimplex OptMax example_5_7
case_pd_example_5_7 :: Assertion
case_pd_example_5_7 = do
assertBool "dual simplex failed" ret
assertBool "invalid tableau" (isValidTableau result)
currentObjValue result @?= -11
where
ret :: Bool
result :: Tableau Rational
(ret,result) = primalDualSimplex OptMax example_5_7
case_lp_example_5_7_twoPhaseSimplex :: Assertion
case_lp_example_5_7_twoPhaseSimplex = do
ret @?= LP.Optimum
oval @?= -11
assertBool "invalid tableau" (isValidTableau tbl)
assertBool "infeasible tableau" (isFeasible tbl)
assertBool "non-optimal tableau" (isOptimal OptMax tbl)
where
oval :: Rational
((ret,tbl,oval),result) = flip runState (LP.emptySolver IntSet.empty) $ do
_ <- LP.newVar
x1 <- LP.newVar
x2 <- LP.newVar
x3 <- LP.newVar
LP.addConstraint (LA.fromTerms [(-1,x1),(-2,x2),(-3,x3)] .<=. LA.constant (-5))
LP.addConstraint (LA.fromTerms [(-2,x1),(-2,x2),(-1,x3)] .<=. LA.constant (-6))
let obj = LA.fromTerms [(-3,x1), (-4,x2),(-5,x3)]
ret <- LP.twoPhaseSimplex OptMax obj
tbl <- LP.getTableau
m <- LP.getModel (IntSet.fromList [x1,x2,x3])
let oval = LA.eval m obj
return (ret,tbl,oval)
case_lp_example_5_7_primalDualSimplex :: Assertion
case_lp_example_5_7_primalDualSimplex = do
ret @?= LP.Optimum
oval @?= -11
assertBool "invalid tableau" (isValidTableau tbl)
assertBool "infeasible tableau" (isFeasible tbl)
assertBool "non-optimal tableau" (isOptimal OptMax tbl)
where
oval :: Rational
((ret,tbl,oval),result) = flip runState (LP.emptySolver IntSet.empty) $ do
_ <- LP.newVar
x1 <- LP.newVar
x2 <- LP.newVar
x3 <- LP.newVar
LP.addConstraint (LA.fromTerms [(-1,x1),(-2,x2),(-3,x3)] .<=. LA.constant (-5))
LP.addConstraint (LA.fromTerms [(-2,x1),(-2,x2),(-1,x3)] .<=. LA.constant (-6))
let obj = LA.fromTerms [(-3,x1), (-4,x2),(-5,x3)]
ret <- LP.primalDualSimplex OptMax obj
tbl <- LP.getTableau
m <- LP.getModel (IntSet.fromList [x1,x2,x3])
let oval = LA.eval m obj
return (ret,tbl,oval)
simplexTextbookTestGroup :: TestTree
simplexTextbookTestGroup = $(testGroupGenerator)
|
74a42d13340546008fbfa996990d8f965ad660b1b1ea901e04e10b949c0d297b | replikativ/datahike | query_rules_test.cljc | (ns datahike.test.query-rules-test
(:require
#?(:cljs [cljs.test :as t :refer-macros [is deftest testing]]
:clj [clojure.test :as t :refer [is deftest testing]])
[datahike.api :as d]
[datahike.db :as db]))
(deftest test-rules
(let [db [[5 :follow 3]
[1 :follow 2] [2 :follow 3] [3 :follow 4] [4 :follow 6]
[2 :follow 4]]]
(is (= (d/q '[:find ?e1 ?e2
:in $ %
:where (follow ?e1 ?e2)]
db
'[[(follow ?x ?y)
[?x :follow ?y]]])
#{[1 2] [2 3] [3 4] [2 4] [5 3] [4 6]}))
(testing "Joining regular clauses with rule"
(is (= (d/q '[:find ?y ?x
:in $ %
:where [_ _ ?x]
(rule ?x ?y)
[(even? ?x)]]
db
'[[(rule ?a ?b)
[?a :follow ?b]]])
#{[3 2] [6 4] [4 2]})))
(testing "Rule context is isolated from outer context"
(is (= (d/q '[:find ?x
:in $ %
:where [?e _ _]
(rule ?x)]
db
'[[(rule ?e)
[_ ?e _]]])
#{[:follow]})))
(testing "Rule with branches"
(is (= (d/q '[:find ?e2
:in $ ?e1 %
:where (follow ?e1 ?e2)]
db
1
'[[(follow ?e2 ?e1)
[?e2 :follow ?e1]]
[(follow ?e2 ?e1)
[?e2 :follow ?t]
[?t :follow ?e1]]])
#{[2] [3] [4]})))
(testing "Recursive rules"
(is (= (d/q '[:find ?e2
:in $ ?e1 %
:where (follow ?e1 ?e2)]
db
1
'[[(follow ?e1 ?e2)
[?e1 :follow ?e2]]
[(follow ?e1 ?e2)
[?e1 :follow ?t]
(follow ?t ?e2)]])
#{[2] [3] [4] [6]}))
(is (= (d/q '[:find ?e1 ?e2
:in $ %
:where (follow ?e1 ?e2)]
[[1 :follow 2] [2 :follow 3]]
'[[(follow ?e1 ?e2)
[?e1 :follow ?e2]]
[(follow ?e1 ?e2)
(follow ?e2 ?e1)]])
#{[1 2] [2 3] [2 1] [3 2]}))
(is (= (d/q '[:find ?e1 ?e2
:in $ %
:where (follow ?e1 ?e2)]
[[1 :follow 2] [2 :follow 3] [3 :follow 1]]
'[[(follow ?e1 ?e2)
[?e1 :follow ?e2]]
[(follow ?e1 ?e2)
(follow ?e2 ?e1)]])
#{[1 2] [2 3] [3 1] [2 1] [3 2] [1 3]})))
(testing "Mutually recursive rules"
(is (= (d/q '[:find ?e1 ?e2
:in $ %
:where (f1 ?e1 ?e2)]
[[0 :f1 1]
[1 :f2 2]
[2 :f1 3]
[3 :f2 4]
[4 :f1 5]
[5 :f2 6]]
'[[(f1 ?e1 ?e2)
[?e1 :f1 ?e2]]
[(f1 ?e1 ?e2)
[?t :f1 ?e2]
(f2 ?e1 ?t)]
[(f2 ?e1 ?e2)
[?e1 :f2 ?e2]]
[(f2 ?e1 ?e2)
[?t :f2 ?e2]
(f1 ?e1 ?t)]])
#{[0 1] [0 3] [0 5]
[1 3] [1 5]
[2 3] [2 5]
[3 5]
[4 5]})))
(testing "Passing ins to rule"
(is (= (d/q '[:find ?x ?y
:in $ % ?even
:where
(match ?even ?x ?y)]
db
'[[(match ?pred ?e ?e2)
[?e :follow ?e2]
[(?pred ?e)]
[(?pred ?e2)]]]
even?)
#{[4 6] [2 4]})))
(testing "Using built-ins inside rule"
(is (= (d/q '[:find ?x ?y
:in $ %
:where (match ?x ?y)]
db
'[[(match ?e ?e2)
[?e :follow ?e2]
[(even? ?e)]
[(even? ?e2)]]])
#{[4 6] [2 4]})))
(testing "Calling rule twice (#44)"
(d/q '[:find ?p
:in $ % ?fn
:where (rule ?p ?fn "a")
(rule ?p ?fn "b")]
[[1 :attr "a"]]
'[[(rule ?p ?fn ?x)
[?p :attr ?x]
[(?fn ?x)]]]
(constantly true))))
(testing "Specifying db to rule"
(is (= (d/q '[:find ?n
:in $sexes $ages %
:where ($sexes male ?n)
($ages adult ?n)]
[["Ivan" :male] ["Darya" :female] ["Oleg" :male] ["Igor" :male]]
[["Ivan" 15] ["Oleg" 66] ["Darya" 32]]
'[[(male ?x)
[?x :male]]
[(adult ?y)
[?y ?a]
[(>= ?a 18)]]])
#{["Oleg"]}))))
;;
(deftest test-false-arguments
(let [db (d/db-with (db/empty-db)
[[:db/add 1 :attr true]
[:db/add 2 :attr false]])
rules '[[(is ?id ?val)
[?id :attr ?val]]]]
(is (= (d/q '[:find ?id :in $ %
:where (is ?id true)]
db rules)
#{[1]}))
(is (= (d/q '[:find ?id :in $ %
:where (is ?id false)] db rules)
#{[2]}))))
(deftest test-rule-arguments
(let [cfg {:store {:backend :mem
:id "rule-test"}
:name "rule-test"
:keep-history? true
:schema-flexibility :write
:attribute-refs? true}
schema [{:db/ident :name
:db/cardinality :db.cardinality/one
:db/index true
:db/unique :db.unique/identity
:db/valueType :db.type/string}
{:db/ident :parents
:db/cardinality :db.cardinality/many
:db/valueType :db.type/ref}
{:db/ident :age
:db/cardinality :db.cardinality/one
:db/valueType :db.type/long}]
rules '[[(parent-info ?child ?name ?age)
[?child :parents ?p]
[(ground ["Alice" "Bob"]) [?name ...]]
[?p :name ?name]
[?p :age ?age]]]
_ (d/delete-database cfg)
_ (d/create-database cfg)
conn (d/connect cfg)]
(d/transact conn {:tx-data schema})
(d/transact conn {:tx-data [{:name "Alice"
:age 25}
{:name "Bob"
:age 30}]})
(d/transact conn {:tx-data [{:name "Charlie"
:age 5
:parents [[:name "Alice"]
[:name "Bob"]]}]})
(is (= #{[25]}
(d/q {:query '{:find [?age]
:in [$ ?n ?pn %]
:where
[[?child :name ?n]
(parent-info ?child ?pn ?age)]}
:args [@conn "Charlie" "Alice" rules]})))
(is (= #{[25]}
(d/q {:query '{:find [?age]
:in [$ ?n [?pn ...] %]
:where
[[?child :name ?n]
(parent-info ?child ?pn ?age)]}
:args [@conn "Charlie" ["Alice"] rules]})))
(is (= #{[25]}
(d/q {:query '{:find [?age]
:in [$ ?n %]
:where
[[?child :name ?n]
(parent-info ?child "Alice" ?age)]}
:args [@conn "Charlie" rules]})))
(is (thrown-msg? "Bad format for value in pattern, must be a scalar, nil or a vector of two elements."
(d/q {:query '{:find [?age]
:in [$ ?n %]
:where
[[?child :name ?n]
(parent-info ?child ["Alice"] ?age)]}
:args [@conn "Charlie" rules]})))))
| null | https://raw.githubusercontent.com/replikativ/datahike/527e269e2c365577ae5f1c53be7e5a168a180107/test/datahike/test/query_rules_test.cljc | clojure | (ns datahike.test.query-rules-test
(:require
#?(:cljs [cljs.test :as t :refer-macros [is deftest testing]]
:clj [clojure.test :as t :refer [is deftest testing]])
[datahike.api :as d]
[datahike.db :as db]))
(deftest test-rules
(let [db [[5 :follow 3]
[1 :follow 2] [2 :follow 3] [3 :follow 4] [4 :follow 6]
[2 :follow 4]]]
(is (= (d/q '[:find ?e1 ?e2
:in $ %
:where (follow ?e1 ?e2)]
db
'[[(follow ?x ?y)
[?x :follow ?y]]])
#{[1 2] [2 3] [3 4] [2 4] [5 3] [4 6]}))
(testing "Joining regular clauses with rule"
(is (= (d/q '[:find ?y ?x
:in $ %
:where [_ _ ?x]
(rule ?x ?y)
[(even? ?x)]]
db
'[[(rule ?a ?b)
[?a :follow ?b]]])
#{[3 2] [6 4] [4 2]})))
(testing "Rule context is isolated from outer context"
(is (= (d/q '[:find ?x
:in $ %
:where [?e _ _]
(rule ?x)]
db
'[[(rule ?e)
[_ ?e _]]])
#{[:follow]})))
(testing "Rule with branches"
(is (= (d/q '[:find ?e2
:in $ ?e1 %
:where (follow ?e1 ?e2)]
db
1
'[[(follow ?e2 ?e1)
[?e2 :follow ?e1]]
[(follow ?e2 ?e1)
[?e2 :follow ?t]
[?t :follow ?e1]]])
#{[2] [3] [4]})))
(testing "Recursive rules"
(is (= (d/q '[:find ?e2
:in $ ?e1 %
:where (follow ?e1 ?e2)]
db
1
'[[(follow ?e1 ?e2)
[?e1 :follow ?e2]]
[(follow ?e1 ?e2)
[?e1 :follow ?t]
(follow ?t ?e2)]])
#{[2] [3] [4] [6]}))
(is (= (d/q '[:find ?e1 ?e2
:in $ %
:where (follow ?e1 ?e2)]
[[1 :follow 2] [2 :follow 3]]
'[[(follow ?e1 ?e2)
[?e1 :follow ?e2]]
[(follow ?e1 ?e2)
(follow ?e2 ?e1)]])
#{[1 2] [2 3] [2 1] [3 2]}))
(is (= (d/q '[:find ?e1 ?e2
:in $ %
:where (follow ?e1 ?e2)]
[[1 :follow 2] [2 :follow 3] [3 :follow 1]]
'[[(follow ?e1 ?e2)
[?e1 :follow ?e2]]
[(follow ?e1 ?e2)
(follow ?e2 ?e1)]])
#{[1 2] [2 3] [3 1] [2 1] [3 2] [1 3]})))
(testing "Mutually recursive rules"
(is (= (d/q '[:find ?e1 ?e2
:in $ %
:where (f1 ?e1 ?e2)]
[[0 :f1 1]
[1 :f2 2]
[2 :f1 3]
[3 :f2 4]
[4 :f1 5]
[5 :f2 6]]
'[[(f1 ?e1 ?e2)
[?e1 :f1 ?e2]]
[(f1 ?e1 ?e2)
[?t :f1 ?e2]
(f2 ?e1 ?t)]
[(f2 ?e1 ?e2)
[?e1 :f2 ?e2]]
[(f2 ?e1 ?e2)
[?t :f2 ?e2]
(f1 ?e1 ?t)]])
#{[0 1] [0 3] [0 5]
[1 3] [1 5]
[2 3] [2 5]
[3 5]
[4 5]})))
(testing "Passing ins to rule"
(is (= (d/q '[:find ?x ?y
:in $ % ?even
:where
(match ?even ?x ?y)]
db
'[[(match ?pred ?e ?e2)
[?e :follow ?e2]
[(?pred ?e)]
[(?pred ?e2)]]]
even?)
#{[4 6] [2 4]})))
(testing "Using built-ins inside rule"
(is (= (d/q '[:find ?x ?y
:in $ %
:where (match ?x ?y)]
db
'[[(match ?e ?e2)
[?e :follow ?e2]
[(even? ?e)]
[(even? ?e2)]]])
#{[4 6] [2 4]})))
(testing "Calling rule twice (#44)"
(d/q '[:find ?p
:in $ % ?fn
:where (rule ?p ?fn "a")
(rule ?p ?fn "b")]
[[1 :attr "a"]]
'[[(rule ?p ?fn ?x)
[?p :attr ?x]
[(?fn ?x)]]]
(constantly true))))
(testing "Specifying db to rule"
(is (= (d/q '[:find ?n
:in $sexes $ages %
:where ($sexes male ?n)
($ages adult ?n)]
[["Ivan" :male] ["Darya" :female] ["Oleg" :male] ["Igor" :male]]
[["Ivan" 15] ["Oleg" 66] ["Darya" 32]]
'[[(male ?x)
[?x :male]]
[(adult ?y)
[?y ?a]
[(>= ?a 18)]]])
#{["Oleg"]}))))
(deftest test-false-arguments
(let [db (d/db-with (db/empty-db)
[[:db/add 1 :attr true]
[:db/add 2 :attr false]])
rules '[[(is ?id ?val)
[?id :attr ?val]]]]
(is (= (d/q '[:find ?id :in $ %
:where (is ?id true)]
db rules)
#{[1]}))
(is (= (d/q '[:find ?id :in $ %
:where (is ?id false)] db rules)
#{[2]}))))
(deftest test-rule-arguments
(let [cfg {:store {:backend :mem
:id "rule-test"}
:name "rule-test"
:keep-history? true
:schema-flexibility :write
:attribute-refs? true}
schema [{:db/ident :name
:db/cardinality :db.cardinality/one
:db/index true
:db/unique :db.unique/identity
:db/valueType :db.type/string}
{:db/ident :parents
:db/cardinality :db.cardinality/many
:db/valueType :db.type/ref}
{:db/ident :age
:db/cardinality :db.cardinality/one
:db/valueType :db.type/long}]
rules '[[(parent-info ?child ?name ?age)
[?child :parents ?p]
[(ground ["Alice" "Bob"]) [?name ...]]
[?p :name ?name]
[?p :age ?age]]]
_ (d/delete-database cfg)
_ (d/create-database cfg)
conn (d/connect cfg)]
(d/transact conn {:tx-data schema})
(d/transact conn {:tx-data [{:name "Alice"
:age 25}
{:name "Bob"
:age 30}]})
(d/transact conn {:tx-data [{:name "Charlie"
:age 5
:parents [[:name "Alice"]
[:name "Bob"]]}]})
(is (= #{[25]}
(d/q {:query '{:find [?age]
:in [$ ?n ?pn %]
:where
[[?child :name ?n]
(parent-info ?child ?pn ?age)]}
:args [@conn "Charlie" "Alice" rules]})))
(is (= #{[25]}
(d/q {:query '{:find [?age]
:in [$ ?n [?pn ...] %]
:where
[[?child :name ?n]
(parent-info ?child ?pn ?age)]}
:args [@conn "Charlie" ["Alice"] rules]})))
(is (= #{[25]}
(d/q {:query '{:find [?age]
:in [$ ?n %]
:where
[[?child :name ?n]
(parent-info ?child "Alice" ?age)]}
:args [@conn "Charlie" rules]})))
(is (thrown-msg? "Bad format for value in pattern, must be a scalar, nil or a vector of two elements."
(d/q {:query '{:find [?age]
:in [$ ?n %]
:where
[[?child :name ?n]
(parent-info ?child ["Alice"] ?age)]}
:args [@conn "Charlie" rules]})))))
| |
d8bbf2a9c254b59737c42f6f376804e04f27a6d6da02e52b67d7d6bddf8c685d | dongweiming/lisp-koans-answer | atoms-vs-lists.lsp | Copyright 2013 Google Inc.
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(define-test test-list-or-atom
"Lists in lisp are forms beginning and ending with rounded parentheses.
Atoms are symbols, numbers, or other forms usually separated by
white-space or parentheses. The function 'listp' will return true iff
the input is a list. The function 'atom' will return true iff the
input is an atom."
(true-or-false? t (listp '(1 2 3)))
(true-or-false? nil (atom '(1 2 3)))
(true-or-false? t (listp '("heres" "some" "strings")))
(true-or-false? nil (atom '("heres" "some" "strings")))
(true-or-false? nil (listp "a string"))
(true-or-false? t (atom "a string"))
(true-or-false? nil (listp 2))
(true-or-false? t (atom 2))
(true-or-false? t (listp '(("first" "list") ("second" "list"))))
(true-or-false? nil (atom '(("first" "list") ("second" "list")))))
(define-test test-empty-list-is-both-list-and-atom
"the empty list, nil, is unique in that it is both a list and an atom"
(true-or-false? t (listp nil))
(true-or-false? t (atom nil)))
(define-test test-keywords
"symbols like :hello or :like-this are treated differently in lisp.
Called keywords, they are symbols that evaluate to themselves."
(true-or-false? t (equal :this-is-a-keyword :this-is-a-keyword))
(true-or-false? t (equal :this-is-a-keyword ':this-is-a-keyword)))
| null | https://raw.githubusercontent.com/dongweiming/lisp-koans-answer/33eb6e38c7179c677db0df0a31cadd49617ae8db/koans/atoms-vs-lists.lsp | lisp |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright 2013 Google Inc.
distributed under the License is distributed on an " AS IS " BASIS ,
(define-test test-list-or-atom
"Lists in lisp are forms beginning and ending with rounded parentheses.
Atoms are symbols, numbers, or other forms usually separated by
white-space or parentheses. The function 'listp' will return true iff
the input is a list. The function 'atom' will return true iff the
input is an atom."
(true-or-false? t (listp '(1 2 3)))
(true-or-false? nil (atom '(1 2 3)))
(true-or-false? t (listp '("heres" "some" "strings")))
(true-or-false? nil (atom '("heres" "some" "strings")))
(true-or-false? nil (listp "a string"))
(true-or-false? t (atom "a string"))
(true-or-false? nil (listp 2))
(true-or-false? t (atom 2))
(true-or-false? t (listp '(("first" "list") ("second" "list"))))
(true-or-false? nil (atom '(("first" "list") ("second" "list")))))
(define-test test-empty-list-is-both-list-and-atom
"the empty list, nil, is unique in that it is both a list and an atom"
(true-or-false? t (listp nil))
(true-or-false? t (atom nil)))
(define-test test-keywords
"symbols like :hello or :like-this are treated differently in lisp.
Called keywords, they are symbols that evaluate to themselves."
(true-or-false? t (equal :this-is-a-keyword :this-is-a-keyword))
(true-or-false? t (equal :this-is-a-keyword ':this-is-a-keyword)))
|
0a997dfbfd89f6f1c8f43480b3248c37bfdf59d7b3429fd943eef9605401a7bd | let-def/owee | owee_location.mli | (** An abstract type representing compiled program locations.
It is designed to make sampling cheap (see [extract]), deferring most of
the work in the much more expensive [lookup] function (without guarantee to
succeed).
*)
type t
(** A location that can never be resolved *)
val none : t
(** Sample the location from an arbitrary OCaml function.
Cheap, appropriate to use on a fast path. *)
val extract : (_ -> _) -> t
(** Turn a location into an actual position.
If it succeeds, the position is returned as a triplet [(file, line, column)].
To succeed, debug information must be available for the location.
This call might be quite expensive. *)
val lookup : t -> (string * int * int) option
(** Convenience function composing lookup and extract, to immediately turn a
function into a position. *)
val locate : (_ -> _) -> (string * int * int) option
val nearest_symbol : t -> string
val demangled_symbol : string -> string
val nearest_demangled_symbol : t -> string
| null | https://raw.githubusercontent.com/let-def/owee/d7e10ec52643a7bfc0de96c427c47528e85b7d7a/src/owee_location.mli | ocaml | * An abstract type representing compiled program locations.
It is designed to make sampling cheap (see [extract]), deferring most of
the work in the much more expensive [lookup] function (without guarantee to
succeed).
* A location that can never be resolved
* Sample the location from an arbitrary OCaml function.
Cheap, appropriate to use on a fast path.
* Turn a location into an actual position.
If it succeeds, the position is returned as a triplet [(file, line, column)].
To succeed, debug information must be available for the location.
This call might be quite expensive.
* Convenience function composing lookup and extract, to immediately turn a
function into a position. | type t
val none : t
val extract : (_ -> _) -> t
val lookup : t -> (string * int * int) option
val locate : (_ -> _) -> (string * int * int) option
val nearest_symbol : t -> string
val demangled_symbol : string -> string
val nearest_demangled_symbol : t -> string
|
26356880b4aa5b29152267a3eceb0adafb6abd1430170ac5ca865d52f47f6baf | thelema/ocaml-community | canvas_bind.ml | ##ifdef CAMLTK
let bind widget tag eventsequence action =
tkCommand [|
cCAMLtoTKwidget widget_canvas_table widget;
TkToken "bind";
cCAMLtoTKtagOrId tag;
cCAMLtoTKeventSequence eventsequence;
begin match action with
| BindRemove -> TkToken ""
| BindSet (what, f) ->
let cbId = register_callback widget (wrapeventInfo f what) in
TkToken ("camlcb " ^ cbId ^ (writeeventField what))
| BindSetBreakable (what, f) ->
let cbId = register_callback widget (wrapeventInfo f what) in
TkToken ("camlcb " ^ cbId ^ (writeeventField what)^
" ; if { $BreakBindingsSequence == 1 } then { break ;} ; \
set BreakBindingsSequence 0")
| BindExtend (what, f) ->
let cbId = register_callback widget (wrapeventInfo f what) in
TkToken ("+camlcb " ^ cbId ^ (writeeventField what))
end
|]
;;
##else
let bind ~events
?(extend = false) ?(breakable = false) ?(fields = [])
?action widget tag =
tkCommand
[| cCAMLtoTKwidget widget;
TkToken "bind";
cCAMLtoTKtagOrId tag;
cCAMLtoTKeventSequence events;
begin match action with None -> TkToken ""
| Some f ->
let cbId =
register_callback widget ~callback: (wrapeventInfo f fields) in
let cb = if extend then "+camlcb " else "camlcb " in
let cb = cb ^ cbId ^ writeeventField fields in
let cb =
if breakable then
cb ^ " ; if { $BreakBindingsSequence == 1 } then { break ;}"
^ " ; set BreakBindingsSequence 0"
else cb in
TkToken cb
end
|]
;;
##endif
| null | https://raw.githubusercontent.com/thelema/ocaml-community/ed0a2424bbf13d1b33292725e089f0d7ba94b540/otherlibs/labltk/builtin/canvas_bind.ml | ocaml | ##ifdef CAMLTK
let bind widget tag eventsequence action =
tkCommand [|
cCAMLtoTKwidget widget_canvas_table widget;
TkToken "bind";
cCAMLtoTKtagOrId tag;
cCAMLtoTKeventSequence eventsequence;
begin match action with
| BindRemove -> TkToken ""
| BindSet (what, f) ->
let cbId = register_callback widget (wrapeventInfo f what) in
TkToken ("camlcb " ^ cbId ^ (writeeventField what))
| BindSetBreakable (what, f) ->
let cbId = register_callback widget (wrapeventInfo f what) in
TkToken ("camlcb " ^ cbId ^ (writeeventField what)^
" ; if { $BreakBindingsSequence == 1 } then { break ;} ; \
set BreakBindingsSequence 0")
| BindExtend (what, f) ->
let cbId = register_callback widget (wrapeventInfo f what) in
TkToken ("+camlcb " ^ cbId ^ (writeeventField what))
end
|]
;;
##else
let bind ~events
?(extend = false) ?(breakable = false) ?(fields = [])
?action widget tag =
tkCommand
[| cCAMLtoTKwidget widget;
TkToken "bind";
cCAMLtoTKtagOrId tag;
cCAMLtoTKeventSequence events;
begin match action with None -> TkToken ""
| Some f ->
let cbId =
register_callback widget ~callback: (wrapeventInfo f fields) in
let cb = if extend then "+camlcb " else "camlcb " in
let cb = cb ^ cbId ^ writeeventField fields in
let cb =
if breakable then
cb ^ " ; if { $BreakBindingsSequence == 1 } then { break ;}"
^ " ; set BreakBindingsSequence 0"
else cb in
TkToken cb
end
|]
;;
##endif
| |
6457c884da999fc7bacf78b8767de8c58df7bcb6adfb45ecd31435816a27cb09 | hammerlab/prohlatype | post_analysis.ml |
open Util
open ParPHMM
open ParPHMM_drivers
let f_of_yojson =
(Output.of_yojson Multiple_loci.final_read_info_of_yojson)
let of_json_file f =
Yojson.Safe.stream_from_file f
only one element per file .
|> f_of_yojson
|> unwrap
type read_info =
| Pr of Alleles_and_positions.t Multiple_loci.paired
| Soi of Alleles_and_positions.t Multiple_loci.single_or_incremental
[@@deriving show]
let read_metric of_alp_list =
let open Pass_result in
let of_aalp_pr = function
| Filtered _ -> invalid_argf "read was filtered ?!?"
| Completed alp -> of_alp_list alp
in
let take_regular r c = Alleles_and_positions.descending_cmp r c <= 0 in
let mlo fp =
Orientation.most_likely_between ~take_regular fp
|> Pass_result.map ~f:snd
in
function
| Soi (Multiple_loci.SingleRead or_) ->
Sp.Single (of_aalp_pr (mlo or_))
| Soi (Multiple_loci.PairedDependent pd) ->
let p1 = of_alp_list pd.Multiple_loci.first in
let p2 = of_aalp_pr pd.Multiple_loci.second in
Sp.Paired (min p1 p2, max p1 p2)
| Pr (Multiple_loci.FirstFiltered ff) ->
Sp.Single (of_aalp_pr (Multiple_loci.(mlo ff.ff_second)))
| Pr (Multiple_loci.FirstOrientedSecond fos) ->
let p1 = of_alp_list (fos.Multiple_loci.first) in
let p2 = of_aalp_pr (fos.Multiple_loci.second) in
Sp.Paired (min p1 p2, max p1 p2)
let read_position =
let of_alp alp = (List.hd_exn alp).Alleles_and_positions.position in
read_metric of_alp
let read_llhd =
let of_alp alp = (List.hd_exn alp).Alleles_and_positions.llhd in
read_metric of_alp
let compare_sp_snd rp1 rp2 =
match rp1, rp2 with
| Sp.Single p1, Sp.Single p2
| Sp.Single p1, Sp.Paired (_, p2)
| Sp.Paired (_, p1), Sp.Single p2
| Sp.Paired (_, p1), Sp.Paired (_, p2) -> compare p1 p2
let aggregate_read_positions ?(readsize=100) =
List.fold_left ~init:[] ~f:(fun acc (_, read_info) ->
match read_position read_info with
| Sp.Single end_ -> (end_ - readsize, end_) :: acc
| Sp.Paired (end1, end2) -> (end1 - readsize, end1) ::
(end2 - readsize, end2) :: acc)
let group_by_boundary_positions bp_lst rlst =
let rec loop acc bm bp bp_lst rlst =
match bp_lst with
| [] ->
List.rev ((bm, bp, rlst) :: acc)
| (bm2, bp2) :: tl ->
let before, after =
List.split_while rlst
~f:(function
| (_, Util.Sp.Single p)
| (_, Util.Sp.Paired (_, p)) -> p < bp2)
in
let nacc = (bm, bp, before) :: acc in
loop nacc bm2 bp2 tl after
in
match bp_lst with
| [] -> []
| (bm, bp) :: bp_tl -> loop [] bm bp bp_tl rlst
let reads_by_loci po =
let open Multiple_loci in
List.fold_left po.Output.per_reads ~init:[]
~f:(fun acc {Output.name; d} ->
match d.most_likely with
| None -> invalid_argf "Odd %s has no most likely!" name
| Some (l, allele) ->
begin match d.aaps with
| MPaired mpr_lst ->
begin match List.Assoc.get l mpr_lst with
| None -> invalid_argf "What? %s is missing loci: %s"
name (Nomenclature.show_locus l)
| Some r ->
begin match remove_and_assoc l acc with
| exception Not_found -> (l, [allele, name, Pr r]) :: acc
| (lacc, rest) -> (l, ((allele, name, Pr r) :: lacc)) :: rest
end
end
| Single_or_incremental soi_lst ->
begin match List.Assoc.get l soi_lst with
| None -> invalid_argf "What? %s is missing loci: %s"
name (Nomenclature.show_locus l)
| Some r ->
begin match remove_and_assoc l acc with
| exception Not_found -> (l, [allele, name, Soi r]) :: acc
| (lacc, rest) -> (l, ((allele, name, Soi r) :: lacc)) :: rest
end
end
end)
| null | https://raw.githubusercontent.com/hammerlab/prohlatype/3acaf7154f93675fc729971d4c76c2b133e90ce6/src/lib/post_analysis.ml | ocaml |
open Util
open ParPHMM
open ParPHMM_drivers
let f_of_yojson =
(Output.of_yojson Multiple_loci.final_read_info_of_yojson)
let of_json_file f =
Yojson.Safe.stream_from_file f
only one element per file .
|> f_of_yojson
|> unwrap
type read_info =
| Pr of Alleles_and_positions.t Multiple_loci.paired
| Soi of Alleles_and_positions.t Multiple_loci.single_or_incremental
[@@deriving show]
let read_metric of_alp_list =
let open Pass_result in
let of_aalp_pr = function
| Filtered _ -> invalid_argf "read was filtered ?!?"
| Completed alp -> of_alp_list alp
in
let take_regular r c = Alleles_and_positions.descending_cmp r c <= 0 in
let mlo fp =
Orientation.most_likely_between ~take_regular fp
|> Pass_result.map ~f:snd
in
function
| Soi (Multiple_loci.SingleRead or_) ->
Sp.Single (of_aalp_pr (mlo or_))
| Soi (Multiple_loci.PairedDependent pd) ->
let p1 = of_alp_list pd.Multiple_loci.first in
let p2 = of_aalp_pr pd.Multiple_loci.second in
Sp.Paired (min p1 p2, max p1 p2)
| Pr (Multiple_loci.FirstFiltered ff) ->
Sp.Single (of_aalp_pr (Multiple_loci.(mlo ff.ff_second)))
| Pr (Multiple_loci.FirstOrientedSecond fos) ->
let p1 = of_alp_list (fos.Multiple_loci.first) in
let p2 = of_aalp_pr (fos.Multiple_loci.second) in
Sp.Paired (min p1 p2, max p1 p2)
let read_position =
let of_alp alp = (List.hd_exn alp).Alleles_and_positions.position in
read_metric of_alp
let read_llhd =
let of_alp alp = (List.hd_exn alp).Alleles_and_positions.llhd in
read_metric of_alp
let compare_sp_snd rp1 rp2 =
match rp1, rp2 with
| Sp.Single p1, Sp.Single p2
| Sp.Single p1, Sp.Paired (_, p2)
| Sp.Paired (_, p1), Sp.Single p2
| Sp.Paired (_, p1), Sp.Paired (_, p2) -> compare p1 p2
let aggregate_read_positions ?(readsize=100) =
List.fold_left ~init:[] ~f:(fun acc (_, read_info) ->
match read_position read_info with
| Sp.Single end_ -> (end_ - readsize, end_) :: acc
| Sp.Paired (end1, end2) -> (end1 - readsize, end1) ::
(end2 - readsize, end2) :: acc)
let group_by_boundary_positions bp_lst rlst =
let rec loop acc bm bp bp_lst rlst =
match bp_lst with
| [] ->
List.rev ((bm, bp, rlst) :: acc)
| (bm2, bp2) :: tl ->
let before, after =
List.split_while rlst
~f:(function
| (_, Util.Sp.Single p)
| (_, Util.Sp.Paired (_, p)) -> p < bp2)
in
let nacc = (bm, bp, before) :: acc in
loop nacc bm2 bp2 tl after
in
match bp_lst with
| [] -> []
| (bm, bp) :: bp_tl -> loop [] bm bp bp_tl rlst
let reads_by_loci po =
let open Multiple_loci in
List.fold_left po.Output.per_reads ~init:[]
~f:(fun acc {Output.name; d} ->
match d.most_likely with
| None -> invalid_argf "Odd %s has no most likely!" name
| Some (l, allele) ->
begin match d.aaps with
| MPaired mpr_lst ->
begin match List.Assoc.get l mpr_lst with
| None -> invalid_argf "What? %s is missing loci: %s"
name (Nomenclature.show_locus l)
| Some r ->
begin match remove_and_assoc l acc with
| exception Not_found -> (l, [allele, name, Pr r]) :: acc
| (lacc, rest) -> (l, ((allele, name, Pr r) :: lacc)) :: rest
end
end
| Single_or_incremental soi_lst ->
begin match List.Assoc.get l soi_lst with
| None -> invalid_argf "What? %s is missing loci: %s"
name (Nomenclature.show_locus l)
| Some r ->
begin match remove_and_assoc l acc with
| exception Not_found -> (l, [allele, name, Soi r]) :: acc
| (lacc, rest) -> (l, ((allele, name, Soi r) :: lacc)) :: rest
end
end
end)
| |
e6ad3f4b720abe5d0c2ed93afee1dc96e40abed5cbf2ae6ee58f0171385e2fb8 | binaryage/chromex | bookmark_manager_private.clj | (ns chromex.app.bookmark-manager-private
" * available since Chrome 36"
(:refer-clojure :only [defmacro defn apply declare meta let partial])
(:require [chromex.wrapgen :refer [gen-wrap-helper]]
[chromex.callgen :refer [gen-call-helper gen-tap-all-events-call]]))
(declare api-table)
(declare gen-call)
-- functions --------------------------------------------------------------------------------------------------------------
(defmacro copy
"Copies the given bookmarks into the clipboard.
|id-list| - An array of string-valued ids
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [].
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([id-list] (gen-call :function ::copy &form id-list)))
(defmacro cut
"Cuts the given bookmarks into the clipboard.
|id-list| - An array of string-valued ids
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [].
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([id-list] (gen-call :function ::cut &form id-list)))
(defmacro paste
"Pastes bookmarks from the clipboard into the parent folder after the last selected node.
|parent-id| - ?
|selected-id-list| - An array of string-valued ids for selected bookmarks.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [].
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([parent-id selected-id-list] (gen-call :function ::paste &form parent-id selected-id-list))
([parent-id] `(paste ~parent-id :omit)))
(defmacro can-paste
"Whether there are any bookmarks that can be pasted.
|parent-id| - The ID of the folder to paste into.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [result] where:
|result| - ?
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([parent-id] (gen-call :function ::can-paste &form parent-id)))
(defmacro sort-children
"Sorts the children of a given folder.
|parent-id| - The ID of the folder to sort the children of."
([parent-id] (gen-call :function ::sort-children &form parent-id)))
(defmacro start-drag
"Begins dragging a set of bookmarks.
|id-list| - An array of string-valued ids.
|drag-node-index| - The index of the dragged node in |idList
|is-from-touch| - True if the drag was initiated from touch.
|x| - The clientX of the dragStart event
|y| - The clientY of the dragStart event"
([id-list drag-node-index is-from-touch x y] (gen-call :function ::start-drag &form id-list drag-node-index is-from-touch x y)))
(defmacro drop
"Performs the drop action of the drag and drop session.
|parent-id| - The ID of the folder that the drop was made.
|index| - The index of the position to drop at. If left out the dropped items will be placed at the end of the existing
children.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [].
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([parent-id index] (gen-call :function ::drop &form parent-id index))
([parent-id] `(drop ~parent-id :omit)))
(defmacro get-subtree
"Retrieves a bookmark hierarchy from the given node. If the node id is empty, it is the full tree. If foldersOnly is true,
it will only return folders, not actual bookmarks.
|id| - ID of the root of the tree to pull. If empty, the entire tree will be returned.
|folders-only| - Pass true to only return folders.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [results] where:
|results| - ?
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([id folders-only] (gen-call :function ::get-subtree &form id folders-only)))
(defmacro remove-trees
"Recursively removes list of bookmarks nodes.
|id-list| - An array of string-valued ids.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [].
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([id-list] (gen-call :function ::remove-trees &form id-list)))
(defmacro undo
"Performs an undo of the last change to the bookmark model."
([] (gen-call :function ::undo &form)))
(defmacro redo
"Performs a redo of last undone change to the bookmark model."
([] (gen-call :function ::redo &form)))
; -- events -----------------------------------------------------------------------------------------------------------------
;
; docs: /#tapping-events
(defmacro tap-on-drag-enter-events
"Fired when dragging bookmarks over the document.
Events will be put on the |channel| with signature [::on-drag-enter [bookmark-node-data]] where:
|bookmark-node-data| - ?
Note: |args| will be passed as additional parameters into Chrome event's .addListener call."
([channel & args] (apply gen-call :event ::on-drag-enter &form channel args)))
(defmacro tap-on-drag-leave-events
"Fired when the drag and drop leaves the document.
Events will be put on the |channel| with signature [::on-drag-leave [bookmark-node-data]] where:
|bookmark-node-data| - ?
Note: |args| will be passed as additional parameters into Chrome event's .addListener call."
([channel & args] (apply gen-call :event ::on-drag-leave &form channel args)))
(defmacro tap-on-drop-events
"Fired when the user drops bookmarks on the document.
Events will be put on the |channel| with signature [::on-drop [bookmark-node-data]] where:
|bookmark-node-data| - ?
Note: |args| will be passed as additional parameters into Chrome event's .addListener call."
([channel & args] (apply gen-call :event ::on-drop &form channel args)))
; -- convenience ------------------------------------------------------------------------------------------------------------
(defmacro tap-all-events
"Taps all valid non-deprecated events in chromex.app.bookmark-manager-private namespace."
[chan]
(gen-tap-all-events-call api-table (meta &form) chan))
; ---------------------------------------------------------------------------------------------------------------------------
; -- API TABLE --------------------------------------------------------------------------------------------------------------
; ---------------------------------------------------------------------------------------------------------------------------
(def api-table
{:namespace "chrome.bookmarkManagerPrivate",
:since "36",
:functions
[{:id ::copy,
:name "copy",
:callback? true,
:params [{:name "id-list", :type "[array-of-strings]"} {:name "callback", :optional? true, :type :callback}]}
{:id ::cut,
:name "cut",
:callback? true,
:params [{:name "id-list", :type "[array-of-strings]"} {:name "callback", :optional? true, :type :callback}]}
{:id ::paste,
:name "paste",
:callback? true,
:params
[{:name "parent-id", :type "string"}
{:name "selected-id-list", :optional? true, :type "[array-of-strings]"}
{:name "callback", :optional? true, :type :callback}]}
{:id ::can-paste,
:name "canPaste",
:callback? true,
:params
[{:name "parent-id", :type "string"}
{:name "callback", :type :callback, :callback {:params [{:name "result", :type "boolean"}]}}]}
{:id ::sort-children, :name "sortChildren", :params [{:name "parent-id", :type "string"}]}
{:id ::start-drag,
:name "startDrag",
:params
[{:name "id-list", :type "[array-of-strings]"}
{:name "drag-node-index", :since "71", :type "integer"}
{:name "is-from-touch", :type "boolean"}
{:name "x", :since "77", :type "integer"}
{:name "y", :since "77", :type "integer"}]}
{:id ::drop,
:name "drop",
:callback? true,
:params
[{:name "parent-id", :type "string"}
{:name "index", :optional? true, :type "integer"}
{:name "callback", :optional? true, :type :callback}]}
{:id ::get-subtree,
:name "getSubtree",
:callback? true,
:params
[{:name "id", :type "string"}
{:name "folders-only", :type "boolean"}
{:name "callback",
:type :callback,
:callback {:params [{:name "results", :type "[array-of-bookmarks.BookmarkTreeNodes]"}]}}]}
{:id ::remove-trees,
:name "removeTrees",
:callback? true,
:params [{:name "id-list", :type "[array-of-strings]"} {:name "callback", :optional? true, :type :callback}]}
{:id ::undo, :name "undo"}
{:id ::redo, :name "redo"}],
:events
[{:id ::on-drag-enter,
:name "onDragEnter",
:params [{:name "bookmark-node-data", :type "bookmarkManagerPrivate.BookmarkNodeData"}]}
{:id ::on-drag-leave,
:name "onDragLeave",
:params [{:name "bookmark-node-data", :type "bookmarkManagerPrivate.BookmarkNodeData"}]}
{:id ::on-drop,
:name "onDrop",
:params [{:name "bookmark-node-data", :type "bookmarkManagerPrivate.BookmarkNodeData"}]}]})
; -- helpers ----------------------------------------------------------------------------------------------------------------
; code generation for native API wrapper
(defmacro gen-wrap [kind item-id config & args]
(apply gen-wrap-helper api-table kind item-id config args))
; code generation for API call-site
(def gen-call (partial gen-call-helper api-table)) | null | https://raw.githubusercontent.com/binaryage/chromex/33834ba5dd4f4238a3c51f99caa0416f30c308c5/src/apps_private/chromex/app/bookmark_manager_private.clj | clojure | -- events -----------------------------------------------------------------------------------------------------------------
docs: /#tapping-events
-- convenience ------------------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------------------------------
-- API TABLE --------------------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------------------------------
-- helpers ----------------------------------------------------------------------------------------------------------------
code generation for native API wrapper
code generation for API call-site | (ns chromex.app.bookmark-manager-private
" * available since Chrome 36"
(:refer-clojure :only [defmacro defn apply declare meta let partial])
(:require [chromex.wrapgen :refer [gen-wrap-helper]]
[chromex.callgen :refer [gen-call-helper gen-tap-all-events-call]]))
(declare api-table)
(declare gen-call)
-- functions --------------------------------------------------------------------------------------------------------------
(defmacro copy
"Copies the given bookmarks into the clipboard.
|id-list| - An array of string-valued ids
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [].
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([id-list] (gen-call :function ::copy &form id-list)))
(defmacro cut
"Cuts the given bookmarks into the clipboard.
|id-list| - An array of string-valued ids
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [].
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([id-list] (gen-call :function ::cut &form id-list)))
(defmacro paste
"Pastes bookmarks from the clipboard into the parent folder after the last selected node.
|parent-id| - ?
|selected-id-list| - An array of string-valued ids for selected bookmarks.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [].
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([parent-id selected-id-list] (gen-call :function ::paste &form parent-id selected-id-list))
([parent-id] `(paste ~parent-id :omit)))
(defmacro can-paste
"Whether there are any bookmarks that can be pasted.
|parent-id| - The ID of the folder to paste into.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [result] where:
|result| - ?
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([parent-id] (gen-call :function ::can-paste &form parent-id)))
(defmacro sort-children
"Sorts the children of a given folder.
|parent-id| - The ID of the folder to sort the children of."
([parent-id] (gen-call :function ::sort-children &form parent-id)))
(defmacro start-drag
"Begins dragging a set of bookmarks.
|id-list| - An array of string-valued ids.
|drag-node-index| - The index of the dragged node in |idList
|is-from-touch| - True if the drag was initiated from touch.
|x| - The clientX of the dragStart event
|y| - The clientY of the dragStart event"
([id-list drag-node-index is-from-touch x y] (gen-call :function ::start-drag &form id-list drag-node-index is-from-touch x y)))
(defmacro drop
"Performs the drop action of the drag and drop session.
|parent-id| - The ID of the folder that the drop was made.
|index| - The index of the position to drop at. If left out the dropped items will be placed at the end of the existing
children.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [].
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([parent-id index] (gen-call :function ::drop &form parent-id index))
([parent-id] `(drop ~parent-id :omit)))
(defmacro get-subtree
"Retrieves a bookmark hierarchy from the given node. If the node id is empty, it is the full tree. If foldersOnly is true,
it will only return folders, not actual bookmarks.
|id| - ID of the root of the tree to pull. If empty, the entire tree will be returned.
|folders-only| - Pass true to only return folders.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [results] where:
|results| - ?
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([id folders-only] (gen-call :function ::get-subtree &form id folders-only)))
(defmacro remove-trees
"Recursively removes list of bookmarks nodes.
|id-list| - An array of string-valued ids.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [].
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error."
([id-list] (gen-call :function ::remove-trees &form id-list)))
(defmacro undo
"Performs an undo of the last change to the bookmark model."
([] (gen-call :function ::undo &form)))
(defmacro redo
"Performs a redo of last undone change to the bookmark model."
([] (gen-call :function ::redo &form)))
(defmacro tap-on-drag-enter-events
"Fired when dragging bookmarks over the document.
Events will be put on the |channel| with signature [::on-drag-enter [bookmark-node-data]] where:
|bookmark-node-data| - ?
Note: |args| will be passed as additional parameters into Chrome event's .addListener call."
([channel & args] (apply gen-call :event ::on-drag-enter &form channel args)))
(defmacro tap-on-drag-leave-events
"Fired when the drag and drop leaves the document.
Events will be put on the |channel| with signature [::on-drag-leave [bookmark-node-data]] where:
|bookmark-node-data| - ?
Note: |args| will be passed as additional parameters into Chrome event's .addListener call."
([channel & args] (apply gen-call :event ::on-drag-leave &form channel args)))
(defmacro tap-on-drop-events
"Fired when the user drops bookmarks on the document.
Events will be put on the |channel| with signature [::on-drop [bookmark-node-data]] where:
|bookmark-node-data| - ?
Note: |args| will be passed as additional parameters into Chrome event's .addListener call."
([channel & args] (apply gen-call :event ::on-drop &form channel args)))
(defmacro tap-all-events
"Taps all valid non-deprecated events in chromex.app.bookmark-manager-private namespace."
[chan]
(gen-tap-all-events-call api-table (meta &form) chan))
(def api-table
{:namespace "chrome.bookmarkManagerPrivate",
:since "36",
:functions
[{:id ::copy,
:name "copy",
:callback? true,
:params [{:name "id-list", :type "[array-of-strings]"} {:name "callback", :optional? true, :type :callback}]}
{:id ::cut,
:name "cut",
:callback? true,
:params [{:name "id-list", :type "[array-of-strings]"} {:name "callback", :optional? true, :type :callback}]}
{:id ::paste,
:name "paste",
:callback? true,
:params
[{:name "parent-id", :type "string"}
{:name "selected-id-list", :optional? true, :type "[array-of-strings]"}
{:name "callback", :optional? true, :type :callback}]}
{:id ::can-paste,
:name "canPaste",
:callback? true,
:params
[{:name "parent-id", :type "string"}
{:name "callback", :type :callback, :callback {:params [{:name "result", :type "boolean"}]}}]}
{:id ::sort-children, :name "sortChildren", :params [{:name "parent-id", :type "string"}]}
{:id ::start-drag,
:name "startDrag",
:params
[{:name "id-list", :type "[array-of-strings]"}
{:name "drag-node-index", :since "71", :type "integer"}
{:name "is-from-touch", :type "boolean"}
{:name "x", :since "77", :type "integer"}
{:name "y", :since "77", :type "integer"}]}
{:id ::drop,
:name "drop",
:callback? true,
:params
[{:name "parent-id", :type "string"}
{:name "index", :optional? true, :type "integer"}
{:name "callback", :optional? true, :type :callback}]}
{:id ::get-subtree,
:name "getSubtree",
:callback? true,
:params
[{:name "id", :type "string"}
{:name "folders-only", :type "boolean"}
{:name "callback",
:type :callback,
:callback {:params [{:name "results", :type "[array-of-bookmarks.BookmarkTreeNodes]"}]}}]}
{:id ::remove-trees,
:name "removeTrees",
:callback? true,
:params [{:name "id-list", :type "[array-of-strings]"} {:name "callback", :optional? true, :type :callback}]}
{:id ::undo, :name "undo"}
{:id ::redo, :name "redo"}],
:events
[{:id ::on-drag-enter,
:name "onDragEnter",
:params [{:name "bookmark-node-data", :type "bookmarkManagerPrivate.BookmarkNodeData"}]}
{:id ::on-drag-leave,
:name "onDragLeave",
:params [{:name "bookmark-node-data", :type "bookmarkManagerPrivate.BookmarkNodeData"}]}
{:id ::on-drop,
:name "onDrop",
:params [{:name "bookmark-node-data", :type "bookmarkManagerPrivate.BookmarkNodeData"}]}]})
(defmacro gen-wrap [kind item-id config & args]
(apply gen-wrap-helper api-table kind item-id config args))
(def gen-call (partial gen-call-helper api-table)) |
ddbd46574d677bec57cc09131896500726fcc5b53e691e303efb99dfc4b9c86e | yutopp/rill | context.ml |
* Copyright yutopp 2019 - .
*
* Distributed under the Boost Software License , Version 1.0 .
* ( See accompanying file LICENSE_1_0.txt or copy at
* )
* Copyright yutopp 2019 - .
*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* )
*)
module type SIG = sig
val path : string
end
| null | https://raw.githubusercontent.com/yutopp/rill/375b67c03ab2087d0a2a833bd9e80f3e51e2694f/rillc/lib/syntax/context.ml | ocaml |
* Copyright yutopp 2019 - .
*
* Distributed under the Boost Software License , Version 1.0 .
* ( See accompanying file LICENSE_1_0.txt or copy at
* )
* Copyright yutopp 2019 - .
*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* )
*)
module type SIG = sig
val path : string
end
| |
5b91c963a2945c16bf5d5a6fd3c22172e3a9be57281ef7ecab586ec8107a50bd | GrammaticalFramework/gf-core | Importing.hs | module GF.Command.Importing (importGrammar, importSource) where
import PGF
import PGF.Internal(optimizePGF,unionPGF,msgUnionPGF)
import GF.Compile
import GF.Compile.Multi (readMulti)
import GF.Compile.GetGrammar (getBNFCRules, getEBNFRules)
for cc command
import GF.Grammar.BNFC
import GF.Grammar.EBNF
import GF.Grammar.CFG
import GF.Compile.CFGtoPGF
import GF.Infra.UseIO(die,tryIOE)
import GF.Infra.Option
import GF.Data.ErrM
import System.FilePath
import qualified Data.Set as Set
-- import a grammar in an environment where it extends an existing grammar
importGrammar :: PGF -> Options -> [FilePath] -> IO PGF
importGrammar pgf0 _ [] = return pgf0
importGrammar pgf0 opts files =
case takeExtensions (last files) of
".cf" -> importCF opts files getBNFCRules bnfc2cf
".ebnf" -> importCF opts files getEBNFRules ebnf2cf
".gfm" -> do
ascss <- mapM readMulti files
let cs = concatMap snd ascss
importGrammar pgf0 opts cs
s | elem s [".gf",".gfo"] -> do
res <- tryIOE $ compileToPGF opts files
case res of
Ok pgf2 -> ioUnionPGF pgf0 pgf2
Bad msg -> do putStrLn ('\n':'\n':msg)
return pgf0
".pgf" -> do
pgf2 <- mapM readPGF files >>= return . foldl1 unionPGF
ioUnionPGF pgf0 pgf2
ext -> die $ "Unknown filename extension: " ++ show ext
ioUnionPGF :: PGF -> PGF -> IO PGF
ioUnionPGF one two = case msgUnionPGF one two of
(pgf, Just msg) -> putStrLn msg >> return pgf
(pgf,_) -> return pgf
importSource :: Options -> [FilePath] -> IO SourceGrammar
importSource opts files = fmap (snd.snd) (batchCompile opts files)
-- for different cf formats
importCF opts files get convert = impCF
where
impCF = do
rules <- fmap (convert . concat) $ mapM (get opts) files
startCat <- case rules of
(Rule cat _ _ : _) -> return cat
_ -> fail "empty CFG"
let pgf = cf2pgf (last files) (mkCFG startCat Set.empty rules)
probs <- maybe (return . defaultProbabilities) readProbabilitiesFromFile (flag optProbsFile opts) pgf
return $ setProbabilities probs
$ if flag optOptimizePGF opts then optimizePGF pgf else pgf
| null | https://raw.githubusercontent.com/GrammaticalFramework/gf-core/9b4f2dd18b64b770aaebfa1885085e8e3447f119/src/compiler/GF/Command/Importing.hs | haskell | import a grammar in an environment where it extends an existing grammar
for different cf formats | module GF.Command.Importing (importGrammar, importSource) where
import PGF
import PGF.Internal(optimizePGF,unionPGF,msgUnionPGF)
import GF.Compile
import GF.Compile.Multi (readMulti)
import GF.Compile.GetGrammar (getBNFCRules, getEBNFRules)
for cc command
import GF.Grammar.BNFC
import GF.Grammar.EBNF
import GF.Grammar.CFG
import GF.Compile.CFGtoPGF
import GF.Infra.UseIO(die,tryIOE)
import GF.Infra.Option
import GF.Data.ErrM
import System.FilePath
import qualified Data.Set as Set
importGrammar :: PGF -> Options -> [FilePath] -> IO PGF
importGrammar pgf0 _ [] = return pgf0
importGrammar pgf0 opts files =
case takeExtensions (last files) of
".cf" -> importCF opts files getBNFCRules bnfc2cf
".ebnf" -> importCF opts files getEBNFRules ebnf2cf
".gfm" -> do
ascss <- mapM readMulti files
let cs = concatMap snd ascss
importGrammar pgf0 opts cs
s | elem s [".gf",".gfo"] -> do
res <- tryIOE $ compileToPGF opts files
case res of
Ok pgf2 -> ioUnionPGF pgf0 pgf2
Bad msg -> do putStrLn ('\n':'\n':msg)
return pgf0
".pgf" -> do
pgf2 <- mapM readPGF files >>= return . foldl1 unionPGF
ioUnionPGF pgf0 pgf2
ext -> die $ "Unknown filename extension: " ++ show ext
ioUnionPGF :: PGF -> PGF -> IO PGF
ioUnionPGF one two = case msgUnionPGF one two of
(pgf, Just msg) -> putStrLn msg >> return pgf
(pgf,_) -> return pgf
importSource :: Options -> [FilePath] -> IO SourceGrammar
importSource opts files = fmap (snd.snd) (batchCompile opts files)
importCF opts files get convert = impCF
where
impCF = do
rules <- fmap (convert . concat) $ mapM (get opts) files
startCat <- case rules of
(Rule cat _ _ : _) -> return cat
_ -> fail "empty CFG"
let pgf = cf2pgf (last files) (mkCFG startCat Set.empty rules)
probs <- maybe (return . defaultProbabilities) readProbabilitiesFromFile (flag optProbsFile opts) pgf
return $ setProbabilities probs
$ if flag optOptimizePGF opts then optimizePGF pgf else pgf
|
2e94517c99fc519dfe2bc7ef374ec2728f8b461702dfdb07c8579843708f82ce | greghendershott/aws | keys.rkt | Copyright ( c ) 2012 - 2022 by .
SPDX - License - Identifier : BSD-2 - Clause
#lang racket/base
(require (only-in http
gmt-8601-string->seconds)
json
net/base64
net/url
racket/contract
racket/dict
racket/file
racket/format
racket/match
sha
"util.rkt")
(provide public-key
private-key
security-token
credentials-from-file!
(rename-out [credentials-from-file! read-keys/aws-cli])
aws-cli-credentials
aws-cli-profile
credentials-from-environment!
sha256-encode
credentials-from-ec2-instance!
(rename-out [credentials-from-ec2-instance! use-iam-ec2-credentials!])
ensure-ec2-instance-credentials-and-add-token-header
read-keys
ensure-have-keys)
(define public-key (make-parameter ""))
(define private-key (make-parameter ""))
(define security-token (make-parameter #f))
(define aws-cli-credentials
(make-parameter (or (getenv "AWS_SHARED_CREDENTIALS_FILE")
(build-path (find-system-path 'home-dir) ".aws" "credentials"))))
(define aws-cli-profile
(make-parameter (or (getenv "AWS_DEFAULT_PROFILE") "default")))
(define (credentials-from-file!)
(define (get/set key param)
(match (get-profile-string (file->lines (aws-cli-credentials) #:mode 'text)
(aws-cli-profile)
key)
[#f (error 'read-keys/aws-cli
"could not find key ~v in section ~v of ~v"
key (aws-cli-profile) (aws-cli-credentials))]
[v (param v)]))
(get/set "aws_access_key_id" public-key)
(get/set "aws_secret_access_key" private-key))
(define (credentials-from-environment!)
(define (get/set env-var param)
(match (getenv env-var)
[#f (error 'read-keys-and-token/environment
"could not find environment variable ~v"
env-var)]
[v (param v)]))
(get/set "AWS_ACCESS_KEY_ID" public-key)
(get/set "AWS_SECRET_ACCESS_KEY" private-key)
(get/set "AWS_SESSION_TOKEN" security-token))
(define (get-profile-string lines section key)
(let find-section ([lines lines])
(match lines
[(list) #f]
[(cons (pregexp "^ *\\[(.+?)\\] *$" (list _ (== section))) more)
(let find-key ([lines more])
(match lines
[(list) #f]
[(cons (pregexp "^ *(.+?) *= *(.+?) *$" (list _ (== key) value)) _)
value]
[(cons _ more) (find-key more)]))]
[(cons _ more) (find-section more)])))
;; DEPRECATED
(define (read-keys [file (build-path (find-system-path 'home-dir) ".aws-keys")])
(match (file->lines file #:mode 'text #:line-mode 'any)
old format that Amazon uses for their CL tools :
[(list* (regexp #rx"^(?i:AWSAccessKeyId)=(.*)$" (list _ public))
(regexp #rx"^(?i:AWSSecretKey)=(.*)$" (list _ private))
_)
(public-key public)
(private-key private)]
;; for backward compatability my old way, just each key on own line:
[(list* public private _)
(public-key public)
(private-key private)]
[_ (error 'read-keys
(string-append
"First two lines of file must be:\n"
"AWSAccessKeyId=<key>\n"
"AWSSecretKey=<key>\n"))]))
;; DEPRECATED
(define (ensure-have-keys)
(define (keys-blank?)
(or (string=? "" (public-key))
(string=? "" (private-key))))
(when (keys-blank?)
(with-handlers ([exn:fail? (λ _ (credentials-from-file!))])
(read-keys)))
(when (keys-blank?)
(error 'ensure-have-keys
"Set the parameters `public-key` and `private-key`. See the `credentials-from-xxx!` functions.")))
(define/contract (sha256-encode str)
(-> string? string?)
(match (bytes->string/utf-8
(base64-encode (sha256-encode (string->bytes/utf-8 (private-key))
(string->bytes/utf-8 str))))
[(regexp #rx"^(.*)\r\n$" (list _ x)) x] ;kill \r\n added by base64-encode
[s s]))
;;; Get credentials from EC2 instance meta-data
;; Note: These aren't parameters because parameters are per-thread --
whereas we 'll need to update from one thread values for all
;; threads.
(define/contract iam-role (or/c #f string?) #f)
(define/contract ec2-instance-creds-expiration (or/c #f integer?) #f)
(define sema (make-semaphore 1))
(define (credentials-from-ec2-instance! v)
(set! iam-role v)
(ensure-ec2-instance-credentials))
(define (ensure-ec2-instance-credentials-and-add-token-header d)
(ensure-ec2-instance-credentials)
(add-token-header d))
(define (ensure-ec2-instance-credentials)
(when iam-role
(call-with-semaphore
sema
(λ ()
(unless (and ec2-instance-creds-expiration
(< (+ (current-seconds) (* 5 60))
ec2-instance-creds-expiration))
(define url
(string->url
(~a "-data/iam/security-credentials/"
iam-role)))
(match (call/input-url url get-pure-port read-json)
[(hash-table ['AccessKeyId public]
['SecretAccessKey private]
['Token token]
['Expiration (app gmt-8601-string->seconds exp)])
(public-key public)
(private-key private)
(security-token token)
(set! ec2-instance-creds-expiration exp)]))))))
(define (add-token-header d)
(if (security-token)
(dict-set d 'X-Amz-Security-Token (security-token))
d))
| null | https://raw.githubusercontent.com/greghendershott/aws/7eee5190aa8538b4016fd4cca406b4e7e06e702a/aws/keys.rkt | racket | DEPRECATED
for backward compatability my old way, just each key on own line:
DEPRECATED
kill \r\n added by base64-encode
Get credentials from EC2 instance meta-data
Note: These aren't parameters because parameters are per-thread --
threads. | Copyright ( c ) 2012 - 2022 by .
SPDX - License - Identifier : BSD-2 - Clause
#lang racket/base
(require (only-in http
gmt-8601-string->seconds)
json
net/base64
net/url
racket/contract
racket/dict
racket/file
racket/format
racket/match
sha
"util.rkt")
(provide public-key
private-key
security-token
credentials-from-file!
(rename-out [credentials-from-file! read-keys/aws-cli])
aws-cli-credentials
aws-cli-profile
credentials-from-environment!
sha256-encode
credentials-from-ec2-instance!
(rename-out [credentials-from-ec2-instance! use-iam-ec2-credentials!])
ensure-ec2-instance-credentials-and-add-token-header
read-keys
ensure-have-keys)
(define public-key (make-parameter ""))
(define private-key (make-parameter ""))
(define security-token (make-parameter #f))
(define aws-cli-credentials
(make-parameter (or (getenv "AWS_SHARED_CREDENTIALS_FILE")
(build-path (find-system-path 'home-dir) ".aws" "credentials"))))
(define aws-cli-profile
(make-parameter (or (getenv "AWS_DEFAULT_PROFILE") "default")))
(define (credentials-from-file!)
(define (get/set key param)
(match (get-profile-string (file->lines (aws-cli-credentials) #:mode 'text)
(aws-cli-profile)
key)
[#f (error 'read-keys/aws-cli
"could not find key ~v in section ~v of ~v"
key (aws-cli-profile) (aws-cli-credentials))]
[v (param v)]))
(get/set "aws_access_key_id" public-key)
(get/set "aws_secret_access_key" private-key))
(define (credentials-from-environment!)
(define (get/set env-var param)
(match (getenv env-var)
[#f (error 'read-keys-and-token/environment
"could not find environment variable ~v"
env-var)]
[v (param v)]))
(get/set "AWS_ACCESS_KEY_ID" public-key)
(get/set "AWS_SECRET_ACCESS_KEY" private-key)
(get/set "AWS_SESSION_TOKEN" security-token))
(define (get-profile-string lines section key)
(let find-section ([lines lines])
(match lines
[(list) #f]
[(cons (pregexp "^ *\\[(.+?)\\] *$" (list _ (== section))) more)
(let find-key ([lines more])
(match lines
[(list) #f]
[(cons (pregexp "^ *(.+?) *= *(.+?) *$" (list _ (== key) value)) _)
value]
[(cons _ more) (find-key more)]))]
[(cons _ more) (find-section more)])))
(define (read-keys [file (build-path (find-system-path 'home-dir) ".aws-keys")])
(match (file->lines file #:mode 'text #:line-mode 'any)
old format that Amazon uses for their CL tools :
[(list* (regexp #rx"^(?i:AWSAccessKeyId)=(.*)$" (list _ public))
(regexp #rx"^(?i:AWSSecretKey)=(.*)$" (list _ private))
_)
(public-key public)
(private-key private)]
[(list* public private _)
(public-key public)
(private-key private)]
[_ (error 'read-keys
(string-append
"First two lines of file must be:\n"
"AWSAccessKeyId=<key>\n"
"AWSSecretKey=<key>\n"))]))
(define (ensure-have-keys)
(define (keys-blank?)
(or (string=? "" (public-key))
(string=? "" (private-key))))
(when (keys-blank?)
(with-handlers ([exn:fail? (λ _ (credentials-from-file!))])
(read-keys)))
(when (keys-blank?)
(error 'ensure-have-keys
"Set the parameters `public-key` and `private-key`. See the `credentials-from-xxx!` functions.")))
(define/contract (sha256-encode str)
(-> string? string?)
(match (bytes->string/utf-8
(base64-encode (sha256-encode (string->bytes/utf-8 (private-key))
(string->bytes/utf-8 str))))
[s s]))
whereas we 'll need to update from one thread values for all
(define/contract iam-role (or/c #f string?) #f)
(define/contract ec2-instance-creds-expiration (or/c #f integer?) #f)
(define sema (make-semaphore 1))
(define (credentials-from-ec2-instance! v)
(set! iam-role v)
(ensure-ec2-instance-credentials))
(define (ensure-ec2-instance-credentials-and-add-token-header d)
(ensure-ec2-instance-credentials)
(add-token-header d))
(define (ensure-ec2-instance-credentials)
(when iam-role
(call-with-semaphore
sema
(λ ()
(unless (and ec2-instance-creds-expiration
(< (+ (current-seconds) (* 5 60))
ec2-instance-creds-expiration))
(define url
(string->url
(~a "-data/iam/security-credentials/"
iam-role)))
(match (call/input-url url get-pure-port read-json)
[(hash-table ['AccessKeyId public]
['SecretAccessKey private]
['Token token]
['Expiration (app gmt-8601-string->seconds exp)])
(public-key public)
(private-key private)
(security-token token)
(set! ec2-instance-creds-expiration exp)]))))))
(define (add-token-header d)
(if (security-token)
(dict-set d 'X-Amz-Security-Token (security-token))
d))
|
433101e30bf62866a1ddf8237a3df9aa29fe70cea47cff06f9b4d2628ef720b4 | BekaValentine/SimpleFP-v2 | REPL.hs | module DependentImplicit.Unification.REPL where
import Control.Monad.Reader (runReaderT)
import System.IO
import Utils.ABT
import Utils.Env
import Utils.Eval
import Utils.Pretty
import DependentImplicit.Core.ConSig
import DependentImplicit.Core.Evaluation
import DependentImplicit.Core.Parser
import DependentImplicit.Core.Term
import DependentImplicit.Unification.Elaborator
import DependentImplicit.Unification.Elaboration
import DependentImplicit.Unification.TypeChecking
flushStr :: String -> IO ()
flushStr str = putStr str >> hFlush stdout
readPrompt :: String -> IO String
readPrompt prompt = flushStr prompt >> getLine
until_ :: Monad m => (a -> Bool) -> m a -> (a -> m ()) -> m ()
until_ p prompt action = do
result <- prompt
if p result
then return ()
else action result >> until_ p prompt action
repl :: String -> IO ()
repl src = case loadProgram src of
Left e -> flushStr ("ERROR: " ++ e ++ "\n")
Right (sig,defs,ctx,env)
-> do hSetBuffering stdin LineBuffering
until_ (== ":quit")
(readPrompt "$> ")
(evalAndPrint sig defs ctx env)
where
loadProgram :: String -> Either String (Signature,Definitions,Context,Env String Term)
loadProgram src
= do prog <- parseProgram src
(_,ElabState sig defs ctx _ _) <- runElaborator0 (elabProgram prog)
let env = definitionsToEnvironment defs
return (sig,defs,ctx,env)
loadTerm :: Signature -> Definitions -> Context -> Env String Term -> String -> Either String Term
loadTerm sig defs ctx env src
= do tm0 <- parseTerm src
let tm = freeToDefined (In . Defined) tm0
case runElaborator (infer tm) sig defs ctx of
Left e -> Left e
Right ((etm,_),_) -> runReaderT (eval etm) env
evalAndPrint :: Signature -> Definitions -> Context -> Env String Term -> String -> IO ()
evalAndPrint _ _ _ _ "" = return ()
evalAndPrint sig defs ctx env src
= case loadTerm sig defs ctx env src of
Left e -> flushStr ("ERROR: " ++ e ++ "\n")
Right v -> flushStr (pretty v ++ "\n")
replFile :: String -> IO ()
replFile loc = readFile loc >>= repl | null | https://raw.githubusercontent.com/BekaValentine/SimpleFP-v2/ae00ec809caefcd13664395b0ae2fc66145f6a74/src/DependentImplicit/Unification/REPL.hs | haskell | module DependentImplicit.Unification.REPL where
import Control.Monad.Reader (runReaderT)
import System.IO
import Utils.ABT
import Utils.Env
import Utils.Eval
import Utils.Pretty
import DependentImplicit.Core.ConSig
import DependentImplicit.Core.Evaluation
import DependentImplicit.Core.Parser
import DependentImplicit.Core.Term
import DependentImplicit.Unification.Elaborator
import DependentImplicit.Unification.Elaboration
import DependentImplicit.Unification.TypeChecking
flushStr :: String -> IO ()
flushStr str = putStr str >> hFlush stdout
readPrompt :: String -> IO String
readPrompt prompt = flushStr prompt >> getLine
until_ :: Monad m => (a -> Bool) -> m a -> (a -> m ()) -> m ()
until_ p prompt action = do
result <- prompt
if p result
then return ()
else action result >> until_ p prompt action
repl :: String -> IO ()
repl src = case loadProgram src of
Left e -> flushStr ("ERROR: " ++ e ++ "\n")
Right (sig,defs,ctx,env)
-> do hSetBuffering stdin LineBuffering
until_ (== ":quit")
(readPrompt "$> ")
(evalAndPrint sig defs ctx env)
where
loadProgram :: String -> Either String (Signature,Definitions,Context,Env String Term)
loadProgram src
= do prog <- parseProgram src
(_,ElabState sig defs ctx _ _) <- runElaborator0 (elabProgram prog)
let env = definitionsToEnvironment defs
return (sig,defs,ctx,env)
loadTerm :: Signature -> Definitions -> Context -> Env String Term -> String -> Either String Term
loadTerm sig defs ctx env src
= do tm0 <- parseTerm src
let tm = freeToDefined (In . Defined) tm0
case runElaborator (infer tm) sig defs ctx of
Left e -> Left e
Right ((etm,_),_) -> runReaderT (eval etm) env
evalAndPrint :: Signature -> Definitions -> Context -> Env String Term -> String -> IO ()
evalAndPrint _ _ _ _ "" = return ()
evalAndPrint sig defs ctx env src
= case loadTerm sig defs ctx env src of
Left e -> flushStr ("ERROR: " ++ e ++ "\n")
Right v -> flushStr (pretty v ++ "\n")
replFile :: String -> IO ()
replFile loc = readFile loc >>= repl | |
6429576e16e0be359c5e422d299f4f2728d3a77848fcbe62f41c4ad5ef6a0971 | RRethy/nvim-treesitter-textsubjects | textsubjects-big.scm | (([
(method)
(singleton_method)
(module)
(class)
] @_start @_end)
(#make-range! "range" @_start @_end))
; sorbet type *annotation*
(((call method: (identifier) @_start) . (method) @_end)
(#match? @_start "sig")
(#make-range! "range" @_start @_end))
| null | https://raw.githubusercontent.com/RRethy/nvim-treesitter-textsubjects/66a62f42fa74826a145d75e4715c2786e319996e/queries/ruby/textsubjects-big.scm | scheme | sorbet type *annotation* | (([
(method)
(singleton_method)
(module)
(class)
] @_start @_end)
(#make-range! "range" @_start @_end))
(((call method: (identifier) @_start) . (method) @_end)
(#match? @_start "sig")
(#make-range! "range" @_start @_end))
|
ce798198e6143c242f7513b17d2fb9f30d2084c91ff5aaa8806d5eb00a53f442 | techascent/tech.ml | protocols.clj | (ns tech.v3.libs.smile.protocols
(:require [tech.v3.libs.smile.data :as smile-data]
[tech.v3.datatype :as dtype]
[tech.v3.dataset.utils :as ds-utils])
(:import [smile.data.formula Formula]
[smile.data.type StructType]
[smile.regression DataFrameRegression]
[smile.classification DataFrameClassifier]
[java.util Properties List]
[smile.data.formula Formula TechFactory Variable]))
(set! *warn-on-reflection* true)
(defprotocol PToFormula
(get-model-formula [item]))
(extend-protocol PToFormula
DataFrameRegression
(get-model-formula [item] (.formula item))
DataFrameClassifier
(get-model-formula [item] (.formula item)))
(defn ->formula
^Formula [item]
(if (instance? Formula item)
item
(get-model-formula item)))
(defn initialize-model-formula!
[model feature-ds]
(let [formula (->formula model)
^List fields (->> (vals feature-ds)
(map meta)
(mapv (fn [{:keys [name datatype]}]
(smile-data/smile-struct-field
(ds-utils/column-safe-name name)
datatype))))
struct-type (StructType. fields)]
(.bind formula struct-type)))
(defn- resolve-default
[item dataset]
(if (fn? item)
(item dataset)
item))
(defn options->properties
^Properties [metadata dataset options]
(let [pname-stem (:property-name-stem metadata)]
(->> (:options metadata)
(reduce (fn [^Properties props {:keys [name default lookup-table]}]
(let [default (if (fn? default)
(default dataset props)
(or (get lookup-table default)
default))
value (get options name)
value (get lookup-table value value)
]
(.put props (format "%s.%s"
pname-stem
(.replace ^String (clojure.core/name name)
"-" "."))
(str (dtype/cast (or value
(resolve-default default dataset))
(dtype/get-datatype default)))))
props)
(Properties.)))))
(defn make-formula
"Make a formula out of a response name and a sequence of feature names"
[^String response & [features]]
(Formula. (TechFactory/variable response)
^"[Lsmile.data.formula.Variable;" (->> features
(map #(TechFactory/variable %))
(into-array Variable ))))
| null | https://raw.githubusercontent.com/techascent/tech.ml/7f2cc506980a05f0f8c85f8b1ff0cde6b9451f54/src/tech/v3/libs/smile/protocols.clj | clojure | (ns tech.v3.libs.smile.protocols
(:require [tech.v3.libs.smile.data :as smile-data]
[tech.v3.datatype :as dtype]
[tech.v3.dataset.utils :as ds-utils])
(:import [smile.data.formula Formula]
[smile.data.type StructType]
[smile.regression DataFrameRegression]
[smile.classification DataFrameClassifier]
[java.util Properties List]
[smile.data.formula Formula TechFactory Variable]))
(set! *warn-on-reflection* true)
(defprotocol PToFormula
(get-model-formula [item]))
(extend-protocol PToFormula
DataFrameRegression
(get-model-formula [item] (.formula item))
DataFrameClassifier
(get-model-formula [item] (.formula item)))
(defn ->formula
^Formula [item]
(if (instance? Formula item)
item
(get-model-formula item)))
(defn initialize-model-formula!
[model feature-ds]
(let [formula (->formula model)
^List fields (->> (vals feature-ds)
(map meta)
(mapv (fn [{:keys [name datatype]}]
(smile-data/smile-struct-field
(ds-utils/column-safe-name name)
datatype))))
struct-type (StructType. fields)]
(.bind formula struct-type)))
(defn- resolve-default
[item dataset]
(if (fn? item)
(item dataset)
item))
(defn options->properties
^Properties [metadata dataset options]
(let [pname-stem (:property-name-stem metadata)]
(->> (:options metadata)
(reduce (fn [^Properties props {:keys [name default lookup-table]}]
(let [default (if (fn? default)
(default dataset props)
(or (get lookup-table default)
default))
value (get options name)
value (get lookup-table value value)
]
(.put props (format "%s.%s"
pname-stem
(.replace ^String (clojure.core/name name)
"-" "."))
(str (dtype/cast (or value
(resolve-default default dataset))
(dtype/get-datatype default)))))
props)
(Properties.)))))
(defn make-formula
"Make a formula out of a response name and a sequence of feature names"
[^String response & [features]]
(Formula. (TechFactory/variable response)
^"[Lsmile.data.formula.Variable;" (->> features
(map #(TechFactory/variable %))
(into-array Variable ))))
| |
a7d229db2ddaa4078713087399d8c5ef1f3467db29508d1d44a09b8fe7db6caa | open-company/open-company-web | reaction.cljs | (ns oc.web.utils.reaction)
(defn can-pick-reaction?
"Given an emoji and the list of the current reactions
check if the user can react.
A user can react if:
- the reaction is NOT already in the reactions list
- the reaction is already in the reactions list and its not reacted"
[emoji reactions-data]
(let [reaction-map (first (filter #(= (:reaction %) emoji) reactions-data))]
(or (not reaction-map)
(and (map? reaction-map)
(not (:reacted reaction-map)))))) | null | https://raw.githubusercontent.com/open-company/open-company-web/dfce3dd9bc115df91003179bceb87cca1f84b6cf/src/main/oc/web/utils/reaction.cljs | clojure | (ns oc.web.utils.reaction)
(defn can-pick-reaction?
"Given an emoji and the list of the current reactions
check if the user can react.
A user can react if:
- the reaction is NOT already in the reactions list
- the reaction is already in the reactions list and its not reacted"
[emoji reactions-data]
(let [reaction-map (first (filter #(= (:reaction %) emoji) reactions-data))]
(or (not reaction-map)
(and (map? reaction-map)
(not (:reacted reaction-map)))))) | |
6970a0c7519b88f4385d571a4b07f3cae7aa103c8efbb4d3223f2ce42f09b7c9 | icicle-lang/x-ambiata | Group.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
module X.Data.Vector.Stream.Group (
embed
, concatWith
, chunked
, grouped
) where
import Control.Monad.Base (MonadBase(..))
import Control.Monad.Primitive (PrimMonad(..))
import Data.Vector.Fusion.Stream.Monadic (Stream(..), Step(..))
import qualified Data.Vector.Fusion.Stream.Monadic as Stream
import qualified Data.Vector.Generic as Generic
import P
import qualified Prelude as Savage
import qualified X.Data.Vector.Grow as Grow
embed :: Monad m => m (Stream m a) -> Stream m a
embed mstream =
Stream.concatMapM (const mstream) (Stream.singleton ())
# INLINE [ 1 ] embed #
concatWith :: Monad m => Stream m a -> (a -> Stream m b) -> Stream m b
concatWith =
flip Stream.concatMap
# INLINE [ 1 ] concatWith #
-- | Turns a stream of @a@ in to a stream of chunks of size @n@.
chunked :: (PrimMonad b, MonadBase b m, Generic.Vector v a) => Int -> Stream m a -> Stream m (v a)
chunked n xs =
if n <= 0 then
Savage.error "X.Data.Vector.Stream.Group.chunked: chunk size must be greater than zero"
else
grouped (\_ _ -> False) n xs
# INLINE [ 1 ] chunked #
| Turns a stream of @a@ in to a stream of chunks of at least size @n@ ,
-- except for the last one. Values of @a@ which are equal according to the
-- comparison function stay in the same chunk.
grouped :: (PrimMonad b, MonadBase b m, Generic.Vector v a) => (a -> a -> Bool) -> Int -> Stream m a -> Stream m (v a)
grouped eq n (Stream step t) =
if n <= 0 then
Savage.error "X.Data.Vector.Stream.Group.grouped: chunk size must be greater than zero"
else
embed $ do
let
notEq mx y =
case mx of
Nothing' ->
True
Just' x ->
not (eq x y)
{-# INLINE [0] notEq #-}
loop = \case
Nothing' ->
pure Done
Just' (s0, i, last, g0) ->
step s0 >>= \case
Yield x s ->
if i >= n && last `notEq` x then do
xs <- liftBase $ Grow.unsafeFreeze g0
g <- liftBase $ Grow.new n
liftBase $ Grow.add g x
pure . Yield xs $ Just' (s, 1, Just' x, g)
else do
liftBase $ Grow.add g0 x
pure . Skip $ Just' (s, i + 1, Just' x, g0)
Skip s ->
pure . Skip $ Just' (s, i, last, g0)
Done ->
if i == 0 then
pure $ Skip Nothing'
else do
xs <- liftBase $ Grow.unsafeFreeze g0
pure $ Yield xs Nothing'
{-# INLINE [0] loop #-}
g <- liftBase $ Grow.new n
pure .
Stream loop $ Just' (t, 0, Nothing', g)
# INLINE [ 1 ] grouped #
| null | https://raw.githubusercontent.com/icicle-lang/x-ambiata/532f8473084b24fb9d8c90fda7fee9858b9fbe30/x-vector/src/X/Data/Vector/Stream/Group.hs | haskell | # LANGUAGE BangPatterns #
| Turns a stream of @a@ in to a stream of chunks of size @n@.
except for the last one. Values of @a@ which are equal according to the
comparison function stay in the same chunk.
# INLINE [0] notEq #
# INLINE [0] loop # | # LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
module X.Data.Vector.Stream.Group (
embed
, concatWith
, chunked
, grouped
) where
import Control.Monad.Base (MonadBase(..))
import Control.Monad.Primitive (PrimMonad(..))
import Data.Vector.Fusion.Stream.Monadic (Stream(..), Step(..))
import qualified Data.Vector.Fusion.Stream.Monadic as Stream
import qualified Data.Vector.Generic as Generic
import P
import qualified Prelude as Savage
import qualified X.Data.Vector.Grow as Grow
embed :: Monad m => m (Stream m a) -> Stream m a
embed mstream =
Stream.concatMapM (const mstream) (Stream.singleton ())
# INLINE [ 1 ] embed #
concatWith :: Monad m => Stream m a -> (a -> Stream m b) -> Stream m b
concatWith =
flip Stream.concatMap
# INLINE [ 1 ] concatWith #
chunked :: (PrimMonad b, MonadBase b m, Generic.Vector v a) => Int -> Stream m a -> Stream m (v a)
chunked n xs =
if n <= 0 then
Savage.error "X.Data.Vector.Stream.Group.chunked: chunk size must be greater than zero"
else
grouped (\_ _ -> False) n xs
# INLINE [ 1 ] chunked #
| Turns a stream of @a@ in to a stream of chunks of at least size @n@ ,
grouped :: (PrimMonad b, MonadBase b m, Generic.Vector v a) => (a -> a -> Bool) -> Int -> Stream m a -> Stream m (v a)
grouped eq n (Stream step t) =
if n <= 0 then
Savage.error "X.Data.Vector.Stream.Group.grouped: chunk size must be greater than zero"
else
embed $ do
let
notEq mx y =
case mx of
Nothing' ->
True
Just' x ->
not (eq x y)
loop = \case
Nothing' ->
pure Done
Just' (s0, i, last, g0) ->
step s0 >>= \case
Yield x s ->
if i >= n && last `notEq` x then do
xs <- liftBase $ Grow.unsafeFreeze g0
g <- liftBase $ Grow.new n
liftBase $ Grow.add g x
pure . Yield xs $ Just' (s, 1, Just' x, g)
else do
liftBase $ Grow.add g0 x
pure . Skip $ Just' (s, i + 1, Just' x, g0)
Skip s ->
pure . Skip $ Just' (s, i, last, g0)
Done ->
if i == 0 then
pure $ Skip Nothing'
else do
xs <- liftBase $ Grow.unsafeFreeze g0
pure $ Yield xs Nothing'
g <- liftBase $ Grow.new n
pure .
Stream loop $ Just' (t, 0, Nothing', g)
# INLINE [ 1 ] grouped #
|
6f194eb15f8147de30da2e2d47d8ca6acf2342328d44b5151eb95855cf879f90 | scymtym/trivial-with-current-source-form | clasp.lisp | ;;;; clasp.lisp --- Clasp implementation.
;;;;
;;;; Author: Bike <>
(cl:in-package #:trivial-with-current-source-form)
(defun expand (forms body)
`(ext:with-current-source-form (,@forms) ,@body))
| null | https://raw.githubusercontent.com/scymtym/trivial-with-current-source-form/198fdc9193c6c8bc43fad2ad7562603d11194c6d/code/clasp.lisp | lisp | clasp.lisp --- Clasp implementation.
Author: Bike <> |
(cl:in-package #:trivial-with-current-source-form)
(defun expand (forms body)
`(ext:with-current-source-form (,@forms) ,@body))
|
8283f1d00fb2493d452867cb0e615833661932a56aae54562549cc058adc6ec7 | jacekschae/learn-pedestal-course-files | dev.clj | (ns dev
(:require [clojure.edn :as edn]
[cheffy.server :as server]
[com.stuartsierra.component.repl :as cr]))
(defn system [_]
(-> (-> "src/config/cheffy/development.edn" (slurp) (edn/read-string))
(server/create-system)))
(cr/set-init system)
(defn start-dev []
(cr/start))
(defn stop-dev []
(cr/stop))
(defn restart-dev []
(cr/reset))
(comment
(:api-server cr/system)
(start-dev)
(restart-dev)
(stop-dev)
) | null | https://raw.githubusercontent.com/jacekschae/learn-pedestal-course-files/33c62931273d30860e6efeeb9513dc4630c75218/increments/17-cheffy-interceptor/src/dev/dev.clj | clojure | (ns dev
(:require [clojure.edn :as edn]
[cheffy.server :as server]
[com.stuartsierra.component.repl :as cr]))
(defn system [_]
(-> (-> "src/config/cheffy/development.edn" (slurp) (edn/read-string))
(server/create-system)))
(cr/set-init system)
(defn start-dev []
(cr/start))
(defn stop-dev []
(cr/stop))
(defn restart-dev []
(cr/reset))
(comment
(:api-server cr/system)
(start-dev)
(restart-dev)
(stop-dev)
) | |
73df1d9ba4998962bec3973b532186592227b7c46713c8b2e3bb76c5c35db890 | nikita-volkov/rebase | Pool.hs | module Rebase.Foreign.Marshal.Pool
(
module Foreign.Marshal.Pool
)
where
import Foreign.Marshal.Pool
| null | https://raw.githubusercontent.com/nikita-volkov/rebase/7c77a0443e80bdffd4488a4239628177cac0761b/library/Rebase/Foreign/Marshal/Pool.hs | haskell | module Rebase.Foreign.Marshal.Pool
(
module Foreign.Marshal.Pool
)
where
import Foreign.Marshal.Pool
| |
5719003302edff231b610f50f59d8a8b48318ebc0e28bc60508d4bda8c259fa4 | realworldocaml/book | test_value_printing.ml |
* Copyright ( c ) 2013 .
*
* This file is distributed under the terms of the MIT License .
* See the file LICENSE for details .
* Copyright (c) 2013 Jeremy Yallop.
*
* This file is distributed under the terms of the MIT License.
* See the file LICENSE for details.
*)
open OUnit2
open Ctypes
let _ = Dl.(dlopen ~filename:"../clib/clib.so" ~flags:[RTLD_NOW])
let strip_whitespace = Str.(global_replace (regexp "[\n ]+") "")
let equal_ignoring_whitespace l r =
strip_whitespace l = strip_whitespace r
module Common_tests(S : Cstubs.FOREIGN with type 'a result = 'a
and type 'a return = 'a) =
struct
module M = Functions.Stubs(S)
open M
(*
Test the printing of atomic values: arithmetic types and values of abstract
types.
*)
let test_atomic_printing _ =
let open Signed in
let open Unsigned in
(* char *)
let _CHAR_MIN = retrieve_CHAR_MIN () in
let _CHAR_MAX = retrieve_CHAR_MAX () in
assert_equal (string_of char _CHAR_MIN) (Printf.sprintf "'%c'" _CHAR_MIN);
assert_equal (string_of char 'a') "'a'";
assert_equal (string_of char 'A') "'A'";
assert_equal (string_of char '3') "'3'";
assert_equal (string_of char '\n') "'\n'";
assert_equal (string_of char ' ') "' '";
assert_equal (string_of char _CHAR_MAX) (Printf.sprintf "'%c'" _CHAR_MAX);
(* signed char *)
let _SCHAR_MIN = retrieve_SCHAR_MIN () in
let _SCHAR_MAX = retrieve_SCHAR_MAX () in
assert_equal (string_of schar _SCHAR_MIN) (string_of_int _SCHAR_MIN);
assert_equal (string_of schar 0) (string_of_int 0);
assert_equal (string_of schar (-5)) (string_of_int (-5));
assert_equal (string_of schar 5) (string_of_int 5);
assert_equal (string_of schar _SCHAR_MAX) (string_of_int _SCHAR_MAX);
(* short *)
let _SHRT_MIN = retrieve_SHRT_MIN () in
let _SHRT_MAX = retrieve_SHRT_MAX () in
assert_equal (string_of short _SHRT_MIN) (string_of_int _SHRT_MIN);
assert_equal (string_of short 0) (string_of_int 0);
assert_equal (string_of short (-5)) (string_of_int (-5));
assert_equal (string_of short 14) (string_of_int 14);
assert_equal (string_of short _SHRT_MAX) (string_of_int _SHRT_MAX);
(* int *)
let _INT_MIN = retrieve_INT_MIN () in
let _INT_MAX = retrieve_INT_MAX () in
assert_equal (string_of int _INT_MIN) (string_of_int _INT_MIN);
assert_equal (string_of int 0) (string_of_int 0);
assert_equal (string_of int (-5)) (string_of_int (-5));
assert_equal (string_of int 14) (string_of_int 14);
assert_equal (string_of int _INT_MAX) (string_of_int _INT_MAX);
(* long *)
let _LONG_MAX = retrieve_LONG_MAX () in
let _LONG_MIN = retrieve_LONG_MIN () in
assert_equal (string_of long _LONG_MIN) Long.(to_string _LONG_MIN);
assert_equal (string_of long Long.(of_int 0)) Long.(to_string (of_int 0));
assert_equal (string_of long (Long.of_int (-5))) Long.(to_string (of_int (-5)));
assert_equal (string_of long (Long.of_int 14)) Long.(to_string (of_int 14));
assert_equal (string_of long _LONG_MAX) Long.(to_string _LONG_MAX);
(* long long *)
let _LLONG_MAX = retrieve_LLONG_MAX () in
let _LLONG_MIN = retrieve_LLONG_MIN () in
assert_equal (string_of llong _LLONG_MIN) LLong.(to_string _LLONG_MIN);
assert_equal (string_of llong LLong.(of_int 0)) LLong.(to_string (of_int 0));
assert_equal (string_of llong (LLong.of_int (-5))) LLong.(to_string (of_int (-5)));
assert_equal (string_of llong (LLong.of_int 14)) LLong.(to_string (of_int 14));
assert_equal (string_of llong _LLONG_MAX) LLong.(to_string _LLONG_MAX);
(* unsigned char *)
let _UCHAR_MAX = retrieve_UCHAR_MAX () in
UChar.(assert_equal (string_of uchar (of_int 0)) (to_string (of_int 0)));
UChar.(assert_equal (string_of uchar (of_int 5)) (to_string (of_int 5)));
UChar.(assert_equal (string_of uchar _UCHAR_MAX) (to_string _UCHAR_MAX));
(* bool *)
assert_equal (string_of bool true) "true";
assert_equal (string_of bool false) "false";
(* unsigned short *)
let _USHRT_MAX = retrieve_USHRT_MAX () in
UShort.(assert_equal (string_of ushort (of_int 0)) (to_string (of_int 0)));
UShort.(assert_equal (string_of ushort (of_int 5)) (to_string (of_int 5)));
UShort.(assert_equal (string_of ushort _USHRT_MAX) (to_string _USHRT_MAX));
(* unsigned int *)
let _UINT_MAX = retrieve_UINT_MAX () in
UInt.(assert_equal (string_of uint (of_int 0)) (to_string (of_int 0)));
UInt.(assert_equal (string_of uint (of_int 5)) (to_string (of_int 5)));
UInt.(assert_equal (string_of uint _UINT_MAX) (to_string _UINT_MAX));
(* unsigned long *)
let _ULONG_MAX = retrieve_ULONG_MAX () in
ULong.(assert_equal (string_of ulong (of_int 0)) (to_string (of_int 0)));
ULong.(assert_equal (string_of ulong (of_int 5)) (to_string (of_int 5)));
ULong.(assert_equal (string_of ulong _ULONG_MAX) (to_string _ULONG_MAX));
(* unsigned long long *)
let _ULLONG_MAX = retrieve_ULLONG_MAX () in
ULLong.(assert_equal (string_of ullong (of_int 0)) (to_string (of_int 0)));
ULLong.(assert_equal (string_of ullong (of_int 5)) (to_string (of_int 5)));
ULLong.(assert_equal (string_of ullong _ULLONG_MAX) (to_string _ULLONG_MAX));
(* int8_t *)
let _INT8_MIN = retrieve_INT8_MIN () in
let _INT8_MAX = retrieve_INT8_MAX () in
assert_equal (string_of int8_t _INT8_MIN) (string_of_int _INT8_MIN);
assert_equal (string_of int8_t 0) (string_of_int 0);
assert_equal (string_of int8_t (-5)) (string_of_int (-5));
assert_equal (string_of int8_t 14) (string_of_int 14);
assert_equal (string_of int8_t _INT8_MAX) (string_of_int _INT8_MAX);
(* int16_t *)
let _INT16_MIN = retrieve_INT16_MIN () in
let _INT16_MAX = retrieve_INT16_MAX () in
assert_equal (string_of int16_t _INT16_MIN) (string_of_int _INT16_MIN);
assert_equal (string_of int16_t 0) (string_of_int 0);
assert_equal (string_of int16_t (-5)) (string_of_int (-5));
assert_equal (string_of int16_t 14) (string_of_int 14);
assert_equal (string_of int16_t _INT16_MAX) (string_of_int _INT16_MAX);
(* int32_t *)
let _INT32_MIN = retrieve_INT32_MIN () in
let _INT32_MAX = retrieve_INT32_MAX () in
assert_equal (string_of int32_t _INT32_MIN) (Int32.to_string _INT32_MIN);
assert_equal (string_of int32_t 0l) (Int32.to_string 0l);
assert_equal (string_of int32_t (-5l)) (Int32.to_string (-5l));
assert_equal (string_of int32_t 14l) (Int32.to_string 14l);
assert_equal (string_of int32_t _INT32_MAX) (Int32.to_string _INT32_MAX);
(* int64_t *)
let _INT64_MIN = retrieve_INT64_MIN () in
let _INT64_MAX = retrieve_INT64_MAX () in
assert_equal (string_of int64_t _INT64_MIN) (Int64.to_string _INT64_MIN);
assert_equal (string_of int64_t 0L) (Int64.to_string 0L);
assert_equal (string_of int64_t (-5L)) (Int64.to_string (-5L));
assert_equal (string_of int64_t 14L) (Int64.to_string 14L);
assert_equal (string_of int64_t _INT64_MAX) (Int64.to_string _INT64_MAX);
(* uint8_t *)
let _UINT8_MAX = retrieve_UINT8_MAX () in
UInt8.(assert_equal (string_of uint8_t (of_int 0)) (to_string (of_int 0)));
UInt8.(assert_equal (string_of uint8_t (of_int 5)) (to_string (of_int 5)));
UInt8.(assert_equal (string_of uint8_t _UINT8_MAX) (to_string _UINT8_MAX));
(* uint16_t *)
let _UINT16_MAX = retrieve_UINT16_MAX () in
UInt16.(assert_equal (string_of uint16_t (of_int 0)) (to_string (of_int 0)));
UInt16.(assert_equal (string_of uint16_t (of_int 5)) (to_string (of_int 5)));
UInt16.(assert_equal (string_of uint16_t _UINT16_MAX) (to_string _UINT16_MAX));
uint32_t
let _UINT32_MAX = retrieve_UINT32_MAX () in
UInt32.(assert_equal (string_of uint32_t (of_int 0)) (to_string (of_int 0)));
UInt32.(assert_equal (string_of uint32_t (of_int 5)) (to_string (of_int 5)));
UInt32.(assert_equal (string_of uint32_t _UINT32_MAX) (to_string _UINT32_MAX));
(* uint64_t *)
let _UINT64_MAX = retrieve_UINT64_MAX () in
UInt64.(assert_equal (string_of uint64_t (of_int 0)) (to_string (of_int 0)));
UInt64.(assert_equal (string_of uint64_t (of_int 5)) (to_string (of_int 5)));
UInt64.(assert_equal (string_of uint64_t _UINT64_MAX) (to_string _UINT64_MAX));
(* size_t *)
let _SIZE_MAX = retrieve_SIZE_MAX () in
Size_t.(assert_equal (string_of size_t (of_int 0)) (to_string (of_int 0)));
Size_t.(assert_equal (string_of size_t (of_int 5)) (to_string (of_int 5)));
Size_t.(assert_equal (string_of size_t _SIZE_MAX) (to_string _SIZE_MAX));
(* float *)
let _FLT_MIN = retrieve_FLT_MIN () in
let _FLT_MAX = retrieve_FLT_MAX () in
let rex = Str.regexp "e\\([-+]\\)[0]+\\([1-9]+\\)" in
let exp_equal a b =
remove leading zeros from exponential form
let a = Str.global_replace rex "e\\1\\2" a in
let b = Str.global_replace rex "e\\1\\2" b in
assert_equal a b in
exp_equal (string_of float _FLT_MIN) (string_of_float _FLT_MIN);
assert_equal (valid_float_lexem (string_of float 0.0)) (string_of_float 0.0);
assert_equal (string_of float nan) (string_of_float nan);
assert_equal (string_of float infinity) (string_of_float infinity);
exp_equal (string_of float _FLT_MAX) (string_of_float _FLT_MAX);
(* double *)
let _DBL_MIN = retrieve_DBL_MIN () in
let _DBL_MAX = retrieve_DBL_MAX () in
assert_equal (string_of double _DBL_MIN) (string_of_float _DBL_MIN);
assert_equal (valid_float_lexem (string_of double 0.0)) (string_of_float 0.0);
assert_equal (string_of double (-1.03)) (string_of_float (-1.03));
assert_equal (string_of double (34.22)) (string_of_float (34.22));
exp_equal (string_of double (1.39e16)) (string_of_float (1.39e16));
assert_equal (string_of double nan) (string_of_float nan);
assert_equal (string_of double infinity) (string_of_float infinity);
assert_equal (string_of double _DBL_MAX) (string_of_float _DBL_MAX);
()
end
(*
Test the printing of pointers.
*)
let test_pointer_printing _ =
(* There's not much we can test here, since pointer formatting is
implementation-dependent. We can at least run the pointer-formatting
code, and test that pointers of different types are printed
equivalently. *)
let arr = CArray.make int 10 in
let p = CArray.start arr in
assert_equal
(string_of (ptr (reference_type p)) p)
(string_of (ptr void) (to_voidp p))
(*
Test the printing of structs.
*)
let test_struct_printing _ =
let s = structure "s" in
let (-:) ty label = field s label ty in
let a = array 3 int -: "arr" in
let d = double -: "dbl" in
let c = char -: "chr" in
let () = seal s in
let t = structure "t" in
let (-:) ty label = field t label ty in
let ts = s -: "ts" in
let ti = int -: "ti" in
let () = seal t in
let vt = make t in
let vs = make s in
begin
setf vs a (CArray.of_list int [4; 5; 6]);
setf vs d nan;
setf vs c 'a';
setf vt ts vs;
setf vt ti 14;
assert_bool "struct printing"
(equal_ignoring_whitespace
"{ts = { arr = {4, 5, 6}, dbl = nan, chr = 'a' }, ti = 14}"
(string_of t vt))
end
(*
Test the printing of unions.
*)
let test_union_printing _ =
let s = structure "s" in
let (-:) ty label = field s label ty in
let i = uint16_t -: "i" in
let j = uint16_t -: "j" in
let () = seal s in
let u = union "u" in
let (-:) ty label = field u label ty in
let us = s -: "us" in
let ua = array 4 uint8_t -: "ua" in
let () = seal u in
let v = make u in
ignore (i, j, us);
setf v ua (CArray.make ~initial:(Unsigned.UInt8.of_int 0) uint8_t 4);
assert_bool "union printing"
(equal_ignoring_whitespace "{ us = {i = 0, j = 0} | ua = {0, 0, 0, 0}}" (string_of u v))
(*
Test the printing of array types.
*)
let test_array_printing _ =
let arr = CArray.of_list int [-1; 0; 1] in
let arrarr = CArray.of_list (array 3 int) [arr; arr] in
assert_bool "array printing"
(equal_ignoring_whitespace "{{-1, 0, 1}, {-1, 0, 1}}"
(string_of (array 2 (array 3 int)) arrarr))
(*
Test the printing of ocaml_string values.
*)
let test_ocaml_string_printing _ =
let s = "abc@%^&*[\"" in
begin
assert_equal
(string_of ocaml_string (ocaml_string_start s))
(Printf.sprintf "%S" s);
assert_bool "ocaml_string printing with offsets"
(equal_ignoring_whitespace
(string_of ocaml_string ((ocaml_string_start s) +@ 3))
(Printf.sprintf "%S [offset:3]" s));
end
module Foreign_tests = Common_tests(Tests_common.Foreign_binder)
module Stub_tests = Common_tests(Generated_bindings)
let suite = "Value printing tests" >:::
["printing atomic values (foreign)"
>:: Foreign_tests.test_atomic_printing;
"printing atomic values (stubs)"
>:: Stub_tests.test_atomic_printing;
"printing pointers"
>:: test_pointer_printing;
"printing structs"
>:: test_struct_printing;
"printing unions"
>:: test_union_printing;
"printing arrays"
>:: test_array_printing;
"printing ocaml strings"
>:: test_ocaml_string_printing;
]
let _ =
run_test_tt_main suite
| null | https://raw.githubusercontent.com/realworldocaml/book/d822fd065f19dbb6324bf83e0143bc73fd77dbf9/duniverse/ocaml-ctypes/tests/test-value_printing/test_value_printing.ml | ocaml |
Test the printing of atomic values: arithmetic types and values of abstract
types.
char
signed char
short
int
long
long long
unsigned char
bool
unsigned short
unsigned int
unsigned long
unsigned long long
int8_t
int16_t
int32_t
int64_t
uint8_t
uint16_t
uint64_t
size_t
float
double
Test the printing of pointers.
There's not much we can test here, since pointer formatting is
implementation-dependent. We can at least run the pointer-formatting
code, and test that pointers of different types are printed
equivalently.
Test the printing of structs.
Test the printing of unions.
Test the printing of array types.
Test the printing of ocaml_string values.
|
* Copyright ( c ) 2013 .
*
* This file is distributed under the terms of the MIT License .
* See the file LICENSE for details .
* Copyright (c) 2013 Jeremy Yallop.
*
* This file is distributed under the terms of the MIT License.
* See the file LICENSE for details.
*)
open OUnit2
open Ctypes
let _ = Dl.(dlopen ~filename:"../clib/clib.so" ~flags:[RTLD_NOW])
let strip_whitespace = Str.(global_replace (regexp "[\n ]+") "")
let equal_ignoring_whitespace l r =
strip_whitespace l = strip_whitespace r
module Common_tests(S : Cstubs.FOREIGN with type 'a result = 'a
and type 'a return = 'a) =
struct
module M = Functions.Stubs(S)
open M
let test_atomic_printing _ =
let open Signed in
let open Unsigned in
let _CHAR_MIN = retrieve_CHAR_MIN () in
let _CHAR_MAX = retrieve_CHAR_MAX () in
assert_equal (string_of char _CHAR_MIN) (Printf.sprintf "'%c'" _CHAR_MIN);
assert_equal (string_of char 'a') "'a'";
assert_equal (string_of char 'A') "'A'";
assert_equal (string_of char '3') "'3'";
assert_equal (string_of char '\n') "'\n'";
assert_equal (string_of char ' ') "' '";
assert_equal (string_of char _CHAR_MAX) (Printf.sprintf "'%c'" _CHAR_MAX);
let _SCHAR_MIN = retrieve_SCHAR_MIN () in
let _SCHAR_MAX = retrieve_SCHAR_MAX () in
assert_equal (string_of schar _SCHAR_MIN) (string_of_int _SCHAR_MIN);
assert_equal (string_of schar 0) (string_of_int 0);
assert_equal (string_of schar (-5)) (string_of_int (-5));
assert_equal (string_of schar 5) (string_of_int 5);
assert_equal (string_of schar _SCHAR_MAX) (string_of_int _SCHAR_MAX);
let _SHRT_MIN = retrieve_SHRT_MIN () in
let _SHRT_MAX = retrieve_SHRT_MAX () in
assert_equal (string_of short _SHRT_MIN) (string_of_int _SHRT_MIN);
assert_equal (string_of short 0) (string_of_int 0);
assert_equal (string_of short (-5)) (string_of_int (-5));
assert_equal (string_of short 14) (string_of_int 14);
assert_equal (string_of short _SHRT_MAX) (string_of_int _SHRT_MAX);
let _INT_MIN = retrieve_INT_MIN () in
let _INT_MAX = retrieve_INT_MAX () in
assert_equal (string_of int _INT_MIN) (string_of_int _INT_MIN);
assert_equal (string_of int 0) (string_of_int 0);
assert_equal (string_of int (-5)) (string_of_int (-5));
assert_equal (string_of int 14) (string_of_int 14);
assert_equal (string_of int _INT_MAX) (string_of_int _INT_MAX);
let _LONG_MAX = retrieve_LONG_MAX () in
let _LONG_MIN = retrieve_LONG_MIN () in
assert_equal (string_of long _LONG_MIN) Long.(to_string _LONG_MIN);
assert_equal (string_of long Long.(of_int 0)) Long.(to_string (of_int 0));
assert_equal (string_of long (Long.of_int (-5))) Long.(to_string (of_int (-5)));
assert_equal (string_of long (Long.of_int 14)) Long.(to_string (of_int 14));
assert_equal (string_of long _LONG_MAX) Long.(to_string _LONG_MAX);
let _LLONG_MAX = retrieve_LLONG_MAX () in
let _LLONG_MIN = retrieve_LLONG_MIN () in
assert_equal (string_of llong _LLONG_MIN) LLong.(to_string _LLONG_MIN);
assert_equal (string_of llong LLong.(of_int 0)) LLong.(to_string (of_int 0));
assert_equal (string_of llong (LLong.of_int (-5))) LLong.(to_string (of_int (-5)));
assert_equal (string_of llong (LLong.of_int 14)) LLong.(to_string (of_int 14));
assert_equal (string_of llong _LLONG_MAX) LLong.(to_string _LLONG_MAX);
let _UCHAR_MAX = retrieve_UCHAR_MAX () in
UChar.(assert_equal (string_of uchar (of_int 0)) (to_string (of_int 0)));
UChar.(assert_equal (string_of uchar (of_int 5)) (to_string (of_int 5)));
UChar.(assert_equal (string_of uchar _UCHAR_MAX) (to_string _UCHAR_MAX));
assert_equal (string_of bool true) "true";
assert_equal (string_of bool false) "false";
let _USHRT_MAX = retrieve_USHRT_MAX () in
UShort.(assert_equal (string_of ushort (of_int 0)) (to_string (of_int 0)));
UShort.(assert_equal (string_of ushort (of_int 5)) (to_string (of_int 5)));
UShort.(assert_equal (string_of ushort _USHRT_MAX) (to_string _USHRT_MAX));
let _UINT_MAX = retrieve_UINT_MAX () in
UInt.(assert_equal (string_of uint (of_int 0)) (to_string (of_int 0)));
UInt.(assert_equal (string_of uint (of_int 5)) (to_string (of_int 5)));
UInt.(assert_equal (string_of uint _UINT_MAX) (to_string _UINT_MAX));
let _ULONG_MAX = retrieve_ULONG_MAX () in
ULong.(assert_equal (string_of ulong (of_int 0)) (to_string (of_int 0)));
ULong.(assert_equal (string_of ulong (of_int 5)) (to_string (of_int 5)));
ULong.(assert_equal (string_of ulong _ULONG_MAX) (to_string _ULONG_MAX));
let _ULLONG_MAX = retrieve_ULLONG_MAX () in
ULLong.(assert_equal (string_of ullong (of_int 0)) (to_string (of_int 0)));
ULLong.(assert_equal (string_of ullong (of_int 5)) (to_string (of_int 5)));
ULLong.(assert_equal (string_of ullong _ULLONG_MAX) (to_string _ULLONG_MAX));
let _INT8_MIN = retrieve_INT8_MIN () in
let _INT8_MAX = retrieve_INT8_MAX () in
assert_equal (string_of int8_t _INT8_MIN) (string_of_int _INT8_MIN);
assert_equal (string_of int8_t 0) (string_of_int 0);
assert_equal (string_of int8_t (-5)) (string_of_int (-5));
assert_equal (string_of int8_t 14) (string_of_int 14);
assert_equal (string_of int8_t _INT8_MAX) (string_of_int _INT8_MAX);
let _INT16_MIN = retrieve_INT16_MIN () in
let _INT16_MAX = retrieve_INT16_MAX () in
assert_equal (string_of int16_t _INT16_MIN) (string_of_int _INT16_MIN);
assert_equal (string_of int16_t 0) (string_of_int 0);
assert_equal (string_of int16_t (-5)) (string_of_int (-5));
assert_equal (string_of int16_t 14) (string_of_int 14);
assert_equal (string_of int16_t _INT16_MAX) (string_of_int _INT16_MAX);
let _INT32_MIN = retrieve_INT32_MIN () in
let _INT32_MAX = retrieve_INT32_MAX () in
assert_equal (string_of int32_t _INT32_MIN) (Int32.to_string _INT32_MIN);
assert_equal (string_of int32_t 0l) (Int32.to_string 0l);
assert_equal (string_of int32_t (-5l)) (Int32.to_string (-5l));
assert_equal (string_of int32_t 14l) (Int32.to_string 14l);
assert_equal (string_of int32_t _INT32_MAX) (Int32.to_string _INT32_MAX);
let _INT64_MIN = retrieve_INT64_MIN () in
let _INT64_MAX = retrieve_INT64_MAX () in
assert_equal (string_of int64_t _INT64_MIN) (Int64.to_string _INT64_MIN);
assert_equal (string_of int64_t 0L) (Int64.to_string 0L);
assert_equal (string_of int64_t (-5L)) (Int64.to_string (-5L));
assert_equal (string_of int64_t 14L) (Int64.to_string 14L);
assert_equal (string_of int64_t _INT64_MAX) (Int64.to_string _INT64_MAX);
let _UINT8_MAX = retrieve_UINT8_MAX () in
UInt8.(assert_equal (string_of uint8_t (of_int 0)) (to_string (of_int 0)));
UInt8.(assert_equal (string_of uint8_t (of_int 5)) (to_string (of_int 5)));
UInt8.(assert_equal (string_of uint8_t _UINT8_MAX) (to_string _UINT8_MAX));
let _UINT16_MAX = retrieve_UINT16_MAX () in
UInt16.(assert_equal (string_of uint16_t (of_int 0)) (to_string (of_int 0)));
UInt16.(assert_equal (string_of uint16_t (of_int 5)) (to_string (of_int 5)));
UInt16.(assert_equal (string_of uint16_t _UINT16_MAX) (to_string _UINT16_MAX));
uint32_t
let _UINT32_MAX = retrieve_UINT32_MAX () in
UInt32.(assert_equal (string_of uint32_t (of_int 0)) (to_string (of_int 0)));
UInt32.(assert_equal (string_of uint32_t (of_int 5)) (to_string (of_int 5)));
UInt32.(assert_equal (string_of uint32_t _UINT32_MAX) (to_string _UINT32_MAX));
let _UINT64_MAX = retrieve_UINT64_MAX () in
UInt64.(assert_equal (string_of uint64_t (of_int 0)) (to_string (of_int 0)));
UInt64.(assert_equal (string_of uint64_t (of_int 5)) (to_string (of_int 5)));
UInt64.(assert_equal (string_of uint64_t _UINT64_MAX) (to_string _UINT64_MAX));
let _SIZE_MAX = retrieve_SIZE_MAX () in
Size_t.(assert_equal (string_of size_t (of_int 0)) (to_string (of_int 0)));
Size_t.(assert_equal (string_of size_t (of_int 5)) (to_string (of_int 5)));
Size_t.(assert_equal (string_of size_t _SIZE_MAX) (to_string _SIZE_MAX));
let _FLT_MIN = retrieve_FLT_MIN () in
let _FLT_MAX = retrieve_FLT_MAX () in
let rex = Str.regexp "e\\([-+]\\)[0]+\\([1-9]+\\)" in
let exp_equal a b =
remove leading zeros from exponential form
let a = Str.global_replace rex "e\\1\\2" a in
let b = Str.global_replace rex "e\\1\\2" b in
assert_equal a b in
exp_equal (string_of float _FLT_MIN) (string_of_float _FLT_MIN);
assert_equal (valid_float_lexem (string_of float 0.0)) (string_of_float 0.0);
assert_equal (string_of float nan) (string_of_float nan);
assert_equal (string_of float infinity) (string_of_float infinity);
exp_equal (string_of float _FLT_MAX) (string_of_float _FLT_MAX);
let _DBL_MIN = retrieve_DBL_MIN () in
let _DBL_MAX = retrieve_DBL_MAX () in
assert_equal (string_of double _DBL_MIN) (string_of_float _DBL_MIN);
assert_equal (valid_float_lexem (string_of double 0.0)) (string_of_float 0.0);
assert_equal (string_of double (-1.03)) (string_of_float (-1.03));
assert_equal (string_of double (34.22)) (string_of_float (34.22));
exp_equal (string_of double (1.39e16)) (string_of_float (1.39e16));
assert_equal (string_of double nan) (string_of_float nan);
assert_equal (string_of double infinity) (string_of_float infinity);
assert_equal (string_of double _DBL_MAX) (string_of_float _DBL_MAX);
()
end
let test_pointer_printing _ =
let arr = CArray.make int 10 in
let p = CArray.start arr in
assert_equal
(string_of (ptr (reference_type p)) p)
(string_of (ptr void) (to_voidp p))
let test_struct_printing _ =
let s = structure "s" in
let (-:) ty label = field s label ty in
let a = array 3 int -: "arr" in
let d = double -: "dbl" in
let c = char -: "chr" in
let () = seal s in
let t = structure "t" in
let (-:) ty label = field t label ty in
let ts = s -: "ts" in
let ti = int -: "ti" in
let () = seal t in
let vt = make t in
let vs = make s in
begin
setf vs a (CArray.of_list int [4; 5; 6]);
setf vs d nan;
setf vs c 'a';
setf vt ts vs;
setf vt ti 14;
assert_bool "struct printing"
(equal_ignoring_whitespace
"{ts = { arr = {4, 5, 6}, dbl = nan, chr = 'a' }, ti = 14}"
(string_of t vt))
end
let test_union_printing _ =
let s = structure "s" in
let (-:) ty label = field s label ty in
let i = uint16_t -: "i" in
let j = uint16_t -: "j" in
let () = seal s in
let u = union "u" in
let (-:) ty label = field u label ty in
let us = s -: "us" in
let ua = array 4 uint8_t -: "ua" in
let () = seal u in
let v = make u in
ignore (i, j, us);
setf v ua (CArray.make ~initial:(Unsigned.UInt8.of_int 0) uint8_t 4);
assert_bool "union printing"
(equal_ignoring_whitespace "{ us = {i = 0, j = 0} | ua = {0, 0, 0, 0}}" (string_of u v))
let test_array_printing _ =
let arr = CArray.of_list int [-1; 0; 1] in
let arrarr = CArray.of_list (array 3 int) [arr; arr] in
assert_bool "array printing"
(equal_ignoring_whitespace "{{-1, 0, 1}, {-1, 0, 1}}"
(string_of (array 2 (array 3 int)) arrarr))
let test_ocaml_string_printing _ =
let s = "abc@%^&*[\"" in
begin
assert_equal
(string_of ocaml_string (ocaml_string_start s))
(Printf.sprintf "%S" s);
assert_bool "ocaml_string printing with offsets"
(equal_ignoring_whitespace
(string_of ocaml_string ((ocaml_string_start s) +@ 3))
(Printf.sprintf "%S [offset:3]" s));
end
module Foreign_tests = Common_tests(Tests_common.Foreign_binder)
module Stub_tests = Common_tests(Generated_bindings)
let suite = "Value printing tests" >:::
["printing atomic values (foreign)"
>:: Foreign_tests.test_atomic_printing;
"printing atomic values (stubs)"
>:: Stub_tests.test_atomic_printing;
"printing pointers"
>:: test_pointer_printing;
"printing structs"
>:: test_struct_printing;
"printing unions"
>:: test_union_printing;
"printing arrays"
>:: test_array_printing;
"printing ocaml strings"
>:: test_ocaml_string_printing;
]
let _ =
run_test_tt_main suite
|
ca23e9519e603c4bcece3f4cbb8be59639298048830a6ae1d75331b35d280b95 | monadbobo/ocaml-core | ref.ml | open Std_internal
type 'a t = 'a ref = { mutable contents : 'a }
let sexp_of_t sexp_of_a t = sexp_of_a !t
let t_of_sexp a_of_sexp sexp = ref (a_of_sexp sexp)
include Bin_prot.Utils.Make_binable1 (struct
module Binable = struct
type 'a t = 'a with bin_io
end
type 'a t = 'a ref
let to_binable t = !t
let of_binable a = ref a
end)
let create x = ref x
let (!) = Pervasives.(!)
let (:=) = Pervasives.(:=)
let swap t1 t2 =
let tmp = !t1 in
t1 := !t2;
t2 := tmp
let replace t f = t := f !t
(* container functions below *)
let length _ = 1
let is_empty _ = false
let iter t ~f = f !t
let fold t ~init ~f = f init !t
let count t ~f = if f !t then 1 else 0
let exists t ~f = f !t
let for_all t ~f = f !t
let mem ?(equal = (=)) t a = equal a !t
let find t ~f = let a = !t in if f a then Some a else None
let find_map t ~f = f !t
let to_list t = [ !t ]
let to_array t = [| !t |]
| null | https://raw.githubusercontent.com/monadbobo/ocaml-core/9c1c06e7a1af7e15b6019a325d7dbdbd4cdb4020/base/core/lib/ref.ml | ocaml | container functions below | open Std_internal
type 'a t = 'a ref = { mutable contents : 'a }
let sexp_of_t sexp_of_a t = sexp_of_a !t
let t_of_sexp a_of_sexp sexp = ref (a_of_sexp sexp)
include Bin_prot.Utils.Make_binable1 (struct
module Binable = struct
type 'a t = 'a with bin_io
end
type 'a t = 'a ref
let to_binable t = !t
let of_binable a = ref a
end)
let create x = ref x
let (!) = Pervasives.(!)
let (:=) = Pervasives.(:=)
let swap t1 t2 =
let tmp = !t1 in
t1 := !t2;
t2 := tmp
let replace t f = t := f !t
let length _ = 1
let is_empty _ = false
let iter t ~f = f !t
let fold t ~init ~f = f init !t
let count t ~f = if f !t then 1 else 0
let exists t ~f = f !t
let for_all t ~f = f !t
let mem ?(equal = (=)) t a = equal a !t
let find t ~f = let a = !t in if f a then Some a else None
let find_map t ~f = f !t
let to_list t = [ !t ]
let to_array t = [| !t |]
|
646f2a5bd75d875867745231de4fb2eeadeef7633bb753b09fa4bfb95d147140 | wdebeaum/step | noon.lisp | ;;;;
;;;; W::noon
;;;;
(define-words :pos W::pro :templ PRONOUN-TEMPL
:tags (:base500)
:words (
at noon , midnight
;; why are these pronouns?
(W::noon
(SENSES
((LF-PARENT ONT::noon)
( SEM ( F::time - function F::day - point ) )
)
)
)
))
| null | https://raw.githubusercontent.com/wdebeaum/step/f38c07d9cd3a58d0e0183159d4445de9a0eafe26/src/LexiconManager/Data/new/noon.lisp | lisp |
W::noon
why are these pronouns? |
(define-words :pos W::pro :templ PRONOUN-TEMPL
:tags (:base500)
:words (
at noon , midnight
(W::noon
(SENSES
((LF-PARENT ONT::noon)
( SEM ( F::time - function F::day - point ) )
)
)
)
))
|
496c3e0f9d6522fa1c9a2e02277cdf61cacef4a89d2283a24bd4e527c50a9a1d | CardanoSolutions/kupo | MonadTime.hs | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
module Kupo.Control.MonadTime
( MonadTime (..)
, Time (..)
, DiffTime
, addTime
, diffTime
, secondsToDiffTime
, millisecondsToDiffTime
, timeout
) where
import Kupo.Prelude
import Control.Monad.Class.MonadTime
( MonadTime (..)
, Time (..)
, addTime
, diffTime
)
import Control.Monad.Class.MonadTimer
( timeout
)
import Data.Time.Clock
( DiffTime
, secondsToDiffTime
)
millisecondsToDiffTime :: Integer -> DiffTime
millisecondsToDiffTime = toEnum . fromInteger . (* 1_000_000_000)
| null | https://raw.githubusercontent.com/CardanoSolutions/kupo/fa37a0e569cc5d8faee925bf19f0349dac7b3990/src/Kupo/Control/MonadTime.hs | haskell | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
module Kupo.Control.MonadTime
( MonadTime (..)
, Time (..)
, DiffTime
, addTime
, diffTime
, secondsToDiffTime
, millisecondsToDiffTime
, timeout
) where
import Kupo.Prelude
import Control.Monad.Class.MonadTime
( MonadTime (..)
, Time (..)
, addTime
, diffTime
)
import Control.Monad.Class.MonadTimer
( timeout
)
import Data.Time.Clock
( DiffTime
, secondsToDiffTime
)
millisecondsToDiffTime :: Integer -> DiffTime
millisecondsToDiffTime = toEnum . fromInteger . (* 1_000_000_000)
| |
599aba923b100868b9635020ea142f9d2cb0299784fcadf85342c3e1b5fc8f43 | facebook/infer | theory.mli |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
(** Theory Solver *)
type oriented_equality = {var: Trm.t; rep: Trm.t}
type t =
{ no_fresh: bool
; solved: oriented_equality list option
; pending: (Trm.t * Trm.t) list }
val pp : t pp
val prefer : Trm.t -> Trm.t -> int
val solve_concat : Trm.sized array -> Trm.t -> Trm.t -> t -> t
val solve : Trm.t -> Trm.t -> t -> t Var.Fresh.m
| null | https://raw.githubusercontent.com/facebook/infer/9d9a3a42b75697db86bc0da3092137f175782773/sledge/src/fol/theory.mli | ocaml | * Theory Solver |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type oriented_equality = {var: Trm.t; rep: Trm.t}
type t =
{ no_fresh: bool
; solved: oriented_equality list option
; pending: (Trm.t * Trm.t) list }
val pp : t pp
val prefer : Trm.t -> Trm.t -> int
val solve_concat : Trm.sized array -> Trm.t -> Trm.t -> t -> t
val solve : Trm.t -> Trm.t -> t -> t Var.Fresh.m
|
b866cb727521cd65cfde713aa6b05096b9de446d0f08a638420ae61a25d1970b | clojure-expectations/clojure-test | test_macros.cljc | copyright ( c ) 2019 - 2020 , all rights reserved
(ns expectations.clojure.test-macros
"Macros to support testing the testing framework."
(:require #?(:clj [clojure.test :refer [is do-report] :as t]
:cljs [cljs.test :refer [do-report assert-expr]
:refer-macros [is assert-expr] :as t])
#?(:cljs [cljs.spec.alpha :as s])
#?(:clj [expectations.clojure.test :as sut]
:cljs [expectations.clojure.test :include-macros true :as sut])))
(defmacro is-not'
"Construct a negative test for an expectation with a symbolic failure."
[expectation failure & [msg]]
`(let [results# (atom nil)]
(with-redefs [do-report (sut/all-report results#)]
~expectation)
(t/is (some (fn [fail#]
(= '~failure (:actual fail#)))
(:fail @results#)))
(when ~msg
(t/is (some (fn [fail#]
(re-find ~msg (:message fail#)))
(:fail @results#))))))
(defmacro is-not
"Construct a negative test for an expectation with a value-based failure."
[expectation failure & [msg]]
`(let [results# (atom nil)]
(with-redefs [do-report (sut/all-report results#)]
~expectation)
(t/is (some (fn [fail#]
(= ~failure (:actual fail#)))
(:fail @results#)))
(when ~msg
(t/is (some (fn [fail#]
(re-find ~msg (:message fail#)))
(:fail @results#))))))
(defmacro passes
"Construct a positive test for an expectation with a predicate-based success.
This is needed for cases where a successful test wraps a failing behavior,
such as `thrown?`, i.e., `(expect ExceptionType actual)`"
[expectation success]
`(let [results# (atom nil)]
(with-redefs [do-report (sut/all-report results#)]
~expectation)
(t/is (some (fn [pass#]
(~success (:actual pass#)))
(:pass @results#)))))
| null | https://raw.githubusercontent.com/clojure-expectations/clojure-test/fdf73d8a1219e17443c4b18c951b40943a7a7e3f/test/expectations/clojure/test_macros.cljc | clojure | copyright ( c ) 2019 - 2020 , all rights reserved
(ns expectations.clojure.test-macros
"Macros to support testing the testing framework."
(:require #?(:clj [clojure.test :refer [is do-report] :as t]
:cljs [cljs.test :refer [do-report assert-expr]
:refer-macros [is assert-expr] :as t])
#?(:cljs [cljs.spec.alpha :as s])
#?(:clj [expectations.clojure.test :as sut]
:cljs [expectations.clojure.test :include-macros true :as sut])))
(defmacro is-not'
"Construct a negative test for an expectation with a symbolic failure."
[expectation failure & [msg]]
`(let [results# (atom nil)]
(with-redefs [do-report (sut/all-report results#)]
~expectation)
(t/is (some (fn [fail#]
(= '~failure (:actual fail#)))
(:fail @results#)))
(when ~msg
(t/is (some (fn [fail#]
(re-find ~msg (:message fail#)))
(:fail @results#))))))
(defmacro is-not
"Construct a negative test for an expectation with a value-based failure."
[expectation failure & [msg]]
`(let [results# (atom nil)]
(with-redefs [do-report (sut/all-report results#)]
~expectation)
(t/is (some (fn [fail#]
(= ~failure (:actual fail#)))
(:fail @results#)))
(when ~msg
(t/is (some (fn [fail#]
(re-find ~msg (:message fail#)))
(:fail @results#))))))
(defmacro passes
"Construct a positive test for an expectation with a predicate-based success.
This is needed for cases where a successful test wraps a failing behavior,
such as `thrown?`, i.e., `(expect ExceptionType actual)`"
[expectation success]
`(let [results# (atom nil)]
(with-redefs [do-report (sut/all-report results#)]
~expectation)
(t/is (some (fn [pass#]
(~success (:actual pass#)))
(:pass @results#)))))
| |
d6db6b19ecfe47c57114a35d506c4b56d66bc52459bec6f818da1a04a218d659 | kupl/LearnML | patch.ml | type lambda = V of var | P of (var * lambda) | C of (lambda * lambda)
and var = string
let rec varfind (arr : 'a list) x : bool =
match arr with
| [] -> false
| hd :: tl -> if hd = x then true else varfind tl x
let rec find (arr : string list) (lam : lambda) : bool * string list =
match lam with
| V x -> (varfind arr x, arr)
| P (x, l) ->
let arr' : string list = x :: arr in
find arr' l
| C (l1, l2) ->
let (b, arr') : bool * string list = find arr l1 in
let (b2, arr'') : bool * string list = find arr' l2 in
(b && b2, arr'')
let rec __s3 (__s4 : lambda) : string list =
match __s4 with
| V __s5 -> [ __s5 ]
| P (__s6, __s7) ->
List.filter (fun (__s8 : string) -> not (__s6 = __s8)) (__s3 __s7)
| C (__s9, __s10) -> __s3 __s9 @ __s3 __s10
let rec check (lam : lambda) : bool =
let (b, a) : bool * string list = find [] lam in
List.length (__s3 lam) = 0
| null | https://raw.githubusercontent.com/kupl/LearnML/c98ef2b95ef67e657b8158a2c504330e9cfb7700/result/cafe2/lambda/sub127/patch.ml | ocaml | type lambda = V of var | P of (var * lambda) | C of (lambda * lambda)
and var = string
let rec varfind (arr : 'a list) x : bool =
match arr with
| [] -> false
| hd :: tl -> if hd = x then true else varfind tl x
let rec find (arr : string list) (lam : lambda) : bool * string list =
match lam with
| V x -> (varfind arr x, arr)
| P (x, l) ->
let arr' : string list = x :: arr in
find arr' l
| C (l1, l2) ->
let (b, arr') : bool * string list = find arr l1 in
let (b2, arr'') : bool * string list = find arr' l2 in
(b && b2, arr'')
let rec __s3 (__s4 : lambda) : string list =
match __s4 with
| V __s5 -> [ __s5 ]
| P (__s6, __s7) ->
List.filter (fun (__s8 : string) -> not (__s6 = __s8)) (__s3 __s7)
| C (__s9, __s10) -> __s3 __s9 @ __s3 __s10
let rec check (lam : lambda) : bool =
let (b, a) : bool * string list = find [] lam in
List.length (__s3 lam) = 0
| |
db2190d4136263797e0b31b722edcda74126485d77d8ccf85ce3051c6a661af6 | tarides/opam-monorepo | project.mli | (** Utility functions to extract project specific path and values *)
type t = Fpath.t
(** The type of projects.
What we consider a project here is the root of a dune project/workspace *)
val local_packages :
recurse:bool ->
t ->
((OpamPackage.Name.t * Fpath.t) list, [> Rresult.R.msg ]) result
(** Returns the locally defined opam packages as an association list from package names to
to the corresponding .opam file path.
Only considers packages defined at the repo's root unless [recurse] is [true]. *)
val all_local_packages :
t -> ((OpamPackage.Name.t * Fpath.t) list, [> Rresult.R.msg ]) result
(** [all_local_packages t] is [local_packages ~recurse:true t]. *)
val dune_project : t -> Fpath.t
(** Returns the path to the dune-project file. *)
val name : t -> (string, [> `Msg of string ]) result
(** Returns the name of the project, as set in the dune-project. *)
val lockfile :
target_packages:OpamPackage.Name.t list ->
t ->
(Fpath.t, [> `Msg of string ]) result
* Returns the path to the opam - monorepo lockfile to generate for the given
project and lockfile target packages .
If there is a single target package , then it is the [ " < package_name>.opam.locked " ]
file at the root of the project .
If it contains multiple packages , then it 's the [ " < project_name>.opam.locked " ] file
at the root of the project , where < project_name > is the name as defined in the
dune - project file .
project and lockfile target packages.
If there is a single target package, then it is the ["<package_name>.opam.locked"]
file at the root of the project.
If it contains multiple packages, then it's the ["<project_name>.opam.locked"] file
at the root of the project, where <project_name> is the name as defined in the
dune-project file. *)
val local_lockfiles : t -> (Fpath.t list, Rresult.R.msg) result
(** Returns all the lockfiles located at the root of the project i.e. all
.opam.locked files. *)
| null | https://raw.githubusercontent.com/tarides/opam-monorepo/9262e7f71d749520b7e046fbd90a4732a43866e9/lib/project.mli | ocaml | * Utility functions to extract project specific path and values
* The type of projects.
What we consider a project here is the root of a dune project/workspace
* Returns the locally defined opam packages as an association list from package names to
to the corresponding .opam file path.
Only considers packages defined at the repo's root unless [recurse] is [true].
* [all_local_packages t] is [local_packages ~recurse:true t].
* Returns the path to the dune-project file.
* Returns the name of the project, as set in the dune-project.
* Returns all the lockfiles located at the root of the project i.e. all
.opam.locked files. |
type t = Fpath.t
val local_packages :
recurse:bool ->
t ->
((OpamPackage.Name.t * Fpath.t) list, [> Rresult.R.msg ]) result
val all_local_packages :
t -> ((OpamPackage.Name.t * Fpath.t) list, [> Rresult.R.msg ]) result
val dune_project : t -> Fpath.t
val name : t -> (string, [> `Msg of string ]) result
val lockfile :
target_packages:OpamPackage.Name.t list ->
t ->
(Fpath.t, [> `Msg of string ]) result
* Returns the path to the opam - monorepo lockfile to generate for the given
project and lockfile target packages .
If there is a single target package , then it is the [ " < package_name>.opam.locked " ]
file at the root of the project .
If it contains multiple packages , then it 's the [ " < project_name>.opam.locked " ] file
at the root of the project , where < project_name > is the name as defined in the
dune - project file .
project and lockfile target packages.
If there is a single target package, then it is the ["<package_name>.opam.locked"]
file at the root of the project.
If it contains multiple packages, then it's the ["<project_name>.opam.locked"] file
at the root of the project, where <project_name> is the name as defined in the
dune-project file. *)
val local_lockfiles : t -> (Fpath.t list, Rresult.R.msg) result
|
f6d804c1f8452225e02aedf70ebd432351261cd444615197af7938634016aef4 | mbj/stratosphere | PortfolioShare.hs | module Stratosphere.ServiceCatalog.PortfolioShare (
PortfolioShare(..), mkPortfolioShare
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
import Stratosphere.Value
data PortfolioShare
= PortfolioShare {acceptLanguage :: (Prelude.Maybe (Value Prelude.Text)),
accountId :: (Value Prelude.Text),
portfolioId :: (Value Prelude.Text),
shareTagOptions :: (Prelude.Maybe (Value Prelude.Bool))}
mkPortfolioShare ::
Value Prelude.Text -> Value Prelude.Text -> PortfolioShare
mkPortfolioShare accountId portfolioId
= PortfolioShare
{accountId = accountId, portfolioId = portfolioId,
acceptLanguage = Prelude.Nothing,
shareTagOptions = Prelude.Nothing}
instance ToResourceProperties PortfolioShare where
toResourceProperties PortfolioShare {..}
= ResourceProperties
{awsType = "AWS::ServiceCatalog::PortfolioShare",
supportsTags = Prelude.False,
properties = Prelude.fromList
((Prelude.<>)
["AccountId" JSON..= accountId, "PortfolioId" JSON..= portfolioId]
(Prelude.catMaybes
[(JSON..=) "AcceptLanguage" Prelude.<$> acceptLanguage,
(JSON..=) "ShareTagOptions" Prelude.<$> shareTagOptions]))}
instance JSON.ToJSON PortfolioShare where
toJSON PortfolioShare {..}
= JSON.object
(Prelude.fromList
((Prelude.<>)
["AccountId" JSON..= accountId, "PortfolioId" JSON..= portfolioId]
(Prelude.catMaybes
[(JSON..=) "AcceptLanguage" Prelude.<$> acceptLanguage,
(JSON..=) "ShareTagOptions" Prelude.<$> shareTagOptions])))
instance Property "AcceptLanguage" PortfolioShare where
type PropertyType "AcceptLanguage" PortfolioShare = Value Prelude.Text
set newValue PortfolioShare {..}
= PortfolioShare {acceptLanguage = Prelude.pure newValue, ..}
instance Property "AccountId" PortfolioShare where
type PropertyType "AccountId" PortfolioShare = Value Prelude.Text
set newValue PortfolioShare {..}
= PortfolioShare {accountId = newValue, ..}
instance Property "PortfolioId" PortfolioShare where
type PropertyType "PortfolioId" PortfolioShare = Value Prelude.Text
set newValue PortfolioShare {..}
= PortfolioShare {portfolioId = newValue, ..}
instance Property "ShareTagOptions" PortfolioShare where
type PropertyType "ShareTagOptions" PortfolioShare = Value Prelude.Bool
set newValue PortfolioShare {..}
= PortfolioShare {shareTagOptions = Prelude.pure newValue, ..} | null | https://raw.githubusercontent.com/mbj/stratosphere/c70f301715425247efcda29af4f3fcf7ec04aa2f/services/servicecatalog/gen/Stratosphere/ServiceCatalog/PortfolioShare.hs | haskell | module Stratosphere.ServiceCatalog.PortfolioShare (
PortfolioShare(..), mkPortfolioShare
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
import Stratosphere.Value
data PortfolioShare
= PortfolioShare {acceptLanguage :: (Prelude.Maybe (Value Prelude.Text)),
accountId :: (Value Prelude.Text),
portfolioId :: (Value Prelude.Text),
shareTagOptions :: (Prelude.Maybe (Value Prelude.Bool))}
mkPortfolioShare ::
Value Prelude.Text -> Value Prelude.Text -> PortfolioShare
mkPortfolioShare accountId portfolioId
= PortfolioShare
{accountId = accountId, portfolioId = portfolioId,
acceptLanguage = Prelude.Nothing,
shareTagOptions = Prelude.Nothing}
instance ToResourceProperties PortfolioShare where
toResourceProperties PortfolioShare {..}
= ResourceProperties
{awsType = "AWS::ServiceCatalog::PortfolioShare",
supportsTags = Prelude.False,
properties = Prelude.fromList
((Prelude.<>)
["AccountId" JSON..= accountId, "PortfolioId" JSON..= portfolioId]
(Prelude.catMaybes
[(JSON..=) "AcceptLanguage" Prelude.<$> acceptLanguage,
(JSON..=) "ShareTagOptions" Prelude.<$> shareTagOptions]))}
instance JSON.ToJSON PortfolioShare where
toJSON PortfolioShare {..}
= JSON.object
(Prelude.fromList
((Prelude.<>)
["AccountId" JSON..= accountId, "PortfolioId" JSON..= portfolioId]
(Prelude.catMaybes
[(JSON..=) "AcceptLanguage" Prelude.<$> acceptLanguage,
(JSON..=) "ShareTagOptions" Prelude.<$> shareTagOptions])))
instance Property "AcceptLanguage" PortfolioShare where
type PropertyType "AcceptLanguage" PortfolioShare = Value Prelude.Text
set newValue PortfolioShare {..}
= PortfolioShare {acceptLanguage = Prelude.pure newValue, ..}
instance Property "AccountId" PortfolioShare where
type PropertyType "AccountId" PortfolioShare = Value Prelude.Text
set newValue PortfolioShare {..}
= PortfolioShare {accountId = newValue, ..}
instance Property "PortfolioId" PortfolioShare where
type PropertyType "PortfolioId" PortfolioShare = Value Prelude.Text
set newValue PortfolioShare {..}
= PortfolioShare {portfolioId = newValue, ..}
instance Property "ShareTagOptions" PortfolioShare where
type PropertyType "ShareTagOptions" PortfolioShare = Value Prelude.Bool
set newValue PortfolioShare {..}
= PortfolioShare {shareTagOptions = Prelude.pure newValue, ..} | |
c46b471aa0e26e7785fa11154632f2c62407b92770529e6cd645aebd4fd71fc0 | pbl64k/gpif-datakinds | IxFunctor.hs | {-# LANGUAGE GADTs #-}
# LANGUAGE DataKinds #
# LANGUAGE PolyKinds #
# LANGUAGE KindSignatures #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeOperators #
# LANGUAGE InstanceSigs #
# LANGUAGE IncoherentInstances #
|
Module : Control . IxFunctor
Description : Free ( co)recursion schemes for a broad range of algebraic data types
Copyright : , 2015
License : BSD2
Maintainer :
Stability : experimental
Portability : GHC > = 7.8
This module re - exports a bunch of stuff from other modules that should be
completely sufficient to define your own data types and get the recursion
schemes for them from the generic implementations .
Module : Control.IxFunctor
Description : Free (co)recursion schemes for a broad range of algebraic data types
Copyright : Pavel Lepin, 2015
License : BSD2
Maintainer :
Stability : experimental
Portability : GHC >= 7.8
This module re-exports a bunch of stuff from other modules that should be
completely sufficient to define your own data types and get the recursion
schemes for them from the generic implementations.
-}
module Control.IxFunctor
( Void
, Equality(Reflexivity)
, Isomorphic(from, to)
, isoToLeft
, isoToRight
, (:->)
, IxTVoid
, IxTConst(IxTConst)
, liftIxTConst
, IxTEither(IxTEitherLeft, IxTEitherRight)
, split
, IxTPair(IxTPair)
, IxFunctor(ixmap)
, IxVoid
, IxUnit(IxUnit)
, (:+:)(IxLeft, IxRight)
, (:*:)(IxProd)
, (:.:)(IxComp)
, IxProj(IxProj)
, IxOut(IxOut)
, IxFix(IxFix)
, ixunfix
, ixcata
, ixana
, ixhylo
, ixmeta
, ixpara
, ixapo
) where
import Control.IxFunctor.Equality
import Control.IxFunctor.Iso
import Control.IxFunctor.IxType
import Control.IxFunctor.IxFunctor
import Control.IxFunctor.RecScheme
| null | https://raw.githubusercontent.com/pbl64k/gpif-datakinds/1ae07ec5274f258c6bdf6633bd88532f6c3c1e04/src/Control/IxFunctor.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE RankNTypes # | # LANGUAGE DataKinds #
# LANGUAGE PolyKinds #
# LANGUAGE KindSignatures #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeOperators #
# LANGUAGE InstanceSigs #
# LANGUAGE IncoherentInstances #
|
Module : Control . IxFunctor
Description : Free ( co)recursion schemes for a broad range of algebraic data types
Copyright : , 2015
License : BSD2
Maintainer :
Stability : experimental
Portability : GHC > = 7.8
This module re - exports a bunch of stuff from other modules that should be
completely sufficient to define your own data types and get the recursion
schemes for them from the generic implementations .
Module : Control.IxFunctor
Description : Free (co)recursion schemes for a broad range of algebraic data types
Copyright : Pavel Lepin, 2015
License : BSD2
Maintainer :
Stability : experimental
Portability : GHC >= 7.8
This module re-exports a bunch of stuff from other modules that should be
completely sufficient to define your own data types and get the recursion
schemes for them from the generic implementations.
-}
module Control.IxFunctor
( Void
, Equality(Reflexivity)
, Isomorphic(from, to)
, isoToLeft
, isoToRight
, (:->)
, IxTVoid
, IxTConst(IxTConst)
, liftIxTConst
, IxTEither(IxTEitherLeft, IxTEitherRight)
, split
, IxTPair(IxTPair)
, IxFunctor(ixmap)
, IxVoid
, IxUnit(IxUnit)
, (:+:)(IxLeft, IxRight)
, (:*:)(IxProd)
, (:.:)(IxComp)
, IxProj(IxProj)
, IxOut(IxOut)
, IxFix(IxFix)
, ixunfix
, ixcata
, ixana
, ixhylo
, ixmeta
, ixpara
, ixapo
) where
import Control.IxFunctor.Equality
import Control.IxFunctor.Iso
import Control.IxFunctor.IxType
import Control.IxFunctor.IxFunctor
import Control.IxFunctor.RecScheme
|
d770f86eaaedffae27f11eb00f27ae281892a081d763e249b08a7a088c5dd2ae | jpmonettas/web-extractor | liga.lisp | (in-package :extractor)
(def-web-extractor partido-map
((local :finder (xpath-finder "//div[@class='equipo'][1]"))
(visitante :finder (xpath-finder "//div[@class='equipo'][2]"))
(resultado_local :finder (compose
(xpath-finder "//div[@class='vs']")
(regexp-finder "(.*)-")))
(resultado_visitante :finder (compose
(xpath-finder "//div[@class='vs']")
(regexp-finder "-(.*)")))))
(def-web-extractor fecha-map
((fecha :finder (compose
(xpath-finder "/root/div[@id='titulo-fecha']" :add-root t)
(regexp-finder "([0-9]+)")))
(partidos :collection partido-map
:splitter (xpath-splitter "/root/div[position()>1]" :add-root t))))
(def-web-extractor fechas-map
((fechas :collection fecha-map
:splitter (regexp-splitter "(<div id=\"titulo-fecha\".*?)<div id=\"titulo-fecha\""))))
(def-web-extractor divisional-map
((nombre :finder (xpath-finder "//a"))
(fechas-col :follow fechas-map :finder
(xpath-finder "//a/@href"))))
(def-web-extractor divisionales-map
((divisionales :collection divisional-map
:splitter (xpath-splitter "//*[@id='adtabla']/div/a")
:limit 10)))
(defun parse-liga ()
(print
(json:encode-json
(extract :url ""
:struct-map divisionales-map))))
( push " /home / jmonetta / MyProjects / web - extractor/ " ASDF:*CENTRAL - REGISTRY * )
;;(asdf:oos 'asdf:load-op :cl-web-extractor)
( load " /home / jmonetta / MyProjects / web - extractor / tests / liga.lisp " )
;;(sb-ext:save-lisp-and-die "my_sbcl_parse_liga" :executable t :toplevel 'extractor::parse-liga)
| null | https://raw.githubusercontent.com/jpmonettas/web-extractor/7a78f83cec83ac4b0377eff64eaf171a5a671002/tests/liga.lisp | lisp | (asdf:oos 'asdf:load-op :cl-web-extractor)
(sb-ext:save-lisp-and-die "my_sbcl_parse_liga" :executable t :toplevel 'extractor::parse-liga) | (in-package :extractor)
(def-web-extractor partido-map
((local :finder (xpath-finder "//div[@class='equipo'][1]"))
(visitante :finder (xpath-finder "//div[@class='equipo'][2]"))
(resultado_local :finder (compose
(xpath-finder "//div[@class='vs']")
(regexp-finder "(.*)-")))
(resultado_visitante :finder (compose
(xpath-finder "//div[@class='vs']")
(regexp-finder "-(.*)")))))
(def-web-extractor fecha-map
((fecha :finder (compose
(xpath-finder "/root/div[@id='titulo-fecha']" :add-root t)
(regexp-finder "([0-9]+)")))
(partidos :collection partido-map
:splitter (xpath-splitter "/root/div[position()>1]" :add-root t))))
(def-web-extractor fechas-map
((fechas :collection fecha-map
:splitter (regexp-splitter "(<div id=\"titulo-fecha\".*?)<div id=\"titulo-fecha\""))))
(def-web-extractor divisional-map
((nombre :finder (xpath-finder "//a"))
(fechas-col :follow fechas-map :finder
(xpath-finder "//a/@href"))))
(def-web-extractor divisionales-map
((divisionales :collection divisional-map
:splitter (xpath-splitter "//*[@id='adtabla']/div/a")
:limit 10)))
(defun parse-liga ()
(print
(json:encode-json
(extract :url ""
:struct-map divisionales-map))))
( push " /home / jmonetta / MyProjects / web - extractor/ " ASDF:*CENTRAL - REGISTRY * )
( load " /home / jmonetta / MyProjects / web - extractor / tests / liga.lisp " )
|
0626c44ed56204625e9e0da43837e9b7d87ad5b80d43109af9348bb6e8b24060 | lopec/LoPEC | logger_ext_supervisor.erl | %%% Author : Sedrik
%%% Description : The client supervisor
Created : Tue Sep 29 08:58:17 CEST 2009
-vsn('$Rev$ ' ) .
-module(logger_ext_supervisor).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
%%%===================================================================
%%% API functions
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the supervisor
%%
( ) - > { ok , Pid } | ignore | { error , Error }
%% @end
%%--------------------------------------------------------------------
start_link() ->
supervisor:start_link(?MODULE, no_args).
%%%===================================================================
%%% Supervisor callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Whenever a supervisor is started using supervisor:start_link/[2,3],
%% this function is called by the new process to find out about
%% restart strategy, maximum restart frequency and child
%% specifications.
%%
) - > { ok , { SupFlags , [ ChildSpec ] } } |
%% ignore |
%% {error, Reason}
%% @end
%%--------------------------------------------------------------------
init(WhereToLog) ->
{ok,{{one_for_one, 1, 60},
[child(logger_ext, WhereToLog)]}}.
%%%===================================================================
Internal functions
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Simple helper function to make the child specefication list easier
%% to read.
%%
, ) - > { ChildSpec }
%% @end
%%--------------------------------------------------------------------
child(Module, Args) ->
{Module, {Module, start_link, [Args]}, permanent, brutal_kill, worker, [Module]}.
| null | https://raw.githubusercontent.com/lopec/LoPEC/29a3989c48a60e5990615dea17bad9d24d770f7b/trunk/old_stuff/old_master/src/logger_ext_supervisor.erl | erlang | Author : Sedrik
Description : The client supervisor
===================================================================
API functions
===================================================================
--------------------------------------------------------------------
@doc
Starts the supervisor
@end
--------------------------------------------------------------------
===================================================================
Supervisor callbacks
===================================================================
--------------------------------------------------------------------
@doc
Whenever a supervisor is started using supervisor:start_link/[2,3],
this function is called by the new process to find out about
restart strategy, maximum restart frequency and child
specifications.
ignore |
{error, Reason}
@end
--------------------------------------------------------------------
===================================================================
===================================================================
--------------------------------------------------------------------
@doc
Simple helper function to make the child specefication list easier
to read.
@end
-------------------------------------------------------------------- | Created : Tue Sep 29 08:58:17 CEST 2009
-vsn('$Rev$ ' ) .
-module(logger_ext_supervisor).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
( ) - > { ok , Pid } | ignore | { error , Error }
start_link() ->
supervisor:start_link(?MODULE, no_args).
@private
) - > { ok , { SupFlags , [ ChildSpec ] } } |
init(WhereToLog) ->
{ok,{{one_for_one, 1, 60},
[child(logger_ext, WhereToLog)]}}.
Internal functions
@private
, ) - > { ChildSpec }
child(Module, Args) ->
{Module, {Module, start_link, [Args]}, permanent, brutal_kill, worker, [Module]}.
|
e8afa17cc17a0ae43bc3eec8114cb45152a184363d3e9f380949b53f4913cda3 | lesguillemets/sicp-haskell | 1.3.2.hs | -- this section is mostly about lisp/scheme syntex, so nothing to do in
-- haskell here.
| null | https://raw.githubusercontent.com/lesguillemets/sicp-haskell/df524a1e28c45fb16a56f539cad8babc881d0431/text/chap01/sect3/1.3.2.hs | haskell | this section is mostly about lisp/scheme syntex, so nothing to do in
haskell here. | |
c63f35d20e51c7416ecc13cb97142ac7d5825385da2672d407f7c393a70226b7 | darrenks/nibbles | Hs.hs | module Hs where
import Data.DList -- needs cabal install --lib dlist
single = singleton
flist = fromList
a +++ b = append a b
assume HsCode is parenthesized if precedence is less than apply ( only need parens for rhs )
newtype HsCode = HsCode (DList Char) deriving (Show, Eq)
getHsCode (HsCode hs) = hs
hsAtom :: String -> HsCode
hsAtom = HsCode . fromList
hsApp :: HsCode -> HsCode -> HsCode
hsApp (HsCode a) (HsCode b) = hsParen $ HsCode $ a +++ single ' ' +++ b
hsFn :: [HsCode] -> HsCode -> HsCode
hsFn args (HsCode body) = HsCode $ flist "(\\" +++ argsLhs args +++ flist"->" +++ body +++ single ')' where
argsLhs hss = getHsCode $ hsParen $ HsCode $ intercalate (single ',') $ Prelude.map getHsCode hss
hsLet :: [HsCode] -> HsCode -> HsCode -> HsCode
hsLet vars (HsCode def) (HsCode body) =
HsCode $ flist"(let (" +++ lhs +++ flist")="
+++ def +++ flist" in "
+++ body +++ single ')'
where lhs = intercalate (single ',') $ Prelude.map getHsCode vars
hsParen :: HsCode -> HsCode
hsParen (HsCode hs) = HsCode $ single '(' +++ hs +++ single ')'
flatHs :: HsCode -> String
flatHs (HsCode hs) = toList hs
i :: Integer -> HsCode
i = hsParen . hsAtom . show -- "::Integer)"
| null | https://raw.githubusercontent.com/darrenks/nibbles/073d463ce50b99b76cf4ae900087d872971b4e33/Hs.hs | haskell | needs cabal install --lib dlist
"::Integer)" | module Hs where
single = singleton
flist = fromList
a +++ b = append a b
assume HsCode is parenthesized if precedence is less than apply ( only need parens for rhs )
newtype HsCode = HsCode (DList Char) deriving (Show, Eq)
getHsCode (HsCode hs) = hs
hsAtom :: String -> HsCode
hsAtom = HsCode . fromList
hsApp :: HsCode -> HsCode -> HsCode
hsApp (HsCode a) (HsCode b) = hsParen $ HsCode $ a +++ single ' ' +++ b
hsFn :: [HsCode] -> HsCode -> HsCode
hsFn args (HsCode body) = HsCode $ flist "(\\" +++ argsLhs args +++ flist"->" +++ body +++ single ')' where
argsLhs hss = getHsCode $ hsParen $ HsCode $ intercalate (single ',') $ Prelude.map getHsCode hss
hsLet :: [HsCode] -> HsCode -> HsCode -> HsCode
hsLet vars (HsCode def) (HsCode body) =
HsCode $ flist"(let (" +++ lhs +++ flist")="
+++ def +++ flist" in "
+++ body +++ single ')'
where lhs = intercalate (single ',') $ Prelude.map getHsCode vars
hsParen :: HsCode -> HsCode
hsParen (HsCode hs) = HsCode $ single '(' +++ hs +++ single ')'
flatHs :: HsCode -> String
flatHs (HsCode hs) = toList hs
i :: Integer -> HsCode
|
47e29bec3ea89a6228432aee722fd3624e8e331dd50b1530067ecb36e7aeb392 | ds-wizard/engine-backend | List_Locale_GET.hs | module Wizard.Specs.API.Config.List_Locale_GET (
list_locale_GET,
) where
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BSL
import Network.HTTP.Types
import Network.Wai (Application)
import Test.Hspec
import Test.Hspec.Wai hiding (shouldRespondWith)
import Test.Hspec.Wai.Matcher
import Shared.Database.Migration.Development.Locale.Data.Locales
import Shared.Util.String (f')
import qualified Wizard.Database.Migration.Development.Locale.LocaleMigration as LOC
import Wizard.Model.Context.AppContext
import SharedTest.Specs.API.Common
import Wizard.Specs.Common
-- ------------------------------------------------------------------------
-- GET /configs/locales/{localeId}
-- ------------------------------------------------------------------------
list_locale_GET :: AppContext -> SpecWith ((), Application)
list_locale_GET appContext = describe "GET /configs/locales/{localeId}" $ test_200 appContext
-- ----------------------------------------------------
-- ----------------------------------------------------
-- ----------------------------------------------------
reqMethod = methodGet
reqUrlT localeId = BS.pack $ f' "/configs/locales/%s" [localeId]
reqHeaders = []
reqBody = ""
-- ----------------------------------------------------
-- ----------------------------------------------------
-- ----------------------------------------------------
test_200 appContext = do
create_test_200 "HTTP 200 OK (nl-NL)" appContext "nl-NL"
create_test_200 "HTTP 200 OK (nl)" appContext "nl"
create_test_200 title appContext localeId = do
it title $
-- GIVEN: Prepare expectation
do
let reqUrl = reqUrlT localeId
let expStatus = 200
let expHeaders = resCtHeader : resCorsHeaders
let expBody = BSL.fromStrict localeNlContent
-- AND: Run migrations
runInContextIO LOC.runMigration appContext
runInContextIO LOC.runS3Migration appContext
-- WHEN: Call API
response <- request reqMethod reqUrl reqHeaders reqBody
-- THEN: Compare response with expectation
let responseMatcher =
ResponseMatcher {matchHeaders = expHeaders, matchStatus = expStatus, matchBody = bodyEquals expBody}
response `shouldRespondWith` responseMatcher
| null | https://raw.githubusercontent.com/ds-wizard/engine-backend/0ec94a4b0545f2de8a4e59686a4376023719d5e7/engine-wizard/test/Wizard/Specs/API/Config/List_Locale_GET.hs | haskell | ------------------------------------------------------------------------
GET /configs/locales/{localeId}
------------------------------------------------------------------------
----------------------------------------------------
----------------------------------------------------
----------------------------------------------------
----------------------------------------------------
----------------------------------------------------
----------------------------------------------------
GIVEN: Prepare expectation
AND: Run migrations
WHEN: Call API
THEN: Compare response with expectation | module Wizard.Specs.API.Config.List_Locale_GET (
list_locale_GET,
) where
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BSL
import Network.HTTP.Types
import Network.Wai (Application)
import Test.Hspec
import Test.Hspec.Wai hiding (shouldRespondWith)
import Test.Hspec.Wai.Matcher
import Shared.Database.Migration.Development.Locale.Data.Locales
import Shared.Util.String (f')
import qualified Wizard.Database.Migration.Development.Locale.LocaleMigration as LOC
import Wizard.Model.Context.AppContext
import SharedTest.Specs.API.Common
import Wizard.Specs.Common
list_locale_GET :: AppContext -> SpecWith ((), Application)
list_locale_GET appContext = describe "GET /configs/locales/{localeId}" $ test_200 appContext
reqMethod = methodGet
reqUrlT localeId = BS.pack $ f' "/configs/locales/%s" [localeId]
reqHeaders = []
reqBody = ""
test_200 appContext = do
create_test_200 "HTTP 200 OK (nl-NL)" appContext "nl-NL"
create_test_200 "HTTP 200 OK (nl)" appContext "nl"
create_test_200 title appContext localeId = do
it title $
do
let reqUrl = reqUrlT localeId
let expStatus = 200
let expHeaders = resCtHeader : resCorsHeaders
let expBody = BSL.fromStrict localeNlContent
runInContextIO LOC.runMigration appContext
runInContextIO LOC.runS3Migration appContext
response <- request reqMethod reqUrl reqHeaders reqBody
let responseMatcher =
ResponseMatcher {matchHeaders = expHeaders, matchStatus = expStatus, matchBody = bodyEquals expBody}
response `shouldRespondWith` responseMatcher
|
bb5fa61c6153e6a03301197a3133d742ab1602422ff72de716c9be9ce6c08ca9 | argp/bap | bench_nreplace.ml | Run with :
make bench BENCH_TARGETS = / bench_nreplace.native
make bench BENCH_TARGETS=benchsuite/bench_nreplace.native
*)
open Batteries
open String
(* The original Batteries String.nreplace *)
let nreplace_orig ~str ~sub ~by =
if sub = "" then invalid_arg "nreplace: cannot replace all empty substrings" ;
let parts = nsplit str ~by:sub in
String.concat by parts
The suggestion from that started it all .
Notice that it replaces substrings from left to right instead of right to left .
Notice that it replaces substrings from left to right instead of right to left. *)
let nreplace_glyn ~str ~sub ~by =
if sub = "" then invalid_arg "nreplace: cannot replace all empty substrings" ;
let find_sub pos = try find_from str pos sub with Not_found -> -1 in
(* allows loop to be tail recursive *)
let sublen = length sub in
let strlen = length str in
let buffer = Buffer.create strlen in
let rec loop curpos =
if curpos = strlen then
Buffer.contents buffer
else
let subpos = find_sub curpos in
if subpos = -1 then
( Buffer.add_substring buffer str curpos (strlen - curpos) ;
Buffer.contents buffer )
else
( Buffer.add_substring buffer str curpos (subpos - curpos) ;
Buffer.add_string buffer by ;
loop (subpos + sublen) )
in
loop 0
Then suggested preallocating the final string . Here is a first
* implementation , performing two rfind_from which is apparently a very bad idea
* implementation, performing two rfind_from which is apparently a very bad idea *)
let nreplace_rxd ~str ~sub ~by =
if sub = "" then invalid_arg "nreplace: cannot replace all empty substrings" ;
let strlen = length str in
let sublen = length sub in
let bylen = length by in
let dlen = bylen - sublen in
let rec loop_subst l i =
match (try Some (rfind_from str (i-1) sub) with Not_found -> None) with
| None -> l
| Some i' -> loop_subst (l + dlen) i' in
let newlen =
if dlen = 0 then strlen else loop_subst strlen strlen in
let newstr = create newlen in
let rec loop_copy i j =
match (try Some (rfind_from str (i-1) sub) with Not_found -> None) with
| None ->
still need the first chunk
String.unsafe_blit str 0 newstr 0 i
| Some i' ->
let j' = j - (i - i') - dlen in
newstring.[j .. end ] is already inited . Init from j ' to ( j-1 ) .
String.unsafe_blit by 0 newstr j' bylen ;
String.unsafe_blit str (i'+sublen) newstr (j'+bylen) (i-i'-sublen) ;
loop_copy i' j' in
loop_copy strlen newlen ;
newstr
So proposed a version without the double rfind_from
* ( taken from + small fix )
* (taken from + small fix) *)
let nreplace_thelema ~str ~sub ~by =
if sub = "" then invalid_arg "nreplace: cannot replace all empty substrings" ;
let strlen = length str in
let sublen = length sub in
let bylen = length by in
let dlen = bylen - sublen in
let rec loop_subst idxes i =
match Exceptionless.rfind_from str (i-1) sub with
| None -> idxes
| Some i' -> loop_subst (i'::idxes) i' in
let idxes = loop_subst [] strlen in
let newlen = strlen + List.length idxes * dlen in
let newstr = create newlen in
let rec loop_copy i j idxes =
match idxes with
| [] ->
(* still need the last chunk *)
String.unsafe_blit str i newstr j (strlen-i)
| i'::rest ->
let di = i' - i in
String.unsafe_blit str i newstr j di ;
String.unsafe_blit by 0 newstr (j + di) bylen ;
loop_copy (i + di + sublen) (j + di + bylen) rest in
loop_copy 0 0 idxes ;
newstr
Same as above but avoiding the
let nreplace_thelema2 ~str ~sub ~by =
if sub = "" then invalid_arg "nreplace: cannot replace all empty substrings" ;
let strlen = length str in
let sublen = length sub in
let bylen = length by in
let dlen = bylen - sublen in
let rec loop_subst idxes newlen i =
match (try rfind_from str (i-1) sub with Not_found -> -1) with
| -1 -> idxes, newlen
| i' -> loop_subst (i'::idxes) (newlen+dlen) i' in
let idxes, newlen = loop_subst [] strlen strlen in
let newstr = create newlen in
let rec loop_copy i j idxes =
match idxes with
| [] ->
(* still need the last chunk *)
String.unsafe_blit str i newstr j (strlen-i)
| i'::rest ->
let di = i' - i in
String.unsafe_blit str i newstr j di ;
String.unsafe_blit by 0 newstr (j + di) bylen ;
loop_copy (i + di + sublen) (j + di + bylen) rest in
loop_copy 0 0 idxes ;
newstr
Independantly , MadRoach implemented the same idea with less luck aparently
let nreplace_madroach ~str ~sub ~by =
let strlen = String.length str
and sublen = String.length sub
and bylen = String.length by in
let rec find_simple ~sub ?(pos=0) str =
let find pos =
try BatString.find_from str pos sub with
Not_found -> raise BatEnum.No_more_elements
in
let nexti = ref pos in
BatEnum.from (fun () -> let i = find !nexti in nexti := i+1; i) in
(* collect all positions where we need to replace,
* skipping overlapping occurences *)
let todo =
let skip_unto = ref 0 in
find_simple sub str |>
Enum.filter begin function
|i when i < !skip_unto -> false
|i -> skip_unto := i + sublen; true
end
in
(* create destination string *)
let dst = String.create (strlen + Enum.count todo * (bylen - sublen)) in
(* do the replacement *)
let srci, dsti =
fold
begin fun (srci,dsti) i ->
let skiplen = i-srci in
String.blit str srci dst dsti skiplen;
String.blit by 0 dst (dsti+skiplen) bylen;
(srci+skiplen+sublen, dsti+skiplen+bylen)
end
(0,0)
todo
in
assert (strlen - srci = String.length dst - dsti);
String.blit str srci dst dsti (strlen - srci);
dst
Gasche had its own idea based on substrings .
Here are several versions , any of which seams faster than all the above .
See :
-batteries-team/batteries-included/pull/372#issuecomment-18399379
for a discussion .
Here are several versions, any of which seams faster than all the above.
See:
-batteries-team/batteries-included/pull/372#issuecomment-18399379
for a discussion.*)
should be
let nsplit str pat =
let pat_len = String.length pat in
let rec loop pos rev_subs =
let next_pos =
try BatString.find_from str pos pat
with Not_found -> -1
in
if next_pos = -1 then
(BatSubstring.extract str pos None :: rev_subs)
else
let sub = BatSubstring.unsafe_substring str pos (next_pos - pos) in
loop (next_pos + pat_len) (sub :: rev_subs)
in
List.rev (loop 0 [])
should be
let nsplit_enum str pat =
let pat_len = String.length pat in
let pos = ref 0 in
BatEnum.from (fun () ->
if !pos < 0 then raise BatEnum.No_more_elements else
try
let next_pos = BatString.find_from str !pos pat in
let sub = BatSubstring.unsafe_substring str !pos (next_pos - !pos) in
pos := next_pos + pat_len;
sub
with Not_found ->
let sub = BatSubstring.extract str !pos None in
pos := -1 ;
sub
)
should be , with a separator argument
let concat_optimized ~sep ssl =
let sep_len = String.length sep in
use of Obj.magic is unfortunate here , but it would not be present
if this function was implemented inside BatSubstring . Another
option would be to make BatSubstring.t a [ private ( string * int
* int ) ] and use a case here , but I 'm not sure it 's wise to expose
the representation publicly -- we may want to change , say , from
( string * start_pos * len ) to ( string * start_pos * end_pos ) .
if this function was implemented inside BatSubstring. Another
option would be to make BatSubstring.t a [private (string * int
* int)] and use a case here, but I'm not sure it's wise to expose
the representation publicly -- we may want to change, say, from
(string * start_pos * len) to (string * start_pos * end_pos). *)
let ssl : (string * int * int) list = Obj.magic (ssl : BatSubstring.t list) in
match ssl with
| [] -> ""
| (s,o,len)::tl ->
let total_len =
let rec count acc = function
| [] -> acc
| (_,_,l)::tl -> count (acc + sep_len + l) tl
in count len tl
in
let item = String.create total_len in
String.unsafe_blit s o item 0 len;
let pos = ref len in
let rec loop = function
| [] -> ()
| (s,o,len)::tl ->
String.unsafe_blit sep 0 item !pos sep_len;
pos := !pos + sep_len;
String.unsafe_blit s o item !pos len;
pos := !pos + len;
loop tl;
in loop tl;
item
should be , with a separator argument
let concat_simple ~sep ssl =
let sep_len = String.length sep in
(* see comment above about Obj.magic *)
let ssl : (string * int * int) list = Obj.magic (ssl : BatSubstring.t list) in
match ssl with
| [] -> ""
| (s,o,len)::tl ->
let total_len = List.fold_left (fun acc (_,_,l) -> acc+sep_len+l) len tl in
let item = String.create total_len in
String.unsafe_blit s o item 0 len;
let pos = ref len in
let write (s,o,len) =
String.unsafe_blit sep 0 item !pos sep_len;
pos := !pos + sep_len;
String.unsafe_blit s o item !pos len;
pos := !pos + len;
in
List.iter write tl;
item
let concat_enum ~sep enum =
match BatEnum.get enum with
| None -> ""
| Some hd ->
let buf = Buffer.create 100 in
Buffer.add_string buf (BatSubstring.to_string hd);
BatEnum.iter (fun substr ->
(* see comment above about Obj.magic *)
let (s,o,l) = (Obj.magic (substr : BatSubstring.t) : string * int * int) in
Buffer.add_string buf sep;
Buffer.add_substring buf s o l;
) enum;
Buffer.contents buf
let nreplace_substring_simple ~str ~sub ~by =
concat_simple ~sep:by (nsplit str sub)
let nreplace_substring_optimized ~str ~sub ~by =
concat_optimized ~sep:by (nsplit str sub)
let nreplace_substring_enum ~str ~sub ~by =
concat_enum ~sep:by (nsplit_enum str sub)
(* We tests these nreplace implementations on this very file, substituting various
* realistic words by others. *)
let long_text =
File.lines_of "benchsuite/bench.ml"
|> Enum.cycle ~times:100 |> List.of_enum |> concat ""
let do_bench_for_len length name =
let run rep iters =
for i=1 to iters do
(* "realistic" workload that attempts to exercise all interesting cases *)
let str = sub long_text 0 length in
let str = rep ~str ~sub:"let" ~by:"let there be light" in
let str = rep ~str ~sub:"nreplace" ~by:"nr" in
let str = rep ~str ~sub:"you wont find me" ~by:"" in
let str = rep ~str ~sub:"match" ~by:"match" in
let str = rep ~str ~sub:" " ~by:" " in
ignore str
done
in
Bench.bench_n [
"orig "^ name, run nreplace_orig ;
"glyn "^ name, run nreplace_glyn ;
"rxd "^ name, run nreplace_rxd ;
"thelema "^ name, run nreplace_thelema ;
"thelema2 "^ name, run nreplace_thelema2 ;
"madroach "^ name, run nreplace_madroach ;
"gasche simple "^ name, run nreplace_substring_simple ;
"gasche enum "^ name, run nreplace_substring_enum ;
"gasche optimized "^ name, run nreplace_substring_optimized ;
] |>
Bench.run_outputs
let main =
First check that all implementation performs the same
let check ~str ~sub ~by =
let outp = nreplace_orig ~str ~sub ~by in
List.iter (fun (d,rep) ->
let outp' = rep ~str ~sub ~by in
if outp' <> outp then (
Printf.fprintf stderr "Implementation %s failed for str:%S, sub:%S, by:%S got %S instead of %S\n"
d str sub by outp' outp ;
exit 1
)) [
"glyn", nreplace_glyn ;
"rxd", nreplace_rxd ;
"thelema", nreplace_thelema ;
"thelema2", nreplace_thelema2 ;
"madroach", nreplace_madroach ;
"gasche simple", nreplace_substring_simple ;
"gasche enum", nreplace_substring_enum ;
"gasche optimz", nreplace_substring_optimized
] in
check ~str:"foo bar baz" ~sub:"bar" ~by:"BAR" ;
check ~str:"foo bar baz" ~sub:"bar" ~by:"" ;
check ~str:"foo bar baz" ~sub:"a" ~by:"BAR" ;
check ~str:"foo bar baz" ~sub:" " ~by:" " ;
do_bench_for_len 100 "short" ;
print_endline "-------------------------------";
do_bench_for_len 1000 "long" ;
print_endline "-------------------------------";
do_bench_for_len 10000 "very long"
| null | https://raw.githubusercontent.com/argp/bap/2f60a35e822200a1ec50eea3a947a322b45da363/batteries/benchsuite/bench_nreplace.ml | ocaml | The original Batteries String.nreplace
allows loop to be tail recursive
still need the last chunk
still need the last chunk
collect all positions where we need to replace,
* skipping overlapping occurences
create destination string
do the replacement
see comment above about Obj.magic
see comment above about Obj.magic
We tests these nreplace implementations on this very file, substituting various
* realistic words by others.
"realistic" workload that attempts to exercise all interesting cases | Run with :
make bench BENCH_TARGETS = / bench_nreplace.native
make bench BENCH_TARGETS=benchsuite/bench_nreplace.native
*)
open Batteries
open String
let nreplace_orig ~str ~sub ~by =
if sub = "" then invalid_arg "nreplace: cannot replace all empty substrings" ;
let parts = nsplit str ~by:sub in
String.concat by parts
The suggestion from that started it all .
Notice that it replaces substrings from left to right instead of right to left .
Notice that it replaces substrings from left to right instead of right to left. *)
let nreplace_glyn ~str ~sub ~by =
if sub = "" then invalid_arg "nreplace: cannot replace all empty substrings" ;
let find_sub pos = try find_from str pos sub with Not_found -> -1 in
let sublen = length sub in
let strlen = length str in
let buffer = Buffer.create strlen in
let rec loop curpos =
if curpos = strlen then
Buffer.contents buffer
else
let subpos = find_sub curpos in
if subpos = -1 then
( Buffer.add_substring buffer str curpos (strlen - curpos) ;
Buffer.contents buffer )
else
( Buffer.add_substring buffer str curpos (subpos - curpos) ;
Buffer.add_string buffer by ;
loop (subpos + sublen) )
in
loop 0
Then suggested preallocating the final string . Here is a first
* implementation , performing two rfind_from which is apparently a very bad idea
* implementation, performing two rfind_from which is apparently a very bad idea *)
let nreplace_rxd ~str ~sub ~by =
if sub = "" then invalid_arg "nreplace: cannot replace all empty substrings" ;
let strlen = length str in
let sublen = length sub in
let bylen = length by in
let dlen = bylen - sublen in
let rec loop_subst l i =
match (try Some (rfind_from str (i-1) sub) with Not_found -> None) with
| None -> l
| Some i' -> loop_subst (l + dlen) i' in
let newlen =
if dlen = 0 then strlen else loop_subst strlen strlen in
let newstr = create newlen in
let rec loop_copy i j =
match (try Some (rfind_from str (i-1) sub) with Not_found -> None) with
| None ->
still need the first chunk
String.unsafe_blit str 0 newstr 0 i
| Some i' ->
let j' = j - (i - i') - dlen in
newstring.[j .. end ] is already inited . Init from j ' to ( j-1 ) .
String.unsafe_blit by 0 newstr j' bylen ;
String.unsafe_blit str (i'+sublen) newstr (j'+bylen) (i-i'-sublen) ;
loop_copy i' j' in
loop_copy strlen newlen ;
newstr
So proposed a version without the double rfind_from
* ( taken from + small fix )
* (taken from + small fix) *)
let nreplace_thelema ~str ~sub ~by =
if sub = "" then invalid_arg "nreplace: cannot replace all empty substrings" ;
let strlen = length str in
let sublen = length sub in
let bylen = length by in
let dlen = bylen - sublen in
let rec loop_subst idxes i =
match Exceptionless.rfind_from str (i-1) sub with
| None -> idxes
| Some i' -> loop_subst (i'::idxes) i' in
let idxes = loop_subst [] strlen in
let newlen = strlen + List.length idxes * dlen in
let newstr = create newlen in
let rec loop_copy i j idxes =
match idxes with
| [] ->
String.unsafe_blit str i newstr j (strlen-i)
| i'::rest ->
let di = i' - i in
String.unsafe_blit str i newstr j di ;
String.unsafe_blit by 0 newstr (j + di) bylen ;
loop_copy (i + di + sublen) (j + di + bylen) rest in
loop_copy 0 0 idxes ;
newstr
Same as above but avoiding the
let nreplace_thelema2 ~str ~sub ~by =
if sub = "" then invalid_arg "nreplace: cannot replace all empty substrings" ;
let strlen = length str in
let sublen = length sub in
let bylen = length by in
let dlen = bylen - sublen in
let rec loop_subst idxes newlen i =
match (try rfind_from str (i-1) sub with Not_found -> -1) with
| -1 -> idxes, newlen
| i' -> loop_subst (i'::idxes) (newlen+dlen) i' in
let idxes, newlen = loop_subst [] strlen strlen in
let newstr = create newlen in
let rec loop_copy i j idxes =
match idxes with
| [] ->
String.unsafe_blit str i newstr j (strlen-i)
| i'::rest ->
let di = i' - i in
String.unsafe_blit str i newstr j di ;
String.unsafe_blit by 0 newstr (j + di) bylen ;
loop_copy (i + di + sublen) (j + di + bylen) rest in
loop_copy 0 0 idxes ;
newstr
Independantly , MadRoach implemented the same idea with less luck aparently
let nreplace_madroach ~str ~sub ~by =
let strlen = String.length str
and sublen = String.length sub
and bylen = String.length by in
let rec find_simple ~sub ?(pos=0) str =
let find pos =
try BatString.find_from str pos sub with
Not_found -> raise BatEnum.No_more_elements
in
let nexti = ref pos in
BatEnum.from (fun () -> let i = find !nexti in nexti := i+1; i) in
let todo =
let skip_unto = ref 0 in
find_simple sub str |>
Enum.filter begin function
|i when i < !skip_unto -> false
|i -> skip_unto := i + sublen; true
end
in
let dst = String.create (strlen + Enum.count todo * (bylen - sublen)) in
let srci, dsti =
fold
begin fun (srci,dsti) i ->
let skiplen = i-srci in
String.blit str srci dst dsti skiplen;
String.blit by 0 dst (dsti+skiplen) bylen;
(srci+skiplen+sublen, dsti+skiplen+bylen)
end
(0,0)
todo
in
assert (strlen - srci = String.length dst - dsti);
String.blit str srci dst dsti (strlen - srci);
dst
Gasche had its own idea based on substrings .
Here are several versions , any of which seams faster than all the above .
See :
-batteries-team/batteries-included/pull/372#issuecomment-18399379
for a discussion .
Here are several versions, any of which seams faster than all the above.
See:
-batteries-team/batteries-included/pull/372#issuecomment-18399379
for a discussion.*)
should be
let nsplit str pat =
let pat_len = String.length pat in
let rec loop pos rev_subs =
let next_pos =
try BatString.find_from str pos pat
with Not_found -> -1
in
if next_pos = -1 then
(BatSubstring.extract str pos None :: rev_subs)
else
let sub = BatSubstring.unsafe_substring str pos (next_pos - pos) in
loop (next_pos + pat_len) (sub :: rev_subs)
in
List.rev (loop 0 [])
should be
let nsplit_enum str pat =
let pat_len = String.length pat in
let pos = ref 0 in
BatEnum.from (fun () ->
if !pos < 0 then raise BatEnum.No_more_elements else
try
let next_pos = BatString.find_from str !pos pat in
let sub = BatSubstring.unsafe_substring str !pos (next_pos - !pos) in
pos := next_pos + pat_len;
sub
with Not_found ->
let sub = BatSubstring.extract str !pos None in
pos := -1 ;
sub
)
should be , with a separator argument
let concat_optimized ~sep ssl =
let sep_len = String.length sep in
use of Obj.magic is unfortunate here , but it would not be present
if this function was implemented inside BatSubstring . Another
option would be to make BatSubstring.t a [ private ( string * int
* int ) ] and use a case here , but I 'm not sure it 's wise to expose
the representation publicly -- we may want to change , say , from
( string * start_pos * len ) to ( string * start_pos * end_pos ) .
if this function was implemented inside BatSubstring. Another
option would be to make BatSubstring.t a [private (string * int
* int)] and use a case here, but I'm not sure it's wise to expose
the representation publicly -- we may want to change, say, from
(string * start_pos * len) to (string * start_pos * end_pos). *)
let ssl : (string * int * int) list = Obj.magic (ssl : BatSubstring.t list) in
match ssl with
| [] -> ""
| (s,o,len)::tl ->
let total_len =
let rec count acc = function
| [] -> acc
| (_,_,l)::tl -> count (acc + sep_len + l) tl
in count len tl
in
let item = String.create total_len in
String.unsafe_blit s o item 0 len;
let pos = ref len in
let rec loop = function
| [] -> ()
| (s,o,len)::tl ->
String.unsafe_blit sep 0 item !pos sep_len;
pos := !pos + sep_len;
String.unsafe_blit s o item !pos len;
pos := !pos + len;
loop tl;
in loop tl;
item
should be , with a separator argument
let concat_simple ~sep ssl =
let sep_len = String.length sep in
let ssl : (string * int * int) list = Obj.magic (ssl : BatSubstring.t list) in
match ssl with
| [] -> ""
| (s,o,len)::tl ->
let total_len = List.fold_left (fun acc (_,_,l) -> acc+sep_len+l) len tl in
let item = String.create total_len in
String.unsafe_blit s o item 0 len;
let pos = ref len in
let write (s,o,len) =
String.unsafe_blit sep 0 item !pos sep_len;
pos := !pos + sep_len;
String.unsafe_blit s o item !pos len;
pos := !pos + len;
in
List.iter write tl;
item
let concat_enum ~sep enum =
match BatEnum.get enum with
| None -> ""
| Some hd ->
let buf = Buffer.create 100 in
Buffer.add_string buf (BatSubstring.to_string hd);
BatEnum.iter (fun substr ->
let (s,o,l) = (Obj.magic (substr : BatSubstring.t) : string * int * int) in
Buffer.add_string buf sep;
Buffer.add_substring buf s o l;
) enum;
Buffer.contents buf
let nreplace_substring_simple ~str ~sub ~by =
concat_simple ~sep:by (nsplit str sub)
let nreplace_substring_optimized ~str ~sub ~by =
concat_optimized ~sep:by (nsplit str sub)
let nreplace_substring_enum ~str ~sub ~by =
concat_enum ~sep:by (nsplit_enum str sub)
let long_text =
File.lines_of "benchsuite/bench.ml"
|> Enum.cycle ~times:100 |> List.of_enum |> concat ""
let do_bench_for_len length name =
let run rep iters =
for i=1 to iters do
let str = sub long_text 0 length in
let str = rep ~str ~sub:"let" ~by:"let there be light" in
let str = rep ~str ~sub:"nreplace" ~by:"nr" in
let str = rep ~str ~sub:"you wont find me" ~by:"" in
let str = rep ~str ~sub:"match" ~by:"match" in
let str = rep ~str ~sub:" " ~by:" " in
ignore str
done
in
Bench.bench_n [
"orig "^ name, run nreplace_orig ;
"glyn "^ name, run nreplace_glyn ;
"rxd "^ name, run nreplace_rxd ;
"thelema "^ name, run nreplace_thelema ;
"thelema2 "^ name, run nreplace_thelema2 ;
"madroach "^ name, run nreplace_madroach ;
"gasche simple "^ name, run nreplace_substring_simple ;
"gasche enum "^ name, run nreplace_substring_enum ;
"gasche optimized "^ name, run nreplace_substring_optimized ;
] |>
Bench.run_outputs
let main =
First check that all implementation performs the same
let check ~str ~sub ~by =
let outp = nreplace_orig ~str ~sub ~by in
List.iter (fun (d,rep) ->
let outp' = rep ~str ~sub ~by in
if outp' <> outp then (
Printf.fprintf stderr "Implementation %s failed for str:%S, sub:%S, by:%S got %S instead of %S\n"
d str sub by outp' outp ;
exit 1
)) [
"glyn", nreplace_glyn ;
"rxd", nreplace_rxd ;
"thelema", nreplace_thelema ;
"thelema2", nreplace_thelema2 ;
"madroach", nreplace_madroach ;
"gasche simple", nreplace_substring_simple ;
"gasche enum", nreplace_substring_enum ;
"gasche optimz", nreplace_substring_optimized
] in
check ~str:"foo bar baz" ~sub:"bar" ~by:"BAR" ;
check ~str:"foo bar baz" ~sub:"bar" ~by:"" ;
check ~str:"foo bar baz" ~sub:"a" ~by:"BAR" ;
check ~str:"foo bar baz" ~sub:" " ~by:" " ;
do_bench_for_len 100 "short" ;
print_endline "-------------------------------";
do_bench_for_len 1000 "long" ;
print_endline "-------------------------------";
do_bench_for_len 10000 "very long"
|
a6fd4b74d0ca9397356a2292946a747ebc5f83665d66ce5402af95545e4aeb07 | ChrisPenner/mad-props | Links.hs | # LANGUAGE ScopedTypeVariables #
module Props.Internal.Links
( disjoint
, equal
, require
) where
import qualified Data.Set as S
import Props.Internal.PropT
|
Apply the constraint that two variables may NOT be set to the same value . This constraint is bidirectional .
E.g. you might apply this constraint to two cells in the same row of sudoku grid to assert they do n't contain the same value .
Apply the constraint that two variables may NOT be set to the same value. This constraint is bidirectional.
E.g. you might apply this constraint to two cells in the same row of sudoku grid to assert they don't contain the same value.
-}
disjoint :: forall a m. (Monad m, Ord a) => PVar S.Set a -> PVar S.Set a -> PropT m ()
disjoint a b = do
constrain a b disj
constrain b a disj
where
disj :: a -> S.Set a -> S.Set a
disj x xs = S.delete x xs
|
Apply the constraint that two variables MUST be set to the same value . This constraint is bidirectional .
Apply the constraint that two variables MUST be set to the same value. This constraint is bidirectional.
-}
equal :: forall a m. (Monad m, Ord a) => PVar S.Set a -> PVar S.Set a -> PropT m ()
equal a b = do
constrain a b eq
constrain b a eq
where
eq :: a -> S.Set a -> S.Set a
eq x xs | x `S.member` xs = S.singleton x
| otherwise = S.empty
|
Given a choice for @a@ ; filter for valid options of @b@ using the given predicate .
E.g. if @a@ must always be greater than @b@ , you could require :
> require ( > ) a b
Given a choice for @a@; filter for valid options of @b@ using the given predicate.
E.g. if @a@ must always be greater than @b@, you could require:
> require (>) a b
-}
require :: Monad m => (a -> b -> Bool) -> PVar S.Set a -> PVar S.Set b -> PropT m ()
require f a b = do
constrain a b (S.filter . f)
| null | https://raw.githubusercontent.com/ChrisPenner/mad-props/a1fd2a5fff0f2da07fb15018dea5164ca225ebc3/src/Props/Internal/Links.hs | haskell | # LANGUAGE ScopedTypeVariables #
module Props.Internal.Links
( disjoint
, equal
, require
) where
import qualified Data.Set as S
import Props.Internal.PropT
|
Apply the constraint that two variables may NOT be set to the same value . This constraint is bidirectional .
E.g. you might apply this constraint to two cells in the same row of sudoku grid to assert they do n't contain the same value .
Apply the constraint that two variables may NOT be set to the same value. This constraint is bidirectional.
E.g. you might apply this constraint to two cells in the same row of sudoku grid to assert they don't contain the same value.
-}
disjoint :: forall a m. (Monad m, Ord a) => PVar S.Set a -> PVar S.Set a -> PropT m ()
disjoint a b = do
constrain a b disj
constrain b a disj
where
disj :: a -> S.Set a -> S.Set a
disj x xs = S.delete x xs
|
Apply the constraint that two variables MUST be set to the same value . This constraint is bidirectional .
Apply the constraint that two variables MUST be set to the same value. This constraint is bidirectional.
-}
equal :: forall a m. (Monad m, Ord a) => PVar S.Set a -> PVar S.Set a -> PropT m ()
equal a b = do
constrain a b eq
constrain b a eq
where
eq :: a -> S.Set a -> S.Set a
eq x xs | x `S.member` xs = S.singleton x
| otherwise = S.empty
|
Given a choice for @a@ ; filter for valid options of @b@ using the given predicate .
E.g. if @a@ must always be greater than @b@ , you could require :
> require ( > ) a b
Given a choice for @a@; filter for valid options of @b@ using the given predicate.
E.g. if @a@ must always be greater than @b@, you could require:
> require (>) a b
-}
require :: Monad m => (a -> b -> Bool) -> PVar S.Set a -> PVar S.Set b -> PropT m ()
require f a b = do
constrain a b (S.filter . f)
| |
72ba0c1df26889aeeea7f672010c45e8aa25e558ac75fabbd05dd56d1c4e48c3 | ConsumerDataStandardsAustralia/validation-prototype | PrismTestHelpers.hs | {-# LANGUAGE RankNTypes #-}
module Web.ConsumerData.Au.Api.Types.PrismTestHelpers where
import Control.Lens
import Hedgehog (property, forAll, tripping)
import qualified Hedgehog.Gen as Gen
import Test.Tasty (TestName, TestTree)
import Test.Tasty.Hedgehog (testProperty)
testEnumPrismTripping :: (Enum a, Bounded a, Show a, Eq a, Show b) => TestName -> Prism' b a -> TestTree
testEnumPrismTripping tn p = testProperty tn . property $ do
a <- forAll Gen.enumBounded
tripping a (p #) (^? p)
| null | https://raw.githubusercontent.com/ConsumerDataStandardsAustralia/validation-prototype/ff63338b77339ee49fa3e0be5bb9d7f74e50c28b/consumer-data-au-api-types/tests/Web/ConsumerData/Au/Api/Types/PrismTestHelpers.hs | haskell | # LANGUAGE RankNTypes # | module Web.ConsumerData.Au.Api.Types.PrismTestHelpers where
import Control.Lens
import Hedgehog (property, forAll, tripping)
import qualified Hedgehog.Gen as Gen
import Test.Tasty (TestName, TestTree)
import Test.Tasty.Hedgehog (testProperty)
testEnumPrismTripping :: (Enum a, Bounded a, Show a, Eq a, Show b) => TestName -> Prism' b a -> TestTree
testEnumPrismTripping tn p = testProperty tn . property $ do
a <- forAll Gen.enumBounded
tripping a (p #) (^? p)
|
c9585fc5a3eacb9b99bf0842b3f3d781b8209480ef30286472b4f6f983b1de3a | merlin-lang/merlin | Merlin_OpenFlow.ml | open Frenetic_OpenFlow
open Pattern
open Merlin_Types
open Merlin_Error
module NK = Frenetic_NetKAT
type pattern = Pattern.t
let all_pattern = match_all
exception Empty_pat
let mk_flow (pat:pattern) (group:group) : Frenetic_OpenFlow.flow =
{ pattern = pat;
action = group;
cookie = 0L;
idle_timeout = Permanent;
hard_timeout = Permanent }
let group_union (g1:group) (g2:group) : group =
match g1, g2 with
| [s1],[s2] ->
[s1 @ s2]
| [], [s] -> [s]
| [s], [] -> [s]
| _ ->
raise (Impossible_group_union (g1,g2))
Intersect two patterns . If either is non - empty , then both must be the same
JNF : " must both the the same " ? This is not what set - theoretic
intersection means ...
intersection means... *)
let pat_inter (pat1:pattern) (pat2:pattern) =
let f vo1 vo2 = match vo1,vo2 with
| Some v1, Some v2 ->
if v1 = v2 then Some v1
else raise Empty_pat
| Some v1, None ->
Some v1
| None, Some v2 ->
Some v2
| None, None ->
None in
try
Some
{ dlSrc = f pat1.dlSrc pat2.dlSrc
; dlDst = f pat1.dlDst pat2.dlDst
; dlTyp = f pat1.dlTyp pat2.dlTyp
; dlVlan = f pat1.dlVlan pat2.dlVlan
; dlVlanPcp = f pat1.dlVlanPcp pat2.dlVlanPcp
; nwSrc = f pat1.nwSrc pat2.nwSrc
; nwDst = f pat1.nwDst pat2.nwDst
; nwProto = f pat1.nwProto pat2.nwProto
; tpSrc = f pat1.tpSrc pat2.tpSrc
; tpDst = f pat1.tpSrc pat2.tpDst
; inPort = f pat1.inPort pat2.inPort }
with Empty_pat ->
None
Two patterns are equal if they both have the same fields and the values for
present fields are the same
present fields are the same *)
JNF : not equal ; subset or equal
let pat_subseteq (pat1:pattern) (pat2:pattern) =
(* Printf.printf "Calling pat_subseteq\n%!"; *)
let f vo1 vo2 =
match vo1,vo2 with
| _,None -> true
| None,_ -> false
| Some v1, Some v2 -> v1 = v2 in
( f pat1.dlSrc pat2.dlSrc
&& f pat1.dlDst pat2.dlDst
&& f pat1.dlTyp pat2.dlTyp
&& f pat1.dlVlan pat2.dlVlan
&& f pat1.dlVlanPcp pat2.dlVlanPcp
&& f pat1.nwSrc pat2.nwSrc
&& f pat1.nwDst pat2.nwDst
&& f pat1.nwProto pat2.nwProto
&& f pat1.tpSrc pat2.tpSrc
&& f pat1.tpSrc pat2.tpDst
&& f pat1.inPort pat2.inPort )
Deduplicate flowtable entries with subseteq patterns
let optimize (t:flowTable) : flowTable =
List.rev
(List.fold_left
(fun acc flow ->
if List.exists (fun flow' -> pat_subseteq flow.pattern flow'.pattern) acc then acc
else flow::acc)
[] t)
let rec inter f (t1:flowTable) (t2:flowTable) =
List.fold_right
(fun flow1 acc ->
List.fold_right
(fun flow2 acc ->
match pat_inter flow1.pattern flow2.pattern with
| Some pat ->
mk_flow pat (f flow1.action flow2.action)::acc
| None ->
acc)
t2 acc)
t1 []
(* Union of t1 & t2 is their intersection followed by each of them, so all
cases are captured *)
let rec union f (t1:flowTable) (t2:flowTable) : flowTable =
inter f t1 t2 @ t1 @ t2
let rec negate f t =
List.map (fun flow -> { flow with action = f flow.action }) t
CPS compiler
let compile (pred : pred) (acts : action list) : flowTable =
let rec aux pred k =
match pred with
| Test(NK.Switch(_)) ->
k [ mk_flow all_pattern [[acts]] ]
| Test(NK.Location(NK.Physical n)) ->
k [ mk_flow { all_pattern with inPort = Some n } [[acts]] ]
| Test(NK.EthSrc n) ->
k [ mk_flow { all_pattern with dlSrc = Some n } [[acts]] ]
| Test(NK.EthDst n) ->
k [ mk_flow { all_pattern with dlDst = Some n } [[acts]] ]
| Test(NK.Vlan n) ->
k [ mk_flow { all_pattern with dlVlan = Some n } [[acts]] ]
| Test(NK.VlanPcp n) ->
k [ mk_flow { all_pattern with dlVlanPcp = Some n } [[acts]] ]
| Test(NK.EthType n) ->
k [ mk_flow { all_pattern with dlTyp = Some n } [[acts]] ]
| Test(NK.IPProto n) ->
k [ mk_flow { all_pattern with nwProto = Some n } [[acts]] ]
| Test(NK.IP4Src (n,_)) ->
k [ mk_flow { all_pattern with nwSrc = Some (n, 0x20l) } [[acts]] ]
| Test(NK.IP4Dst (n,_)) ->
k [ mk_flow { all_pattern with nwDst = Some (n, 0x20l) } [[acts]] ]
| Test(NK.TCPSrcPort n) ->
k [ mk_flow { all_pattern with tpSrc = Some n } [[acts]] ]
| Test(NK.TCPDstPort n) ->
k [ mk_flow { all_pattern with tpDst = Some n } [[acts]] ]
| Test _ ->
failwith "unsupported test"
| Or(pred1,pred2) ->
let f x y = x in
aux pred1 (fun t1 ->
aux pred2 (fun t2 ->
k (optimize (union f t1 t2))))
| And(pred1,pred2) ->
let f x y = x in
aux pred1 (fun t1 ->
aux pred2 (fun t2 ->
k (optimize (inter f t1 t2))))
| Not(pred) ->
let f x = if x = [] then [[acts]] else [] in
aux pred (fun t ->
k (negate f t))
| Everything ->
k [ mk_flow all_pattern [[acts]] ]
| Nothing ->
k [ mk_flow all_pattern [] ] in
aux pred (fun x -> optimize x)
let compile_flowtable (sw:Int64.t) (ofls: (pred * action list) list)
: flowTable =
List.fold_left
(fun acc (pat,acts) ->
let t = compile pat acts in
optimize (union group_union t acc))
[ mk_flow all_pattern [[]] ]
ofls
| null | https://raw.githubusercontent.com/merlin-lang/merlin/35a88bce024a8b8be858c796f1cd718e4a660529/lib/Merlin_OpenFlow.ml | ocaml | Printf.printf "Calling pat_subseteq\n%!";
Union of t1 & t2 is their intersection followed by each of them, so all
cases are captured | open Frenetic_OpenFlow
open Pattern
open Merlin_Types
open Merlin_Error
module NK = Frenetic_NetKAT
type pattern = Pattern.t
let all_pattern = match_all
exception Empty_pat
let mk_flow (pat:pattern) (group:group) : Frenetic_OpenFlow.flow =
{ pattern = pat;
action = group;
cookie = 0L;
idle_timeout = Permanent;
hard_timeout = Permanent }
let group_union (g1:group) (g2:group) : group =
match g1, g2 with
| [s1],[s2] ->
[s1 @ s2]
| [], [s] -> [s]
| [s], [] -> [s]
| _ ->
raise (Impossible_group_union (g1,g2))
Intersect two patterns . If either is non - empty , then both must be the same
JNF : " must both the the same " ? This is not what set - theoretic
intersection means ...
intersection means... *)
let pat_inter (pat1:pattern) (pat2:pattern) =
let f vo1 vo2 = match vo1,vo2 with
| Some v1, Some v2 ->
if v1 = v2 then Some v1
else raise Empty_pat
| Some v1, None ->
Some v1
| None, Some v2 ->
Some v2
| None, None ->
None in
try
Some
{ dlSrc = f pat1.dlSrc pat2.dlSrc
; dlDst = f pat1.dlDst pat2.dlDst
; dlTyp = f pat1.dlTyp pat2.dlTyp
; dlVlan = f pat1.dlVlan pat2.dlVlan
; dlVlanPcp = f pat1.dlVlanPcp pat2.dlVlanPcp
; nwSrc = f pat1.nwSrc pat2.nwSrc
; nwDst = f pat1.nwDst pat2.nwDst
; nwProto = f pat1.nwProto pat2.nwProto
; tpSrc = f pat1.tpSrc pat2.tpSrc
; tpDst = f pat1.tpSrc pat2.tpDst
; inPort = f pat1.inPort pat2.inPort }
with Empty_pat ->
None
Two patterns are equal if they both have the same fields and the values for
present fields are the same
present fields are the same *)
JNF : not equal ; subset or equal
let pat_subseteq (pat1:pattern) (pat2:pattern) =
let f vo1 vo2 =
match vo1,vo2 with
| _,None -> true
| None,_ -> false
| Some v1, Some v2 -> v1 = v2 in
( f pat1.dlSrc pat2.dlSrc
&& f pat1.dlDst pat2.dlDst
&& f pat1.dlTyp pat2.dlTyp
&& f pat1.dlVlan pat2.dlVlan
&& f pat1.dlVlanPcp pat2.dlVlanPcp
&& f pat1.nwSrc pat2.nwSrc
&& f pat1.nwDst pat2.nwDst
&& f pat1.nwProto pat2.nwProto
&& f pat1.tpSrc pat2.tpSrc
&& f pat1.tpSrc pat2.tpDst
&& f pat1.inPort pat2.inPort )
Deduplicate flowtable entries with subseteq patterns
let optimize (t:flowTable) : flowTable =
List.rev
(List.fold_left
(fun acc flow ->
if List.exists (fun flow' -> pat_subseteq flow.pattern flow'.pattern) acc then acc
else flow::acc)
[] t)
let rec inter f (t1:flowTable) (t2:flowTable) =
List.fold_right
(fun flow1 acc ->
List.fold_right
(fun flow2 acc ->
match pat_inter flow1.pattern flow2.pattern with
| Some pat ->
mk_flow pat (f flow1.action flow2.action)::acc
| None ->
acc)
t2 acc)
t1 []
let rec union f (t1:flowTable) (t2:flowTable) : flowTable =
inter f t1 t2 @ t1 @ t2
let rec negate f t =
List.map (fun flow -> { flow with action = f flow.action }) t
CPS compiler
let compile (pred : pred) (acts : action list) : flowTable =
let rec aux pred k =
match pred with
| Test(NK.Switch(_)) ->
k [ mk_flow all_pattern [[acts]] ]
| Test(NK.Location(NK.Physical n)) ->
k [ mk_flow { all_pattern with inPort = Some n } [[acts]] ]
| Test(NK.EthSrc n) ->
k [ mk_flow { all_pattern with dlSrc = Some n } [[acts]] ]
| Test(NK.EthDst n) ->
k [ mk_flow { all_pattern with dlDst = Some n } [[acts]] ]
| Test(NK.Vlan n) ->
k [ mk_flow { all_pattern with dlVlan = Some n } [[acts]] ]
| Test(NK.VlanPcp n) ->
k [ mk_flow { all_pattern with dlVlanPcp = Some n } [[acts]] ]
| Test(NK.EthType n) ->
k [ mk_flow { all_pattern with dlTyp = Some n } [[acts]] ]
| Test(NK.IPProto n) ->
k [ mk_flow { all_pattern with nwProto = Some n } [[acts]] ]
| Test(NK.IP4Src (n,_)) ->
k [ mk_flow { all_pattern with nwSrc = Some (n, 0x20l) } [[acts]] ]
| Test(NK.IP4Dst (n,_)) ->
k [ mk_flow { all_pattern with nwDst = Some (n, 0x20l) } [[acts]] ]
| Test(NK.TCPSrcPort n) ->
k [ mk_flow { all_pattern with tpSrc = Some n } [[acts]] ]
| Test(NK.TCPDstPort n) ->
k [ mk_flow { all_pattern with tpDst = Some n } [[acts]] ]
| Test _ ->
failwith "unsupported test"
| Or(pred1,pred2) ->
let f x y = x in
aux pred1 (fun t1 ->
aux pred2 (fun t2 ->
k (optimize (union f t1 t2))))
| And(pred1,pred2) ->
let f x y = x in
aux pred1 (fun t1 ->
aux pred2 (fun t2 ->
k (optimize (inter f t1 t2))))
| Not(pred) ->
let f x = if x = [] then [[acts]] else [] in
aux pred (fun t ->
k (negate f t))
| Everything ->
k [ mk_flow all_pattern [[acts]] ]
| Nothing ->
k [ mk_flow all_pattern [] ] in
aux pred (fun x -> optimize x)
let compile_flowtable (sw:Int64.t) (ofls: (pred * action list) list)
: flowTable =
List.fold_left
(fun acc (pat,acts) ->
let t = compile pat acts in
optimize (union group_union t acc))
[ mk_flow all_pattern [[]] ]
ofls
|
7897bcafb637623cd94b24aa69c0137a9aa9aef3b3aebaf241ca4e723f8322b7 | ayazhafiz/plts | language.ml | type term =
| Num of int
| Var of string
| Abs of string * term
| App of term * term
| Record of (string * term) list
| RecordProject of term * string
| Let of { is_rec : bool; name : string; rhs : term; body : term }
type toplevel = { is_rec : bool; name : string; body : term }
type var_state = {
uid : int;
level : int;
(** The level at which the variable should be generalized.
Useful in representing let polymorphism, which can be deeply nested in
many "let" terms, each term being more general than the last. *)
mutable lower_bounds : simple_ty list;
mutable upper_bounds : simple_ty list;
}
(** Types inferred from the frontend *)
and simple_ty =
| STyVar of var_state
| STyPrim of string
| STyFn of simple_ty * simple_ty
| STyRecord of (string * simple_ty) list
type poly_ty = PolyTy of int (* level *) * simple_ty
type polar_var =
| Positive of var_state (** Positive variables are in output positions *)
| Negative of var_state (** Negative variables are in input positions *)
(** Types after inference and constraining of simple types *)
type ty =
| TyTop
| TyBottom
| TyUnion of ty * ty
| TyIntersection of ty * ty
| TyFn of ty * ty
| TyRecord of (string * ty) list
Must always be a TyVar
| TyVar of string
| TyPrim of string
module VarStOrder = struct
type t = var_state
let compare a b = compare a.uid b.uid
end
module VarSet = Set.Make (VarStOrder)
module StringSet = Set.Make (String)
type compact_ty = {
vars : VarSet.t;
prims : StringSet.t;
rcd : (string * compact_ty) list option;
fn : (compact_ty * compact_ty) option;
}
(** Describes a union or intersection with different type components.
Useful as an IR during simplification. *)
type compact_ty_scheme = {
ty : compact_ty;
rec_vars : (var_state * compact_ty) list;
}
| null | https://raw.githubusercontent.com/ayazhafiz/plts/59b3996642f4fd5941c96a4987643303acc3dee6/simple_sub/lib/language.ml | ocaml | * The level at which the variable should be generalized.
Useful in representing let polymorphism, which can be deeply nested in
many "let" terms, each term being more general than the last.
* Types inferred from the frontend
level
* Positive variables are in output positions
* Negative variables are in input positions
* Types after inference and constraining of simple types
* Describes a union or intersection with different type components.
Useful as an IR during simplification. | type term =
| Num of int
| Var of string
| Abs of string * term
| App of term * term
| Record of (string * term) list
| RecordProject of term * string
| Let of { is_rec : bool; name : string; rhs : term; body : term }
type toplevel = { is_rec : bool; name : string; body : term }
type var_state = {
uid : int;
level : int;
mutable lower_bounds : simple_ty list;
mutable upper_bounds : simple_ty list;
}
and simple_ty =
| STyVar of var_state
| STyPrim of string
| STyFn of simple_ty * simple_ty
| STyRecord of (string * simple_ty) list
type polar_var =
type ty =
| TyTop
| TyBottom
| TyUnion of ty * ty
| TyIntersection of ty * ty
| TyFn of ty * ty
| TyRecord of (string * ty) list
Must always be a TyVar
| TyVar of string
| TyPrim of string
module VarStOrder = struct
type t = var_state
let compare a b = compare a.uid b.uid
end
module VarSet = Set.Make (VarStOrder)
module StringSet = Set.Make (String)
type compact_ty = {
vars : VarSet.t;
prims : StringSet.t;
rcd : (string * compact_ty) list option;
fn : (compact_ty * compact_ty) option;
}
type compact_ty_scheme = {
ty : compact_ty;
rec_vars : (var_state * compact_ty) list;
}
|
82a04bdf0a556b6a84f0ac06829b3ae7a9cb235b003a3ffdbcd01416cf35287d | informatimago/lisp | macros.lisp | -*- mode : lisp;coding : utf-8 -*-
;;;;**************************************************************************
FILE : macros.lisp
;;;;LANGUAGE: Common-Lisp
;;;;SYSTEM: Common-Lisp
USER - INTERFACE :
;;;;DESCRIPTION
;;;;
Defines DEFUN and LAMBDA , to deal with interactive declarations .
;;;;
< PJB > < >
MODIFICATIONS
2015 - 01 - 11 < PJB > Extracted from editor.lisp
;;;;LEGAL
AGPL3
;;;;
Copyright 2015 - 2016
;;;;
;;;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;;; (at your option) any later version.
;;;;
;;;; This program is distributed in the hope that it will be useful,
;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details .
;;;;
You should have received a copy of the GNU Affero General Public License
;;;; along with this program. If not, see </>.
;;;;**************************************************************************
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(in-package "COM.INFORMATIMAGO.EDITOR")
;;;---------------------------------------------------------------------
;;; Commands: interactive functions
;;;---------------------------------------------------------------------
;;;
;;; We want to define commands, with a special INTERACTIVE
declaration . So we need to use our own DEFUN ( and LAMBDA ) macros .
(declaim (declaration interactive))
(defvar *interactive-decls* (make-hash-table #+clisp :weak #+clisp :key)
"A map of commands name or functions to INTERACTIVE declarations.")
(defmacro defun (name arguments &body body)
"Do additionnal book-keeping over CL:DEFUN, for INTERACTIVE commands."
(let* ((decls (mapcan (function rest) (extract-declarations body)))
(inter (find 'interactive decls :key (function first))))
(if inter
`(progn
(cl:defun ,name ,arguments ,@body)
(setf (gethash ',name *interactive-decls*) ',inter
(gethash (function ,name) *interactive-decls*) ',inter)
',name)
`(progn
(cl:defun ,name ,arguments ,@body)
(remhash ',name *interactive-decls*)
(remhash (function ,name) *interactive-decls*)
',name))))
(defmacro lambda (arguments &body body)
"Do additionnal bookkeeping over CL:LAMBDA, for INTERACTIVE commands."
(let* ((decls (mapcan (function rest) (extract-declarations body)))
(inter (find 'interactive decls :key (function first))))
(if inter
`(flet ((anonymous-function ,arguments ,@body))
(setf (gethash (function anonymous-function) *interactive-decls*) ',inter)
(function anonymous-function))
`(cl:lambda ,arguments ,@body))))
(defun interactivep (fundesc)
"Whether the function FUNCDESC is INTERACTIVE."
(gethash fundesc *interactive-decls*))
(defun getenv (var)
#+asdf3 (uiop:getenv var)
#-asdf3 (asdf::getenv var))
(defun (setf getenv) (new-val var)
#+asdf3 (setf (uiop:getenv var) new-val)
#-asdf3 (setf (asdf::getenv var) new-val))
;;;; THE END ;;;;
| null | https://raw.githubusercontent.com/informatimago/lisp/571af24c06ba466e01b4c9483f8bb7690bc46d03/editor/macros.lisp | lisp | coding : utf-8 -*-
**************************************************************************
LANGUAGE: Common-Lisp
SYSTEM: Common-Lisp
DESCRIPTION
LEGAL
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this program. If not, see </>.
**************************************************************************
---------------------------------------------------------------------
Commands: interactive functions
---------------------------------------------------------------------
We want to define commands, with a special INTERACTIVE
THE END ;;;; | FILE : macros.lisp
USER - INTERFACE :
Defines DEFUN and LAMBDA , to deal with interactive declarations .
< PJB > < >
MODIFICATIONS
2015 - 01 - 11 < PJB > Extracted from editor.lisp
AGPL3
Copyright 2015 - 2016
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(in-package "COM.INFORMATIMAGO.EDITOR")
declaration . So we need to use our own DEFUN ( and LAMBDA ) macros .
(declaim (declaration interactive))
(defvar *interactive-decls* (make-hash-table #+clisp :weak #+clisp :key)
"A map of commands name or functions to INTERACTIVE declarations.")
(defmacro defun (name arguments &body body)
"Do additionnal book-keeping over CL:DEFUN, for INTERACTIVE commands."
(let* ((decls (mapcan (function rest) (extract-declarations body)))
(inter (find 'interactive decls :key (function first))))
(if inter
`(progn
(cl:defun ,name ,arguments ,@body)
(setf (gethash ',name *interactive-decls*) ',inter
(gethash (function ,name) *interactive-decls*) ',inter)
',name)
`(progn
(cl:defun ,name ,arguments ,@body)
(remhash ',name *interactive-decls*)
(remhash (function ,name) *interactive-decls*)
',name))))
(defmacro lambda (arguments &body body)
"Do additionnal bookkeeping over CL:LAMBDA, for INTERACTIVE commands."
(let* ((decls (mapcan (function rest) (extract-declarations body)))
(inter (find 'interactive decls :key (function first))))
(if inter
`(flet ((anonymous-function ,arguments ,@body))
(setf (gethash (function anonymous-function) *interactive-decls*) ',inter)
(function anonymous-function))
`(cl:lambda ,arguments ,@body))))
(defun interactivep (fundesc)
"Whether the function FUNCDESC is INTERACTIVE."
(gethash fundesc *interactive-decls*))
(defun getenv (var)
#+asdf3 (uiop:getenv var)
#-asdf3 (asdf::getenv var))
(defun (setf getenv) (new-val var)
#+asdf3 (setf (uiop:getenv var) new-val)
#-asdf3 (setf (asdf::getenv var) new-val))
|
32af9495890e9c516e5f4c9f82da021b9b0a09b01eaac7eed1fc565f8d493cd4 | theodormoroianu/SecondYearCourses | HaskellChurch_20210415163610.hs | {-# LANGUAGE RankNTypes #-}
module HaskellChurch where
A boolean is any way to choose between two alternatives
newtype CBool = CBool {cIf :: forall t. t -> t -> t}
An instance to show as regular Booleans
instance Show CBool where
show b = "cBool " <> show (cIf b True False)
The boolean constant true always chooses the first alternative
cTrue :: CBool
cTrue = undefined
The boolean constant false always chooses the second alternative
cFalse :: CBool
cFalse = undefined
cBool :: Bool -> CBool
cBool True = cTrue
cBool False = cFalse
--The boolean negation switches the alternatives
cNot :: CBool -> CBool
cNot = undefined
--The boolean conjunction can be built as a conditional
(&&:) :: CBool -> CBool -> CBool
(&&:) = undefined
infixr 3 &&:
--The boolean disjunction can be built as a conditional
(||:) :: CBool -> CBool -> CBool
(||:) = undefined
infixr 2 ||:
-- a pair is a way to compute something based on the values
-- contained within the pair.
newtype CPair a b = CPair { cOn :: forall c . (a -> b -> c) -> c }
An instance to show CPairs as regular pairs .
instance (Show a, Show b) => Show (CPair a b) where
show p = "cPair " <> show (cOn p (,))
builds a pair out of two values as an object which , when given
--a function to be applied on the values, it will apply it on them.
cPair :: a -> b -> CPair a b
cPair = undefined
first projection uses the function selecting first component on a pair
cFst :: CPair a b -> a
cFst = undefined
second projection
cSnd :: CPair a b -> b
cSnd = undefined
-- A natural number is any way to iterate a function s a number of times
-- over an initial value z
newtype CNat = CNat { cFor :: forall t. (t -> t) -> t -> t }
-- An instance to show CNats as regular natural numbers
instance Show CNat where
show n = show $ cFor n (1 +) (0 :: Integer)
--0 will iterate the function s 0 times over z, producing z
c0 :: CNat
c0 = undefined
1 is the the function s iterated 1 times over z , that is , z
c1 :: CNat
c1 = undefined
--Successor n either
- applies s one more time in addition to what n does
-- - iterates s n times over (s z)
cS :: CNat -> CNat
cS = undefined
--Addition of m and n is done by iterating s n times over m
(+:) :: CNat -> CNat -> CNat
(+:) = undefined
infixl 6 +:
--Multiplication of m and n can be done by composing n and m
(*:) :: CNat -> CNat -> CNat
(*:) = \n m -> CNat $ cFor n . cFor m
infixl 7 *:
--Exponentiation of m and n can be done by applying n to m
(^:) :: CNat -> CNat -> CNat
(^:) = \m n -> CNat $ cFor n (cFor m)
infixr 8 ^:
--Testing whether a value is 0 can be done through iteration
-- using a function constantly false and an initial value true
cIs0 :: CNat -> CBool
cIs0 = \n -> cFor n (\_ -> cFalse) cTrue
Predecessor ( evaluating to 0 for 0 ) can be defined iterating
over pairs , starting from an initial value ( 0 , 0 )
cPred :: CNat -> CNat
cPred = undefined
substraction from m n ( evaluating to 0 if m < n ) is repeated application
-- of the predeccesor function
(-:) :: CNat -> CNat -> CNat
(-:) = \m n -> cFor n cPred m
Transform a value into a CNat ( should yield c0 for nums < = 0 )
cNat :: (Ord p, Num p) => p -> CNat
cNat n = undefined
We can define an instance Num CNat which will allow us to see any
integer constant as a CNat ( e.g. 12 : : CNat ) and also use regular
-- arithmetic
instance Num CNat where
(+) = (+:)
(*) = (*:)
(-) = (-:)
abs = id
signum n = cIf (cIs0 n) 0 1
fromInteger = cNat
-- m is less than (or equal to) n if when substracting n from m we get 0
(<=:) :: CNat -> CNat -> CBool
(<=:) = undefined
infix 4 <=:
(>=:) :: CNat -> CNat -> CBool
(>=:) = \m n -> n <=: m
infix 4 >=:
(<:) :: CNat -> CNat -> CBool
(<:) = \m n -> cNot (m >=: n)
infix 4 <:
(>:) :: CNat -> CNat -> CBool
(>:) = \m n -> n <: m
infix 4 >:
-- equality on naturals can be defined my means of comparisons
(==:) :: CNat -> CNat -> CBool
(==:) = undefined
--Fun with arithmetic and pairs
--Define factorial. You can iterate over a pair to contain the current index and so far factorial
cFactorial :: CNat -> CNat
cFactorial = undefined
Define Fibonacci . You can iterate over a pair to contain two consecutive numbers in the sequence
cFibonacci :: CNat -> CNat
cFibonacci = undefined
--Given m and n, compute q and r satisfying m = q * n + r. If n is not 0 then r should be less than n.
--hint repeated substraction, iterated for at most m times.
cDivMod :: CNat -> CNat -> CPair CNat CNat
cDivMod = undefined
-- a list is a way to aggregate a sequence of elements given an aggregation function and an initial value.
newtype CList a = CList { cFoldR :: forall b. (a -> b -> b) -> b -> b }
make CList an instance of Foldable
instance Foldable CList where
--An instance to show CLists as regular lists.
instance (Show a) => Show (CList a) where
show l = "cList " <> (show $ toList l)
-- The empty list is that which when aggregated it will always produce the initial value
cNil :: CList a
cNil = undefined
-- Adding an element to a list means that, when aggregating the list, the newly added
-- element will be aggregated with the result obtained by aggregating the remainder of the list
(.:) :: a -> CList a -> CList a
(.:) = undefined
-- we can
cList :: [a] -> CList a
cList = undefined
churchNatList :: [Integer] -> Term
churchNatList = churchList . map churchNat
cNatList :: [Integer] -> CList CNat
cNatList = cList . map cNat
churchSum :: Term
churchSum = lam "l" (v "l" $$ churchPlus $$ church0)
cSum :: CList CNat -> CNat
since CList is an instance of Foldable ; otherwise : \l - > cFoldR l ( + ) 0
churchIsNil :: Term
churchIsNil = lam "l" (v "l" $$ lams ["x", "a"] churchFalse $$ churchTrue)
cIsNil :: CList a -> CBool
cIsNil = \l -> cFoldR l (\_ _ -> cFalse) cTrue
churchHead :: Term
churchHead = lams ["l", "default"] (v "l" $$ lams ["x", "a"] (v "x") $$ v "default")
cHead :: CList a -> a -> a
cHead = \l d -> cFoldR l (\x _ -> x) d
churchTail :: Term
churchTail = lam "l" (churchFst $$
(v "l"
$$ lams ["x","p"] (lam "t" (churchPair $$ v "t" $$ (churchCons $$ v "x" $$ v "t"))
$$ (churchSnd $$ v "p"))
$$ (churchPair $$ churchNil $$ churchNil)
))
cTail :: CList a -> CList a
cTail = \l -> cFst $ cFoldR l (\x p -> (\t -> cPair t (x .: t)) (cSnd p)) (cPair cNil cNil)
cLength :: CList a -> CNat
cLength = \l -> cFoldR l (\_ n -> cS n) 0
fix :: Term
fix = lam "f" (lam "x" (v "f" $$ (v "x" $$ v "x")) $$ lam "x" (v "f" $$ (v "x" $$ v "x")))
divmod :: (Enum a, Num a, Ord b, Num b) => b -> b -> (a, b)
divmod m n = divmod' (0, 0)
where
divmod' (x, y)
| x' <= m = divmod' (x', succ y)
| otherwise = (y, m - x)
where x' = x + n
divmod' m n =
if n == 0 then (0, m)
else
Function.fix
(\f p ->
(\x' ->
if x' > 0 then f ((,) (succ (fst p)) x')
else if (<=) n (snd p) then ((,) (succ (fst p)) 0)
else p)
((-) (snd p) n))
(0, m)
churchDivMod' :: Term
churchDivMod' = lams ["m", "n"]
(churchIs0 $$ v "n"
$$ (churchPair $$ church0 $$ v "m")
$$ (fix
$$ lams ["f", "p"]
(lam "x"
(churchIs0 $$ v "x"
$$ (churchLte $$ v "n" $$ (churchSnd $$ v "p")
$$ (churchPair $$ (churchS $$ (churchFst $$ v "p")) $$ church0)
$$ v "p"
)
$$ (v "f" $$ (churchPair $$ (churchS $$ (churchFst $$ v "p")) $$ v "x"))
)
$$ (churchSub $$ (churchSnd $$ v "p") $$ v "n")
)
$$ (churchPair $$ church0 $$ v "m")
)
)
churchSudan :: Term
churchSudan = fix $$ lam "f" (lams ["n", "x", "y"]
(churchIs0 $$ v "n"
$$ (churchPlus $$ v "x" $$ v "y")
$$ (churchIs0 $$ v "y"
$$ v "x"
$$ (lam "fnpy"
(v "f" $$ (churchPred $$ v "n")
$$ v "fnpy"
$$ (churchPlus $$ v "fnpy" $$ v "y")
)
$$ (v "f" $$ v "n" $$ v "x" $$ (churchPred $$ v "y"))
)
)
))
churchAckermann :: Term
churchAckermann = fix $$ lam "A" (lams ["m", "n"]
(churchIs0 $$ v "m"
$$ (churchS $$ v "n")
$$ (churchIs0 $$ v "n"
$$ (v "A" $$ (churchPred $$ v "m") $$ church1)
$$ (v "A" $$ (churchPred $$ v "m")
$$ (v "A" $$ v "m" $$ (churchPred $$ v "n")))
)
)
)
| null | https://raw.githubusercontent.com/theodormoroianu/SecondYearCourses/5e359e6a7cf588a527d27209bf53b4ce6b8d5e83/FLP/Laboratoare/Lab%209/.history/HaskellChurch_20210415163610.hs | haskell | # LANGUAGE RankNTypes #
The boolean negation switches the alternatives
The boolean conjunction can be built as a conditional
The boolean disjunction can be built as a conditional
a pair is a way to compute something based on the values
contained within the pair.
a function to be applied on the values, it will apply it on them.
A natural number is any way to iterate a function s a number of times
over an initial value z
An instance to show CNats as regular natural numbers
0 will iterate the function s 0 times over z, producing z
Successor n either
- iterates s n times over (s z)
Addition of m and n is done by iterating s n times over m
Multiplication of m and n can be done by composing n and m
Exponentiation of m and n can be done by applying n to m
Testing whether a value is 0 can be done through iteration
using a function constantly false and an initial value true
of the predeccesor function
arithmetic
m is less than (or equal to) n if when substracting n from m we get 0
equality on naturals can be defined my means of comparisons
Fun with arithmetic and pairs
Define factorial. You can iterate over a pair to contain the current index and so far factorial
Given m and n, compute q and r satisfying m = q * n + r. If n is not 0 then r should be less than n.
hint repeated substraction, iterated for at most m times.
a list is a way to aggregate a sequence of elements given an aggregation function and an initial value.
An instance to show CLists as regular lists.
The empty list is that which when aggregated it will always produce the initial value
Adding an element to a list means that, when aggregating the list, the newly added
element will be aggregated with the result obtained by aggregating the remainder of the list
we can | module HaskellChurch where
A boolean is any way to choose between two alternatives
newtype CBool = CBool {cIf :: forall t. t -> t -> t}
An instance to show as regular Booleans
instance Show CBool where
show b = "cBool " <> show (cIf b True False)
The boolean constant true always chooses the first alternative
cTrue :: CBool
cTrue = undefined
The boolean constant false always chooses the second alternative
cFalse :: CBool
cFalse = undefined
cBool :: Bool -> CBool
cBool True = cTrue
cBool False = cFalse
cNot :: CBool -> CBool
cNot = undefined
(&&:) :: CBool -> CBool -> CBool
(&&:) = undefined
infixr 3 &&:
(||:) :: CBool -> CBool -> CBool
(||:) = undefined
infixr 2 ||:
newtype CPair a b = CPair { cOn :: forall c . (a -> b -> c) -> c }
An instance to show CPairs as regular pairs .
instance (Show a, Show b) => Show (CPair a b) where
show p = "cPair " <> show (cOn p (,))
builds a pair out of two values as an object which , when given
cPair :: a -> b -> CPair a b
cPair = undefined
first projection uses the function selecting first component on a pair
cFst :: CPair a b -> a
cFst = undefined
second projection
cSnd :: CPair a b -> b
cSnd = undefined
newtype CNat = CNat { cFor :: forall t. (t -> t) -> t -> t }
instance Show CNat where
show n = show $ cFor n (1 +) (0 :: Integer)
c0 :: CNat
c0 = undefined
1 is the the function s iterated 1 times over z , that is , z
c1 :: CNat
c1 = undefined
- applies s one more time in addition to what n does
cS :: CNat -> CNat
cS = undefined
(+:) :: CNat -> CNat -> CNat
(+:) = undefined
infixl 6 +:
(*:) :: CNat -> CNat -> CNat
(*:) = \n m -> CNat $ cFor n . cFor m
infixl 7 *:
(^:) :: CNat -> CNat -> CNat
(^:) = \m n -> CNat $ cFor n (cFor m)
infixr 8 ^:
cIs0 :: CNat -> CBool
cIs0 = \n -> cFor n (\_ -> cFalse) cTrue
Predecessor ( evaluating to 0 for 0 ) can be defined iterating
over pairs , starting from an initial value ( 0 , 0 )
cPred :: CNat -> CNat
cPred = undefined
substraction from m n ( evaluating to 0 if m < n ) is repeated application
(-:) :: CNat -> CNat -> CNat
(-:) = \m n -> cFor n cPred m
Transform a value into a CNat ( should yield c0 for nums < = 0 )
cNat :: (Ord p, Num p) => p -> CNat
cNat n = undefined
We can define an instance Num CNat which will allow us to see any
integer constant as a CNat ( e.g. 12 : : CNat ) and also use regular
instance Num CNat where
(+) = (+:)
(*) = (*:)
(-) = (-:)
abs = id
signum n = cIf (cIs0 n) 0 1
fromInteger = cNat
(<=:) :: CNat -> CNat -> CBool
(<=:) = undefined
infix 4 <=:
(>=:) :: CNat -> CNat -> CBool
(>=:) = \m n -> n <=: m
infix 4 >=:
(<:) :: CNat -> CNat -> CBool
(<:) = \m n -> cNot (m >=: n)
infix 4 <:
(>:) :: CNat -> CNat -> CBool
(>:) = \m n -> n <: m
infix 4 >:
(==:) :: CNat -> CNat -> CBool
(==:) = undefined
cFactorial :: CNat -> CNat
cFactorial = undefined
Define Fibonacci . You can iterate over a pair to contain two consecutive numbers in the sequence
cFibonacci :: CNat -> CNat
cFibonacci = undefined
cDivMod :: CNat -> CNat -> CPair CNat CNat
cDivMod = undefined
newtype CList a = CList { cFoldR :: forall b. (a -> b -> b) -> b -> b }
make CList an instance of Foldable
instance Foldable CList where
instance (Show a) => Show (CList a) where
show l = "cList " <> (show $ toList l)
cNil :: CList a
cNil = undefined
(.:) :: a -> CList a -> CList a
(.:) = undefined
cList :: [a] -> CList a
cList = undefined
churchNatList :: [Integer] -> Term
churchNatList = churchList . map churchNat
cNatList :: [Integer] -> CList CNat
cNatList = cList . map cNat
churchSum :: Term
churchSum = lam "l" (v "l" $$ churchPlus $$ church0)
cSum :: CList CNat -> CNat
since CList is an instance of Foldable ; otherwise : \l - > cFoldR l ( + ) 0
churchIsNil :: Term
churchIsNil = lam "l" (v "l" $$ lams ["x", "a"] churchFalse $$ churchTrue)
cIsNil :: CList a -> CBool
cIsNil = \l -> cFoldR l (\_ _ -> cFalse) cTrue
churchHead :: Term
churchHead = lams ["l", "default"] (v "l" $$ lams ["x", "a"] (v "x") $$ v "default")
cHead :: CList a -> a -> a
cHead = \l d -> cFoldR l (\x _ -> x) d
churchTail :: Term
churchTail = lam "l" (churchFst $$
(v "l"
$$ lams ["x","p"] (lam "t" (churchPair $$ v "t" $$ (churchCons $$ v "x" $$ v "t"))
$$ (churchSnd $$ v "p"))
$$ (churchPair $$ churchNil $$ churchNil)
))
cTail :: CList a -> CList a
cTail = \l -> cFst $ cFoldR l (\x p -> (\t -> cPair t (x .: t)) (cSnd p)) (cPair cNil cNil)
cLength :: CList a -> CNat
cLength = \l -> cFoldR l (\_ n -> cS n) 0
fix :: Term
fix = lam "f" (lam "x" (v "f" $$ (v "x" $$ v "x")) $$ lam "x" (v "f" $$ (v "x" $$ v "x")))
divmod :: (Enum a, Num a, Ord b, Num b) => b -> b -> (a, b)
divmod m n = divmod' (0, 0)
where
divmod' (x, y)
| x' <= m = divmod' (x', succ y)
| otherwise = (y, m - x)
where x' = x + n
divmod' m n =
if n == 0 then (0, m)
else
Function.fix
(\f p ->
(\x' ->
if x' > 0 then f ((,) (succ (fst p)) x')
else if (<=) n (snd p) then ((,) (succ (fst p)) 0)
else p)
((-) (snd p) n))
(0, m)
churchDivMod' :: Term
churchDivMod' = lams ["m", "n"]
(churchIs0 $$ v "n"
$$ (churchPair $$ church0 $$ v "m")
$$ (fix
$$ lams ["f", "p"]
(lam "x"
(churchIs0 $$ v "x"
$$ (churchLte $$ v "n" $$ (churchSnd $$ v "p")
$$ (churchPair $$ (churchS $$ (churchFst $$ v "p")) $$ church0)
$$ v "p"
)
$$ (v "f" $$ (churchPair $$ (churchS $$ (churchFst $$ v "p")) $$ v "x"))
)
$$ (churchSub $$ (churchSnd $$ v "p") $$ v "n")
)
$$ (churchPair $$ church0 $$ v "m")
)
)
churchSudan :: Term
churchSudan = fix $$ lam "f" (lams ["n", "x", "y"]
(churchIs0 $$ v "n"
$$ (churchPlus $$ v "x" $$ v "y")
$$ (churchIs0 $$ v "y"
$$ v "x"
$$ (lam "fnpy"
(v "f" $$ (churchPred $$ v "n")
$$ v "fnpy"
$$ (churchPlus $$ v "fnpy" $$ v "y")
)
$$ (v "f" $$ v "n" $$ v "x" $$ (churchPred $$ v "y"))
)
)
))
churchAckermann :: Term
churchAckermann = fix $$ lam "A" (lams ["m", "n"]
(churchIs0 $$ v "m"
$$ (churchS $$ v "n")
$$ (churchIs0 $$ v "n"
$$ (v "A" $$ (churchPred $$ v "m") $$ church1)
$$ (v "A" $$ (churchPred $$ v "m")
$$ (v "A" $$ v "m" $$ (churchPred $$ v "n")))
)
)
)
|
f19aa7e147ef4d96a1dd8c2d3bf6c1f9221404eb29c696d8e5c5b8eaf5773489 | goodell/cppmem | flow.ml | Js_of_ocaml compiler
* /
* Copyright ( C ) 2010
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* /
* Copyright (C) 2010 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
let debug = Util.debug "flow"
let disable_optcall = Util.disabled "optcall"
let times = Util.debug "times"
open Code
(****)
let add_var = VarISet.add
type def = Phi of VarSet.t | Expr of Code.expr | Param
let undefined = Phi VarSet.empty
let is_undefined d = match d with Phi s -> VarSet.is_empty s | _ -> false
let add_expr_def defs x e =
let idx = Var.idx x in
assert (is_undefined defs.(idx));
defs.(idx) <- Expr e
let add_assign_def vars defs x y =
add_var vars x;
let idx = Var.idx x in
match defs.(idx) with
Expr _ | Param ->
assert false
| Phi s ->
defs.(idx) <- Phi (VarSet.add y s)
let add_param_def vars defs x =
add_var vars x;
let idx = Var.idx x in
assert (is_undefined defs.(idx) || defs.(idx) = Param);
defs.(idx) <- Param
(* x depends on y *)
let add_dep deps x y =
let idx = Var.idx y in
deps.(idx) <- VarSet.add x deps.(idx)
let rec arg_deps vars deps defs params args =
match params, args with
x :: params, y :: args ->
add_dep deps x y;
add_assign_def vars defs x y;
arg_deps vars deps defs params args
| _ ->
()
let cont_deps blocks vars deps defs (pc, args) =
let block = AddrMap.find pc blocks in
arg_deps vars deps defs block.params args
let expr_deps blocks vars deps defs x e =
match e with
Const _ | Constant _ | Apply _ | Prim _ ->
()
| Closure (l, cont) ->
List.iter (fun x -> add_param_def vars defs x) l;
cont_deps blocks vars deps defs cont
| Block (_, a) ->
Array.iter (fun y -> add_dep deps x y) a
| Field (y, _) ->
add_dep deps x y
let program_deps (_, blocks, _) =
let nv = Var.count () in
let vars = VarISet.empty () in
let deps = Array.make nv VarSet.empty in
let defs = Array.make nv undefined in
AddrMap.iter
(fun pc block ->
List.iter
(fun i ->
match i with
Let (x, e) ->
add_var vars x;
add_expr_def defs x e;
expr_deps blocks vars deps defs x e
| Set_field _ | Array_set _ | Offset_ref _ ->
())
block.body;
Util.opt_iter
(fun (x, cont) ->
add_param_def vars defs x;
cont_deps blocks vars deps defs cont)
block.handler;
match block.branch with
Return _ | Raise _ | Stop ->
()
| Branch cont | Poptrap cont ->
cont_deps blocks vars deps defs cont
| Cond (_, _, cont1, cont2) ->
cont_deps blocks vars deps defs cont1;
cont_deps blocks vars deps defs cont2
| Switch (_, a1, a2) ->
Array.iter (fun cont -> cont_deps blocks vars deps defs cont) a1;
Array.iter (fun cont -> cont_deps blocks vars deps defs cont) a2
| Pushtrap (cont, _, _, _) ->
cont_deps blocks vars deps defs cont)
blocks;
(vars, deps, defs)
let var_set_lift f s =
VarSet.fold (fun y s -> VarSet.union (f y) s) s VarSet.empty
let propagate1 deps defs st x =
match defs.(Var.idx x) with
Param ->
VarSet.singleton x
| Phi s ->
var_set_lift (fun y -> VarTbl.get st y) s
| Expr e ->
match e with
Const _ | Constant _ | Apply _ | Prim _
| Closure _ | Block _ ->
VarSet.singleton x
| Field (y, n) ->
var_set_lift
(fun z ->
match defs.(Var.idx z) with
Expr (Block (_, a)) when n < Array.length a ->
let t = a.(n) in
add_dep deps x t;
VarTbl.get st t
| Phi _ | Param | Expr _ ->
VarSet.empty)
(VarTbl.get st y)
module G = Dgraph.Make_Imperative (Var) (VarISet) (VarTbl)
module Domain1 = struct
type t = VarSet.t
let equal = VarSet.equal
let bot = VarSet.empty
end
module Solver1 = G.Solver (Domain1)
let solver1 vars deps defs =
let g =
{ G.domain = vars;
G.iter_children = fun f x -> VarSet.iter f deps.(Var.idx x) }
in
Solver1.f () g (propagate1 deps defs)
(****)
type mutability_state =
{ defs : def array;
known_origins : Code.VarSet.t Code.VarTbl.t;
may_escape : bool array;
possibly_mutable : bool array }
let rec block_escape st x =
VarSet.iter
(fun y ->
let idx = Var.idx y in
if not st.may_escape.(idx) then begin
st.may_escape.(idx) <- true;
st.possibly_mutable.(idx) <- true;
match st.defs.(Var.idx y) with
Expr (Block (_, l)) -> Array.iter (fun z -> block_escape st z) l
| _ -> ()
end)
(VarTbl.get st.known_origins x)
let expr_escape st x e =
match e with
Const _ | Constant _ | Closure _ | Block _ | Field _ ->
()
| Apply (_, l, _) ->
List.iter (fun x -> block_escape st x) l
| Prim (_, l) ->
List.iter
(fun x ->
match x with
Pv x -> block_escape st x
| Pc _ -> ())
l
let program_escape defs known_origins (_, blocks, _) =
let nv = Var.count () in
let may_escape = Array.make nv false in
let possibly_mutable = Array.make nv false in
let st =
{ defs = defs;
known_origins = known_origins;
may_escape = may_escape;
possibly_mutable = possibly_mutable }
in
AddrMap.iter
(fun pc block ->
List.iter
(fun i ->
match i with
Let (x, e) ->
expr_escape st x e
| Set_field (x, _, y) | Array_set (x, _, y) ->
VarSet.iter (fun y -> possibly_mutable.(Var.idx y) <- true)
(VarTbl.get known_origins x);
block_escape st y
| Offset_ref (x, _) ->
VarSet.iter (fun y -> possibly_mutable.(Var.idx y) <- true)
(VarTbl.get known_origins x))
block.body;
match block.branch with
Return x | Raise x ->
block_escape st x
| Stop | Branch _ | Cond _ | Switch _ | Pushtrap _ | Poptrap _ ->
())
blocks;
possibly_mutable
(****)
type approx = Known | Maybe_unknown
let a_max u v =
match u, v with
Known, Known -> Known
| _ -> Maybe_unknown
let approx_lift f s = VarSet.fold (fun y u -> a_max (f y) u) s Known
let propagate2 defs known_origins possibly_mutable st x =
match defs.(Var.idx x) with
Param ->
false
| Phi s ->
VarSet.exists (fun y -> VarTbl.get st y) s
| Expr e ->
match e with
Const _ | Constant _ | Closure _ | Apply _ | Prim _ | Block _ ->
false
| Field (y, n) ->
VarTbl.get st y
||
VarSet.exists
(fun z ->
match defs.(Var.idx z) with
Expr (Block (_, a)) ->
n >= Array.length a
||
possibly_mutable.(Var.idx z)
||
VarTbl.get st a.(n)
| Phi _ | Param | Expr _ ->
true)
(VarTbl.get known_origins y)
module Domain2 = struct
type t = bool
let equal (u : bool) v = u = v
let bot = false
end
module Solver2 = G.Solver (Domain2)
let solver2 vars deps defs known_origins possibly_mutable =
let g =
{ G.domain = vars;
G.iter_children = fun f x -> VarSet.iter f deps.(Var.idx x) }
in
Solver2.f () g (propagate2 defs known_origins possibly_mutable)
(****)
let get_approx (defs, known_origins, maybe_unknown) f top join x =
let s = VarTbl.get known_origins x in
if VarTbl.get maybe_unknown x then top else
match VarSet.cardinal s with
0 -> top
| 1 -> f (VarSet.choose s)
| _ -> VarSet.fold (fun x u -> join (f x) u) s (f (VarSet.choose s))
let the_def_of ((defs, _, _) as info) x =
get_approx info
(fun x -> match defs.(Var.idx x) with Expr e -> Some e | _ -> None)
None (fun u v -> None) x
let the_int ((defs, _, _) as info) x =
get_approx info
(fun x -> match defs.(Var.idx x) with Expr (Const i) -> Some i | _ -> None)
None
(fun u v -> match u, v with Some i, Some j when i = j -> u | _ -> None)
x
let function_cardinality ((defs, _, _) as info) x =
get_approx info
(fun x ->
match defs.(Var.idx x) with
Expr (Closure (l, _)) -> Some (List.length l)
| _ -> None)
None
(fun u v -> match u, v with Some n, Some m when n = m -> u | _ -> None)
x
let specialize_instr info i =
match i with
Let (x, Apply (f, l, _)) when not (disable_optcall ()) ->
Let (x, Apply (f, l, function_cardinality info f))
(*FIX this should be moved to a different file (javascript specific) *)
| Let (x, Prim (Extern "caml_js_var", [Pv y])) ->
begin match the_def_of info y with
Some (Constant (String _ as c)) ->
Let (x, Prim (Extern "caml_js_var", [Pc c]))
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_const", [Pv y])) ->
begin match the_def_of info y with
Some (Constant (String _ as c)) ->
Let (x, Prim (Extern "caml_js_const", [Pc c]))
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_call", [Pv f; Pv o; Pv a])) ->
begin match the_def_of info a with
Some (Block (_, a)) ->
let a = Array.map (fun x -> Pv x) a in
Let (x, Prim (Extern "caml_js_opt_call",
Pv f :: Pv o :: Array.to_list a))
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_fun_call", [Pv f; Pv a])) ->
begin match the_def_of info a with
Some (Block (_, a)) ->
let a = Array.map (fun x -> Pv x) a in
Let (x, Prim (Extern "caml_js_opt_fun_call",
Pv f :: Array.to_list a))
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_meth_call", [Pv o; Pv m; Pv a])) ->
begin match the_def_of info m with
Some (Constant (String _ as m)) ->
begin match the_def_of info a with
Some (Block (_, a)) ->
let a = Array.map (fun x -> Pv x) a in
Let (x, Prim (Extern "caml_js_opt_meth_call",
Pv o :: Pc m :: Array.to_list a))
| _ ->
i
end
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_new", [Pv c; Pv a])) ->
begin match the_def_of info a with
Some (Block (_, a)) ->
let a = Array.map (fun x -> Pv x) a in
Let (x, Prim (Extern "caml_js_opt_new",
Pv c :: Array.to_list a))
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_get", [Pv o; Pv f])) ->
begin match the_def_of info f with
Some (Constant (String _ as c)) ->
Let (x, Prim (Extern "caml_js_get", [Pv o; Pc c]))
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_set", [Pv o; Pv f; Pv v])) ->
begin match the_def_of info f with
Some (Constant (String _ as c)) ->
Let (x, Prim (Extern "caml_js_set", [Pv o; Pc c; Pv v]))
| _ ->
i
end
| Let (x, Prim (Extern "%int_mul", [Pv y; Pv z])) ->
begin match the_int info y, the_int info z with
Some j, _ | _, Some j when abs j < 0x200000 ->
Let (x, Prim (Extern "%direct_int_mul", [Pv y; Pv z]))
| _ ->
i
end
| Let (x, Prim (Extern "%int_div", [Pv y; Pv z])) ->
begin match the_int info z with
Some j when j <> 0 ->
Let (x, Prim (Extern "%direct_int_div", [Pv y; Pv z]))
| _ ->
i
end
| Let (x, Prim (Extern "%int_mod", [Pv y; Pv z])) ->
begin match the_int info z with
Some j when j <> 0 ->
Let (x, Prim (Extern "%direct_int_mod", [Pv y; Pv z]))
| _ ->
i
end
| _ ->
i
let specialize_instrs info (pc, blocks, free_pc) =
let blocks =
AddrMap.map
(fun block ->
{ block with Code.body =
List.map (fun i -> specialize_instr info i) block.body })
blocks
in
(pc, blocks, free_pc)
(****)
(*XXX Maybe we could iterate? *)
let direct_approx defs known_origins maybe_unknown possibly_mutable x =
match defs.(Var.idx x) with
Expr (Field (y, n)) ->
get_approx (defs, known_origins, maybe_unknown)
(fun z ->
if possibly_mutable.(Var.idx z) then None else
match defs.(Var.idx z) with
Expr (Block (_, a)) when n < Array.length a ->
Some a.(n)
| _ ->
None)
None
(fun u v ->
match u, v with
Some n, Some m when Var.compare n m = 0 -> u
| _ -> None)
y
| _ ->
None
let build_subst defs vars known_origins maybe_unknown possibly_mutable =
let nv = Var.count () in
let subst = Array.make nv None in
VarISet.iter
(fun x ->
let u = VarTbl.get maybe_unknown x in
if not u then begin
let s = VarTbl.get known_origins x in
if VarSet.cardinal s = 1 then
subst.(Var.idx x) <- Some (VarSet.choose s)
end;
if subst.(Var.idx x) = None then
subst.(Var.idx x) <-
direct_approx defs known_origins maybe_unknown possibly_mutable x)
vars;
subst
(****)
let f ((pc, blocks, free_pc) as p) =
let t = Util.Timer.make () in
let t1 = Util.Timer.make () in
let (vars, deps, defs) = program_deps p in
if times () then Format.eprintf " flow analysis 1: %a@." Util.Timer.print t1;
let t2 = Util.Timer.make () in
let known_origins = solver1 vars deps defs in
if times () then Format.eprintf " flow analysis 2: %a@." Util.Timer.print t2;
let t3 = Util.Timer.make () in
let possibly_mutable = program_escape defs known_origins p in
if times () then Format.eprintf " flow analysis 3: %a@." Util.Timer.print t3;
let t4 = Util.Timer.make () in
let maybe_unknown = solver2 vars deps defs known_origins possibly_mutable in
if times () then Format.eprintf " flow analysis 4: %a@." Util.Timer.print t4;
if debug () then begin
VarISet.iter
(fun x ->
let s = VarTbl.get known_origins x in
& & VarSet.choose s < > x
Format.eprintf "%a: {%a} / %s@."
Var.print x Code.print_var_list (VarSet.elements s)
(if VarTbl.get maybe_unknown x then "any" else "known")
end)
vars
end;
let t5 = Util.Timer.make () in
let p = specialize_instrs (defs, known_origins, maybe_unknown) p in
let s = build_subst defs vars known_origins maybe_unknown possibly_mutable in
let p = Subst.program (Subst.from_array s) p in
if times () then Format.eprintf " flow analysis 5: %a@." Util.Timer.print t5;
if times () then Format.eprintf " flow analysis: %a@." Util.Timer.print t;
p
| null | https://raw.githubusercontent.com/goodell/cppmem/eb3ce19b607a5d6ec81138cd8cacd236f9388e87/js_of_ocaml-1.2/compiler/flow.ml | ocaml | **
x depends on y
**
**
**
FIX this should be moved to a different file (javascript specific)
**
XXX Maybe we could iterate?
** | Js_of_ocaml compiler
* /
* Copyright ( C ) 2010
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* /
* Copyright (C) 2010 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
let debug = Util.debug "flow"
let disable_optcall = Util.disabled "optcall"
let times = Util.debug "times"
open Code
let add_var = VarISet.add
type def = Phi of VarSet.t | Expr of Code.expr | Param
let undefined = Phi VarSet.empty
let is_undefined d = match d with Phi s -> VarSet.is_empty s | _ -> false
let add_expr_def defs x e =
let idx = Var.idx x in
assert (is_undefined defs.(idx));
defs.(idx) <- Expr e
let add_assign_def vars defs x y =
add_var vars x;
let idx = Var.idx x in
match defs.(idx) with
Expr _ | Param ->
assert false
| Phi s ->
defs.(idx) <- Phi (VarSet.add y s)
let add_param_def vars defs x =
add_var vars x;
let idx = Var.idx x in
assert (is_undefined defs.(idx) || defs.(idx) = Param);
defs.(idx) <- Param
let add_dep deps x y =
let idx = Var.idx y in
deps.(idx) <- VarSet.add x deps.(idx)
let rec arg_deps vars deps defs params args =
match params, args with
x :: params, y :: args ->
add_dep deps x y;
add_assign_def vars defs x y;
arg_deps vars deps defs params args
| _ ->
()
let cont_deps blocks vars deps defs (pc, args) =
let block = AddrMap.find pc blocks in
arg_deps vars deps defs block.params args
let expr_deps blocks vars deps defs x e =
match e with
Const _ | Constant _ | Apply _ | Prim _ ->
()
| Closure (l, cont) ->
List.iter (fun x -> add_param_def vars defs x) l;
cont_deps blocks vars deps defs cont
| Block (_, a) ->
Array.iter (fun y -> add_dep deps x y) a
| Field (y, _) ->
add_dep deps x y
let program_deps (_, blocks, _) =
let nv = Var.count () in
let vars = VarISet.empty () in
let deps = Array.make nv VarSet.empty in
let defs = Array.make nv undefined in
AddrMap.iter
(fun pc block ->
List.iter
(fun i ->
match i with
Let (x, e) ->
add_var vars x;
add_expr_def defs x e;
expr_deps blocks vars deps defs x e
| Set_field _ | Array_set _ | Offset_ref _ ->
())
block.body;
Util.opt_iter
(fun (x, cont) ->
add_param_def vars defs x;
cont_deps blocks vars deps defs cont)
block.handler;
match block.branch with
Return _ | Raise _ | Stop ->
()
| Branch cont | Poptrap cont ->
cont_deps blocks vars deps defs cont
| Cond (_, _, cont1, cont2) ->
cont_deps blocks vars deps defs cont1;
cont_deps blocks vars deps defs cont2
| Switch (_, a1, a2) ->
Array.iter (fun cont -> cont_deps blocks vars deps defs cont) a1;
Array.iter (fun cont -> cont_deps blocks vars deps defs cont) a2
| Pushtrap (cont, _, _, _) ->
cont_deps blocks vars deps defs cont)
blocks;
(vars, deps, defs)
let var_set_lift f s =
VarSet.fold (fun y s -> VarSet.union (f y) s) s VarSet.empty
let propagate1 deps defs st x =
match defs.(Var.idx x) with
Param ->
VarSet.singleton x
| Phi s ->
var_set_lift (fun y -> VarTbl.get st y) s
| Expr e ->
match e with
Const _ | Constant _ | Apply _ | Prim _
| Closure _ | Block _ ->
VarSet.singleton x
| Field (y, n) ->
var_set_lift
(fun z ->
match defs.(Var.idx z) with
Expr (Block (_, a)) when n < Array.length a ->
let t = a.(n) in
add_dep deps x t;
VarTbl.get st t
| Phi _ | Param | Expr _ ->
VarSet.empty)
(VarTbl.get st y)
module G = Dgraph.Make_Imperative (Var) (VarISet) (VarTbl)
module Domain1 = struct
type t = VarSet.t
let equal = VarSet.equal
let bot = VarSet.empty
end
module Solver1 = G.Solver (Domain1)
let solver1 vars deps defs =
let g =
{ G.domain = vars;
G.iter_children = fun f x -> VarSet.iter f deps.(Var.idx x) }
in
Solver1.f () g (propagate1 deps defs)
type mutability_state =
{ defs : def array;
known_origins : Code.VarSet.t Code.VarTbl.t;
may_escape : bool array;
possibly_mutable : bool array }
let rec block_escape st x =
VarSet.iter
(fun y ->
let idx = Var.idx y in
if not st.may_escape.(idx) then begin
st.may_escape.(idx) <- true;
st.possibly_mutable.(idx) <- true;
match st.defs.(Var.idx y) with
Expr (Block (_, l)) -> Array.iter (fun z -> block_escape st z) l
| _ -> ()
end)
(VarTbl.get st.known_origins x)
let expr_escape st x e =
match e with
Const _ | Constant _ | Closure _ | Block _ | Field _ ->
()
| Apply (_, l, _) ->
List.iter (fun x -> block_escape st x) l
| Prim (_, l) ->
List.iter
(fun x ->
match x with
Pv x -> block_escape st x
| Pc _ -> ())
l
let program_escape defs known_origins (_, blocks, _) =
let nv = Var.count () in
let may_escape = Array.make nv false in
let possibly_mutable = Array.make nv false in
let st =
{ defs = defs;
known_origins = known_origins;
may_escape = may_escape;
possibly_mutable = possibly_mutable }
in
AddrMap.iter
(fun pc block ->
List.iter
(fun i ->
match i with
Let (x, e) ->
expr_escape st x e
| Set_field (x, _, y) | Array_set (x, _, y) ->
VarSet.iter (fun y -> possibly_mutable.(Var.idx y) <- true)
(VarTbl.get known_origins x);
block_escape st y
| Offset_ref (x, _) ->
VarSet.iter (fun y -> possibly_mutable.(Var.idx y) <- true)
(VarTbl.get known_origins x))
block.body;
match block.branch with
Return x | Raise x ->
block_escape st x
| Stop | Branch _ | Cond _ | Switch _ | Pushtrap _ | Poptrap _ ->
())
blocks;
possibly_mutable
type approx = Known | Maybe_unknown
let a_max u v =
match u, v with
Known, Known -> Known
| _ -> Maybe_unknown
let approx_lift f s = VarSet.fold (fun y u -> a_max (f y) u) s Known
let propagate2 defs known_origins possibly_mutable st x =
match defs.(Var.idx x) with
Param ->
false
| Phi s ->
VarSet.exists (fun y -> VarTbl.get st y) s
| Expr e ->
match e with
Const _ | Constant _ | Closure _ | Apply _ | Prim _ | Block _ ->
false
| Field (y, n) ->
VarTbl.get st y
||
VarSet.exists
(fun z ->
match defs.(Var.idx z) with
Expr (Block (_, a)) ->
n >= Array.length a
||
possibly_mutable.(Var.idx z)
||
VarTbl.get st a.(n)
| Phi _ | Param | Expr _ ->
true)
(VarTbl.get known_origins y)
module Domain2 = struct
type t = bool
let equal (u : bool) v = u = v
let bot = false
end
module Solver2 = G.Solver (Domain2)
let solver2 vars deps defs known_origins possibly_mutable =
let g =
{ G.domain = vars;
G.iter_children = fun f x -> VarSet.iter f deps.(Var.idx x) }
in
Solver2.f () g (propagate2 defs known_origins possibly_mutable)
let get_approx (defs, known_origins, maybe_unknown) f top join x =
let s = VarTbl.get known_origins x in
if VarTbl.get maybe_unknown x then top else
match VarSet.cardinal s with
0 -> top
| 1 -> f (VarSet.choose s)
| _ -> VarSet.fold (fun x u -> join (f x) u) s (f (VarSet.choose s))
let the_def_of ((defs, _, _) as info) x =
get_approx info
(fun x -> match defs.(Var.idx x) with Expr e -> Some e | _ -> None)
None (fun u v -> None) x
let the_int ((defs, _, _) as info) x =
get_approx info
(fun x -> match defs.(Var.idx x) with Expr (Const i) -> Some i | _ -> None)
None
(fun u v -> match u, v with Some i, Some j when i = j -> u | _ -> None)
x
let function_cardinality ((defs, _, _) as info) x =
get_approx info
(fun x ->
match defs.(Var.idx x) with
Expr (Closure (l, _)) -> Some (List.length l)
| _ -> None)
None
(fun u v -> match u, v with Some n, Some m when n = m -> u | _ -> None)
x
let specialize_instr info i =
match i with
Let (x, Apply (f, l, _)) when not (disable_optcall ()) ->
Let (x, Apply (f, l, function_cardinality info f))
| Let (x, Prim (Extern "caml_js_var", [Pv y])) ->
begin match the_def_of info y with
Some (Constant (String _ as c)) ->
Let (x, Prim (Extern "caml_js_var", [Pc c]))
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_const", [Pv y])) ->
begin match the_def_of info y with
Some (Constant (String _ as c)) ->
Let (x, Prim (Extern "caml_js_const", [Pc c]))
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_call", [Pv f; Pv o; Pv a])) ->
begin match the_def_of info a with
Some (Block (_, a)) ->
let a = Array.map (fun x -> Pv x) a in
Let (x, Prim (Extern "caml_js_opt_call",
Pv f :: Pv o :: Array.to_list a))
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_fun_call", [Pv f; Pv a])) ->
begin match the_def_of info a with
Some (Block (_, a)) ->
let a = Array.map (fun x -> Pv x) a in
Let (x, Prim (Extern "caml_js_opt_fun_call",
Pv f :: Array.to_list a))
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_meth_call", [Pv o; Pv m; Pv a])) ->
begin match the_def_of info m with
Some (Constant (String _ as m)) ->
begin match the_def_of info a with
Some (Block (_, a)) ->
let a = Array.map (fun x -> Pv x) a in
Let (x, Prim (Extern "caml_js_opt_meth_call",
Pv o :: Pc m :: Array.to_list a))
| _ ->
i
end
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_new", [Pv c; Pv a])) ->
begin match the_def_of info a with
Some (Block (_, a)) ->
let a = Array.map (fun x -> Pv x) a in
Let (x, Prim (Extern "caml_js_opt_new",
Pv c :: Array.to_list a))
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_get", [Pv o; Pv f])) ->
begin match the_def_of info f with
Some (Constant (String _ as c)) ->
Let (x, Prim (Extern "caml_js_get", [Pv o; Pc c]))
| _ ->
i
end
| Let (x, Prim (Extern "caml_js_set", [Pv o; Pv f; Pv v])) ->
begin match the_def_of info f with
Some (Constant (String _ as c)) ->
Let (x, Prim (Extern "caml_js_set", [Pv o; Pc c; Pv v]))
| _ ->
i
end
| Let (x, Prim (Extern "%int_mul", [Pv y; Pv z])) ->
begin match the_int info y, the_int info z with
Some j, _ | _, Some j when abs j < 0x200000 ->
Let (x, Prim (Extern "%direct_int_mul", [Pv y; Pv z]))
| _ ->
i
end
| Let (x, Prim (Extern "%int_div", [Pv y; Pv z])) ->
begin match the_int info z with
Some j when j <> 0 ->
Let (x, Prim (Extern "%direct_int_div", [Pv y; Pv z]))
| _ ->
i
end
| Let (x, Prim (Extern "%int_mod", [Pv y; Pv z])) ->
begin match the_int info z with
Some j when j <> 0 ->
Let (x, Prim (Extern "%direct_int_mod", [Pv y; Pv z]))
| _ ->
i
end
| _ ->
i
let specialize_instrs info (pc, blocks, free_pc) =
let blocks =
AddrMap.map
(fun block ->
{ block with Code.body =
List.map (fun i -> specialize_instr info i) block.body })
blocks
in
(pc, blocks, free_pc)
let direct_approx defs known_origins maybe_unknown possibly_mutable x =
match defs.(Var.idx x) with
Expr (Field (y, n)) ->
get_approx (defs, known_origins, maybe_unknown)
(fun z ->
if possibly_mutable.(Var.idx z) then None else
match defs.(Var.idx z) with
Expr (Block (_, a)) when n < Array.length a ->
Some a.(n)
| _ ->
None)
None
(fun u v ->
match u, v with
Some n, Some m when Var.compare n m = 0 -> u
| _ -> None)
y
| _ ->
None
let build_subst defs vars known_origins maybe_unknown possibly_mutable =
let nv = Var.count () in
let subst = Array.make nv None in
VarISet.iter
(fun x ->
let u = VarTbl.get maybe_unknown x in
if not u then begin
let s = VarTbl.get known_origins x in
if VarSet.cardinal s = 1 then
subst.(Var.idx x) <- Some (VarSet.choose s)
end;
if subst.(Var.idx x) = None then
subst.(Var.idx x) <-
direct_approx defs known_origins maybe_unknown possibly_mutable x)
vars;
subst
let f ((pc, blocks, free_pc) as p) =
let t = Util.Timer.make () in
let t1 = Util.Timer.make () in
let (vars, deps, defs) = program_deps p in
if times () then Format.eprintf " flow analysis 1: %a@." Util.Timer.print t1;
let t2 = Util.Timer.make () in
let known_origins = solver1 vars deps defs in
if times () then Format.eprintf " flow analysis 2: %a@." Util.Timer.print t2;
let t3 = Util.Timer.make () in
let possibly_mutable = program_escape defs known_origins p in
if times () then Format.eprintf " flow analysis 3: %a@." Util.Timer.print t3;
let t4 = Util.Timer.make () in
let maybe_unknown = solver2 vars deps defs known_origins possibly_mutable in
if times () then Format.eprintf " flow analysis 4: %a@." Util.Timer.print t4;
if debug () then begin
VarISet.iter
(fun x ->
let s = VarTbl.get known_origins x in
& & VarSet.choose s < > x
Format.eprintf "%a: {%a} / %s@."
Var.print x Code.print_var_list (VarSet.elements s)
(if VarTbl.get maybe_unknown x then "any" else "known")
end)
vars
end;
let t5 = Util.Timer.make () in
let p = specialize_instrs (defs, known_origins, maybe_unknown) p in
let s = build_subst defs vars known_origins maybe_unknown possibly_mutable in
let p = Subst.program (Subst.from_array s) p in
if times () then Format.eprintf " flow analysis 5: %a@." Util.Timer.print t5;
if times () then Format.eprintf " flow analysis: %a@." Util.Timer.print t;
p
|
03f1d95bf4a84445c1114401bda7644399341a68bc1cb830e08b095d044dc44d | futurice/haskell-futurice-prelude | Setup.hs | import Distribution.Extra.Doctest
main = defaultMainWithDoctests "doctests"
| null | https://raw.githubusercontent.com/futurice/haskell-futurice-prelude/56192d63bea76d06cb456c5ce4c776cf41a5cd7e/Setup.hs | haskell | import Distribution.Extra.Doctest
main = defaultMainWithDoctests "doctests"
| |
82db7289e9625c4ed6e0964ab834834fb92b9baa95a9e988e3970b525087af8b | wfnuser/sicp-solutions | e2-40.scm |
(define (enumerate-interval start end)
(if (<= start end)
(cons start (enumerate-interval (+ start 1) end))
'()
)
)
(define (accumulate op initial seq)
(if (null? seq)
initial
(op (car seq) (accumulate op initial (cdr seq)))
)
)
(define (flat-map proc seq)
(accumulate append '() (map proc seq))
)
(define (unique-pairs n)
(flat-map (lambda (i)
(map (lambda (j) (list j i)) (enumerate-interval 1 i))
) (enumerate-interval 1 n))
)
(define (prime? num)
(define (is-prime? num d)
(if (<= d 1)
#t
(if (= (modulo num d) 0)
#f
(is-prime? num (- d 1))
)
)
)
(is-prime? num (- num 1))
)
(define (prime-sum? pair)
(prime? (+ (car pair) (cadr pair))))
(define (prime-sum-pairs n)
(filter prime-sum? (unique-pairs n))
)
(prime-sum-pairs 10)
| null | https://raw.githubusercontent.com/wfnuser/sicp-solutions/2c94b28d8ee004dcbfe7311f866e5a346ee01d12/ch2/e2-40.scm | scheme |
(define (enumerate-interval start end)
(if (<= start end)
(cons start (enumerate-interval (+ start 1) end))
'()
)
)
(define (accumulate op initial seq)
(if (null? seq)
initial
(op (car seq) (accumulate op initial (cdr seq)))
)
)
(define (flat-map proc seq)
(accumulate append '() (map proc seq))
)
(define (unique-pairs n)
(flat-map (lambda (i)
(map (lambda (j) (list j i)) (enumerate-interval 1 i))
) (enumerate-interval 1 n))
)
(define (prime? num)
(define (is-prime? num d)
(if (<= d 1)
#t
(if (= (modulo num d) 0)
#f
(is-prime? num (- d 1))
)
)
)
(is-prime? num (- num 1))
)
(define (prime-sum? pair)
(prime? (+ (car pair) (cadr pair))))
(define (prime-sum-pairs n)
(filter prime-sum? (unique-pairs n))
)
(prime-sum-pairs 10)
| |
8c6607e759e8e00b0afdb50ddaef6602585703c291c40d42091f9c4f540a3f8c | dwayne/haskell-programming | Main.hs | module Main where
import Control.Monad (forever)
import Data.Char (toLower)
import Data.Maybe (isJust)
import Data.List (intersperse)
import System.Exit (exitSuccess)
import System.IO
import System.Random (randomRIO)
type WordList = [String]
allWords :: IO WordList
allWords = do
dict <- readFile "data/dict.txt"
return (lines dict)
minWordLength :: Int
minWordLength = 5
maxWordLength :: Int
maxWordLength = 9
gameWords :: IO WordList
gameWords = do
aw <- allWords
return (filter gameLength aw)
where
gameLength w =
let
l = length (w :: String)
in
l > minWordLength && l < maxWordLength
randomWord :: WordList -> IO String
randomWord wl = do
randomIndex <- randomRIO (0, length wl - 1)
return $ wl !! randomIndex
randomWord' :: IO String
randomWord' = gameWords >>= randomWord
data Puzzle = Puzzle String [Maybe Char] [Char]
instance Show Puzzle where
show (Puzzle _ discovered guessed) =
(intersperse ' ' $ fmap renderPuzzleChar discovered)
++ " Guessed so far: " ++ guessed
freshPuzzle :: String -> Puzzle
freshPuzzle w = Puzzle w (map (const Nothing) w) []
charInWord :: Puzzle -> Char -> Bool
charInWord (Puzzle word _ _) c = elem c word
alreadyGuessed :: Puzzle -> Char -> Bool
alreadyGuessed (Puzzle _ _ guessed) c = elem c guessed
renderPuzzleChar :: Maybe Char -> Char
renderPuzzleChar Nothing = '_'
renderPuzzleChar (Just c) = c
fillInCharacter :: Puzzle -> Char -> Puzzle
fillInCharacter (Puzzle word filledInSoFar s) c =
Puzzle word newFilledInSoFar (c : s)
where
zipper guessed wordChar guessChar =
if wordChar == guessed
then Just wordChar
else guessChar
newFilledInSoFar = zipWith (zipper c) word filledInSoFar
handleGuess :: Puzzle -> Char -> IO Puzzle
handleGuess puzzle guess = do
putStrLn $ "Your guess was: " ++ [guess]
case (charInWord puzzle guess, alreadyGuessed puzzle guess) of
(_, True) -> do
putStrLn "You already guessed that character, pick something else!"
return puzzle
(True, _) -> do
putStrLn "This character was in the word, filling in the word accordingly."
return (fillInCharacter puzzle guess)
(False, _) -> do
putStrLn "This character wasn't in the word, try again."
return (fillInCharacter puzzle guess)
gameOver :: Puzzle -> IO ()
gameOver (Puzzle wordToGuess _ guessed) =
if (length guessed) > 7
then do
putStrLn "You lose!"
putStrLn $ "The word was: " ++ wordToGuess
exitSuccess
else
return ()
gameWin :: Puzzle -> IO ()
gameWin (Puzzle _ filledInSoFar _) =
if all isJust filledInSoFar
then do
putStrLn "You win!"
exitSuccess
else
return ()
runGame :: Puzzle -> IO ()
runGame puzzle = forever $ do
gameOver puzzle
gameWin puzzle
putStrLn $ "Current puzzle is: " ++ show puzzle
putStr "Guess a letter: "
guess <- getLine
case guess of
[c] -> handleGuess puzzle c >>= runGame
_ -> putStrLn "Your guess must be a single character."
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
word <- randomWord'
let puzzle = freshPuzzle (fmap toLower word)
runGame puzzle
| null | https://raw.githubusercontent.com/dwayne/haskell-programming/d08679e76cfd39985fa2ee3cd89d55c9aedfb531/ch13/hangman/src/Main.hs | haskell | module Main where
import Control.Monad (forever)
import Data.Char (toLower)
import Data.Maybe (isJust)
import Data.List (intersperse)
import System.Exit (exitSuccess)
import System.IO
import System.Random (randomRIO)
type WordList = [String]
allWords :: IO WordList
allWords = do
dict <- readFile "data/dict.txt"
return (lines dict)
minWordLength :: Int
minWordLength = 5
maxWordLength :: Int
maxWordLength = 9
gameWords :: IO WordList
gameWords = do
aw <- allWords
return (filter gameLength aw)
where
gameLength w =
let
l = length (w :: String)
in
l > minWordLength && l < maxWordLength
randomWord :: WordList -> IO String
randomWord wl = do
randomIndex <- randomRIO (0, length wl - 1)
return $ wl !! randomIndex
randomWord' :: IO String
randomWord' = gameWords >>= randomWord
data Puzzle = Puzzle String [Maybe Char] [Char]
instance Show Puzzle where
show (Puzzle _ discovered guessed) =
(intersperse ' ' $ fmap renderPuzzleChar discovered)
++ " Guessed so far: " ++ guessed
freshPuzzle :: String -> Puzzle
freshPuzzle w = Puzzle w (map (const Nothing) w) []
charInWord :: Puzzle -> Char -> Bool
charInWord (Puzzle word _ _) c = elem c word
alreadyGuessed :: Puzzle -> Char -> Bool
alreadyGuessed (Puzzle _ _ guessed) c = elem c guessed
renderPuzzleChar :: Maybe Char -> Char
renderPuzzleChar Nothing = '_'
renderPuzzleChar (Just c) = c
fillInCharacter :: Puzzle -> Char -> Puzzle
fillInCharacter (Puzzle word filledInSoFar s) c =
Puzzle word newFilledInSoFar (c : s)
where
zipper guessed wordChar guessChar =
if wordChar == guessed
then Just wordChar
else guessChar
newFilledInSoFar = zipWith (zipper c) word filledInSoFar
handleGuess :: Puzzle -> Char -> IO Puzzle
handleGuess puzzle guess = do
putStrLn $ "Your guess was: " ++ [guess]
case (charInWord puzzle guess, alreadyGuessed puzzle guess) of
(_, True) -> do
putStrLn "You already guessed that character, pick something else!"
return puzzle
(True, _) -> do
putStrLn "This character was in the word, filling in the word accordingly."
return (fillInCharacter puzzle guess)
(False, _) -> do
putStrLn "This character wasn't in the word, try again."
return (fillInCharacter puzzle guess)
gameOver :: Puzzle -> IO ()
gameOver (Puzzle wordToGuess _ guessed) =
if (length guessed) > 7
then do
putStrLn "You lose!"
putStrLn $ "The word was: " ++ wordToGuess
exitSuccess
else
return ()
gameWin :: Puzzle -> IO ()
gameWin (Puzzle _ filledInSoFar _) =
if all isJust filledInSoFar
then do
putStrLn "You win!"
exitSuccess
else
return ()
runGame :: Puzzle -> IO ()
runGame puzzle = forever $ do
gameOver puzzle
gameWin puzzle
putStrLn $ "Current puzzle is: " ++ show puzzle
putStr "Guess a letter: "
guess <- getLine
case guess of
[c] -> handleGuess puzzle c >>= runGame
_ -> putStrLn "Your guess must be a single character."
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
word <- randomWord'
let puzzle = freshPuzzle (fmap toLower word)
runGame puzzle
| |
d4856638a6c800548a02ce2ff050534b9df326345f2304c50bb51aae0dec69db | SimulaVR/godot-haskell | VisualScriptVariableGet.hs | # LANGUAGE DerivingStrategies , GeneralizedNewtypeDeriving ,
TypeFamilies , TypeOperators , FlexibleContexts , DataKinds ,
MultiParamTypeClasses #
TypeFamilies, TypeOperators, FlexibleContexts, DataKinds,
MultiParamTypeClasses #-}
module Godot.Core.VisualScriptVariableGet
(Godot.Core.VisualScriptVariableGet.get_variable,
Godot.Core.VisualScriptVariableGet.set_variable)
where
import Data.Coerce
import Foreign.C
import Godot.Internal.Dispatch
import qualified Data.Vector as V
import Linear(V2(..),V3(..),M22)
import Data.Colour(withOpacity)
import Data.Colour.SRGB(sRGB)
import System.IO.Unsafe
import Godot.Gdnative.Internal
import Godot.Api.Types
import Godot.Core.VisualScriptNode()
instance NodeProperty VisualScriptVariableGet "var_name"
GodotString
'False
where
nodeProperty
= (get_variable, wrapDroppingSetter set_variable, Nothing)
# NOINLINE bindVisualScriptVariableGet_get_variable #
bindVisualScriptVariableGet_get_variable :: MethodBind
bindVisualScriptVariableGet_get_variable
= unsafePerformIO $
withCString "VisualScriptVariableGet" $
\ clsNamePtr ->
withCString "get_variable" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_variable ::
(VisualScriptVariableGet :< cls, Object :< cls) =>
cls -> IO GodotString
get_variable cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualScriptVariableGet_get_variable
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualScriptVariableGet "get_variable" '[]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualScriptVariableGet.get_variable
# NOINLINE bindVisualScriptVariableGet_set_variable #
bindVisualScriptVariableGet_set_variable :: MethodBind
bindVisualScriptVariableGet_set_variable
= unsafePerformIO $
withCString "VisualScriptVariableGet" $
\ clsNamePtr ->
withCString "set_variable" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
set_variable ::
(VisualScriptVariableGet :< cls, Object :< cls) =>
cls -> GodotString -> IO ()
set_variable cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualScriptVariableGet_set_variable
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualScriptVariableGet "set_variable"
'[GodotString]
(IO ())
where
nodeMethod = Godot.Core.VisualScriptVariableGet.set_variable | null | https://raw.githubusercontent.com/SimulaVR/godot-haskell/e8f2c45f1b9cc2f0586ebdc9ec6002c8c2d384ae/src/Godot/Core/VisualScriptVariableGet.hs | haskell | # LANGUAGE DerivingStrategies , GeneralizedNewtypeDeriving ,
TypeFamilies , TypeOperators , FlexibleContexts , DataKinds ,
MultiParamTypeClasses #
TypeFamilies, TypeOperators, FlexibleContexts, DataKinds,
MultiParamTypeClasses #-}
module Godot.Core.VisualScriptVariableGet
(Godot.Core.VisualScriptVariableGet.get_variable,
Godot.Core.VisualScriptVariableGet.set_variable)
where
import Data.Coerce
import Foreign.C
import Godot.Internal.Dispatch
import qualified Data.Vector as V
import Linear(V2(..),V3(..),M22)
import Data.Colour(withOpacity)
import Data.Colour.SRGB(sRGB)
import System.IO.Unsafe
import Godot.Gdnative.Internal
import Godot.Api.Types
import Godot.Core.VisualScriptNode()
instance NodeProperty VisualScriptVariableGet "var_name"
GodotString
'False
where
nodeProperty
= (get_variable, wrapDroppingSetter set_variable, Nothing)
# NOINLINE bindVisualScriptVariableGet_get_variable #
bindVisualScriptVariableGet_get_variable :: MethodBind
bindVisualScriptVariableGet_get_variable
= unsafePerformIO $
withCString "VisualScriptVariableGet" $
\ clsNamePtr ->
withCString "get_variable" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_variable ::
(VisualScriptVariableGet :< cls, Object :< cls) =>
cls -> IO GodotString
get_variable cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualScriptVariableGet_get_variable
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualScriptVariableGet "get_variable" '[]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualScriptVariableGet.get_variable
# NOINLINE bindVisualScriptVariableGet_set_variable #
bindVisualScriptVariableGet_set_variable :: MethodBind
bindVisualScriptVariableGet_set_variable
= unsafePerformIO $
withCString "VisualScriptVariableGet" $
\ clsNamePtr ->
withCString "set_variable" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
set_variable ::
(VisualScriptVariableGet :< cls, Object :< cls) =>
cls -> GodotString -> IO ()
set_variable cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualScriptVariableGet_set_variable
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualScriptVariableGet "set_variable"
'[GodotString]
(IO ())
where
nodeMethod = Godot.Core.VisualScriptVariableGet.set_variable | |
b9e3594d72ea6403133588b29652ddd1f6511f08d3e92de16e470db9bd854215 | YoshikuniJujo/funpaala | guessNumber0.hs | import Data.Maybe (fromMaybe)
import Text.Read (readMaybe)
import System.Random (randomRIO)
getInteger :: IO Integer
getInteger = fromMaybe 0 . readMaybe <$> getLine
doWhile :: Monad m => m (Maybe a) -> m [a]
doWhile m = do
mx <- m
case mx of
Just x -> (x :) <$> doWhile m
Nothing -> return []
main :: IO ()
main = do
putStrLn "Guess the Number (1 - 10)"
n <- randomRIO (1, 10)
_ <- doWhile $ do
g <- getInteger
case g `compare` n of
EQ -> do
putStrLn "You win!"
return Nothing
LT -> do
putStrLn $ "Your guess, " ++ show g ++
", is too low."
return $ Just ()
GT -> do
putStrLn $ "Your guess, " ++ show g ++
", is too high."
return $ Just ()
return ()
| null | https://raw.githubusercontent.com/YoshikuniJujo/funpaala/5366130826da0e6b1180992dfff94c4a634cda99/samples/39_learn_io/guessNumber0.hs | haskell | import Data.Maybe (fromMaybe)
import Text.Read (readMaybe)
import System.Random (randomRIO)
getInteger :: IO Integer
getInteger = fromMaybe 0 . readMaybe <$> getLine
doWhile :: Monad m => m (Maybe a) -> m [a]
doWhile m = do
mx <- m
case mx of
Just x -> (x :) <$> doWhile m
Nothing -> return []
main :: IO ()
main = do
putStrLn "Guess the Number (1 - 10)"
n <- randomRIO (1, 10)
_ <- doWhile $ do
g <- getInteger
case g `compare` n of
EQ -> do
putStrLn "You win!"
return Nothing
LT -> do
putStrLn $ "Your guess, " ++ show g ++
", is too low."
return $ Just ()
GT -> do
putStrLn $ "Your guess, " ++ show g ++
", is too high."
return $ Just ()
return ()
| |
5ddd03526d83f7f1ecf7fb68fb5b38d93baa060272cefeb939d727144b8cc794 | inaka/katana-code | ktn_io_string.erl | -module(ktn_io_string).
-export([new/1]).
-export([start_link/1, init/1, loop/1, skip/2, skip/3]).
-type state() :: #{buffer := string(), original := string()}.
-hank([{unnecessary_function_arguments, [{skip, 3}]}]).
%%------------------------------------------------------------------------------
%% API
%%------------------------------------------------------------------------------
-spec new(string() | binary()) -> pid().
new(Str) when is_binary(Str) ->
new(binary_to_list(Str));
new(Str) ->
start_link(Str).
%%------------------------------------------------------------------------------
IO server
%%
%% Implementation of a subset of the io protocol in order to only support
%% reading operations.
%%------------------------------------------------------------------------------
-spec start_link(string()) -> pid().
start_link(Str) ->
spawn_link(?MODULE, init, [Str]).
-spec init(string()) -> ok.
init(Str) ->
State = #{buffer => Str, original => Str},
?MODULE:loop(State).
-spec loop(state()) -> ok.
loop(#{buffer := Str} = State) ->
receive
{io_request, From, ReplyAs, Request} ->
{Reply, NewStr} = request(Request, Str),
_ = reply(From, ReplyAs, Reply),
?MODULE:loop(State#{buffer := NewStr});
{file_request, From, Ref, close} ->
file_reply(From, Ref, ok);
{file_request, From, Ref, {position, Pos}} ->
{Reply, NewState} = file_position(Pos, State),
_ = file_reply(From, Ref, Reply),
?MODULE:loop(NewState);
_Unknown ->
?MODULE:loop(State)
end.
-spec reply(pid(), pid(), any()) -> any().
reply(From, ReplyAs, Reply) ->
From ! {io_reply, ReplyAs, Reply}.
-spec file_reply(pid(), pid(), any()) -> any().
file_reply(From, ReplyAs, Reply) ->
From ! {file_reply, ReplyAs, Reply}.
-spec file_position(integer(), state()) -> {any(), state()}.
file_position(Pos, #{original := Original} = State) ->
Buffer = lists:nthtail(Pos, Original),
{{ok, Pos}, State#{buffer => Buffer}}.
-spec request(any(), string()) -> {string() | {error, request}, string()}.
request({get_chars, _Encoding, _Prompt, N}, Str) ->
get_chars(N, Str);
request({get_line, _Encoding, _Prompt}, Str) ->
get_line(Str);
request({get_until, _Encoding, _Prompt, Module, Function, Xargs}, Str) ->
get_until(Module, Function, Xargs, Str);
request(_Other, State) ->
{{error, request}, State}.
-spec get_chars(integer(), string()) -> {string() | eof, string()}.
get_chars(_N, []) ->
{eof, []};
get_chars(1, [Ch | Str]) ->
{[Ch], Str};
get_chars(N, Str) ->
do_get_chars(N, Str, []).
-spec do_get_chars(integer(), string(), string()) -> {string(), string()}.
do_get_chars(0, Str, Result) ->
{lists:flatten(Result), Str};
do_get_chars(_N, [], Result) ->
{Result, []};
do_get_chars(N, [Ch | NewStr], Result) ->
do_get_chars(N - 1, NewStr, [Result, Ch]).
-spec get_line(string()) -> {string() | eof, string()}.
get_line([]) ->
{eof, []};
get_line(Str) ->
do_get_line(Str, []).
-spec do_get_line(string(), string()) -> {string() | eof, string()}.
do_get_line([], Result) ->
{lists:flatten(Result), []};
do_get_line("\r\n" ++ RestStr, Result) ->
{lists:flatten(Result), RestStr};
do_get_line("\n" ++ RestStr, Result) ->
{lists:flatten(Result), RestStr};
do_get_line("\r" ++ RestStr, Result) ->
{lists:flatten(Result), RestStr};
do_get_line([Ch | RestStr], Result) ->
do_get_line(RestStr, [Result, Ch]).
-spec get_until(module(), atom(), list(), eof | string()) -> {term(), string()}.
get_until(Module, Function, XArgs, Str) ->
apply_get_until(Module, Function, [], Str, XArgs).
-spec apply_get_until(module(), atom(), any(), string() | eof, list()) ->
{term(), string()}.
apply_get_until(Module, Function, State, String, XArgs) ->
case apply(Module, Function, [State, String | XArgs]) of
{done, Result, NewStr} ->
{Result, NewStr};
{more, NewState} ->
apply_get_until(Module, Function, NewState, eof, XArgs)
end.
-spec skip(string() | {cont, integer(), string()}, term(), integer()) ->
{more, {cont, integer(), string()}} | {done, integer(), string()}.
skip(Str, _Data, Length) ->
skip(Str, Length).
-spec skip(string() | {cont, integer(), string()}, integer()) ->
{more, {cont, integer(), string()}} | {done, integer(), string()}.
skip(Str, Length) when is_list(Str) ->
{more, {cont, Length, Str}};
skip({cont, 0, Str}, Length) ->
{done, Length, Str};
skip({cont, Length, []}, Length) ->
{done, eof, []};
skip({cont, Length, [_ | RestStr]}, _Length) ->
{more, {cont, Length - 1, RestStr}}.
| null | https://raw.githubusercontent.com/inaka/katana-code/a1bebae843703af8f9ddd508a5924fbf9d831886/src/ktn_io_string.erl | erlang | ------------------------------------------------------------------------------
API
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Implementation of a subset of the io protocol in order to only support
reading operations.
------------------------------------------------------------------------------ | -module(ktn_io_string).
-export([new/1]).
-export([start_link/1, init/1, loop/1, skip/2, skip/3]).
-type state() :: #{buffer := string(), original := string()}.
-hank([{unnecessary_function_arguments, [{skip, 3}]}]).
-spec new(string() | binary()) -> pid().
new(Str) when is_binary(Str) ->
new(binary_to_list(Str));
new(Str) ->
start_link(Str).
IO server
-spec start_link(string()) -> pid().
start_link(Str) ->
spawn_link(?MODULE, init, [Str]).
-spec init(string()) -> ok.
init(Str) ->
State = #{buffer => Str, original => Str},
?MODULE:loop(State).
-spec loop(state()) -> ok.
loop(#{buffer := Str} = State) ->
receive
{io_request, From, ReplyAs, Request} ->
{Reply, NewStr} = request(Request, Str),
_ = reply(From, ReplyAs, Reply),
?MODULE:loop(State#{buffer := NewStr});
{file_request, From, Ref, close} ->
file_reply(From, Ref, ok);
{file_request, From, Ref, {position, Pos}} ->
{Reply, NewState} = file_position(Pos, State),
_ = file_reply(From, Ref, Reply),
?MODULE:loop(NewState);
_Unknown ->
?MODULE:loop(State)
end.
-spec reply(pid(), pid(), any()) -> any().
reply(From, ReplyAs, Reply) ->
From ! {io_reply, ReplyAs, Reply}.
-spec file_reply(pid(), pid(), any()) -> any().
file_reply(From, ReplyAs, Reply) ->
From ! {file_reply, ReplyAs, Reply}.
-spec file_position(integer(), state()) -> {any(), state()}.
file_position(Pos, #{original := Original} = State) ->
Buffer = lists:nthtail(Pos, Original),
{{ok, Pos}, State#{buffer => Buffer}}.
-spec request(any(), string()) -> {string() | {error, request}, string()}.
request({get_chars, _Encoding, _Prompt, N}, Str) ->
get_chars(N, Str);
request({get_line, _Encoding, _Prompt}, Str) ->
get_line(Str);
request({get_until, _Encoding, _Prompt, Module, Function, Xargs}, Str) ->
get_until(Module, Function, Xargs, Str);
request(_Other, State) ->
{{error, request}, State}.
-spec get_chars(integer(), string()) -> {string() | eof, string()}.
get_chars(_N, []) ->
{eof, []};
get_chars(1, [Ch | Str]) ->
{[Ch], Str};
get_chars(N, Str) ->
do_get_chars(N, Str, []).
-spec do_get_chars(integer(), string(), string()) -> {string(), string()}.
do_get_chars(0, Str, Result) ->
{lists:flatten(Result), Str};
do_get_chars(_N, [], Result) ->
{Result, []};
do_get_chars(N, [Ch | NewStr], Result) ->
do_get_chars(N - 1, NewStr, [Result, Ch]).
-spec get_line(string()) -> {string() | eof, string()}.
get_line([]) ->
{eof, []};
get_line(Str) ->
do_get_line(Str, []).
-spec do_get_line(string(), string()) -> {string() | eof, string()}.
do_get_line([], Result) ->
{lists:flatten(Result), []};
do_get_line("\r\n" ++ RestStr, Result) ->
{lists:flatten(Result), RestStr};
do_get_line("\n" ++ RestStr, Result) ->
{lists:flatten(Result), RestStr};
do_get_line("\r" ++ RestStr, Result) ->
{lists:flatten(Result), RestStr};
do_get_line([Ch | RestStr], Result) ->
do_get_line(RestStr, [Result, Ch]).
-spec get_until(module(), atom(), list(), eof | string()) -> {term(), string()}.
get_until(Module, Function, XArgs, Str) ->
apply_get_until(Module, Function, [], Str, XArgs).
-spec apply_get_until(module(), atom(), any(), string() | eof, list()) ->
{term(), string()}.
apply_get_until(Module, Function, State, String, XArgs) ->
case apply(Module, Function, [State, String | XArgs]) of
{done, Result, NewStr} ->
{Result, NewStr};
{more, NewState} ->
apply_get_until(Module, Function, NewState, eof, XArgs)
end.
-spec skip(string() | {cont, integer(), string()}, term(), integer()) ->
{more, {cont, integer(), string()}} | {done, integer(), string()}.
skip(Str, _Data, Length) ->
skip(Str, Length).
-spec skip(string() | {cont, integer(), string()}, integer()) ->
{more, {cont, integer(), string()}} | {done, integer(), string()}.
skip(Str, Length) when is_list(Str) ->
{more, {cont, Length, Str}};
skip({cont, 0, Str}, Length) ->
{done, Length, Str};
skip({cont, Length, []}, Length) ->
{done, eof, []};
skip({cont, Length, [_ | RestStr]}, _Length) ->
{more, {cont, Length - 1, RestStr}}.
|
67417b644283448809907e22b4aef3b883999b8c28801dfc90af08169f26b1ff | petelliott/raylib-guile | core-input-keys.scm | (use-modules (raylib))
(define screen-width 800)
(define screen-height 450)
(InitWindow screen-width screen-height "raylib [core] example - keyboard input")
(define ball-position (make-Vector2 (/ screen-width 2)
(/ screen-height 2)))
(SetTargetFPS 60)
(define (Vector2-delta! vec dx dy)
(Vector2-set-x! vec (+ dx (Vector2-x vec)))
(Vector2-set-y! vec (+ dy (Vector2-y vec))))
(define (main-loop)
(unless (WindowShouldClose)
;; Update
(when (IsKeyDown KEY_RIGHT) (Vector2-delta! ball-position 2 0))
(when (IsKeyDown KEY_LEFT) (Vector2-delta! ball-position -2 0))
(when (IsKeyDown KEY_UP) (Vector2-delta! ball-position 0 -2))
(when (IsKeyDown KEY_DOWN) (Vector2-delta! ball-position 0 2))
;; Draw
(BeginDrawing)
(ClearBackground RAYWHITE)
(DrawText "move the ball with arrow keys" 10 10 20 DARKGRAY)
(DrawCircleV ball-position 50 MAROON)
(EndDrawing)
(main-loop)))
(main-loop)
(CloseWindow)
| null | https://raw.githubusercontent.com/petelliott/raylib-guile/88689ffc1704d0974a5b017ff409a852c6cb7635/examples/core/core-input-keys.scm | scheme | Update
Draw | (use-modules (raylib))
(define screen-width 800)
(define screen-height 450)
(InitWindow screen-width screen-height "raylib [core] example - keyboard input")
(define ball-position (make-Vector2 (/ screen-width 2)
(/ screen-height 2)))
(SetTargetFPS 60)
(define (Vector2-delta! vec dx dy)
(Vector2-set-x! vec (+ dx (Vector2-x vec)))
(Vector2-set-y! vec (+ dy (Vector2-y vec))))
(define (main-loop)
(unless (WindowShouldClose)
(when (IsKeyDown KEY_RIGHT) (Vector2-delta! ball-position 2 0))
(when (IsKeyDown KEY_LEFT) (Vector2-delta! ball-position -2 0))
(when (IsKeyDown KEY_UP) (Vector2-delta! ball-position 0 -2))
(when (IsKeyDown KEY_DOWN) (Vector2-delta! ball-position 0 2))
(BeginDrawing)
(ClearBackground RAYWHITE)
(DrawText "move the ball with arrow keys" 10 10 20 DARKGRAY)
(DrawCircleV ball-position 50 MAROON)
(EndDrawing)
(main-loop)))
(main-loop)
(CloseWindow)
|
b9ca68a5f1b6aedbe520fa3a92a0f64498fffc90fe796207a1330374a2b819b1 | Elzair/nazghul | troll.scm | ; (define (troll-display . args)
; (display (kern-get-ticks))
; (display ":")
; (apply display args))
; (define (troll-newline) (newline))
(define (troll-display . args) )
(define (troll-newline) )
;;----------------------------------------------------------------------------
Troll AI
;;----------------------------------------------------------------------------
(define (troll-is-critical? ktroll)
(< (kern-char-get-hp ktroll) troll-critical-hp))
(define (troll-wander ktroll)
(troll-display "troll-wander")(troll-newline)
(wander ktroll))
(define (troll-flee ktroll)
(troll-display "troll-flee")(troll-newline)
(flee ktroll))
(define (troll-foes-in-weapon-range ktroll karms kfoes)
(troll-display "troll-foes-in-weapon-range")(troll-newline)
(all-in-range (kern-obj-get-location ktroll)
(kern-arms-type-get-range karms)
kfoes))
(define (weaker? a b)
(< (kern-char-get-hp a) (kern-char-get-hp b)))
(define (troll-pick-target ktroll foes)
(troll-display "troll-pick-target")(troll-newline)
(foldr (lambda (a b) (if (weaker? a b) a b))
(car foes)
(cdr foes)))
(define (troll-pathfind-foe ktroll foes)
(troll-display "troll-pathfind-foe")(troll-newline)
(let ((ktarg (troll-pick-target ktroll foes)))
(if (notnull? ktarg)
(pathfind ktroll (kern-obj-get-location ktarg)))))
(define (troll-attack ktroll karms foes)
(troll-display "troll-attack")(troll-newline)
(kern-char-attack ktroll
karms
(troll-pick-target ktroll
foes)))
;; Given an "origin" location and a list of locations, find the location in the
;; list closest to the coordinates.
(define (loc-closest origin lst)
(if (null? lst) nil
(foldr (lambda (a b) (if (loc-closer? a b origin) a b))
(car lst)
(cdr lst))))
(define (troll-stronger? ktroll foes)
(> (kern-char-get-strength ktroll)
(foldr (lambda (a b) (+ a (kern-char-get-strength b)))
0
foes)))
(define (troll-has-ranged-weapon? ktroll)
(in-inventory? ktroll troll-ranged-weapon))
;; troll-get-ammo -- give troll a boulder and convert terrain to grass
(define (troll-get-terrain-ammo ktroll coords)
(troll-display "troll-get-terrain-ammo")(troll-newline)
(kern-obj-add-to-inventory ktroll troll-ranged-weapon 1)
(kern-place-set-terrain coords t_grass)
(kern-map-repaint)
(kern-obj-dec-ap ktroll troll-ripup-boulder-ap)
)
;; ----------------------------------------------------------------------------
;; troll-get-loose-ammo -- search the objects at the location for ammo and give
;; it to the th character
;; ----------------------------------------------------------------------------
(define (troll-get-loose-ammo ktroll loc)
(troll-display "troll-get-loose-ammo")(troll-newline)
(kobj-get-at ktroll loc troll-ranged-weapon))
;; ----------------------------------------------------------------------------
;; troll-terrain-is-ammo -- true iff the given location's terrain can be
;; converted by a troll into ammo
;; ----------------------------------------------------------------------------
(define (troll-terrain-is-ammo? coords)
(eqv? t_boulder (kern-place-get-terrain coords)))
;; ----------------------------------------------------------------------------
;; troll-find-nearest-ammo -- return the closest location with ammo objects or
;; with terrain that can be converted to ammo objects.
;; ----------------------------------------------------------------------------
(define (troll-find-nearest-ammo ktroll)
(troll-display "troll-find-nearest-ammo")(troll-newline)
(define (scanobjlst lst)
(foldr (lambda (a b)
(or a (eqv? (kern-obj-get-type b) troll-ranged-weapon)))
#f
lst))
(define (check lst loc)
(if (troll-terrain-is-ammo? loc)
(cons loc lst)
(if (scanobjlst (kern-get-objects-at loc))
(cons loc lst)
lst)))
(let* ((loc (kern-obj-get-location ktroll))
(rad (kern-obj-get-vision-radius ktroll))
(coords (profile foldr-rect (loc-place loc)
(- (loc-x loc) (/ rad 2))
(- (loc-y loc) (/ rad 2))
(* 1 rad)
(* 1 rad)
check
nil)))
(troll-display coords)(troll-newline)
(profile loc-closest loc coords)))
(define (troll-find-nearest-ammo2 ktroll)
(troll-display "troll-find-nearest-ammo2")(troll-newline)
(let* ((loc (kern-obj-get-location ktroll))
(rad (kern-obj-get-vision-radius ktroll))
(coords (profile kern-search-rect (loc-place loc)
(- (loc-x loc) (/ rad 2))
(- (loc-y loc) (/ rad 2))
(* 1 rad)
(* 1 rad)
t_boulder
troll-ranged-weapon)))
(profile loc-closest loc coords)))
(define (troll-find-nearest-ammo3 ktroll)
(troll-display "troll-find-nearest-ammo3")(troll-newline)
(define (scanobjlst lst)
(foldr (lambda (a b)
(or a (eqv? (kern-obj-get-type b) troll-ranged-weapon)))
#f
lst))
(define (check lst loc)
(if (troll-terrain-is-ammo? loc)
(cons loc lst)
(if (scanobjlst (kern-get-objects-at loc))
(cons loc lst)
lst)))
(let* ((loc (kern-obj-get-location ktroll))
(rad (kern-obj-get-vision-radius ktroll))
(coords (profile kern-fold-rect (loc-place loc)
(- (loc-x loc) (/ rad 2))
(- (loc-y loc) (/ rad 2))
(* 1 rad)
(* 1 rad)
check
nil)))
(troll-display coords)(troll-newline)
(profile loc-closest loc coords)))
(define (troll-find-nearest-ammo4 ktroll)
(troll-display "troll-find-nearest-ammo4")(troll-newline)
(let* ((loc (kern-obj-get-location ktroll))
(rad (kern-obj-get-vision-radius ktroll))
(terrain-coords (profile kern-search-rect-for-terrain (loc-place loc)
(- (loc-x loc) (/ rad 2))
(- (loc-y loc) (/ rad 2))
(* 1 rad)
(* 1 rad)
t_boulder))
(closest-terrain (profile loc-closest loc terrain-coords))
(obj-coords (profile kern-search-rect-for-obj-type (loc-place loc)
(- (loc-x loc) (/ rad 2))
(- (loc-y loc) (/ rad 2))
(* 1 rad)
(* 1 rad)
troll-ranged-weapon))
(closest-obj (profile loc-closest loc obj-coords)))
(cond ((null? closest-obj) closest-terrain)
((null? closest-terrain) closest-obj)
(else
(if (loc-closer? closest-obj closest-terrain loc)
closest-obj
closest-terrain)))))
;; ----------------------------------------------------------------------------
;; troll-get-ammo -- given the location of an ammo object or terrain that can
;; be converted to ammo, have the troll get the ammo
;; ----------------------------------------------------------------------------
(define (troll-get-ammo ktroll loc)
(troll-display "troll-get-ammo")(troll-newline)
(if (troll-terrain-is-ammo? loc)
(troll-get-terrain-ammo ktroll loc)
(troll-get-loose-ammo ktroll loc)))
;; ----------------------------------------------------------------------------
;; troll-hunt-for-ammo2 -- find the nearest available ammo and pathfind to it
;; or pick it up. Returns false iff none available.
;; ----------------------------------------------------------------------------
(define (troll-hunt-for-ammo ktroll)
(troll-display "troll-hunt-for-ammo")(troll-newline)
(let ((nearest (profile troll-find-nearest-ammo2 ktroll))
(kloc (kern-obj-get-location ktroll)))
(troll-display "nearest=")(troll-display nearest)(troll-newline)
(if (null? nearest)
#f
(begin
(do-or-goto ktroll nearest troll-get-ammo)
#t))))
(define (troll-display-objs lst)
(if (null? lst)
(troll-newline)
(begin
(troll-display (kern-obj-get-name (car lst)))
(troll-display " ")
(troll-display-objs (cdr lst)))))
(define (troll-no-hostiles ktroll)
(troll-display "troll-no-hostiles")(troll-newline)
(troll-wander ktroll))
(define (troll-taunted? ktroll)
(car (kobj-gob-data ktroll)))
(define troll-taunts
(list
"[primal howl]"
"[hateful roar]"
"[raging bellow]"
))
(define (troll-taunt ktroll ktarg)
(taunt ktroll ktarg troll-taunts)
(set-car! (kobj-gob-data ktroll) #t))
(define (troll-hostiles ktroll foes)
(troll-display "troll-hostiles")(troll-newline)
(if (troll-is-critical? ktroll)
(troll-flee ktroll)
(let ((melee-targs (troll-foes-in-weapon-range ktroll
troll-melee-weapon
foes)))
(troll-display "troll-ai:melee-targs=")
(troll-display melee-targs)
(troll-newline)
(or (troll-taunted? ktroll)
(troll-taunt ktroll (car foes)))
(if (null? melee-targs)
(if (troll-has-ranged-weapon? ktroll)
(let
((ranged-foes
(troll-foes-in-weapon-range ktroll
troll-ranged-weapon
foes)))
(troll-display "troll-ai:ranged-foes=")
(troll-display ranged-foes)
(troll-newline)
(if (null? ranged-foes)
(troll-pathfind-foe ktroll foes)
(troll-attack ktroll troll-ranged-weapon
ranged-foes)))
(or (troll-hunt-for-ammo ktroll)
(troll-pathfind-foe ktroll foes)))
(if (troll-stronger? ktroll melee-targs)
(troll-attack ktroll troll-melee-weapon melee-targs)
(evade ktroll melee-targs))))))
;; ----------------------------------------------------------------------------
;; troll-ai -- combat ai for a troll npc. Called repeatedly by the kernel on
the troll 's turn until the troll is out of ap .
;; ----------------------------------------------------------------------------
(define (troll-ai ktroll)
(troll-display "troll-ai")(troll-newline)
(let ((foes (all-visible-hostiles ktroll)))
(if (null? foes)
(troll-wander ktroll)
(troll-hostiles ktroll foes))))
| null | https://raw.githubusercontent.com/Elzair/nazghul/8f3a45ed6289cd9f469c4ff618d39366f2fbc1d8/worlds/haxima-1.001/troll.scm | scheme | (define (troll-display . args)
(display (kern-get-ticks))
(display ":")
(apply display args))
(define (troll-newline) (newline))
----------------------------------------------------------------------------
----------------------------------------------------------------------------
Given an "origin" location and a list of locations, find the location in the
list closest to the coordinates.
troll-get-ammo -- give troll a boulder and convert terrain to grass
----------------------------------------------------------------------------
troll-get-loose-ammo -- search the objects at the location for ammo and give
it to the th character
----------------------------------------------------------------------------
----------------------------------------------------------------------------
troll-terrain-is-ammo -- true iff the given location's terrain can be
converted by a troll into ammo
----------------------------------------------------------------------------
----------------------------------------------------------------------------
troll-find-nearest-ammo -- return the closest location with ammo objects or
with terrain that can be converted to ammo objects.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
troll-get-ammo -- given the location of an ammo object or terrain that can
be converted to ammo, have the troll get the ammo
----------------------------------------------------------------------------
----------------------------------------------------------------------------
troll-hunt-for-ammo2 -- find the nearest available ammo and pathfind to it
or pick it up. Returns false iff none available.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
troll-ai -- combat ai for a troll npc. Called repeatedly by the kernel on
---------------------------------------------------------------------------- |
(define (troll-display . args) )
(define (troll-newline) )
Troll AI
(define (troll-is-critical? ktroll)
(< (kern-char-get-hp ktroll) troll-critical-hp))
(define (troll-wander ktroll)
(troll-display "troll-wander")(troll-newline)
(wander ktroll))
(define (troll-flee ktroll)
(troll-display "troll-flee")(troll-newline)
(flee ktroll))
(define (troll-foes-in-weapon-range ktroll karms kfoes)
(troll-display "troll-foes-in-weapon-range")(troll-newline)
(all-in-range (kern-obj-get-location ktroll)
(kern-arms-type-get-range karms)
kfoes))
(define (weaker? a b)
(< (kern-char-get-hp a) (kern-char-get-hp b)))
(define (troll-pick-target ktroll foes)
(troll-display "troll-pick-target")(troll-newline)
(foldr (lambda (a b) (if (weaker? a b) a b))
(car foes)
(cdr foes)))
(define (troll-pathfind-foe ktroll foes)
(troll-display "troll-pathfind-foe")(troll-newline)
(let ((ktarg (troll-pick-target ktroll foes)))
(if (notnull? ktarg)
(pathfind ktroll (kern-obj-get-location ktarg)))))
(define (troll-attack ktroll karms foes)
(troll-display "troll-attack")(troll-newline)
(kern-char-attack ktroll
karms
(troll-pick-target ktroll
foes)))
(define (loc-closest origin lst)
(if (null? lst) nil
(foldr (lambda (a b) (if (loc-closer? a b origin) a b))
(car lst)
(cdr lst))))
(define (troll-stronger? ktroll foes)
(> (kern-char-get-strength ktroll)
(foldr (lambda (a b) (+ a (kern-char-get-strength b)))
0
foes)))
(define (troll-has-ranged-weapon? ktroll)
(in-inventory? ktroll troll-ranged-weapon))
(define (troll-get-terrain-ammo ktroll coords)
(troll-display "troll-get-terrain-ammo")(troll-newline)
(kern-obj-add-to-inventory ktroll troll-ranged-weapon 1)
(kern-place-set-terrain coords t_grass)
(kern-map-repaint)
(kern-obj-dec-ap ktroll troll-ripup-boulder-ap)
)
(define (troll-get-loose-ammo ktroll loc)
(troll-display "troll-get-loose-ammo")(troll-newline)
(kobj-get-at ktroll loc troll-ranged-weapon))
(define (troll-terrain-is-ammo? coords)
(eqv? t_boulder (kern-place-get-terrain coords)))
(define (troll-find-nearest-ammo ktroll)
(troll-display "troll-find-nearest-ammo")(troll-newline)
(define (scanobjlst lst)
(foldr (lambda (a b)
(or a (eqv? (kern-obj-get-type b) troll-ranged-weapon)))
#f
lst))
(define (check lst loc)
(if (troll-terrain-is-ammo? loc)
(cons loc lst)
(if (scanobjlst (kern-get-objects-at loc))
(cons loc lst)
lst)))
(let* ((loc (kern-obj-get-location ktroll))
(rad (kern-obj-get-vision-radius ktroll))
(coords (profile foldr-rect (loc-place loc)
(- (loc-x loc) (/ rad 2))
(- (loc-y loc) (/ rad 2))
(* 1 rad)
(* 1 rad)
check
nil)))
(troll-display coords)(troll-newline)
(profile loc-closest loc coords)))
(define (troll-find-nearest-ammo2 ktroll)
(troll-display "troll-find-nearest-ammo2")(troll-newline)
(let* ((loc (kern-obj-get-location ktroll))
(rad (kern-obj-get-vision-radius ktroll))
(coords (profile kern-search-rect (loc-place loc)
(- (loc-x loc) (/ rad 2))
(- (loc-y loc) (/ rad 2))
(* 1 rad)
(* 1 rad)
t_boulder
troll-ranged-weapon)))
(profile loc-closest loc coords)))
(define (troll-find-nearest-ammo3 ktroll)
(troll-display "troll-find-nearest-ammo3")(troll-newline)
(define (scanobjlst lst)
(foldr (lambda (a b)
(or a (eqv? (kern-obj-get-type b) troll-ranged-weapon)))
#f
lst))
(define (check lst loc)
(if (troll-terrain-is-ammo? loc)
(cons loc lst)
(if (scanobjlst (kern-get-objects-at loc))
(cons loc lst)
lst)))
(let* ((loc (kern-obj-get-location ktroll))
(rad (kern-obj-get-vision-radius ktroll))
(coords (profile kern-fold-rect (loc-place loc)
(- (loc-x loc) (/ rad 2))
(- (loc-y loc) (/ rad 2))
(* 1 rad)
(* 1 rad)
check
nil)))
(troll-display coords)(troll-newline)
(profile loc-closest loc coords)))
(define (troll-find-nearest-ammo4 ktroll)
(troll-display "troll-find-nearest-ammo4")(troll-newline)
(let* ((loc (kern-obj-get-location ktroll))
(rad (kern-obj-get-vision-radius ktroll))
(terrain-coords (profile kern-search-rect-for-terrain (loc-place loc)
(- (loc-x loc) (/ rad 2))
(- (loc-y loc) (/ rad 2))
(* 1 rad)
(* 1 rad)
t_boulder))
(closest-terrain (profile loc-closest loc terrain-coords))
(obj-coords (profile kern-search-rect-for-obj-type (loc-place loc)
(- (loc-x loc) (/ rad 2))
(- (loc-y loc) (/ rad 2))
(* 1 rad)
(* 1 rad)
troll-ranged-weapon))
(closest-obj (profile loc-closest loc obj-coords)))
(cond ((null? closest-obj) closest-terrain)
((null? closest-terrain) closest-obj)
(else
(if (loc-closer? closest-obj closest-terrain loc)
closest-obj
closest-terrain)))))
(define (troll-get-ammo ktroll loc)
(troll-display "troll-get-ammo")(troll-newline)
(if (troll-terrain-is-ammo? loc)
(troll-get-terrain-ammo ktroll loc)
(troll-get-loose-ammo ktroll loc)))
(define (troll-hunt-for-ammo ktroll)
(troll-display "troll-hunt-for-ammo")(troll-newline)
(let ((nearest (profile troll-find-nearest-ammo2 ktroll))
(kloc (kern-obj-get-location ktroll)))
(troll-display "nearest=")(troll-display nearest)(troll-newline)
(if (null? nearest)
#f
(begin
(do-or-goto ktroll nearest troll-get-ammo)
#t))))
(define (troll-display-objs lst)
(if (null? lst)
(troll-newline)
(begin
(troll-display (kern-obj-get-name (car lst)))
(troll-display " ")
(troll-display-objs (cdr lst)))))
(define (troll-no-hostiles ktroll)
(troll-display "troll-no-hostiles")(troll-newline)
(troll-wander ktroll))
(define (troll-taunted? ktroll)
(car (kobj-gob-data ktroll)))
(define troll-taunts
(list
"[primal howl]"
"[hateful roar]"
"[raging bellow]"
))
(define (troll-taunt ktroll ktarg)
(taunt ktroll ktarg troll-taunts)
(set-car! (kobj-gob-data ktroll) #t))
(define (troll-hostiles ktroll foes)
(troll-display "troll-hostiles")(troll-newline)
(if (troll-is-critical? ktroll)
(troll-flee ktroll)
(let ((melee-targs (troll-foes-in-weapon-range ktroll
troll-melee-weapon
foes)))
(troll-display "troll-ai:melee-targs=")
(troll-display melee-targs)
(troll-newline)
(or (troll-taunted? ktroll)
(troll-taunt ktroll (car foes)))
(if (null? melee-targs)
(if (troll-has-ranged-weapon? ktroll)
(let
((ranged-foes
(troll-foes-in-weapon-range ktroll
troll-ranged-weapon
foes)))
(troll-display "troll-ai:ranged-foes=")
(troll-display ranged-foes)
(troll-newline)
(if (null? ranged-foes)
(troll-pathfind-foe ktroll foes)
(troll-attack ktroll troll-ranged-weapon
ranged-foes)))
(or (troll-hunt-for-ammo ktroll)
(troll-pathfind-foe ktroll foes)))
(if (troll-stronger? ktroll melee-targs)
(troll-attack ktroll troll-melee-weapon melee-targs)
(evade ktroll melee-targs))))))
the troll 's turn until the troll is out of ap .
(define (troll-ai ktroll)
(troll-display "troll-ai")(troll-newline)
(let ((foes (all-visible-hostiles ktroll)))
(if (null? foes)
(troll-wander ktroll)
(troll-hostiles ktroll foes))))
|
10d29446db29baed93444ed7a149cfc0d3db58c2759729563e818fd2c1587a7f | jkrivine/tl_interpreter | token.ml | * { e Contract } . Implements basic ERC20 - like functionality .
open Tools
open Env.Imp.Program
(** A simple [address->int] map *)
let balances = data ~pp:(MP.pp Address.pp Format.pp_print_int) "balances"
(** Whoever has minting power on the token *)
let owner = data ~pp:Address.pp "owner"
let balance = code ()
let transfer = code ()
let transfer_up_to = code ()
let transfer_all = code ()
(** admin only *)
let mint_for = code ()
(** All transfers attempt to call [on_token_receive] on the receiving contract. Ignored if method is not implemented by the contract. *)
let on_token_receive : (Address.t*Address.t*int,unit) code_id = code ()
let construct () =
let caller = get_caller () in
data_set owner caller ;
data_set balances MP.empty ;
code_set mint_for begin
fun (amount,taker) ->
require (data_get owner = get_caller ());
map_update balances taker ~default:0 (fun v -> v+amount)
end;
let balance' who = map_find balances who |? 0 in
let add amount who =
map_update balances who ~default:0 (fun v -> v+amount) in
let transfer' giver amount taker =
if amount < 0
then error "cannot transfer a negative amount"
else
let bal = balance' giver in
if bal < amount
then error "insufficient funds for transfer"
else
add (-amount) giver; add amount taker in
let token_addr = get_this () in
code_set on_token_receive (fun _ ->
error "This contract does not receive tokens");
code_set balance balance' ;
code_set transfer (fun (amount,taker) ->
let giver = get_caller () in
transfer' giver amount taker;
if responds taker on_token_receive then
call taker on_token_receive (giver,token_addr,amount)
else ());
code_set transfer_up_to (fun (amount,taker) ->
let giver = get_caller () in
let bal = balance' giver in
transfer' giver (min amount bal) taker);
code_set transfer_all (fun (taker) ->
let giver = get_caller () in
let bal = balance' giver in
transfer' giver bal taker)
| null | https://raw.githubusercontent.com/jkrivine/tl_interpreter/c967c6578dd4491a6930c9842a0709fbc5939496/lib/contracts/token.ml | ocaml | * A simple [address->int] map
* Whoever has minting power on the token
* admin only
* All transfers attempt to call [on_token_receive] on the receiving contract. Ignored if method is not implemented by the contract. | * { e Contract } . Implements basic ERC20 - like functionality .
open Tools
open Env.Imp.Program
let balances = data ~pp:(MP.pp Address.pp Format.pp_print_int) "balances"
let owner = data ~pp:Address.pp "owner"
let balance = code ()
let transfer = code ()
let transfer_up_to = code ()
let transfer_all = code ()
let mint_for = code ()
let on_token_receive : (Address.t*Address.t*int,unit) code_id = code ()
let construct () =
let caller = get_caller () in
data_set owner caller ;
data_set balances MP.empty ;
code_set mint_for begin
fun (amount,taker) ->
require (data_get owner = get_caller ());
map_update balances taker ~default:0 (fun v -> v+amount)
end;
let balance' who = map_find balances who |? 0 in
let add amount who =
map_update balances who ~default:0 (fun v -> v+amount) in
let transfer' giver amount taker =
if amount < 0
then error "cannot transfer a negative amount"
else
let bal = balance' giver in
if bal < amount
then error "insufficient funds for transfer"
else
add (-amount) giver; add amount taker in
let token_addr = get_this () in
code_set on_token_receive (fun _ ->
error "This contract does not receive tokens");
code_set balance balance' ;
code_set transfer (fun (amount,taker) ->
let giver = get_caller () in
transfer' giver amount taker;
if responds taker on_token_receive then
call taker on_token_receive (giver,token_addr,amount)
else ());
code_set transfer_up_to (fun (amount,taker) ->
let giver = get_caller () in
let bal = balance' giver in
transfer' giver (min amount bal) taker);
code_set transfer_all (fun (taker) ->
let giver = get_caller () in
let bal = balance' giver in
transfer' giver bal taker)
|
dfa5a7c1167641bd3090eba95b6613c342791013c1dcbc36a97b0b7361c204b7 | chiroptical/book-of-monads | Main.hs | module Main where
import Lib
main :: IO ()
main = putStrLn "Monads for Mutability"
| null | https://raw.githubusercontent.com/chiroptical/book-of-monads/c2eff1c67a8958b28cfd2001d652f8b68e7c84df/chapter8/app/Main.hs | haskell | module Main where
import Lib
main :: IO ()
main = putStrLn "Monads for Mutability"
| |
19178161ddf7b3d7d228cc829a501d5d5676ad29e67dffc8938ee1250642c140 | tonyg/kali-scheme | pattern.scm | Copyright ( c ) 1993 , 1994 by and .
Copyright ( c ) 1998 by NEC Research Institute , Inc. See file COPYING .
;(define (simplify-subtract call)
; (simplify-args call 0)
; ((pattern-simplifier
; ((- 'a 'b) '(- a b)) ; constant folding
; ((- x 'a) (+ '(- 0 a) x)) ; convert to a normal form
; ((- 'a (+ 'b x)) (- '(- a b) x)) ; merging constants
; ((- 'a (- 'b x)) (+ x '(- a b))) ; ditto
; ((- x (+ 'a y)) (+ '(- 0 a) (- x y))) ; convert to a normal form
; ((- (+ 'a x) (+ 'b y)) (- (+ '(- a b) x) y)))
; call))
; (pattern-simplifier pattern-spec ...)
; =>
; (lambda (call-node) ...)
; The resulting procedure replaces instances of IN-PATTERNs with the
; corresponding OUT-PATTERNs.
;
; <pattern-spec> ::= (in-pattern out-pattern) |
; (in-pattern boolean-expression out-pattern)
;
; All of the IN-PATTERNs for a particular simplifier must be calls to the
same primop . If the boolean - expression is present it is evaluated after
; the in-pattern is matched and in an environment where the symbols of the
; the in-pattern are bound to the corresponding values from the call.
;
; x matches anything
; 'x matches any literal
( x ... ) matches a call to
5 matches the literal 5
; The patterns are matched in order.
;----------------
; Call MATCH-CALLS with a continuation that makes code to construct the
; right-hand side of the specification. This assumes that the left-hand side
; of all of the specifications will be calls to the same primitive. The
; initial CASE is removed from the code returned by MATCH-CALLS.
(define (make-pattern-simplifier specs)
(set! *generate-symbol-index* 0)
(let* ((initial (generate-symbol 'initial))
(exp (match-calls (map (lambda (spec)
(make-pattern (car spec) (cdr spec)))
specs)
initial
#f
(lambda (patterns)
(if (null? patterns)
(error "no patterns matched" specs)
(check-predicates patterns initial))))))
`(lambda (,initial)
,(cadar (cddr exp))))) ; strip off initial CASE
(define-record-type pattern
(spec ; the specification this pattern is to match
(env) ; an a-list mapping atoms in the pattern to the identifiers
; that will be bound to the value matched by the atom
parent ; if this pattern is an argument in another pattern, this
; field contains the other pattern
predicate ; predicate call or #F
build-spec ; specification for the transformed pattern
)
())
Returns the pattern for the I'th argument in PATTERN .
(define (pattern-arg pattern i)
(list-ref (pattern-spec pattern) (+ i 1)))
(define (make-pattern spec specs)
(receive (build-spec predicate)
(if (null? (cdr specs))
(values (car specs) #f)
(values (cadr specs) (car specs)))
(pattern-maker spec '() #f predicate build-spec)))
For each pattern in PATTERN , extend the environment with the I'th argument
; of the pattern bound to ID.
(define (extend-pattern-envs patterns i id)
(map (lambda (pattern)
(let ((arg (pattern-arg pattern i)))
(set-pattern-env! pattern
(cons (if (pair? arg)
(list (cadr arg) id #t)
(list arg id #f))
(pattern-env pattern)))))
patterns))
Return the parent of PATTERN , setting the environment of the parent to be
the environment of PATTERN . This is only used once we are done with PATTERN
; and want to continue with the next argument in the parent.
(define (get-pattern-parent pattern)
(let ((p (pattern-parent pattern)))
(set-pattern-env! p (pattern-env pattern))
p))
Sort PATTERNS by the primop being called , and for each set of patterns
matching the same primop , call MATCH - CALL - ARGS to generate code for
; those patterns. FINISH-CALL-MATCH builds the clauses that this generates
; into a CASE expression.
; CALL-VAR is the identifier that will be bound to the call being matched.
FAIL - VAR is either # f or a variable that should be called if no pattern
; matches.
; MORE is a procedure that finishes with the patterns after this call has
; been matched.
(define (match-calls patterns call-var fail-var more)
(let ((primop-var (generate-symbol 'primop)))
(let loop ((patterns patterns) (res '()))
(if (null? patterns)
(finish-call-match res call-var primop-var fail-var)
(let ((primop (car (pattern-spec (car patterns)))))
(receive (same other)
(partition-list (lambda (p)
(eq? primop (car (pattern-spec p))))
(cdr patterns))
(loop other
(cons `(,(if (number? primop) 'else `(,primop))
,(match-call-args (cons (car patterns) same)
0
call-var
fail-var
more))
res))))))))
(define (finish-call-match clauses call-var primop-var fail-var)
(receive (elses other)
(partition-list (lambda (c)
(eq? (car c) 'else))
clauses)
`(case (primop-id (call-primop ,call-var))
,@(reverse other)
(else ,(cond ((null? elses)
(if fail-var `(,fail-var) #f))
((null? (cdr elses))
`(let ((,primop-var (call-primop ,call-var)))
,(cadar elses))) ; strip of uneeded ELSE
(else
(error "more than one ELSE clause" elses)))))))
Similar to MATCH - CALLS , except that this is matching the I'th argument of a
call . All patterns with similar I'th arguments are grouped together and
; passed to MATCH-CALL-ARG. The clauses that are returned are made into a
; COND expression by FINISH-MATCH-CALL-ARGS.
; If there are fewer than I arguments, MORE is called to continue matching
; other parts of the patterns.
Patterns that always match the I'th argument are handled separately .
; They are used to generate the ELSE clause of the conditional returned.
If there are no such patterns , then the passed - in FAIL - VAR is called
; if no patterns match.
(define (match-call-args patterns i call-var fail-var more)
(if (>= i (length (cdr (pattern-spec (car patterns)))))
(more patterns)
(receive (atom-patterns other-patterns)
(partition-list (lambda (p)
(atom? (pattern-arg p i)))
patterns)
(let* ((arg-var (generate-symbol 'arg))
(else-code (cond ((null? atom-patterns)
#f)
(else
(extend-pattern-envs atom-patterns i arg-var)
(match-call-args atom-patterns (+ i 1)
call-var fail-var more))))
(fail-var (if else-code (generate-symbol 'fail) fail-var))
(more (lambda (patterns)
(match-call-args patterns (+ i 1)
call-var fail-var more))))
(let loop ((patterns other-patterns) (clauses '()))
(if (null? patterns)
(finish-match-call-args i call-var arg-var fail-var
else-code clauses)
(let ((first (car patterns)))
(receive (same other)
(partition-list (lambda (p)
(same-arg-pattern? first p i))
(cdr patterns))
(loop other
(cons (match-call-arg (cons first same)
i
arg-var
fail-var
more)
clauses))))))))))
If ELSE - CODE exists this binds FAIL - VAR to a failure procedure containing it .
The CLAUSES are put in a COND .
(define (finish-match-call-args i call-var arg-var fail-var else-code clauses)
`(let ((,arg-var (call-arg ,call-var ,i)))
,(if else-code
`(let ((,fail-var (lambda () ,else-code)))
(cond ,@clauses (else (,fail-var))))
`(cond ,@clauses (else ,(if fail-var `(,fail-var) #f))))))
Are the I'th arguments of patterns P1 and P2 the same as far as matching
; arguments is concerned?
(define (same-arg-pattern? p1 p2 i)
(let ((a1 (pattern-arg p1 i))
(a2 (pattern-arg p2 i)))
(cond ((atom? a1)
(atom? a2))
((atom? a2)
#f)
((eq? (car a1) 'quote)
(eq? (car a2) 'quote))
((eq? (car a2) 'quote)
#f)
(else #t))))
Dispatch on the type of the I'th argument of PATTERNS ( all of which have
similar I'th arguments ) and generate the appropriate code .
ARG - VAR is the identifier that will be bound to the actual argument .
; MORE is a procedure that generates code for the rest of the patterns.
; Atoms always match and require that the environments of the patterns
; be extended.
; Code for literals and calls are generated by other procedures.
(define (match-call-arg patterns i arg-var fail-var more)
(let ((arg (pattern-arg (car patterns) i)))
(cond ((eq? (car arg) 'quote)
`((literal-node? ,arg-var)
,(match-literal patterns i arg-var fail-var more)))
(else
`((call-node? ,arg-var)
,(match-calls (map (lambda (p)
(pattern-maker (pattern-arg p i)
(pattern-env p)
p
(pattern-predicate p)
(pattern-build-spec p)))
patterns)
arg-var
fail-var
(lambda (patterns)
(more (map get-pattern-parent patterns)))))))))
; Again we sort the patterns into similar groups and build a clause for
; each group. Patterns with symbols have their environments extended.
; FINISH-MATCH-LITERAL puts the clauses into a CASE expression.
(define (match-literal patterns i arg-var fail-var more)
(receive (symbols numbers)
(partition-list (lambda (p)
(symbol? (cadr (pattern-arg p i))))
patterns)
(extend-pattern-envs symbols i arg-var)
(if (null? numbers)
(more symbols)
(let loop ((patterns numbers) (clauses '()))
(if (null? patterns)
(finish-match-literal clauses
(if (null? symbols)
(if fail-var `(,fail-var) #f)
(more symbols))
arg-var)
(receive (same other)
(partition-list (lambda (p)
(= (cadr (pattern-arg (car patterns) i))
(cadr (pattern-arg p i))))
(cdr patterns))
(loop other
(cons `((,(cadr (pattern-arg (car patterns) i)))
,(more (cons (car patterns) same)))
clauses))))))))
(define (finish-match-literal clauses else arg-var)
(if (null? clauses)
else
`(case (literal-value ,arg-var)
,@(reverse clauses)
(else ,else))))
;------------------------------------------------------------------------------
; GENSYM utility
(define *generate-symbol-index* 0)
(define (generate-symbol sym)
(let ((i *generate-symbol-index*))
(set! *generate-symbol-index* (+ i 1))
(concatenate-symbol sym "." i)))
;------------------------------------------------------------------------------
; Add code to check the predicate if any.
(define (check-predicates patterns initial)
(let label ((patterns patterns))
(cond ((null? (cdr patterns))
(let ((pattern (car patterns)))
(if (pattern-predicate pattern)
(make-predicate-check pattern initial #f)
(make-builder pattern initial))))
((pattern-predicate (car patterns))
(make-predicate-check (car patterns)
initial
(label (cdr patterns))))
(else
(error "multiple patterns matched ~S"
patterns)))))
(define (make-predicate-check pattern initial rest)
`(if (let ,(map (lambda (p)
`(,(car p) ,(if (caddr p)
`(literal-value ,(cadr p))
(cadr p))))
(pattern-env pattern))
,(pattern-predicate pattern))
,(make-builder pattern initial)
,rest))
;------------------------------------------------------------------------------
; Building the result of a pattern match
; A new environment is made as the builder must keep track of how many times
; each node in the matched pattern is used.
CLAUSES is a list of LET - NODES clauses for making the call nodes in the
produced pattern . VALUE is what will replace the original pattern in the
node tree . Any nodes that are used in the result are DETACHed .
(define (make-builder pattern initial)
(let ((env (map (lambda (p)
(list (car p) (cadr p) #f))
(pattern-env pattern)))
(pattern (pattern-build-spec pattern))
(sym (generate-symbol 'result)))
(let ((clauses (if (and (pair? pattern)
(neq? (car pattern) 'quote))
(reverse (build-call sym pattern env))
'()))
(value (cond ((not (pair? pattern))
(lookup-pattern pattern env))
((eq? (car pattern) 'quote)
`(make-literal-node ,(build-literal (cadr pattern) env)
(node-type ,initial)))
(else
sym))))
`(begin
,@(filter-map (lambda (data)
(if (caddr data)
`(detach ,(cadr data))
#f))
env)
(let-nodes ,clauses
(replace ,initial ,value))))))
Go down the arguments in PATTERN making the appropriate LET - NODES spec
; for each.
(define (build-call id pattern env)
(let loop ((arg-patterns (cdr pattern)) (args '()) (clauses '()))
(if (null? arg-patterns)
`((,id (,(car pattern) 0 . ,(reverse args)))
. ,clauses)
(let ((arg (car arg-patterns)))
(cond ((atom? arg)
(loop (cdr arg-patterns)
(cons (lookup-pattern arg env) args)
clauses))
((eq? (car arg) 'quote)
(loop (cdr arg-patterns)
(cons `'(,(build-literal (cadr arg) env)
type/unknown)
args)
clauses))
(else
(let ((sym (generate-symbol 'new)))
(loop (cdr arg-patterns)
(cons sym args)
(append (build-call sym arg env) clauses)))))))))
; A literal specification is either a number, a symbol which will bound to a
; number, or an expression to be evaluated.
(define (build-literal spec env)
(cond ((number? spec)
spec)
((symbol? spec)
`(literal-value ,(lookup-literal spec env)))
(else
`(,(car spec)
. ,(map (lambda (a)
(build-literal a env))
(cdr spec))))))
Get the identifier that will be bound to the value of PATTERN .
(define (lookup-literal pattern env)
(cond ((assoc pattern env)
=> cadr)
(else
(error "pattern ~S not found in env" pattern))))
Get the identifier that will be bound to the node value of PATTERN .
; Annotate the environment to mark that the node has been used.
(define (lookup-pattern pattern env)
(cond ((assoc pattern env)
=> (lambda (data)
(if (caddr data)
(error "node ~S is used more than once" (car data)))
(set-car! (cddr data) 1)
(cadr data)))
(else
(error "pattern ~S not found in env" pattern))))
| null | https://raw.githubusercontent.com/tonyg/kali-scheme/79bf76b4964729b63fce99c4d2149b32cb067ac0/ps-compiler/simp/pattern.scm | scheme | (define (simplify-subtract call)
(simplify-args call 0)
((pattern-simplifier
((- 'a 'b) '(- a b)) ; constant folding
((- x 'a) (+ '(- 0 a) x)) ; convert to a normal form
((- 'a (+ 'b x)) (- '(- a b) x)) ; merging constants
((- 'a (- 'b x)) (+ x '(- a b))) ; ditto
((- x (+ 'a y)) (+ '(- 0 a) (- x y))) ; convert to a normal form
((- (+ 'a x) (+ 'b y)) (- (+ '(- a b) x) y)))
call))
(pattern-simplifier pattern-spec ...)
=>
(lambda (call-node) ...)
The resulting procedure replaces instances of IN-PATTERNs with the
corresponding OUT-PATTERNs.
<pattern-spec> ::= (in-pattern out-pattern) |
(in-pattern boolean-expression out-pattern)
All of the IN-PATTERNs for a particular simplifier must be calls to the
the in-pattern is matched and in an environment where the symbols of the
the in-pattern are bound to the corresponding values from the call.
x matches anything
'x matches any literal
The patterns are matched in order.
----------------
Call MATCH-CALLS with a continuation that makes code to construct the
right-hand side of the specification. This assumes that the left-hand side
of all of the specifications will be calls to the same primitive. The
initial CASE is removed from the code returned by MATCH-CALLS.
strip off initial CASE
the specification this pattern is to match
an a-list mapping atoms in the pattern to the identifiers
that will be bound to the value matched by the atom
if this pattern is an argument in another pattern, this
field contains the other pattern
predicate call or #F
specification for the transformed pattern
of the pattern bound to ID.
and want to continue with the next argument in the parent.
those patterns. FINISH-CALL-MATCH builds the clauses that this generates
into a CASE expression.
CALL-VAR is the identifier that will be bound to the call being matched.
matches.
MORE is a procedure that finishes with the patterns after this call has
been matched.
strip of uneeded ELSE
passed to MATCH-CALL-ARG. The clauses that are returned are made into a
COND expression by FINISH-MATCH-CALL-ARGS.
If there are fewer than I arguments, MORE is called to continue matching
other parts of the patterns.
They are used to generate the ELSE clause of the conditional returned.
if no patterns match.
arguments is concerned?
MORE is a procedure that generates code for the rest of the patterns.
Atoms always match and require that the environments of the patterns
be extended.
Code for literals and calls are generated by other procedures.
Again we sort the patterns into similar groups and build a clause for
each group. Patterns with symbols have their environments extended.
FINISH-MATCH-LITERAL puts the clauses into a CASE expression.
------------------------------------------------------------------------------
GENSYM utility
------------------------------------------------------------------------------
Add code to check the predicate if any.
------------------------------------------------------------------------------
Building the result of a pattern match
A new environment is made as the builder must keep track of how many times
each node in the matched pattern is used.
for each.
A literal specification is either a number, a symbol which will bound to a
number, or an expression to be evaluated.
Annotate the environment to mark that the node has been used. | Copyright ( c ) 1993 , 1994 by and .
Copyright ( c ) 1998 by NEC Research Institute , Inc. See file COPYING .
same primop . If the boolean - expression is present it is evaluated after
( x ... ) matches a call to
5 matches the literal 5
(define (make-pattern-simplifier specs)
(set! *generate-symbol-index* 0)
(let* ((initial (generate-symbol 'initial))
(exp (match-calls (map (lambda (spec)
(make-pattern (car spec) (cdr spec)))
specs)
initial
#f
(lambda (patterns)
(if (null? patterns)
(error "no patterns matched" specs)
(check-predicates patterns initial))))))
`(lambda (,initial)
(define-record-type pattern
)
())
Returns the pattern for the I'th argument in PATTERN .
(define (pattern-arg pattern i)
(list-ref (pattern-spec pattern) (+ i 1)))
(define (make-pattern spec specs)
(receive (build-spec predicate)
(if (null? (cdr specs))
(values (car specs) #f)
(values (cadr specs) (car specs)))
(pattern-maker spec '() #f predicate build-spec)))
For each pattern in PATTERN , extend the environment with the I'th argument
(define (extend-pattern-envs patterns i id)
(map (lambda (pattern)
(let ((arg (pattern-arg pattern i)))
(set-pattern-env! pattern
(cons (if (pair? arg)
(list (cadr arg) id #t)
(list arg id #f))
(pattern-env pattern)))))
patterns))
Return the parent of PATTERN , setting the environment of the parent to be
the environment of PATTERN . This is only used once we are done with PATTERN
(define (get-pattern-parent pattern)
(let ((p (pattern-parent pattern)))
(set-pattern-env! p (pattern-env pattern))
p))
Sort PATTERNS by the primop being called , and for each set of patterns
matching the same primop , call MATCH - CALL - ARGS to generate code for
FAIL - VAR is either # f or a variable that should be called if no pattern
(define (match-calls patterns call-var fail-var more)
(let ((primop-var (generate-symbol 'primop)))
(let loop ((patterns patterns) (res '()))
(if (null? patterns)
(finish-call-match res call-var primop-var fail-var)
(let ((primop (car (pattern-spec (car patterns)))))
(receive (same other)
(partition-list (lambda (p)
(eq? primop (car (pattern-spec p))))
(cdr patterns))
(loop other
(cons `(,(if (number? primop) 'else `(,primop))
,(match-call-args (cons (car patterns) same)
0
call-var
fail-var
more))
res))))))))
(define (finish-call-match clauses call-var primop-var fail-var)
(receive (elses other)
(partition-list (lambda (c)
(eq? (car c) 'else))
clauses)
`(case (primop-id (call-primop ,call-var))
,@(reverse other)
(else ,(cond ((null? elses)
(if fail-var `(,fail-var) #f))
((null? (cdr elses))
`(let ((,primop-var (call-primop ,call-var)))
(else
(error "more than one ELSE clause" elses)))))))
Similar to MATCH - CALLS , except that this is matching the I'th argument of a
call . All patterns with similar I'th arguments are grouped together and
Patterns that always match the I'th argument are handled separately .
If there are no such patterns , then the passed - in FAIL - VAR is called
(define (match-call-args patterns i call-var fail-var more)
(if (>= i (length (cdr (pattern-spec (car patterns)))))
(more patterns)
(receive (atom-patterns other-patterns)
(partition-list (lambda (p)
(atom? (pattern-arg p i)))
patterns)
(let* ((arg-var (generate-symbol 'arg))
(else-code (cond ((null? atom-patterns)
#f)
(else
(extend-pattern-envs atom-patterns i arg-var)
(match-call-args atom-patterns (+ i 1)
call-var fail-var more))))
(fail-var (if else-code (generate-symbol 'fail) fail-var))
(more (lambda (patterns)
(match-call-args patterns (+ i 1)
call-var fail-var more))))
(let loop ((patterns other-patterns) (clauses '()))
(if (null? patterns)
(finish-match-call-args i call-var arg-var fail-var
else-code clauses)
(let ((first (car patterns)))
(receive (same other)
(partition-list (lambda (p)
(same-arg-pattern? first p i))
(cdr patterns))
(loop other
(cons (match-call-arg (cons first same)
i
arg-var
fail-var
more)
clauses))))))))))
If ELSE - CODE exists this binds FAIL - VAR to a failure procedure containing it .
The CLAUSES are put in a COND .
(define (finish-match-call-args i call-var arg-var fail-var else-code clauses)
`(let ((,arg-var (call-arg ,call-var ,i)))
,(if else-code
`(let ((,fail-var (lambda () ,else-code)))
(cond ,@clauses (else (,fail-var))))
`(cond ,@clauses (else ,(if fail-var `(,fail-var) #f))))))
Are the I'th arguments of patterns P1 and P2 the same as far as matching
(define (same-arg-pattern? p1 p2 i)
(let ((a1 (pattern-arg p1 i))
(a2 (pattern-arg p2 i)))
(cond ((atom? a1)
(atom? a2))
((atom? a2)
#f)
((eq? (car a1) 'quote)
(eq? (car a2) 'quote))
((eq? (car a2) 'quote)
#f)
(else #t))))
Dispatch on the type of the I'th argument of PATTERNS ( all of which have
similar I'th arguments ) and generate the appropriate code .
ARG - VAR is the identifier that will be bound to the actual argument .
(define (match-call-arg patterns i arg-var fail-var more)
(let ((arg (pattern-arg (car patterns) i)))
(cond ((eq? (car arg) 'quote)
`((literal-node? ,arg-var)
,(match-literal patterns i arg-var fail-var more)))
(else
`((call-node? ,arg-var)
,(match-calls (map (lambda (p)
(pattern-maker (pattern-arg p i)
(pattern-env p)
p
(pattern-predicate p)
(pattern-build-spec p)))
patterns)
arg-var
fail-var
(lambda (patterns)
(more (map get-pattern-parent patterns)))))))))
(define (match-literal patterns i arg-var fail-var more)
(receive (symbols numbers)
(partition-list (lambda (p)
(symbol? (cadr (pattern-arg p i))))
patterns)
(extend-pattern-envs symbols i arg-var)
(if (null? numbers)
(more symbols)
(let loop ((patterns numbers) (clauses '()))
(if (null? patterns)
(finish-match-literal clauses
(if (null? symbols)
(if fail-var `(,fail-var) #f)
(more symbols))
arg-var)
(receive (same other)
(partition-list (lambda (p)
(= (cadr (pattern-arg (car patterns) i))
(cadr (pattern-arg p i))))
(cdr patterns))
(loop other
(cons `((,(cadr (pattern-arg (car patterns) i)))
,(more (cons (car patterns) same)))
clauses))))))))
(define (finish-match-literal clauses else arg-var)
(if (null? clauses)
else
`(case (literal-value ,arg-var)
,@(reverse clauses)
(else ,else))))
(define *generate-symbol-index* 0)
(define (generate-symbol sym)
(let ((i *generate-symbol-index*))
(set! *generate-symbol-index* (+ i 1))
(concatenate-symbol sym "." i)))
(define (check-predicates patterns initial)
(let label ((patterns patterns))
(cond ((null? (cdr patterns))
(let ((pattern (car patterns)))
(if (pattern-predicate pattern)
(make-predicate-check pattern initial #f)
(make-builder pattern initial))))
((pattern-predicate (car patterns))
(make-predicate-check (car patterns)
initial
(label (cdr patterns))))
(else
(error "multiple patterns matched ~S"
patterns)))))
(define (make-predicate-check pattern initial rest)
`(if (let ,(map (lambda (p)
`(,(car p) ,(if (caddr p)
`(literal-value ,(cadr p))
(cadr p))))
(pattern-env pattern))
,(pattern-predicate pattern))
,(make-builder pattern initial)
,rest))
CLAUSES is a list of LET - NODES clauses for making the call nodes in the
produced pattern . VALUE is what will replace the original pattern in the
node tree . Any nodes that are used in the result are DETACHed .
(define (make-builder pattern initial)
(let ((env (map (lambda (p)
(list (car p) (cadr p) #f))
(pattern-env pattern)))
(pattern (pattern-build-spec pattern))
(sym (generate-symbol 'result)))
(let ((clauses (if (and (pair? pattern)
(neq? (car pattern) 'quote))
(reverse (build-call sym pattern env))
'()))
(value (cond ((not (pair? pattern))
(lookup-pattern pattern env))
((eq? (car pattern) 'quote)
`(make-literal-node ,(build-literal (cadr pattern) env)
(node-type ,initial)))
(else
sym))))
`(begin
,@(filter-map (lambda (data)
(if (caddr data)
`(detach ,(cadr data))
#f))
env)
(let-nodes ,clauses
(replace ,initial ,value))))))
Go down the arguments in PATTERN making the appropriate LET - NODES spec
(define (build-call id pattern env)
(let loop ((arg-patterns (cdr pattern)) (args '()) (clauses '()))
(if (null? arg-patterns)
`((,id (,(car pattern) 0 . ,(reverse args)))
. ,clauses)
(let ((arg (car arg-patterns)))
(cond ((atom? arg)
(loop (cdr arg-patterns)
(cons (lookup-pattern arg env) args)
clauses))
((eq? (car arg) 'quote)
(loop (cdr arg-patterns)
(cons `'(,(build-literal (cadr arg) env)
type/unknown)
args)
clauses))
(else
(let ((sym (generate-symbol 'new)))
(loop (cdr arg-patterns)
(cons sym args)
(append (build-call sym arg env) clauses)))))))))
(define (build-literal spec env)
(cond ((number? spec)
spec)
((symbol? spec)
`(literal-value ,(lookup-literal spec env)))
(else
`(,(car spec)
. ,(map (lambda (a)
(build-literal a env))
(cdr spec))))))
Get the identifier that will be bound to the value of PATTERN .
(define (lookup-literal pattern env)
(cond ((assoc pattern env)
=> cadr)
(else
(error "pattern ~S not found in env" pattern))))
Get the identifier that will be bound to the node value of PATTERN .
(define (lookup-pattern pattern env)
(cond ((assoc pattern env)
=> (lambda (data)
(if (caddr data)
(error "node ~S is used more than once" (car data)))
(set-car! (cddr data) 1)
(cadr data)))
(else
(error "pattern ~S not found in env" pattern))))
|
d8425c6ef9a4dfe2c0ad5d718e1b85c9ed24c5c50579ab8fbdf9d8a6b67a8718 | epgsql/epgsql | epgsql_cmd_describe_statement.erl | %% @doc Asks server to provide input parameter and result rows information.
%%
%% Almost the same as {@link epgsql_cmd_parse}.
%%
%% ```
%% > Describe(STATEMENT)
%% < ParameterDescription
%% < RowDescription | NoData
%% '''
-module(epgsql_cmd_describe_statement).
-behaviour(epgsql_command).
-export([init/1, execute/2, handle_message/4]).
-export_type([response/0]).
-include("epgsql.hrl").
-include("protocol.hrl").
-type response() :: {ok, #statement{}} | {error, epgsql:query_error()}.
-record(desc_stmt,
{name :: iodata(),
parameter_typenames = [],
parameter_descr = []}).
init(Name) ->
#desc_stmt{name = Name}.
execute(Sock, #desc_stmt{name = Name} = St) ->
Commands =
[
epgsql_wire:encode_describe(statement, Name),
epgsql_wire:encode_flush()
],
{send_multi, Commands, Sock, St}.
handle_message(?PARAMETER_DESCRIPTION, Bin, Sock, State) ->
Codec = epgsql_sock:get_codec(Sock),
TypeInfos = epgsql_wire:decode_parameters(Bin, Codec),
OidInfos = [epgsql_binary:typeinfo_to_oid_info(Type, Codec) || Type <- TypeInfos],
TypeNames = [epgsql_binary:typeinfo_to_name_array(Type, Codec) || Type <- TypeInfos],
Sock2 = epgsql_sock:notify(Sock, {types, TypeNames}),
{noaction, Sock2, State#desc_stmt{parameter_descr = OidInfos,
parameter_typenames = TypeNames}};
handle_message(?ROW_DESCRIPTION, <<Count:?int16, Bin/binary>>, Sock,
#desc_stmt{name = Name, parameter_descr = Params,
parameter_typenames = TypeNames}) ->
Codec = epgsql_sock:get_codec(Sock),
Columns = epgsql_wire:decode_columns(Count, Bin, Codec),
Columns2 = [Col#column{format = epgsql_wire:format(Col, Codec)}
|| Col <- Columns],
Result = {ok, #statement{name = Name,
types = TypeNames,
parameter_info = Params,
columns = Columns2}},
{finish, Result, {columns, Columns2}, Sock};
handle_message(?NO_DATA, <<>>, Sock, #desc_stmt{name = Name, parameter_descr = Params,
parameter_typenames = TypeNames}) ->
Result = {ok, #statement{name = Name,
types = TypeNames,
parameter_info = Params,
columns = []}},
{finish, Result, no_data, Sock};
handle_message(?ERROR, Error, _Sock, _State) ->
Result = {error, Error},
{sync_required, Result};
handle_message(_, _, _, _) ->
unknown.
| null | https://raw.githubusercontent.com/epgsql/epgsql/f811a09926892dbd1359afe44a9bfa8f6907b322/src/commands/epgsql_cmd_describe_statement.erl | erlang | @doc Asks server to provide input parameter and result rows information.
Almost the same as {@link epgsql_cmd_parse}.
```
> Describe(STATEMENT)
< ParameterDescription
< RowDescription | NoData
''' | -module(epgsql_cmd_describe_statement).
-behaviour(epgsql_command).
-export([init/1, execute/2, handle_message/4]).
-export_type([response/0]).
-include("epgsql.hrl").
-include("protocol.hrl").
-type response() :: {ok, #statement{}} | {error, epgsql:query_error()}.
-record(desc_stmt,
{name :: iodata(),
parameter_typenames = [],
parameter_descr = []}).
init(Name) ->
#desc_stmt{name = Name}.
execute(Sock, #desc_stmt{name = Name} = St) ->
Commands =
[
epgsql_wire:encode_describe(statement, Name),
epgsql_wire:encode_flush()
],
{send_multi, Commands, Sock, St}.
handle_message(?PARAMETER_DESCRIPTION, Bin, Sock, State) ->
Codec = epgsql_sock:get_codec(Sock),
TypeInfos = epgsql_wire:decode_parameters(Bin, Codec),
OidInfos = [epgsql_binary:typeinfo_to_oid_info(Type, Codec) || Type <- TypeInfos],
TypeNames = [epgsql_binary:typeinfo_to_name_array(Type, Codec) || Type <- TypeInfos],
Sock2 = epgsql_sock:notify(Sock, {types, TypeNames}),
{noaction, Sock2, State#desc_stmt{parameter_descr = OidInfos,
parameter_typenames = TypeNames}};
handle_message(?ROW_DESCRIPTION, <<Count:?int16, Bin/binary>>, Sock,
#desc_stmt{name = Name, parameter_descr = Params,
parameter_typenames = TypeNames}) ->
Codec = epgsql_sock:get_codec(Sock),
Columns = epgsql_wire:decode_columns(Count, Bin, Codec),
Columns2 = [Col#column{format = epgsql_wire:format(Col, Codec)}
|| Col <- Columns],
Result = {ok, #statement{name = Name,
types = TypeNames,
parameter_info = Params,
columns = Columns2}},
{finish, Result, {columns, Columns2}, Sock};
handle_message(?NO_DATA, <<>>, Sock, #desc_stmt{name = Name, parameter_descr = Params,
parameter_typenames = TypeNames}) ->
Result = {ok, #statement{name = Name,
types = TypeNames,
parameter_info = Params,
columns = []}},
{finish, Result, no_data, Sock};
handle_message(?ERROR, Error, _Sock, _State) ->
Result = {error, Error},
{sync_required, Result};
handle_message(_, _, _, _) ->
unknown.
|
9fedea90bcb2d16373e90a3a58ee9504b7965df637230efcbce5e40997de79fe | yetanalytics/dl4clj | optimize_tests.clj | (ns dl4clj.optimize-tests
(:require [dl4clj.optimize.listeners :refer :all]
[dl4clj.optimize.api.listeners :refer :all]
[clojure.test :refer :all])
(:import [org.deeplearning4j.optimize.api IterationListener]
[org.deeplearning4j.datasets.iterator.impl MnistDataSetIterator]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; objects that I need for testing
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def single-listener (new-score-iteration-listener :print-every-n 2 :array? true))
(def multiple-listeners [(new-score-iteration-listener :print-every-n 2)
(new-collection-scores-iteration-listener :frequency 2)])
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; testing the creation of listeners
;; -summary.html
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(deftest listeners-test
(testing "the creation of iteration listeners"
(is (= org.deeplearning4j.optimize.listeners.ParamAndGradientIterationListener
(type (new-param-and-gradient-iteration-listener :as-code? false))))
(is (= org.deeplearning4j.optimize.listeners.CollectScoresIterationListener
(type (new-collection-scores-iteration-listener :frequency 5 :as-code? false))))
(is (= org.deeplearning4j.optimize.listeners.CollectScoresIterationListener
(type (new-collection-scores-iteration-listener :as-code? false))))
(is (= org.deeplearning4j.optimize.listeners.ComposableIterationListener
(type (new-composable-iteration-listener :coll-of-listeners multiple-listeners :as-code? false))))
(is (= org.deeplearning4j.optimize.listeners.ScoreIterationListener
(type (new-score-iteration-listener :print-every-n 5 :as-code? false))))
(is (= org.deeplearning4j.optimize.listeners.ScoreIterationListener
(type (new-score-iteration-listener :as-code? false))))
(is (= org.deeplearning4j.optimize.listeners.PerformanceListener
(type (new-performance-iteration-listener :as-code? false :build? true))))))
| null | https://raw.githubusercontent.com/yetanalytics/dl4clj/9ef055b2a460f1a6246733713136b981fd322510/test/dl4clj/optimize_tests.clj | clojure |
objects that I need for testing
testing the creation of listeners
-summary.html
| (ns dl4clj.optimize-tests
(:require [dl4clj.optimize.listeners :refer :all]
[dl4clj.optimize.api.listeners :refer :all]
[clojure.test :refer :all])
(:import [org.deeplearning4j.optimize.api IterationListener]
[org.deeplearning4j.datasets.iterator.impl MnistDataSetIterator]))
(def single-listener (new-score-iteration-listener :print-every-n 2 :array? true))
(def multiple-listeners [(new-score-iteration-listener :print-every-n 2)
(new-collection-scores-iteration-listener :frequency 2)])
(deftest listeners-test
(testing "the creation of iteration listeners"
(is (= org.deeplearning4j.optimize.listeners.ParamAndGradientIterationListener
(type (new-param-and-gradient-iteration-listener :as-code? false))))
(is (= org.deeplearning4j.optimize.listeners.CollectScoresIterationListener
(type (new-collection-scores-iteration-listener :frequency 5 :as-code? false))))
(is (= org.deeplearning4j.optimize.listeners.CollectScoresIterationListener
(type (new-collection-scores-iteration-listener :as-code? false))))
(is (= org.deeplearning4j.optimize.listeners.ComposableIterationListener
(type (new-composable-iteration-listener :coll-of-listeners multiple-listeners :as-code? false))))
(is (= org.deeplearning4j.optimize.listeners.ScoreIterationListener
(type (new-score-iteration-listener :print-every-n 5 :as-code? false))))
(is (= org.deeplearning4j.optimize.listeners.ScoreIterationListener
(type (new-score-iteration-listener :as-code? false))))
(is (= org.deeplearning4j.optimize.listeners.PerformanceListener
(type (new-performance-iteration-listener :as-code? false :build? true))))))
|
53814306c987a4fca1b35b4173539f9f1f4107eb957cea3b4a9dfb1cf22071bf | DavidAlphaFox/RabbitMQ | rabbit_ws_test_all.erl | The contents of this file are subject to the Mozilla Public License
Version 1.1 ( the " License " ) ; you may not use this file except in
%% compliance with the License. You may obtain a copy of the License at
%% /
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
%% License for the specific language governing rights and limitations
%% under the License.
%%
The Original Code is RabbitMQ Management Console .
%%
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2012 - 2014 GoPivotal , Inc. All rights reserved .
%%
-module(rabbit_ws_test_all).
-export([all_tests/0]).
all_tests() ->
ok = eunit:test(rabbit_ws_test_raw_websocket, [verbose]),
ok = eunit:test(rabbit_ws_test_sockjs_websocket, [verbose]),
ok.
| null | https://raw.githubusercontent.com/DavidAlphaFox/RabbitMQ/0a64e6f0464a9a4ce85c6baa52fb1c584689f49a/plugins-src/rabbitmq-web-stomp/test/src/rabbit_ws_test_all.erl | erlang | compliance with the License. You may obtain a copy of the License at
/
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
License for the specific language governing rights and limitations
under the License.
| The contents of this file are subject to the Mozilla Public License
Version 1.1 ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
The Original Code is RabbitMQ Management Console .
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2012 - 2014 GoPivotal , Inc. All rights reserved .
-module(rabbit_ws_test_all).
-export([all_tests/0]).
all_tests() ->
ok = eunit:test(rabbit_ws_test_raw_websocket, [verbose]),
ok = eunit:test(rabbit_ws_test_sockjs_websocket, [verbose]),
ok.
|
5b006746cbdf1b8143e04dfd62140b9145c5db7a989a8473c6e0b4b0f66540d7 | alsonkemp/turbinado | HelloWorld.hs | module App.Controllers.HelloWorld where
import Turbinado.Controller
index :: Controller ()
index = do setViewDataValue "sample_value" "smarfle!"
| null | https://raw.githubusercontent.com/alsonkemp/turbinado/da2ba7c3443ddf6a51d1ec5b05cb45a85efc0809/App/Controllers/HelloWorld.hs | haskell | module App.Controllers.HelloWorld where
import Turbinado.Controller
index :: Controller ()
index = do setViewDataValue "sample_value" "smarfle!"
| |
e5915b46d641acdf7c3f816708f2639c22ca6207bc0c7b725ffcebe828034504 | tsoding/HyperNerd | LogTest.hs | module Bot.LogTest
( spec
) where
import Bot.Log (secondsAsBackwardsDiff)
import Data.Time.Clock (NominalDiffTime)
import Test.HUnit
testSecondsAsBackwardsDiff :: Test
testSecondsAsBackwardsDiff =
TestLabel "Default Scenario" $
TestCase $ assertEqual "Unexpected value after conversion" expected actual
where
expected = -5 :: NominalDiffTime
actual = secondsAsBackwardsDiff 5
spec :: Test
spec = testSecondsAsBackwardsDiff
| null | https://raw.githubusercontent.com/tsoding/HyperNerd/5322580483c5c05179bc455a6f94566d398bccdf/test/Bot/LogTest.hs | haskell | module Bot.LogTest
( spec
) where
import Bot.Log (secondsAsBackwardsDiff)
import Data.Time.Clock (NominalDiffTime)
import Test.HUnit
testSecondsAsBackwardsDiff :: Test
testSecondsAsBackwardsDiff =
TestLabel "Default Scenario" $
TestCase $ assertEqual "Unexpected value after conversion" expected actual
where
expected = -5 :: NominalDiffTime
actual = secondsAsBackwardsDiff 5
spec :: Test
spec = testSecondsAsBackwardsDiff
| |
13ad2e674b604b2923604436ae06198fa6eae31aafe295927779dc6f4920c1a5 | jorinvo/googlesheets-sql-sync | project.clj | (defproject googlesheets-sql-sync "0.7.0"
:description "Keep your SQL database in sync with Google Sheets"
:url "-sql-sync"
:license {:name "MIT"
:url ""}
:dependencies [[org.clojure/clojure "1.9.0"]
[org.clojure/core.async "0.4.474"]
[org.clojure/java.jdbc "0.7.8"]
[org.postgresql/postgresql "42.2.4"]
[mysql/mysql-connector-java "8.0.18"]
[org.xerial/sqlite-jdbc "3.32.3"]
[org.clojure/tools.cli "0.3.7"]
[http-kit "2.5.3"]
[metosin/jsonista "0.2.0"]
[ring/ring-core "1.7.0-RC2"]
[spootnik/signal "0.2.1"]]
:main googlesheets-sql-sync.cli
:target-path "target/%s"
:profiles {:uberjar {:aot :all
:omit-source true}
:dev {:dependencies [[cljfmt "0.5.1"]
[expound "0.7.1"]]
:source-paths ["src" "test" "dev"]}}
:jvm-opts ["-Xmx200m"]
:repl-options {:init-ns dev})
| null | https://raw.githubusercontent.com/jorinvo/googlesheets-sql-sync/b9fb4c8d7fba36859d4225140778e465142be180/project.clj | clojure | (defproject googlesheets-sql-sync "0.7.0"
:description "Keep your SQL database in sync with Google Sheets"
:url "-sql-sync"
:license {:name "MIT"
:url ""}
:dependencies [[org.clojure/clojure "1.9.0"]
[org.clojure/core.async "0.4.474"]
[org.clojure/java.jdbc "0.7.8"]
[org.postgresql/postgresql "42.2.4"]
[mysql/mysql-connector-java "8.0.18"]
[org.xerial/sqlite-jdbc "3.32.3"]
[org.clojure/tools.cli "0.3.7"]
[http-kit "2.5.3"]
[metosin/jsonista "0.2.0"]
[ring/ring-core "1.7.0-RC2"]
[spootnik/signal "0.2.1"]]
:main googlesheets-sql-sync.cli
:target-path "target/%s"
:profiles {:uberjar {:aot :all
:omit-source true}
:dev {:dependencies [[cljfmt "0.5.1"]
[expound "0.7.1"]]
:source-paths ["src" "test" "dev"]}}
:jvm-opts ["-Xmx200m"]
:repl-options {:init-ns dev})
| |
0c757c196dc117f5a8d72707df790b0089fb0e508da62b82cd461442aec47f51 | LeifAndersen/racket-compiler2 | components.rkt | #lang racket
(require (except-in nanopass/base
define-language
define-pass)
(rename-in nanopass/base
[define-language nanopass:define-language]
[define-pass nanopass:define-pass])
racket/splicing
rackunit
(rename-in racket/base
[compile base:compile]
[current-compile base:current-compile])
(for-syntax racket/base
syntax/parse
racket/syntax)
"utils.rkt")
(provide make-compiler-component
add-pass-to-component!
define-compiler
(struct-out compiler-component)
add-pass-to-component!
variable-add-property!
variable-update-property!
variable-get-property)
; Representation of a compiler component
passes : ( Procedure )
insertion - procs : ( HashTable Symbol ( ( - > Any Any ) )
(struct compiler-component (passes
insertion-procs)
#:mutable)
(define (make-compiler-component [passes '()]
[insertion-procs (make-hash
(list
(list 'pre (mutable-set))
(list 'post (mutable-set))))])
(compiler-component passes insertion-procs))
; Add a compiler pass to a component
; (to be used by define-compiler)
; (Adds back to front)
(define (add-pass-to-component! component pass)
(set-compiler-component-passes! component (cons pass (compiler-component-passes component))))
(begin-for-syntax
(define-syntax-class pass
(pattern name:id
#:attr [components 1] '())
(pattern (name:id components:id ...))))
; Key object to be used in variable properties table
(struct key ())
; Adds a property to a variable. Returns a key that must be used
; to get property out again.
; Variable Any -> Key
(define (variable-add-property! variable property)
(define k (key))
(dict-set! (variable-properties variable) k property)
k)
; Updates the property attached to a specific variable and key.
; Returns the old property that was there.
; Errors if variable does not have a property for the key.
; Variable Key (-> Any Any) -> Any
(define (variable-update-property! variable key property-thunk)
(dict-update!
(dict-update! (variable-properties variable) key
(lambda ()
(raise (exn:fail:contract (format "Variable ~a does not contain key ~a"
variable key)
(current-continuation-marks)))))))
; Retrieves a property from a variable given a key.
; Errors if variable does not have a property for the key
; Variable Key -> Any
(define (variable-get-property variable key)
(dict-ref (variable-properties variable) key
(lambda ()
(raise (exn:fail:contract (format "Variable ~a does not contain key: ~a" variable key)
(current-continuation-marks))))))
; Adds a procedure to a component
; The location field is currently either 'pre or 'post
; As we learn more about what valid locations should be, that will change.
; Possibly even make it possible for a component to state what valid locations are.
; Component Symbol (-> Any Any) -> Void
(define (component-add-proc! component location proc)
(define insertion-procs (compiler-component-insertion-procs component))
(unless (hash-has-key? location)
(raise (exn:fail:contract (format "Compiler Component ~a does not contain location: ~a"
component location)
(current-continuation-marks)))))
; Returns a setof of all valid locations in the compiler component
; Component -> (Setof Symbol)
(define (compiler-component-insert-locations component)
(dict-keys (compiler-component-insertion-procs component)))
(define-syntax (define-compiler stx)
(syntax-parse stx
[(_ name:id passes:pass ...+)
#:with compilers (format-id stx "compilers")
(define pass-names (reverse (syntax->list #'(passes.name ...))))
(define pass-components (reverse (syntax->list #'((passes.components ...) ...))))
;; Bind the compiler name to the compiler.
#`(begin (define name (compose #,@pass-names))
;; Add each of the pass to there respective components
#,@(for/list ([pn (in-list pass-names)]
(pc (in-list pass-components)))
#`(begin
#,@(for/list ([pc* (in-list (syntax->list pc))])
#`(add-pass-to-component! #,pc* #,pn))))
;; Create intermediate compilers for use in test casses
(define compilers null)
#,@(let build-partial-compiler ([passes pass-names]
[pass-count (length pass-names)])
(if (= pass-count 0)
'()
(with-syntax ([name* (format-id stx "~a/~a" #'name (- pass-count 1))])
(list* #`(define name* (compose #,@passes))
#`(set! compilers (cons name* compilers))
(if (identifier? (car passes))
(with-syntax ([name** (format-id stx
"~a/~a"
#'name
(car passes))])
(cons #`(define name** name*)
(build-partial-compiler (cdr passes) (- pass-count 1))))
(build-partial-compiler (cdr passes) (- pass-count 1))))))))]))
| null | https://raw.githubusercontent.com/LeifAndersen/racket-compiler2/ebd40c81621f6e4012fc72c838cd8e4ad1d8a266/private/components.rkt | racket | Representation of a compiler component
Add a compiler pass to a component
(to be used by define-compiler)
(Adds back to front)
Key object to be used in variable properties table
Adds a property to a variable. Returns a key that must be used
to get property out again.
Variable Any -> Key
Updates the property attached to a specific variable and key.
Returns the old property that was there.
Errors if variable does not have a property for the key.
Variable Key (-> Any Any) -> Any
Retrieves a property from a variable given a key.
Errors if variable does not have a property for the key
Variable Key -> Any
Adds a procedure to a component
The location field is currently either 'pre or 'post
As we learn more about what valid locations should be, that will change.
Possibly even make it possible for a component to state what valid locations are.
Component Symbol (-> Any Any) -> Void
Returns a setof of all valid locations in the compiler component
Component -> (Setof Symbol)
Bind the compiler name to the compiler.
Add each of the pass to there respective components
Create intermediate compilers for use in test casses | #lang racket
(require (except-in nanopass/base
define-language
define-pass)
(rename-in nanopass/base
[define-language nanopass:define-language]
[define-pass nanopass:define-pass])
racket/splicing
rackunit
(rename-in racket/base
[compile base:compile]
[current-compile base:current-compile])
(for-syntax racket/base
syntax/parse
racket/syntax)
"utils.rkt")
(provide make-compiler-component
add-pass-to-component!
define-compiler
(struct-out compiler-component)
add-pass-to-component!
variable-add-property!
variable-update-property!
variable-get-property)
passes : ( Procedure )
insertion - procs : ( HashTable Symbol ( ( - > Any Any ) )
(struct compiler-component (passes
insertion-procs)
#:mutable)
(define (make-compiler-component [passes '()]
[insertion-procs (make-hash
(list
(list 'pre (mutable-set))
(list 'post (mutable-set))))])
(compiler-component passes insertion-procs))
(define (add-pass-to-component! component pass)
(set-compiler-component-passes! component (cons pass (compiler-component-passes component))))
(begin-for-syntax
(define-syntax-class pass
(pattern name:id
#:attr [components 1] '())
(pattern (name:id components:id ...))))
(struct key ())
(define (variable-add-property! variable property)
(define k (key))
(dict-set! (variable-properties variable) k property)
k)
(define (variable-update-property! variable key property-thunk)
(dict-update!
(dict-update! (variable-properties variable) key
(lambda ()
(raise (exn:fail:contract (format "Variable ~a does not contain key ~a"
variable key)
(current-continuation-marks)))))))
(define (variable-get-property variable key)
(dict-ref (variable-properties variable) key
(lambda ()
(raise (exn:fail:contract (format "Variable ~a does not contain key: ~a" variable key)
(current-continuation-marks))))))
(define (component-add-proc! component location proc)
(define insertion-procs (compiler-component-insertion-procs component))
(unless (hash-has-key? location)
(raise (exn:fail:contract (format "Compiler Component ~a does not contain location: ~a"
component location)
(current-continuation-marks)))))
(define (compiler-component-insert-locations component)
(dict-keys (compiler-component-insertion-procs component)))
(define-syntax (define-compiler stx)
(syntax-parse stx
[(_ name:id passes:pass ...+)
#:with compilers (format-id stx "compilers")
(define pass-names (reverse (syntax->list #'(passes.name ...))))
(define pass-components (reverse (syntax->list #'((passes.components ...) ...))))
#`(begin (define name (compose #,@pass-names))
#,@(for/list ([pn (in-list pass-names)]
(pc (in-list pass-components)))
#`(begin
#,@(for/list ([pc* (in-list (syntax->list pc))])
#`(add-pass-to-component! #,pc* #,pn))))
(define compilers null)
#,@(let build-partial-compiler ([passes pass-names]
[pass-count (length pass-names)])
(if (= pass-count 0)
'()
(with-syntax ([name* (format-id stx "~a/~a" #'name (- pass-count 1))])
(list* #`(define name* (compose #,@passes))
#`(set! compilers (cons name* compilers))
(if (identifier? (car passes))
(with-syntax ([name** (format-id stx
"~a/~a"
#'name
(car passes))])
(cons #`(define name** name*)
(build-partial-compiler (cdr passes) (- pass-count 1))))
(build-partial-compiler (cdr passes) (- pass-count 1))))))))]))
|
1351fb4a92c0fddeae3f2a5f060f2dc68557d70198c239c15cccf10dde1e79cb | 3b/3bgl-misc | resource-manager.lisp | (in-package 3bgl-sg2)
(defparameter +globals-binding+ 0)
(defparameter +materials-binding+ 1)
(defparameter +per-object-binding+ 2)
(defvar *globals-program* '3bgl-sg2-shaders-common::common-vertex)
(defparameter *no* 0)
(defparameter *draws* 0)
(defparameter *objects* 0)
(defparameter *once* t)
(defvar *timing-helper* nil)
(defclass resource-manager ()
;; buffers is indexed by a 'vertex format' as created by
;; buffer-builder::vertex-format-for-layout, values are buffer-set
;; objects corresponding to layout.
((buffers :initform (make-hash-table :test 'equalp) :reader buffers)
(index-buffer :initform (make-instance 'index-buffer) :reader index-buffer)
(objects :initform (make-hash-table :test 'equalp) :reader objects)
(textures :initform (make-hash-table :test 'equalp) :reader textures)
(handles :initform (make-hash-table :test 'equalp) :reader handles)
(samplers :initform (make-hash-table :test 'equalp) :reader samplers)
(materials :initform (make-hash-table) :reader materials)
(material-names :initform (make-hash-table :test 'equalp) :reader material-names)
(previous-material :initform nil :accessor previous-material)
(programs :initform (make-hash-table :test 'equalp) :reader programs)
(globals :initform (make-hash-table) :reader %globals)
(globals-layout :initform (make-instance '3bgl-ssbo::ssbo-layout/static)
:reader globals-layout)
(modified-functions :initform (make-hash-table) :reader modified-functions)
;; (interned) material -> list (vector?) of things to draw
(draw-lists :initform (make-hash-table) :reader draw-lists)
;; globals + per-object data
(streaming-ssbo :initform 0 :accessor streaming-ssbo)
;; multi-draw-indirect command lists
(command-ssbo :initform 0 :accessor command-ssbo)))
(defvar *resource-manager* nil)
(defvar *foo* nil)
(defmethod reset-globals ((m resource-manager))
;; make sure we don't have any old texture handles or similar
;; hanging around
(let ((g (%globals m)))
(clrhash g)
;; set some defaults for globals
(loop for mat in '(3bgl-sg2-shaders-common::mvp
3bgl-sg2-shaders-common::vp
3bgl-sg2-shaders-common::v
3bgl-sg2-shaders-common::p
3bgl-sg2-shaders-common::ui-matrix)
do (setf (gethash mat g) (sb-cga:identity-matrix)))
(setf (gethash '3bgl-sg2-shaders-common::ui-scale g) 1.0)
(setf (gethash '3bgl-sg2-shaders-common::eye-pos g)
(sb-cga:vec 0.0 0.0 0.0))
(setf (gethash '3bgl-sg2-shaders-common::env-map-mode g) 0)
(setf (gethash '3bgl-sg2-shaders-common::diffuse-env-map g) 0)
(setf (gethash '3bgl-sg2-shaders-common::diffuse-cube-map g) 0)
(setf (gethash '3bgl-sg2-shaders-common::specular-env-map g) 0)
(setf (gethash '3bgl-sg2-shaders-common::specular-cube-map g) 0)
(setf (gethash '3bgl-sg2-shaders-common::prefiltered-specular-max-lod g) 0)
(setf (gethash '3bgl-sg2-shaders-common::prefiltered-specular-lut g) 0)))
(defmethod initialize-instance :after ((m resource-manager) &key)
(reset-globals m)
(setf (streaming-ssbo m)
(3bgl-ssbo::make-persistent-mapped-buffer
16 MB x triple - buffered . 16 M is enough for 41 floats each
for 100k objects , so probably overkill . possibly should
;; reduce it once code is smart enough to handle running out
;; of space reasonably (and/or using space more efficiently)
( 16 MB is size of 2kx2kxRGBA texture without mipmaps
;; though, so possibly not worth worrying about optimizing)
(expt 2 24) :regions 10))
(setf (command-ssbo m)
(3bgl-ssbo::make-persistent-mapped-buffer
4 MB x triple - buffered . enough for ~209k draws . should
;; reduce to match streaming-ssbo once there is a better idea
of size of per - object data . One command is ( * 4 5 ) bytes
(expt 2 22) :regions 10)))
(defun reset-resource-manager (manager)
(reset-globals manager)
(macrolet ((reset (slot fun)
`(let ((v (alexandria:hash-table-values (,slot manager))))
(clrhash (,slot manager))
(map nil ',fun v))))
(when manager
(clrhash (draw-lists manager))
(3bgl-ssbo::reset-persistent-mapped-buffer (streaming-ssbo manager))
(3bgl-ssbo::reset-persistent-mapped-buffer (command-ssbo manager))
(reset buffers reset-buffer-set)
(reset objects reset-object)
;; reset handles before textures and samplers
(reset handles reset-handle)
(reset textures reset-texture)
(reset samplers reset-sampler)
(reset materials reset-material)
(setf (previous-material manager) nil)
(reset-buffer (index-buffer manager))
(reset programs 3bgl-shaders::reset-program))))
(defmethod ensure-buffers ((m resource-manager))
(3bgl-ssbo::ensure-buffers (streaming-ssbo m))
(3bgl-ssbo::ensure-buffers (command-ssbo m)))
(defmethod next-region ((m resource-manager))
(3bgl-ssbo::next-region (streaming-ssbo m))
(3bgl-ssbo::next-region (command-ssbo m)))
(defparameter *live-managers* (make-hash-table))
(defun notice-modified-shaders (functions)
(format t "notice-modified-shaders ~s~%" functions)
;; fixme: locks or something, though probably not recompiling things
;; while starting/exiting the program very often
(loop for rm in (alexandria:hash-table-keys *live-managers*)
do (loop for f in functions
do (setf (gethash f (modified-functions rm)) t))))
(pushnew 'notice-modified-shaders 3bgl-shaders::*modified-function-hook*)
(defmacro with-resource-manager ((&key timing) &body body)
`(let* ((*resource-manager* (make-instance 'resource-manager))
(*timing-helper* ,timing))
(setf *foo* *resource-manager*)
(setf (gethash *resource-manager* *live-managers*) *resource-manager*)
(setf (gethash *globals-program* (modified-functions *resource-manager*))
t)
(unwind-protect
(progn
,@body)
(remhash *resource-manager* *live-managers*)
(reset-resource-manager *resource-manager*))))
(defclass strided-buffer (3bgl-ssbo::buffer)
((stride :initarg :stride :reader stride))
(:default-initargs :flags '(:dynamic-storage)))
(defmethod vbo ((b strided-buffer))
(3bgl-ssbo::name b))
(defclass vbo (strided-buffer)
(;; next available element
(next :initform 0 :accessor next)))
(defmethod size ((vbo vbo))
;; size in units of STRIDE
(/ (3bgl-ssbo::size vbo) (stride vbo)))
(defmethod (setf size) (new-size (vbo vbo))
;; size in units of STRIDE
(assert (= new-size
(/ (3bgl-ssbo::size vbo) (stride vbo))))
new-size)
(defclass index-buffer (vbo)
((index-type :initarg :index-type :reader index-type))
;; todo: calculate stride from type
(:default-initargs :stride 2 :index-type :unsigned-short))
(defun index-type-size (type)
(ecase type (:unsigned-byte 1) (:unsigned-short 2) (:unsigned-int 4)))
(defun calc-min-size (count &key (alloc-granularity 1024))
;;; todo: more efficient growth (and/or preallocate)
(* alloc-granularity
(ceiling count alloc-granularity)))
(defmethod grow-buffer (buffer new-size &key)
(let ((stride (stride buffer))
(size (size buffer)))
(if (> new-size size)
(progn
(format t "growing buffer from ~s to ~s elements = ~a -> ~a bytes (stride ~s)~%"
size new-size (* size stride) (* new-size stride) stride)
(3bgl-ssbo::resize buffer (* new-size stride)
:copy-octets (* stride (next buffer)))
(size buffer))
size)))
(defun reset-buffer (buffer)
(3bgl-ssbo::destroy buffer)
(setf (next buffer) 0
(size buffer) 0))
(defun reset-buffer-set (bs)
(setf (next bs) 0
(size bs) 0)
(let ((bindings (shiftf (bindings bs) nil)))
(map 'nil '3bgl-ssbo::destroy bindings)))
(defun upload-index-data (buffer pointer count type)
(assert (eq type (index-type buffer)))
;; make sure buffer has enough space (grow+copy if needed)
(let ((start (next buffer))
(new-size (calc-min-size (+ count (next buffer))
grow by 1Mi elements
;; (~2MB)
:alloc-granularity (expt 2 20))))
(assert (>= new-size (size buffer)))
(assert (>= new-size (+ count (next buffer))))
(setf (size buffer) (grow-buffer buffer new-size))
(%gl:named-buffer-sub-data (vbo buffer)
(* (stride buffer) (next buffer))
(* (stride buffer) count)
pointer)
(incf (next buffer) count)
(list start count)))
(defclass buffer-set ()
' vertex format ' and correponding VAO of this buffer ( should be
;; shared with all others of same format, eventually may want to
;; move to higher level object, but this way can point directly to
;; this from mesh object and have all needed info)
((vertex-format :initarg :vertex-format :reader vertex-format)
(vao :initarg :vao :reader vao)
;; list of buffer-binding objects
(bindings :initarg :bindings :accessor bindings)
;; next available index and total size of buffer, in vertices
(next :initform 0 :accessor next)
(size :initform 0 :accessor size)))
(defun get-buffer-set (format)
(or (gethash format (buffers *resource-manager*))
(let* ((stride (getf (nthcdr 6 (first format)) :stride))
(bs (make-instance 'buffer-set
:vertex-format format
:vao (caadr
(scenegraph::canonicalize-state
:vertex-format format)))))
(setf (gethash format (buffers *resource-manager*)) bs)
(setf (bindings bs)
(list (make-instance 'buffer-binding :stride stride
:index 0 :offset 0
:parent bs)))
bs)))
(defclass buffer-binding (strided-buffer)
;; parameters to bind-vertex-buffer
((index :initarg :index :initform 0 :reader index)
(offset :initarg :offset :initform 0 :reader offset)
;; link back to parent so we can get size/next from it
(parent :initarg :parent :reader parent)))
(defmethod next ((b buffer-binding))
(next (parent b)))
(defmethod size ((b buffer-binding))
(size (parent b)))
(defun buffer-geometry (buffer-set count &rest pointers)
(let* ((start (next buffer-set))
(new-size (calc-min-size (+ count (next buffer-set))
allocate space for 64k vertices at
;; a time (probably ~1-2MB). Probably
;; can be larger but not sure how
;; many different formats will be
;; used at once in practice
:alloc-granularity (expt 2 16))))
(assert (>= new-size (size buffer-set)))
(assert (= (length pointers) (length (bindings buffer-set))))
(loop for binding in (bindings buffer-set)
for pointer in pointers
do (assert (>= new-size (+ count start)))
(grow-buffer binding new-size)
(%gl:named-buffer-sub-data (vbo binding)
(* (stride binding) start)
(* (stride binding) count)
pointer))
(setf (size buffer-set) new-size)
(incf (next buffer-set) count)
(list start count)))
(defclass mesh ()
arguments to , ( assuming : unsigned - short
;; indices and :triangles primitive type)
((index-count :initarg :count :reader index-count)
index into global index buffer of first index for this mesh
(first-index :initarg :first :reader first-index)
;; offset added to index values to get actual vertex index in buffer-set
(base-vertex :initarg :base :reader base-offset)
;; material data = ?
(material :initarg :material :reader material)
;; ref to buffer set storing the mesh vertex data
(buffer-set :initarg :buffer-set :reader buffer-set)))
(defclass object ()
;; group of meshes which are 'the same object' in some sense (shared
;; skeleton in particular, shared transform, possibly shared
;; geometry data).
;; for now, assuming all culled as group
((parts :initarg :parts :reader parts)))
(defun reset-object (object)
;; no foreign state to clean up, so just make sure we don't keep any
;; refs to things that do have state alive
(setf (slot-value object 'parts) nil))
#++(defun get-object (loader name)
(or (gethash (list loader name) (meshes *resource-manager*))
()))
(defun get-program (&rest components)
;; sort shaders by name of stage
(setf components (alexandria:alist-plist
(sort (alexandria:plist-alist components)
'string< :key 'car)))
(or (gethash components (programs *resource-manager*))
(setf (gethash components (programs *resource-manager*))
(apply '3bgl-shaders::shader-program components))))
(defun update-materials-for-recompiled-shaders (rm)
(let ((mf (modified-functions rm))
(mm (make-hash-table)))
(when (gethash *globals-program* mf)
(format t "rebuild globals layout~%")
(setf (3bgl-ssbo::packing (globals-layout rm))
(multiple-value-bind (a b c blocks structs)
(3bgl-shaders::generate-stage :vertex *globals-program*
:expand-uniforms t)
(declare (ignore a b c))
(let ((pack (3bgl-ssbo::calculate-layout blocks structs :index 0)))
pack))))
;; clear previous material so shaders get reloaded
(setf (previous-material rm) nil)
;; see if any materials have modified programs
(loop for m in (alexandria:hash-table-values (materials *resource-manager*))
for sp = (program m)
for stages = (alexandria:hash-table-values (3bgl-shaders::stages sp))
do (loop for s in stages
when (gethash s (modified-functions rm))
do (setf (gethash m mm) t)
and return nil))
;; update materials with modified programs
(loop for m in (alexandria:hash-table-keys mm)
do (update-material m :repack t)))
(clrhash (modified-functions rm)))
(defun add-draw (material material-data index vertex matrix)
;; material-data is material-id
;; index is (count offset)
;; vertex is (buffer-set start count)
(let ((h (gethash material (draw-lists *resource-manager*))))
;;draw-lists is hash table of (material -> hash table of
;;(buffer-set -> draw))
(unless h
(setf h (make-hash-table))
(setf (gethash material (draw-lists *resource-manager*)) h))
(push (list material-data index vertex matrix) (gethash (first vertex) h))))
(3bgl-ssbo::define-ssbo-writer write-rm-globals (layout pointer size rm)
(let* ((size (3bgl-ssbo::check-size 0))
(g (%globals rm)))
(macrolet ((s (slot &optional (d sb-cga:+identity-matrix+))
`(3bgl-ssbo::set-slot ,slot (or (gethash ',slot g) ,d))))
(s 3bgl-sg2-shaders-common::mvp)
(s 3bgl-sg2-shaders-common::vp)
(s 3bgl-sg2-shaders-common::v)
(s 3bgl-sg2-shaders-common::p)
(s 3bgl-sg2-shaders-common::eye-pos #.(sb-cga::vec 0.0 0.0 0.0))
(s 3bgl-sg2-shaders-common::ui-scale)
(s 3bgl-sg2-shaders-common::ui-matrix)
(s 3bgl-sg2-shaders-common::now
(ldb (byte 32 0)
(floor (* 1000 (get-internal-real-time))
internal-time-units-per-second)))
(s 3bgl-sg2-shaders-common::env-map-mode 0)
(s 3bgl-sg2-shaders-common::specular-cube-map 0)
(s 3bgl-sg2-shaders-common::specular-env-map 0)
(s 3bgl-sg2-shaders-common::diffuse-cube-map 0)
(s 3bgl-sg2-shaders-common::diffuse-env-map 0)
(s 3bgl-sg2-shaders-common::prefiltered-specular-max-lod 0)
(s 3bgl-sg2-shaders-common::prefiltered-specular-lut 0))
size))
(defun load-env-map (loader file mode map-type)
(let* ((tex (ecase mode
(:cube
(get-texture file :target :texture-cube-map :type loader))
(:equirectangular (get-texture file :type loader))))
(h (%globals *resource-manager*))
(handle (when tex
(handle
(get-handle tex (get-sampler 'env-map-sampler
:max-anisotropy 1)
:resident t)))))
(flet ((reset ()
(setf (gethash '3bgl-sg2-shaders-common::specular-env-map h) 0
(gethash '3bgl-sg2-shaders-common::specular-cube-map h) 0
(gethash '3bgl-sg2-shaders-common::diffuse-env-map h) 0
(gethash '3bgl-sg2-shaders-common::diffuse-cube-map h) 0)))
(when handle
(ecase mode
(:cube
(unless (eql (gethash '3bgl-sg2-shaders-common::env-map-mode h) 1)
(reset)
(setf (gethash '3bgl-sg2-shaders-common::env-map-mode h) 1)))
(:equirectangular
(unless (eql (gethash '3bgl-sg2-shaders-common::env-map-mode h) 2)
(reset)
(setf (gethash '3bgl-sg2-shaders-common::env-map-mode h) 2))))
(ecase map-type
(:diffuse
(ecase mode
(:cube
(setf (gethash '3bgl-sg2-shaders-common::diffuse-cube-map h)
handle))
(:equirectangular
(setf (gethash '3bgl-sg2-shaders-common::diffuse-env-map h)
handle))))
(:specular
(ecase mode
(:cube
(setf (gethash '3bgl-sg2-shaders-common::specular-cube-map h)
handle))
(:equirectangular
(setf (gethash '3bgl-sg2-shaders-common::specular-env-map h)
handle)))))))))
(defun write-globals ()
(let* ((rm *resource-manager*)
(gl (globals-layout rm)))
(when (3bgl-ssbo::packing gl)
(3bgl-ssbo::with-current-region (p)
(streaming-ssbo rm)
(3bgl-ssbo::use-bytes (write-rm-globals gl p (3bgl-ssbo::remaining) rm))
(3bgl-ssbo::bind-range :shader-storage-buffer +globals-binding+)))))
(defun set-global (var value)
(setf (gethash var (%globals *resource-manager*)) value))
(defun clear-globals ()
(clrhash (%globals *resource-manager*)))
(defmethod write-per-object (mat draws)
(break "no per-object writer for material ~s?" mat)
;; not sure if this should have some default behavior or not...
(values 0 0))
(defmethod primitive (mat)
:triangles)
(defmethod submit-material-draws (material bs draws rm cs)
(let ((vao (vao bs)))
(%gl:vertex-array-element-buffer
vao (vbo (index-buffer rm)))
(loop for b in (bindings bs)
;; todo: use %gl:vertex-array-vertex-buffers?
do (%gl:vertex-array-vertex-buffer
vao (index b) (vbo b)
(offset b) (stride b)))
(gl:bind-vertex-array VAO))
(multiple-value-bind (size count)
(write-per-object material draws)
(declare (ignorable size))
(unless count
(break "count = ~s, size=~s?" count size))
(when count
(3bgl-ssbo::with-current-region (p) cs
(let* ((max (floor (3bgl-ssbo::remaining) (* 5 4))))
(when (< max count)
(cerror "continue"
"not enough space for draw commands. ~s / ~s~%"
max count)
(setf count max)))
( setf count ( min 25000 count ) )
(macrolet ((add (offset value)
`(setf (cffi:mem-aref p :unsigned-int (+ i ,offset))
,value)))
(loop for draw in draws
for index below count
for (nil ;; material dataa
(index-offset index-count) ;; index
(bs base-vertex count) ;;vertex
nil) ;; matrix
= draw
for i = (* index 5)
for base-instance = index
do (let ((n index-count #++(min index-count 42)))
(add 0 n)
(incf *no* (floor n 3)))
(add 1 1)
(add 2 index-offset)
(add 3 base-vertex)
(add 4 base-instance)
(incf *objects*))
(3bgl-ssbo::use-bytes (* 5 4 count)))
(let ((offset (3bgl-ssbo::bind :draw-indirect-buffer)))
(incf *draws*)
(%gl:multi-draw-elements-indirect (primitive material)
:unsigned-short
offset
count
0))))))
(defun submit-draws (&key depth-pass)
(mark *timing-helper* :id :submit-draw-start)
(setf *no* 0)
(setf *draws* 0)
(setf *objects* 0)
(update-materials-for-recompiled-shaders *resource-manager*)
(mark *timing-helper* :id :updated-materials)
(ensure-buffers *resource-manager*)
(mark *timing-helper* :id :ensured-buffers)
(write-globals)
(mark *timing-helper* :id :wrote-globals)
(loop
with rm = *resource-manager*
with cs = (command-ssbo rm)
for i from 0
for mat-name being the hash-keys of (draw-lists *resource-manager*)
using (hash-value buffer-sets)
for material = (if (typep mat-name 'material)
mat-name
(gethash mat-name (materials *resource-manager*)))
do (bind-material material :depth-pass depth-pass)
(loop
for bs being the hash-keys of buffer-sets
using (hash-value draws)
do (submit-material-draws material bs draws rm cs)))
(mark *timing-helper* :id :submitted-draws)
(mark *timing-helper* :id (list :done-draw-loop
(hash-table-count
(draw-lists *resource-manager*))))
;; possibly should clear individual per-bs hashes in each entry to
;; avoid reallocation every frame?
(clrhash (draw-lists *resource-manager*))
;; do this at end of draw to minimize the amount of things the sync
;; needs to wait for
(next-region *resource-manager*)
(mark *timing-helper* :id :next-region)
(setf *once* nil))
| null | https://raw.githubusercontent.com/3b/3bgl-misc/e3bf2781d603feb6b44e5c4ec20f06225648ffd9/scenegraph2/resource-manager.lisp | lisp | buffers is indexed by a 'vertex format' as created by
buffer-builder::vertex-format-for-layout, values are buffer-set
objects corresponding to layout.
(interned) material -> list (vector?) of things to draw
globals + per-object data
multi-draw-indirect command lists
make sure we don't have any old texture handles or similar
hanging around
set some defaults for globals
reduce it once code is smart enough to handle running out
of space reasonably (and/or using space more efficiently)
though, so possibly not worth worrying about optimizing)
reduce to match streaming-ssbo once there is a better idea
reset handles before textures and samplers
fixme: locks or something, though probably not recompiling things
while starting/exiting the program very often
next available element
size in units of STRIDE
size in units of STRIDE
todo: calculate stride from type
todo: more efficient growth (and/or preallocate)
make sure buffer has enough space (grow+copy if needed)
(~2MB)
shared with all others of same format, eventually may want to
move to higher level object, but this way can point directly to
this from mesh object and have all needed info)
list of buffer-binding objects
next available index and total size of buffer, in vertices
parameters to bind-vertex-buffer
link back to parent so we can get size/next from it
a time (probably ~1-2MB). Probably
can be larger but not sure how
many different formats will be
used at once in practice
indices and :triangles primitive type)
offset added to index values to get actual vertex index in buffer-set
material data = ?
ref to buffer set storing the mesh vertex data
group of meshes which are 'the same object' in some sense (shared
skeleton in particular, shared transform, possibly shared
geometry data).
for now, assuming all culled as group
no foreign state to clean up, so just make sure we don't keep any
refs to things that do have state alive
sort shaders by name of stage
clear previous material so shaders get reloaded
see if any materials have modified programs
update materials with modified programs
material-data is material-id
index is (count offset)
vertex is (buffer-set start count)
draw-lists is hash table of (material -> hash table of
(buffer-set -> draw))
not sure if this should have some default behavior or not...
todo: use %gl:vertex-array-vertex-buffers?
material dataa
index
vertex
matrix
possibly should clear individual per-bs hashes in each entry to
avoid reallocation every frame?
do this at end of draw to minimize the amount of things the sync
needs to wait for | (in-package 3bgl-sg2)
(defparameter +globals-binding+ 0)
(defparameter +materials-binding+ 1)
(defparameter +per-object-binding+ 2)
(defvar *globals-program* '3bgl-sg2-shaders-common::common-vertex)
(defparameter *no* 0)
(defparameter *draws* 0)
(defparameter *objects* 0)
(defparameter *once* t)
(defvar *timing-helper* nil)
(defclass resource-manager ()
((buffers :initform (make-hash-table :test 'equalp) :reader buffers)
(index-buffer :initform (make-instance 'index-buffer) :reader index-buffer)
(objects :initform (make-hash-table :test 'equalp) :reader objects)
(textures :initform (make-hash-table :test 'equalp) :reader textures)
(handles :initform (make-hash-table :test 'equalp) :reader handles)
(samplers :initform (make-hash-table :test 'equalp) :reader samplers)
(materials :initform (make-hash-table) :reader materials)
(material-names :initform (make-hash-table :test 'equalp) :reader material-names)
(previous-material :initform nil :accessor previous-material)
(programs :initform (make-hash-table :test 'equalp) :reader programs)
(globals :initform (make-hash-table) :reader %globals)
(globals-layout :initform (make-instance '3bgl-ssbo::ssbo-layout/static)
:reader globals-layout)
(modified-functions :initform (make-hash-table) :reader modified-functions)
(draw-lists :initform (make-hash-table) :reader draw-lists)
(streaming-ssbo :initform 0 :accessor streaming-ssbo)
(command-ssbo :initform 0 :accessor command-ssbo)))
(defvar *resource-manager* nil)
(defvar *foo* nil)
(defmethod reset-globals ((m resource-manager))
(let ((g (%globals m)))
(clrhash g)
(loop for mat in '(3bgl-sg2-shaders-common::mvp
3bgl-sg2-shaders-common::vp
3bgl-sg2-shaders-common::v
3bgl-sg2-shaders-common::p
3bgl-sg2-shaders-common::ui-matrix)
do (setf (gethash mat g) (sb-cga:identity-matrix)))
(setf (gethash '3bgl-sg2-shaders-common::ui-scale g) 1.0)
(setf (gethash '3bgl-sg2-shaders-common::eye-pos g)
(sb-cga:vec 0.0 0.0 0.0))
(setf (gethash '3bgl-sg2-shaders-common::env-map-mode g) 0)
(setf (gethash '3bgl-sg2-shaders-common::diffuse-env-map g) 0)
(setf (gethash '3bgl-sg2-shaders-common::diffuse-cube-map g) 0)
(setf (gethash '3bgl-sg2-shaders-common::specular-env-map g) 0)
(setf (gethash '3bgl-sg2-shaders-common::specular-cube-map g) 0)
(setf (gethash '3bgl-sg2-shaders-common::prefiltered-specular-max-lod g) 0)
(setf (gethash '3bgl-sg2-shaders-common::prefiltered-specular-lut g) 0)))
(defmethod initialize-instance :after ((m resource-manager) &key)
(reset-globals m)
(setf (streaming-ssbo m)
(3bgl-ssbo::make-persistent-mapped-buffer
16 MB x triple - buffered . 16 M is enough for 41 floats each
for 100k objects , so probably overkill . possibly should
( 16 MB is size of 2kx2kxRGBA texture without mipmaps
(expt 2 24) :regions 10))
(setf (command-ssbo m)
(3bgl-ssbo::make-persistent-mapped-buffer
4 MB x triple - buffered . enough for ~209k draws . should
of size of per - object data . One command is ( * 4 5 ) bytes
(expt 2 22) :regions 10)))
(defun reset-resource-manager (manager)
(reset-globals manager)
(macrolet ((reset (slot fun)
`(let ((v (alexandria:hash-table-values (,slot manager))))
(clrhash (,slot manager))
(map nil ',fun v))))
(when manager
(clrhash (draw-lists manager))
(3bgl-ssbo::reset-persistent-mapped-buffer (streaming-ssbo manager))
(3bgl-ssbo::reset-persistent-mapped-buffer (command-ssbo manager))
(reset buffers reset-buffer-set)
(reset objects reset-object)
(reset handles reset-handle)
(reset textures reset-texture)
(reset samplers reset-sampler)
(reset materials reset-material)
(setf (previous-material manager) nil)
(reset-buffer (index-buffer manager))
(reset programs 3bgl-shaders::reset-program))))
(defmethod ensure-buffers ((m resource-manager))
(3bgl-ssbo::ensure-buffers (streaming-ssbo m))
(3bgl-ssbo::ensure-buffers (command-ssbo m)))
(defmethod next-region ((m resource-manager))
(3bgl-ssbo::next-region (streaming-ssbo m))
(3bgl-ssbo::next-region (command-ssbo m)))
(defparameter *live-managers* (make-hash-table))
(defun notice-modified-shaders (functions)
(format t "notice-modified-shaders ~s~%" functions)
(loop for rm in (alexandria:hash-table-keys *live-managers*)
do (loop for f in functions
do (setf (gethash f (modified-functions rm)) t))))
(pushnew 'notice-modified-shaders 3bgl-shaders::*modified-function-hook*)
(defmacro with-resource-manager ((&key timing) &body body)
`(let* ((*resource-manager* (make-instance 'resource-manager))
(*timing-helper* ,timing))
(setf *foo* *resource-manager*)
(setf (gethash *resource-manager* *live-managers*) *resource-manager*)
(setf (gethash *globals-program* (modified-functions *resource-manager*))
t)
(unwind-protect
(progn
,@body)
(remhash *resource-manager* *live-managers*)
(reset-resource-manager *resource-manager*))))
(defclass strided-buffer (3bgl-ssbo::buffer)
((stride :initarg :stride :reader stride))
(:default-initargs :flags '(:dynamic-storage)))
(defmethod vbo ((b strided-buffer))
(3bgl-ssbo::name b))
(defclass vbo (strided-buffer)
(next :initform 0 :accessor next)))
(defmethod size ((vbo vbo))
(/ (3bgl-ssbo::size vbo) (stride vbo)))
(defmethod (setf size) (new-size (vbo vbo))
(assert (= new-size
(/ (3bgl-ssbo::size vbo) (stride vbo))))
new-size)
(defclass index-buffer (vbo)
((index-type :initarg :index-type :reader index-type))
(:default-initargs :stride 2 :index-type :unsigned-short))
(defun index-type-size (type)
(ecase type (:unsigned-byte 1) (:unsigned-short 2) (:unsigned-int 4)))
(defun calc-min-size (count &key (alloc-granularity 1024))
(* alloc-granularity
(ceiling count alloc-granularity)))
(defmethod grow-buffer (buffer new-size &key)
(let ((stride (stride buffer))
(size (size buffer)))
(if (> new-size size)
(progn
(format t "growing buffer from ~s to ~s elements = ~a -> ~a bytes (stride ~s)~%"
size new-size (* size stride) (* new-size stride) stride)
(3bgl-ssbo::resize buffer (* new-size stride)
:copy-octets (* stride (next buffer)))
(size buffer))
size)))
(defun reset-buffer (buffer)
(3bgl-ssbo::destroy buffer)
(setf (next buffer) 0
(size buffer) 0))
(defun reset-buffer-set (bs)
(setf (next bs) 0
(size bs) 0)
(let ((bindings (shiftf (bindings bs) nil)))
(map 'nil '3bgl-ssbo::destroy bindings)))
(defun upload-index-data (buffer pointer count type)
(assert (eq type (index-type buffer)))
(let ((start (next buffer))
(new-size (calc-min-size (+ count (next buffer))
grow by 1Mi elements
:alloc-granularity (expt 2 20))))
(assert (>= new-size (size buffer)))
(assert (>= new-size (+ count (next buffer))))
(setf (size buffer) (grow-buffer buffer new-size))
(%gl:named-buffer-sub-data (vbo buffer)
(* (stride buffer) (next buffer))
(* (stride buffer) count)
pointer)
(incf (next buffer) count)
(list start count)))
(defclass buffer-set ()
' vertex format ' and correponding VAO of this buffer ( should be
((vertex-format :initarg :vertex-format :reader vertex-format)
(vao :initarg :vao :reader vao)
(bindings :initarg :bindings :accessor bindings)
(next :initform 0 :accessor next)
(size :initform 0 :accessor size)))
(defun get-buffer-set (format)
(or (gethash format (buffers *resource-manager*))
(let* ((stride (getf (nthcdr 6 (first format)) :stride))
(bs (make-instance 'buffer-set
:vertex-format format
:vao (caadr
(scenegraph::canonicalize-state
:vertex-format format)))))
(setf (gethash format (buffers *resource-manager*)) bs)
(setf (bindings bs)
(list (make-instance 'buffer-binding :stride stride
:index 0 :offset 0
:parent bs)))
bs)))
(defclass buffer-binding (strided-buffer)
((index :initarg :index :initform 0 :reader index)
(offset :initarg :offset :initform 0 :reader offset)
(parent :initarg :parent :reader parent)))
(defmethod next ((b buffer-binding))
(next (parent b)))
(defmethod size ((b buffer-binding))
(size (parent b)))
(defun buffer-geometry (buffer-set count &rest pointers)
(let* ((start (next buffer-set))
(new-size (calc-min-size (+ count (next buffer-set))
allocate space for 64k vertices at
:alloc-granularity (expt 2 16))))
(assert (>= new-size (size buffer-set)))
(assert (= (length pointers) (length (bindings buffer-set))))
(loop for binding in (bindings buffer-set)
for pointer in pointers
do (assert (>= new-size (+ count start)))
(grow-buffer binding new-size)
(%gl:named-buffer-sub-data (vbo binding)
(* (stride binding) start)
(* (stride binding) count)
pointer))
(setf (size buffer-set) new-size)
(incf (next buffer-set) count)
(list start count)))
(defclass mesh ()
arguments to , ( assuming : unsigned - short
((index-count :initarg :count :reader index-count)
index into global index buffer of first index for this mesh
(first-index :initarg :first :reader first-index)
(base-vertex :initarg :base :reader base-offset)
(material :initarg :material :reader material)
(buffer-set :initarg :buffer-set :reader buffer-set)))
(defclass object ()
((parts :initarg :parts :reader parts)))
(defun reset-object (object)
(setf (slot-value object 'parts) nil))
#++(defun get-object (loader name)
(or (gethash (list loader name) (meshes *resource-manager*))
()))
(defun get-program (&rest components)
(setf components (alexandria:alist-plist
(sort (alexandria:plist-alist components)
'string< :key 'car)))
(or (gethash components (programs *resource-manager*))
(setf (gethash components (programs *resource-manager*))
(apply '3bgl-shaders::shader-program components))))
(defun update-materials-for-recompiled-shaders (rm)
(let ((mf (modified-functions rm))
(mm (make-hash-table)))
(when (gethash *globals-program* mf)
(format t "rebuild globals layout~%")
(setf (3bgl-ssbo::packing (globals-layout rm))
(multiple-value-bind (a b c blocks structs)
(3bgl-shaders::generate-stage :vertex *globals-program*
:expand-uniforms t)
(declare (ignore a b c))
(let ((pack (3bgl-ssbo::calculate-layout blocks structs :index 0)))
pack))))
(setf (previous-material rm) nil)
(loop for m in (alexandria:hash-table-values (materials *resource-manager*))
for sp = (program m)
for stages = (alexandria:hash-table-values (3bgl-shaders::stages sp))
do (loop for s in stages
when (gethash s (modified-functions rm))
do (setf (gethash m mm) t)
and return nil))
(loop for m in (alexandria:hash-table-keys mm)
do (update-material m :repack t)))
(clrhash (modified-functions rm)))
(defun add-draw (material material-data index vertex matrix)
(let ((h (gethash material (draw-lists *resource-manager*))))
(unless h
(setf h (make-hash-table))
(setf (gethash material (draw-lists *resource-manager*)) h))
(push (list material-data index vertex matrix) (gethash (first vertex) h))))
(3bgl-ssbo::define-ssbo-writer write-rm-globals (layout pointer size rm)
(let* ((size (3bgl-ssbo::check-size 0))
(g (%globals rm)))
(macrolet ((s (slot &optional (d sb-cga:+identity-matrix+))
`(3bgl-ssbo::set-slot ,slot (or (gethash ',slot g) ,d))))
(s 3bgl-sg2-shaders-common::mvp)
(s 3bgl-sg2-shaders-common::vp)
(s 3bgl-sg2-shaders-common::v)
(s 3bgl-sg2-shaders-common::p)
(s 3bgl-sg2-shaders-common::eye-pos #.(sb-cga::vec 0.0 0.0 0.0))
(s 3bgl-sg2-shaders-common::ui-scale)
(s 3bgl-sg2-shaders-common::ui-matrix)
(s 3bgl-sg2-shaders-common::now
(ldb (byte 32 0)
(floor (* 1000 (get-internal-real-time))
internal-time-units-per-second)))
(s 3bgl-sg2-shaders-common::env-map-mode 0)
(s 3bgl-sg2-shaders-common::specular-cube-map 0)
(s 3bgl-sg2-shaders-common::specular-env-map 0)
(s 3bgl-sg2-shaders-common::diffuse-cube-map 0)
(s 3bgl-sg2-shaders-common::diffuse-env-map 0)
(s 3bgl-sg2-shaders-common::prefiltered-specular-max-lod 0)
(s 3bgl-sg2-shaders-common::prefiltered-specular-lut 0))
size))
(defun load-env-map (loader file mode map-type)
(let* ((tex (ecase mode
(:cube
(get-texture file :target :texture-cube-map :type loader))
(:equirectangular (get-texture file :type loader))))
(h (%globals *resource-manager*))
(handle (when tex
(handle
(get-handle tex (get-sampler 'env-map-sampler
:max-anisotropy 1)
:resident t)))))
(flet ((reset ()
(setf (gethash '3bgl-sg2-shaders-common::specular-env-map h) 0
(gethash '3bgl-sg2-shaders-common::specular-cube-map h) 0
(gethash '3bgl-sg2-shaders-common::diffuse-env-map h) 0
(gethash '3bgl-sg2-shaders-common::diffuse-cube-map h) 0)))
(when handle
(ecase mode
(:cube
(unless (eql (gethash '3bgl-sg2-shaders-common::env-map-mode h) 1)
(reset)
(setf (gethash '3bgl-sg2-shaders-common::env-map-mode h) 1)))
(:equirectangular
(unless (eql (gethash '3bgl-sg2-shaders-common::env-map-mode h) 2)
(reset)
(setf (gethash '3bgl-sg2-shaders-common::env-map-mode h) 2))))
(ecase map-type
(:diffuse
(ecase mode
(:cube
(setf (gethash '3bgl-sg2-shaders-common::diffuse-cube-map h)
handle))
(:equirectangular
(setf (gethash '3bgl-sg2-shaders-common::diffuse-env-map h)
handle))))
(:specular
(ecase mode
(:cube
(setf (gethash '3bgl-sg2-shaders-common::specular-cube-map h)
handle))
(:equirectangular
(setf (gethash '3bgl-sg2-shaders-common::specular-env-map h)
handle)))))))))
(defun write-globals ()
(let* ((rm *resource-manager*)
(gl (globals-layout rm)))
(when (3bgl-ssbo::packing gl)
(3bgl-ssbo::with-current-region (p)
(streaming-ssbo rm)
(3bgl-ssbo::use-bytes (write-rm-globals gl p (3bgl-ssbo::remaining) rm))
(3bgl-ssbo::bind-range :shader-storage-buffer +globals-binding+)))))
(defun set-global (var value)
(setf (gethash var (%globals *resource-manager*)) value))
(defun clear-globals ()
(clrhash (%globals *resource-manager*)))
(defmethod write-per-object (mat draws)
(break "no per-object writer for material ~s?" mat)
(values 0 0))
(defmethod primitive (mat)
:triangles)
(defmethod submit-material-draws (material bs draws rm cs)
(let ((vao (vao bs)))
(%gl:vertex-array-element-buffer
vao (vbo (index-buffer rm)))
(loop for b in (bindings bs)
do (%gl:vertex-array-vertex-buffer
vao (index b) (vbo b)
(offset b) (stride b)))
(gl:bind-vertex-array VAO))
(multiple-value-bind (size count)
(write-per-object material draws)
(declare (ignorable size))
(unless count
(break "count = ~s, size=~s?" count size))
(when count
(3bgl-ssbo::with-current-region (p) cs
(let* ((max (floor (3bgl-ssbo::remaining) (* 5 4))))
(when (< max count)
(cerror "continue"
"not enough space for draw commands. ~s / ~s~%"
max count)
(setf count max)))
( setf count ( min 25000 count ) )
(macrolet ((add (offset value)
`(setf (cffi:mem-aref p :unsigned-int (+ i ,offset))
,value)))
(loop for draw in draws
for index below count
= draw
for i = (* index 5)
for base-instance = index
do (let ((n index-count #++(min index-count 42)))
(add 0 n)
(incf *no* (floor n 3)))
(add 1 1)
(add 2 index-offset)
(add 3 base-vertex)
(add 4 base-instance)
(incf *objects*))
(3bgl-ssbo::use-bytes (* 5 4 count)))
(let ((offset (3bgl-ssbo::bind :draw-indirect-buffer)))
(incf *draws*)
(%gl:multi-draw-elements-indirect (primitive material)
:unsigned-short
offset
count
0))))))
(defun submit-draws (&key depth-pass)
(mark *timing-helper* :id :submit-draw-start)
(setf *no* 0)
(setf *draws* 0)
(setf *objects* 0)
(update-materials-for-recompiled-shaders *resource-manager*)
(mark *timing-helper* :id :updated-materials)
(ensure-buffers *resource-manager*)
(mark *timing-helper* :id :ensured-buffers)
(write-globals)
(mark *timing-helper* :id :wrote-globals)
(loop
with rm = *resource-manager*
with cs = (command-ssbo rm)
for i from 0
for mat-name being the hash-keys of (draw-lists *resource-manager*)
using (hash-value buffer-sets)
for material = (if (typep mat-name 'material)
mat-name
(gethash mat-name (materials *resource-manager*)))
do (bind-material material :depth-pass depth-pass)
(loop
for bs being the hash-keys of buffer-sets
using (hash-value draws)
do (submit-material-draws material bs draws rm cs)))
(mark *timing-helper* :id :submitted-draws)
(mark *timing-helper* :id (list :done-draw-loop
(hash-table-count
(draw-lists *resource-manager*))))
(clrhash (draw-lists *resource-manager*))
(next-region *resource-manager*)
(mark *timing-helper* :id :next-region)
(setf *once* nil))
|
dea5d3b20b62d6bfd518ce24c63d5795316b0d724294af3189d33de8cba65f10 | capsjac/opengles | Internal.hs | # LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FunctionalDependencies #
# LANGUAGE ScopedTypeVariables #
module Graphics.OpenGLES.Internal where
import Control.Applicative
import Control.Monad
import Control.Concurrent.Chan
import Control.Future
import qualified Data.ByteString as B
import Data.IORef
import Data.Monoid
import Data.Typeable
import qualified Data.Vector.Storable as V
import Foreign hiding (newForeignPtr, addForeignPtrFinalizer, void)
import Foreign.C.String (peekCString, peekCStringLen)
import Foreign.Concurrent (newForeignPtr, addForeignPtrFinalizer)
import Graphics.OpenGLES.Base
import Graphics.TextureContainer.KTX
import Linear
import System.IO.Unsafe (unsafePerformIO)
-- * Internal
-- glRestoreLostObjects :: GL ()
-- saveBuffer :: Buffer -> IO ()
= atomicModifyIORef ' ( buf :) bufferArchive
bufferArchive = unsafePerformIO $ newIORef [ ]
-- addCompiledProgramResources
frameCounter :: IORef Int64
frameCounter = unsafePerformIO $ newIORef 0
-- ** Logging
errorQueue :: Chan String
errorQueue = unsafePerformIO newChan
# NOINLINE errorQueue #
glLog :: String -> IO ()
glLog msg = writeChan errorQueue msg
-- ** GL Error
data GLError = InvalidEnum | InvalidValue | InvalidOperation
| OutOfMemory | InvalidFrameBufferOperation
deriving Show
getError :: GL (Maybe GLError)
getError = unMarshal <$> glGetError
where unMarshal x = case x of
0x0000 -> Nothing
0x0500 -> Just InvalidEnum
0x0501 -> Just InvalidValue
0x0502 -> Just InvalidOperation
0x0505 -> Just OutOfMemory
0x0506 -> Just InvalidFrameBufferOperation
showError :: String -> GL Bool
showError location = do
--putStrLn location -- tmp
getError >>= maybe (return False) (\err -> do
glLog ("E " ++ location ++ ": " ++ show err)
return True )
-- ** GL Object management
type GLO = IORef GLObj
data GLObj = GLObj GLuint (GL GLObj) (ForeignPtr GLuint)
getObjId glo = fmap go (readIORef glo)
where go (GLObj i _ _) = i
instance Show GLO where
show = show . unsafePerformIO . getObjId
newGLO
:: (GLsizei -> Ptr GLuint -> GL ())
-> (GLsizei -> Ptr GLuint -> GL ())
-> (GLuint -> GL ())
-> GL GLO
newGLO gen del init = do
ref <- newIORef undefined
writeIORef ref =<< genObj gen del init
-- addToGLOMS ref
return ref
| genObj glo glGenBuffers glDeleteBuffers
genObj
:: (GLsizei -> Ptr GLuint -> GL ())
-> (GLsizei -> Ptr GLuint -> GL ())
-> (GLuint -> GL ())
-> GL GLObj
genObj genObjs delObjs initObj = do
fp <- mallocForeignPtr
withForeignPtr fp $ \ptr -> do
genObjs 1 ptr
showError "genObj"
obj <- peek ptr
addForeignPtrFinalizer fp $ do
-- XXX check whether context is valud or not
with obj $ \ptr -> do
delObjs 1 ptr
void $ showError "delObj"
initObj obj
return $ GLObj obj (genObj genObjs delObjs initObj) fp
-- ** Types
VertexArray
-- 2.0
newtype HalfFloat = HalfFloat Word16 deriving (Num,Storable)
newtype FixedFloat = FixedFloat Int32 deriving (Num,Storable)
3.0
newtype Int2_10x3 = Int210x3 Int32 deriving (Num,Storable)
newtype Word2_10x3 = Word2_10x3 Int32 deriving (Num,Storable)
Renderbuffer
-- 2.0
newtype Word4444 = Word4444 Word16 deriving (Num,Storable)
newtype Word5551 = Word5551 Word16 deriving (Num,Storable)
newtype Word565 = Word565 Word16 deriving (Num,Storable)
3.0
newtype Word10f11f11f = Word10f11f11f Word32 deriving (Num,Storable)
newtype Word5999 = Word5999 Word32 deriving (Num,Storable)
newtype Word24_8 = Word24_8 Word32 deriving (Num,Storable)
newtype FloatWord24_8 = FloatWord24_8 (Float, Word32)
class GLType a where
glType :: m a -> Word32
instance GLType Int8 where glType _ = 0x1400
instance GLType Word8 where glType _ = 0x1401
instance GLType Int16 where glType _ = 0x1402
instance GLType Word16 where glType _ = 0x1403
instance GLType Int32 where glType _ = 0x1404
instance GLType Word32 where glType _ = 0x1405
instance GLType Float where glType _ = 0x1406
instance GLType Double where glType _ = 0x140A -- OpenGL
instance GLType HalfFloat where glType _ = 0x140B
instance GLType FixedFloat where glType _ = 0x140C
instance GLType Int2_10x3 where glType _ = 0x8D9F
instance GLType Word2_10x3 where glType _ = 0x8368
instance GLType Word4444 where glType _ = 0x8033
instance GLType Word5551 where glType _ = 0x8034
instance GLType Word565 where glType _ = 0x8363
instance GLType Word10f11f11f where glType _ = 0x8C3B
instance GLType Word5999 where glType _ = 0x8C3E
instance GLType Word24_8 where glType _ = 0x84FA
instance GLType FloatWord24_8 where glType _ = 0x8DAD
r,rg,rgb,rgba,r_integer,rg_integer,rgb_integer,rgba_integer,
depth_component,depth_stencil :: GLenum
rgb = 0x1907
rgba = 0x1908
depth_component = 0x1902
r = 0x1903
rg = 0x8227
rg_integer = 0x8228
r_integer = 0x8D94
rgb_integer = 0x8D98
rgba_integer = 0x8D99
depth_stencil = 0x84F9
-- ** Buffer
type GLArray a = V.Vector a
-- Buffer usage id (latestArray or length)
data Buffer a = Buffer (IORef (Either (GLArray a) Int)) GLO
DoubleBuffer GLO GLO ( IORef ( GLArray a ) )
newtype BufferUsage = BufferUsage GLenum
newtype BufferSlot = BufferSlot GLenum
* *
newtype DrawMode = DrawMode GLenum
-- ** Graphics State
-- | See "Graphics.OpenGLES.State"
type RenderConfig = GL ()
newtype Capability = Capability GLenum
newtype CullFace = Culling GLenum
newtype CompFunc = CompFunc GLenum
newtype StencilOp = StencilOp GLenum
newtype BlendOp = BlendOp GLenum
newtype BlendingFactor = BlendingFactor GLenum
newtype Hint = Hint GLenum
-- ** Programmable Shader
type ShaderType = GLenum
data Shader = Shader ShaderType GLName B.ByteString
deriving Show
data TransformFeedback =
NoFeedback
| FeedbackArrays [String]
| FeedbackPacked [String]
deriving Show
data Program p = Program
{ programGLO :: GLO
, programTF :: TransformFeedback
, programShaders :: [Shader]
, programVariables :: ([VarDesc], [VarDesc])
} deriving Show
type ProgramBinary = B.ByteString
-- | name: (location, length of array, type)
type VarDesc = (String, (GLint, GLsizei, GLenum))
-- binaryStore :: IORef [(String, B.ByteString)]
-- or (FilePath -> IO B.ByteString)
-- binaryStore = unsafePerformIO $ newIORef []
programDict :: IORef [(String, Program ())]
programDict = unsafePerformIO $ newIORef []
lookupVarDesc :: TypeRep -> IO (Maybe ([VarDesc], [VarDesc]))
lookupVarDesc rep = do
let name = show rep
entry <- lookup name <$> readIORef programDict
case entry of
Nothing -> do
glLog $ "Program '" ++ name ++ "' is not compiled."
return Nothing
Just prog -> return $ Just (programVariables prog)
loadProgram
:: Typeable p
=> Program p
-> (Int -> String -> Maybe ProgramBinary -> GL ())
-> GL (Progress [String] (Program p))
loadProgram prog@(Program glo tf shaders ([],[])) progressLogger = do
let numShaders = length shaders
let progname = show (typeRep prog)
let msg = "Start compiling: " ++ progname
glLog msg
progressLogger 0 msg Nothing
pid <- glCreateProgram
res <- if pid == 0 then do
showError "glCreateProgram"
let msg = "Fatal: glCreateProgram returned 0."
progressLogger (numShaders + 1) msg Nothing
return $ Fixme [msg]
else do
results <- mapM (loadShader progressLogger) (zip [1..] shaders)
-- putStrLn $ show results
let errors = [msg | Fixme [msg] <- results]
res <- if errors /= []
then return $ Fixme errors
else do
forM_ results $ \(Finished sid) -> do
glAttachShader pid sid
showError "glAttachShader"
glLinkProgram pid
showError "glLinkProgram"
postLink progname numShaders prog pid progressLogger
sequence_ [glDeleteShader s | Finished s <- results]
return res
glLog "---------------"
return res
postLink
:: Typeable p
=> String -> Int -> Program p -> GLuint
-> (Int -> String -> Maybe ProgramBinary -> GL ())
-> GL (Progress [String] (Program p))
postLink progname numShaders prog pid
progressLogger = alloca $ \intptr -> do
glGetProgramiv pid c_link_status intptr
linkStatus <- peek intptr
glGetProgramiv pid c_info_log_length intptr
len <- fmap fromIntegral $ peek intptr
info <- allocaBytes len $ \buf -> do
glGetProgramInfoLog pid (fromIntegral len) nullPtr buf
peekCStringLen (buf, len-1)
let info' = if info == "" then "" else '\n':info
if linkStatus == 0 then do
let msg = "Cannot link program " ++ progname ++ info'
glLog msg
progressLogger (numShaders + 1) msg Nothing
glDeleteProgram pid
return $ Fixme [msg]
else do
-- obtain shader variables
vars <- getActiveVariables pid
putStrLn . show $ vars
fp <- newForeignPtr nullPtr (glDeleteProgram pid)
writeIORef (programGLO prog) (GLObj pid (error "not impl: Program implicit recompilation") fp)
let msg = "Successfully linked " ++ progname ++ "!" ++ info'
glLog msg
progressLogger (numShaders + 1) msg Nothing
let prog' = prog { programVariables = vars }
atomicModifyIORef' programDict $! \xs ->
((show (typeRep prog), prog'):xs, ())
return $ Finished prog'
c_link_status = 0x8B82
c_info_log_length = 0x8B84
GL_PROGRAM_BINARY_RETRIEVABLE_HINT 0x8257
GL_PROGRAM_BINARY_LENGTH 0x8741
GL_NUM_PROGRAM_BINARY_FORMATS 0x87FE
loadProgramBinary : : Program p - > GLuint - > GL ( )
loadProgramBinary ( Program tf _ ref ) pid = do
bs < - ...
let ( fp , offset , len ) = toForeignPtr bs
withForeignPtr fp $ \p - > do
fmt < - peek ( p ` plusPtr ` offset )
( p ` plusPtr ` ( ) ) ( fromIntegral len )
showError " glProgramBinary "
if err , writeIORef ref Broken
postLink progname pid
GL_PROGRAM_BINARY_RETRIEVABLE_HINT 0x8257
GL_PROGRAM_BINARY_LENGTH 0x8741
GL_NUM_PROGRAM_BINARY_FORMATS 0x87FE
loadProgramBinary :: Program p -> GLuint -> GL ()
loadProgramBinary (Program tf _ ref) pid = do
bs <- ...
let (fp, offset, len) = toForeignPtr bs
withForeignPtr fp $ \p -> do
fmt <- peek (p `plusPtr` offset)
glProgramBinary pid fmt (p `plusPtr` (offset+4)) (fromIntegral len)
showError "glProgramBinary"
if err, writeIORef ref Broken
postLink progname numShaders ref pid
-}
loadShader
:: (Int -> String -> Maybe ProgramBinary -> GL ())
-> (Int, Shader)
-> GL (Progress [String] GLuint)
loadShader progressLogger (i, Shader shaderType name bs) = do
sid <- glCreateShader shaderType
if sid == 0 then do
showError "glCreateShader"
let msg = "Fatal: glCreateShader returned 0."
glLog msg
progressLogger i msg Nothing
return $ Fixme [name ++ ": " ++ msg]
else B.useAsCString bs $ \src -> do
withArray [src] $ \ptr -> do
glShaderSource sid 1 ptr nullPtr
showError "glShaderSource"
glCompileShader sid
showError "glCompileShader"
alloca $ \pint -> do
glGetShaderiv sid c_compile_status pint
compiled <- peek pint
glGetShaderiv sid c_info_log_length pint
len <- fmap fromIntegral $ peek pint
info <- allocaBytes len $ \buf -> do
glGetShaderInfoLog sid (fromIntegral len) nullPtr buf
peekCStringLen (buf, len-1)
let info' = if info == "" then "" else '\n':info
if compiled == 0 then do
let msg = "Could not compile " ++ name ++ info'
glLog msg
progressLogger i msg Nothing
glDeleteShader sid
return $ Fixme [msg]
else do
let msg = name ++ " ... done" ++ info'
glLog msg
progressLogger i msg Nothing
return $ Finished sid
c_compile_status = 0x8B81
getActiveVariables :: GLuint -> GL ([VarDesc], [VarDesc])
getActiveVariables pid = do
sptr <- malloc
glGetProgramiv pid c_active_uniform_max_length sptr
uMaxLen <- peek sptr
glGetProgramiv pid c_active_attribute_max_length sptr
aMaxLen <- peek sptr
let maxlen = max uMaxLen aMaxLen
str <- mallocBytes (fromIntegral maxlen)
glGetProgramiv pid c_active_uniforms sptr
numU <- peek sptr
glGetProgramiv pid c_active_attributes sptr
numA <- peek sptr
tptr <- malloc
uniforms <- forM [0..numU-1] $ \ index -> do
avoid [ 0 .. maxBound ] bug
let i = (fromIntegral :: GLint -> GLuint) index
glGetActiveUniform pid i maxlen nullPtr sptr tptr str
name <- peekCString str
loc <- glGetUniformLocation pid str
size <- peek sptr
typ <- peek tptr
return (name, (loc, size, typ))
attribs <- forM [0..numA-1] $ \index -> do
let i = fromIntegral index
glGetActiveAttrib pid i maxlen nullPtr sptr tptr str
name <- peekCString str
loc <- glGetAttribLocation pid str
size <- peek sptr
typ <- peek tptr
putStrLn . show $ (index, loc)
return (name, (loc, size, typ))
free str; free sptr; free tptr
return (uniforms, attribs)
c_active_uniform_max_length = 0x8B87
c_active_attribute_max_length = 0x8B8A
c_active_uniforms = 0x8B86
c_active_attributes = 0x8B89
-- ** Uniform
( location , length of array or 1 , ptr )
-- Uniform location is unique to each program
newtype Uniform p a = Uniform (GLint, GLsizei, Ptr ())
--
class UnifVal a where
glUniform :: (GLint, GLsizei, Ptr ()) -> a -> GL ()
class UnifMat a where
glUnifMat :: GLint -> GLsizei -> GLboolean -> Ptr a -> GL ()
class GLVar m v a where
-- ($=) :: m p a -> a -> (m (), v ())
-- ($-) :: m p a -> v a -> (m (), v ())
instance UnifVal a = > GLVar Uniform UniformValue a where
-- unif $= value = unif $- unifVal value
unif $ - value = ( coerce , coerce value )
instance AttrStruct a = > a where
-- attr $= value = attr $- buffer "tmp" value
-- attr $- buffer = (coerce attr, coerce buffer)
UnifVal a = > ( Uniform p a , a )
UnifStruct a = > ( UniformBlock p a , Buffer a )
GLStruct ? std130 ?
* *
program glsl_type = ( index , size , normalize , divisor )
newtype Attrib p a = Attrib (GLuint, GLsizei, GLboolean, GLuint)
deriving Show
-- | GLSL vertex attribute type
class VertexAttribute a where
glVertexAttrib :: GLuint -> a -> GL ()
| A set of ' VertexAttribute 's packed in a ' Buffer '
class AttrStruct a p b | a -> p where
glVertexBuffer :: a -> Buffer b -> GL ()
| The 3rd argument of glVertexAttribI?Pointer
class GLType a => AttrElement a where
-- ** Vertex Array Object
( glo , init )
newtype VertexArray p = VertexArray (GLO, GL ())
-- ** Vertex Picker
newtype VertexPicker = VertexPicker (GLenum -> GL Bool)
instance Monoid VertexPicker where
mempty = VertexPicker (const $ return True)
mappend (VertexPicker f) (VertexPicker g) =
VertexPicker $ \mode ->
f mode >> g mode
class VertexIx a where
vxix :: m a -> (GLenum, GLint)
instance VertexIx Word8 where
vxix _ = (0x1401, 1)
instance VertexIx Word16 where
vxix _ = (0x1403, 2)
instance VertexIx Word32 where
vxix _ = (0x1405, 4)
instance forall v a. VertexIx a => VertexIx (v a) where
vxix _ = vxix (undefined :: v a)
-- ** Draw Operation
newtype BufferMask = BufferMask GLenum deriving Num
[ MainThread , ]
if Nothing , main GL thread should stop before the next frame .
drawOrExit :: IORef (Maybe (GL ())) -- eglSwapBuffer inside
drawOrExit = unsafePerformIO $ newIORef Nothing
drawQueue :: Chan (GL ())
drawQueue = unsafePerformIO newChan
# NOINLINE drawQueue #
* *
data Framebuffer = Framebuffer (IORef (V2 GLsizei)) GLO
data Renderbuffer a = Renderbuffer GLint GLenum (IORef (V2 GLsizei)) GLO
class Attachable a b where
glAttachToFramebuffer :: GLenum -> a b -> IORef (V2 GLsizei) -> GL ()
defaultFramebuffer :: Framebuffer
defaultFramebuffer = unsafePerformIO $ do
glo <- newIORef $ GLObj 0 undefined undefined
dummy <- newIORef undefined
return $ Framebuffer dummy glo
-- ** Texture
glo , target , ktx
data Texture a = Texture GLenum (IORef Ktx) GLO
texture_2d, texture_cube_map, texture_2d_array, texture_3d,
texture_cube_map_positive_x :: Word32
texture_2d = 0x0DE1
texture_cube_map = 0x8513
texture_2d_array = 0x8C1A
texture_3d = 0x806F
texture_cube_map_positive_x = 0x8515
| null | https://raw.githubusercontent.com/capsjac/opengles/23b78e5d1b058349a778a49310d867164ea1a529/src/Graphics/OpenGLES/Internal.hs | haskell | * Internal
glRestoreLostObjects :: GL ()
saveBuffer :: Buffer -> IO ()
addCompiledProgramResources
** Logging
** GL Error
putStrLn location -- tmp
** GL Object management
addToGLOMS ref
XXX check whether context is valud or not
** Types
2.0
2.0
OpenGL
** Buffer
Buffer usage id (latestArray or length)
** Graphics State
| See "Graphics.OpenGLES.State"
** Programmable Shader
| name: (location, length of array, type)
binaryStore :: IORef [(String, B.ByteString)]
or (FilePath -> IO B.ByteString)
binaryStore = unsafePerformIO $ newIORef []
putStrLn $ show results
obtain shader variables
** Uniform
Uniform location is unique to each program
($=) :: m p a -> a -> (m (), v ())
($-) :: m p a -> v a -> (m (), v ())
unif $= value = unif $- unifVal value
attr $= value = attr $- buffer "tmp" value
attr $- buffer = (coerce attr, coerce buffer)
| GLSL vertex attribute type
** Vertex Array Object
** Vertex Picker
** Draw Operation
eglSwapBuffer inside
** Texture | # LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FunctionalDependencies #
# LANGUAGE ScopedTypeVariables #
module Graphics.OpenGLES.Internal where
import Control.Applicative
import Control.Monad
import Control.Concurrent.Chan
import Control.Future
import qualified Data.ByteString as B
import Data.IORef
import Data.Monoid
import Data.Typeable
import qualified Data.Vector.Storable as V
import Foreign hiding (newForeignPtr, addForeignPtrFinalizer, void)
import Foreign.C.String (peekCString, peekCStringLen)
import Foreign.Concurrent (newForeignPtr, addForeignPtrFinalizer)
import Graphics.OpenGLES.Base
import Graphics.TextureContainer.KTX
import Linear
import System.IO.Unsafe (unsafePerformIO)
= atomicModifyIORef ' ( buf :) bufferArchive
bufferArchive = unsafePerformIO $ newIORef [ ]
frameCounter :: IORef Int64
frameCounter = unsafePerformIO $ newIORef 0
errorQueue :: Chan String
errorQueue = unsafePerformIO newChan
# NOINLINE errorQueue #
glLog :: String -> IO ()
glLog msg = writeChan errorQueue msg
data GLError = InvalidEnum | InvalidValue | InvalidOperation
| OutOfMemory | InvalidFrameBufferOperation
deriving Show
getError :: GL (Maybe GLError)
getError = unMarshal <$> glGetError
where unMarshal x = case x of
0x0000 -> Nothing
0x0500 -> Just InvalidEnum
0x0501 -> Just InvalidValue
0x0502 -> Just InvalidOperation
0x0505 -> Just OutOfMemory
0x0506 -> Just InvalidFrameBufferOperation
showError :: String -> GL Bool
showError location = do
getError >>= maybe (return False) (\err -> do
glLog ("E " ++ location ++ ": " ++ show err)
return True )
type GLO = IORef GLObj
data GLObj = GLObj GLuint (GL GLObj) (ForeignPtr GLuint)
getObjId glo = fmap go (readIORef glo)
where go (GLObj i _ _) = i
instance Show GLO where
show = show . unsafePerformIO . getObjId
newGLO
:: (GLsizei -> Ptr GLuint -> GL ())
-> (GLsizei -> Ptr GLuint -> GL ())
-> (GLuint -> GL ())
-> GL GLO
newGLO gen del init = do
ref <- newIORef undefined
writeIORef ref =<< genObj gen del init
return ref
| genObj glo glGenBuffers glDeleteBuffers
genObj
:: (GLsizei -> Ptr GLuint -> GL ())
-> (GLsizei -> Ptr GLuint -> GL ())
-> (GLuint -> GL ())
-> GL GLObj
genObj genObjs delObjs initObj = do
fp <- mallocForeignPtr
withForeignPtr fp $ \ptr -> do
genObjs 1 ptr
showError "genObj"
obj <- peek ptr
addForeignPtrFinalizer fp $ do
with obj $ \ptr -> do
delObjs 1 ptr
void $ showError "delObj"
initObj obj
return $ GLObj obj (genObj genObjs delObjs initObj) fp
VertexArray
newtype HalfFloat = HalfFloat Word16 deriving (Num,Storable)
newtype FixedFloat = FixedFloat Int32 deriving (Num,Storable)
3.0
newtype Int2_10x3 = Int210x3 Int32 deriving (Num,Storable)
newtype Word2_10x3 = Word2_10x3 Int32 deriving (Num,Storable)
Renderbuffer
newtype Word4444 = Word4444 Word16 deriving (Num,Storable)
newtype Word5551 = Word5551 Word16 deriving (Num,Storable)
newtype Word565 = Word565 Word16 deriving (Num,Storable)
3.0
newtype Word10f11f11f = Word10f11f11f Word32 deriving (Num,Storable)
newtype Word5999 = Word5999 Word32 deriving (Num,Storable)
newtype Word24_8 = Word24_8 Word32 deriving (Num,Storable)
newtype FloatWord24_8 = FloatWord24_8 (Float, Word32)
class GLType a where
glType :: m a -> Word32
instance GLType Int8 where glType _ = 0x1400
instance GLType Word8 where glType _ = 0x1401
instance GLType Int16 where glType _ = 0x1402
instance GLType Word16 where glType _ = 0x1403
instance GLType Int32 where glType _ = 0x1404
instance GLType Word32 where glType _ = 0x1405
instance GLType Float where glType _ = 0x1406
instance GLType HalfFloat where glType _ = 0x140B
instance GLType FixedFloat where glType _ = 0x140C
instance GLType Int2_10x3 where glType _ = 0x8D9F
instance GLType Word2_10x3 where glType _ = 0x8368
instance GLType Word4444 where glType _ = 0x8033
instance GLType Word5551 where glType _ = 0x8034
instance GLType Word565 where glType _ = 0x8363
instance GLType Word10f11f11f where glType _ = 0x8C3B
instance GLType Word5999 where glType _ = 0x8C3E
instance GLType Word24_8 where glType _ = 0x84FA
instance GLType FloatWord24_8 where glType _ = 0x8DAD
r,rg,rgb,rgba,r_integer,rg_integer,rgb_integer,rgba_integer,
depth_component,depth_stencil :: GLenum
rgb = 0x1907
rgba = 0x1908
depth_component = 0x1902
r = 0x1903
rg = 0x8227
rg_integer = 0x8228
r_integer = 0x8D94
rgb_integer = 0x8D98
rgba_integer = 0x8D99
depth_stencil = 0x84F9
type GLArray a = V.Vector a
data Buffer a = Buffer (IORef (Either (GLArray a) Int)) GLO
DoubleBuffer GLO GLO ( IORef ( GLArray a ) )
newtype BufferUsage = BufferUsage GLenum
newtype BufferSlot = BufferSlot GLenum
* *
newtype DrawMode = DrawMode GLenum
type RenderConfig = GL ()
newtype Capability = Capability GLenum
newtype CullFace = Culling GLenum
newtype CompFunc = CompFunc GLenum
newtype StencilOp = StencilOp GLenum
newtype BlendOp = BlendOp GLenum
newtype BlendingFactor = BlendingFactor GLenum
newtype Hint = Hint GLenum
type ShaderType = GLenum
data Shader = Shader ShaderType GLName B.ByteString
deriving Show
data TransformFeedback =
NoFeedback
| FeedbackArrays [String]
| FeedbackPacked [String]
deriving Show
data Program p = Program
{ programGLO :: GLO
, programTF :: TransformFeedback
, programShaders :: [Shader]
, programVariables :: ([VarDesc], [VarDesc])
} deriving Show
type ProgramBinary = B.ByteString
type VarDesc = (String, (GLint, GLsizei, GLenum))
programDict :: IORef [(String, Program ())]
programDict = unsafePerformIO $ newIORef []
lookupVarDesc :: TypeRep -> IO (Maybe ([VarDesc], [VarDesc]))
lookupVarDesc rep = do
let name = show rep
entry <- lookup name <$> readIORef programDict
case entry of
Nothing -> do
glLog $ "Program '" ++ name ++ "' is not compiled."
return Nothing
Just prog -> return $ Just (programVariables prog)
loadProgram
:: Typeable p
=> Program p
-> (Int -> String -> Maybe ProgramBinary -> GL ())
-> GL (Progress [String] (Program p))
loadProgram prog@(Program glo tf shaders ([],[])) progressLogger = do
let numShaders = length shaders
let progname = show (typeRep prog)
let msg = "Start compiling: " ++ progname
glLog msg
progressLogger 0 msg Nothing
pid <- glCreateProgram
res <- if pid == 0 then do
showError "glCreateProgram"
let msg = "Fatal: glCreateProgram returned 0."
progressLogger (numShaders + 1) msg Nothing
return $ Fixme [msg]
else do
results <- mapM (loadShader progressLogger) (zip [1..] shaders)
let errors = [msg | Fixme [msg] <- results]
res <- if errors /= []
then return $ Fixme errors
else do
forM_ results $ \(Finished sid) -> do
glAttachShader pid sid
showError "glAttachShader"
glLinkProgram pid
showError "glLinkProgram"
postLink progname numShaders prog pid progressLogger
sequence_ [glDeleteShader s | Finished s <- results]
return res
glLog "---------------"
return res
postLink
:: Typeable p
=> String -> Int -> Program p -> GLuint
-> (Int -> String -> Maybe ProgramBinary -> GL ())
-> GL (Progress [String] (Program p))
postLink progname numShaders prog pid
progressLogger = alloca $ \intptr -> do
glGetProgramiv pid c_link_status intptr
linkStatus <- peek intptr
glGetProgramiv pid c_info_log_length intptr
len <- fmap fromIntegral $ peek intptr
info <- allocaBytes len $ \buf -> do
glGetProgramInfoLog pid (fromIntegral len) nullPtr buf
peekCStringLen (buf, len-1)
let info' = if info == "" then "" else '\n':info
if linkStatus == 0 then do
let msg = "Cannot link program " ++ progname ++ info'
glLog msg
progressLogger (numShaders + 1) msg Nothing
glDeleteProgram pid
return $ Fixme [msg]
else do
vars <- getActiveVariables pid
putStrLn . show $ vars
fp <- newForeignPtr nullPtr (glDeleteProgram pid)
writeIORef (programGLO prog) (GLObj pid (error "not impl: Program implicit recompilation") fp)
let msg = "Successfully linked " ++ progname ++ "!" ++ info'
glLog msg
progressLogger (numShaders + 1) msg Nothing
let prog' = prog { programVariables = vars }
atomicModifyIORef' programDict $! \xs ->
((show (typeRep prog), prog'):xs, ())
return $ Finished prog'
c_link_status = 0x8B82
c_info_log_length = 0x8B84
GL_PROGRAM_BINARY_RETRIEVABLE_HINT 0x8257
GL_PROGRAM_BINARY_LENGTH 0x8741
GL_NUM_PROGRAM_BINARY_FORMATS 0x87FE
loadProgramBinary : : Program p - > GLuint - > GL ( )
loadProgramBinary ( Program tf _ ref ) pid = do
bs < - ...
let ( fp , offset , len ) = toForeignPtr bs
withForeignPtr fp $ \p - > do
fmt < - peek ( p ` plusPtr ` offset )
( p ` plusPtr ` ( ) ) ( fromIntegral len )
showError " glProgramBinary "
if err , writeIORef ref Broken
postLink progname pid
GL_PROGRAM_BINARY_RETRIEVABLE_HINT 0x8257
GL_PROGRAM_BINARY_LENGTH 0x8741
GL_NUM_PROGRAM_BINARY_FORMATS 0x87FE
loadProgramBinary :: Program p -> GLuint -> GL ()
loadProgramBinary (Program tf _ ref) pid = do
bs <- ...
let (fp, offset, len) = toForeignPtr bs
withForeignPtr fp $ \p -> do
fmt <- peek (p `plusPtr` offset)
glProgramBinary pid fmt (p `plusPtr` (offset+4)) (fromIntegral len)
showError "glProgramBinary"
if err, writeIORef ref Broken
postLink progname numShaders ref pid
-}
loadShader
:: (Int -> String -> Maybe ProgramBinary -> GL ())
-> (Int, Shader)
-> GL (Progress [String] GLuint)
loadShader progressLogger (i, Shader shaderType name bs) = do
sid <- glCreateShader shaderType
if sid == 0 then do
showError "glCreateShader"
let msg = "Fatal: glCreateShader returned 0."
glLog msg
progressLogger i msg Nothing
return $ Fixme [name ++ ": " ++ msg]
else B.useAsCString bs $ \src -> do
withArray [src] $ \ptr -> do
glShaderSource sid 1 ptr nullPtr
showError "glShaderSource"
glCompileShader sid
showError "glCompileShader"
alloca $ \pint -> do
glGetShaderiv sid c_compile_status pint
compiled <- peek pint
glGetShaderiv sid c_info_log_length pint
len <- fmap fromIntegral $ peek pint
info <- allocaBytes len $ \buf -> do
glGetShaderInfoLog sid (fromIntegral len) nullPtr buf
peekCStringLen (buf, len-1)
let info' = if info == "" then "" else '\n':info
if compiled == 0 then do
let msg = "Could not compile " ++ name ++ info'
glLog msg
progressLogger i msg Nothing
glDeleteShader sid
return $ Fixme [msg]
else do
let msg = name ++ " ... done" ++ info'
glLog msg
progressLogger i msg Nothing
return $ Finished sid
c_compile_status = 0x8B81
getActiveVariables :: GLuint -> GL ([VarDesc], [VarDesc])
getActiveVariables pid = do
sptr <- malloc
glGetProgramiv pid c_active_uniform_max_length sptr
uMaxLen <- peek sptr
glGetProgramiv pid c_active_attribute_max_length sptr
aMaxLen <- peek sptr
let maxlen = max uMaxLen aMaxLen
str <- mallocBytes (fromIntegral maxlen)
glGetProgramiv pid c_active_uniforms sptr
numU <- peek sptr
glGetProgramiv pid c_active_attributes sptr
numA <- peek sptr
tptr <- malloc
uniforms <- forM [0..numU-1] $ \ index -> do
avoid [ 0 .. maxBound ] bug
let i = (fromIntegral :: GLint -> GLuint) index
glGetActiveUniform pid i maxlen nullPtr sptr tptr str
name <- peekCString str
loc <- glGetUniformLocation pid str
size <- peek sptr
typ <- peek tptr
return (name, (loc, size, typ))
attribs <- forM [0..numA-1] $ \index -> do
let i = fromIntegral index
glGetActiveAttrib pid i maxlen nullPtr sptr tptr str
name <- peekCString str
loc <- glGetAttribLocation pid str
size <- peek sptr
typ <- peek tptr
putStrLn . show $ (index, loc)
return (name, (loc, size, typ))
free str; free sptr; free tptr
return (uniforms, attribs)
c_active_uniform_max_length = 0x8B87
c_active_attribute_max_length = 0x8B8A
c_active_uniforms = 0x8B86
c_active_attributes = 0x8B89
( location , length of array or 1 , ptr )
newtype Uniform p a = Uniform (GLint, GLsizei, Ptr ())
class UnifVal a where
glUniform :: (GLint, GLsizei, Ptr ()) -> a -> GL ()
class UnifMat a where
glUnifMat :: GLint -> GLsizei -> GLboolean -> Ptr a -> GL ()
class GLVar m v a where
instance UnifVal a = > GLVar Uniform UniformValue a where
unif $ - value = ( coerce , coerce value )
instance AttrStruct a = > a where
UnifVal a = > ( Uniform p a , a )
UnifStruct a = > ( UniformBlock p a , Buffer a )
GLStruct ? std130 ?
* *
program glsl_type = ( index , size , normalize , divisor )
newtype Attrib p a = Attrib (GLuint, GLsizei, GLboolean, GLuint)
deriving Show
class VertexAttribute a where
glVertexAttrib :: GLuint -> a -> GL ()
| A set of ' VertexAttribute 's packed in a ' Buffer '
class AttrStruct a p b | a -> p where
glVertexBuffer :: a -> Buffer b -> GL ()
| The 3rd argument of glVertexAttribI?Pointer
class GLType a => AttrElement a where
( glo , init )
newtype VertexArray p = VertexArray (GLO, GL ())
newtype VertexPicker = VertexPicker (GLenum -> GL Bool)
instance Monoid VertexPicker where
mempty = VertexPicker (const $ return True)
mappend (VertexPicker f) (VertexPicker g) =
VertexPicker $ \mode ->
f mode >> g mode
class VertexIx a where
vxix :: m a -> (GLenum, GLint)
instance VertexIx Word8 where
vxix _ = (0x1401, 1)
instance VertexIx Word16 where
vxix _ = (0x1403, 2)
instance VertexIx Word32 where
vxix _ = (0x1405, 4)
instance forall v a. VertexIx a => VertexIx (v a) where
vxix _ = vxix (undefined :: v a)
newtype BufferMask = BufferMask GLenum deriving Num
[ MainThread , ]
if Nothing , main GL thread should stop before the next frame .
drawOrExit = unsafePerformIO $ newIORef Nothing
drawQueue :: Chan (GL ())
drawQueue = unsafePerformIO newChan
# NOINLINE drawQueue #
* *
data Framebuffer = Framebuffer (IORef (V2 GLsizei)) GLO
data Renderbuffer a = Renderbuffer GLint GLenum (IORef (V2 GLsizei)) GLO
class Attachable a b where
glAttachToFramebuffer :: GLenum -> a b -> IORef (V2 GLsizei) -> GL ()
defaultFramebuffer :: Framebuffer
defaultFramebuffer = unsafePerformIO $ do
glo <- newIORef $ GLObj 0 undefined undefined
dummy <- newIORef undefined
return $ Framebuffer dummy glo
glo , target , ktx
data Texture a = Texture GLenum (IORef Ktx) GLO
texture_2d, texture_cube_map, texture_2d_array, texture_3d,
texture_cube_map_positive_x :: Word32
texture_2d = 0x0DE1
texture_cube_map = 0x8513
texture_2d_array = 0x8C1A
texture_3d = 0x806F
texture_cube_map_positive_x = 0x8515
|
64ed5d8462394fc25a572a3d76f4d4ecab92ed1b357cc1e28f5281688719bc16 | input-output-hk/plutus-apps | Helpers.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
Not using all CardanoEra
module PlutusScripts.Helpers where
import Cardano.Api qualified as C
import Cardano.Api.Shelley qualified as C
import Codec.Serialise (serialise)
import Data.ByteString qualified as BS (ByteString)
import Data.ByteString.Lazy qualified as LBS
import Data.ByteString.Short qualified as SBS
import Plutus.Script.Utils.Value (CurrencySymbol)
import Plutus.V1.Ledger.Api (MintingPolicy, Validator, unMintingPolicyScript, unValidatorScript)
import Plutus.V1.Ledger.Api qualified as PlutusV1
import Plutus.V1.Ledger.Bytes qualified as P (bytes, fromHex)
import Plutus.V1.Ledger.Scripts (Datum (Datum), Redeemer (Redeemer))
import PlutusTx qualified
import PlutusTx.Builtins qualified as BI
-- | Treat string of hexidecimal bytes literally, without encoding. Useful for hashes.
bytesFromHex :: BS.ByteString -> BS.ByteString
bytesFromHex = P.bytes . fromEither . P.fromHex
where
fromEither (Left e) = error $ show e
fromEither (Right b) = b
| Default execution units with zero values . Needed for valid script witness in txbody .
-- Useful when exunits are automatically balanced.
defExecutionUnits :: C.ExecutionUnits
defExecutionUnits = C.ExecutionUnits {C.executionSteps = 0, C.executionMemory = 0 }
| Any data to ScriptData . Used for script datum and redeemer .
toScriptData :: PlutusTx.ToData a => a -> C.ScriptData
toScriptData a = C.fromPlutusData $ PlutusTx.toData a
asRedeemer :: PlutusTx.ToData a => a -> Redeemer
asRedeemer a = Redeemer $ PlutusTx.dataToBuiltinData $ PlutusTx.toData a
asDatum :: PlutusTx.ToData a => a -> Datum
asDatum a = Datum $ PlutusTx.dataToBuiltinData $ PlutusTx.toData a
plutusL1 :: C.ScriptLanguage C.PlutusScriptV1
plutusL1 = C.PlutusScriptLanguage C.PlutusScriptV1
plutusL2 :: C.ScriptLanguage C.PlutusScriptV2
plutusL2 = C.PlutusScriptLanguage C.PlutusScriptV2
| Witness token mint for including in txbody 's txMintValue .
Provide either the script or for reference script to include in witness .
Zero execution units can only be used with convenience build function .
mintScriptWitness :: C.CardanoEra era
-> C.ScriptLanguage lang
-> Either (C.PlutusScript lang) C.TxIn -- either script or reference to script
-> C.ScriptData
-> C.ScriptWitness C.WitCtxMint era
V1 script
mintScriptWitness era lang@(C.PlutusScriptLanguage C.PlutusScriptV1) eScript redeemer =
mintScriptWitness' era lang eScript redeemer defExecutionUnits
V2 script
mintScriptWitness era lang@(C.PlutusScriptLanguage C.PlutusScriptV2) (Left script) redeemer = do
C.PlutusScriptWitness (maybeScriptWitness era lang $ C.scriptLanguageSupportedInEra era lang)
C.PlutusScriptV2 (C.PScript script) C.NoScriptDatumForMint redeemer defExecutionUnits
-- V2 reference script
mintScriptWitness era lang@(C.PlutusScriptLanguage C.PlutusScriptV2) (Right refTxIn) redeemer = do
C.PlutusScriptWitness (maybeScriptWitness era lang $ C.scriptLanguageSupportedInEra era lang)
C.PlutusScriptV2 (C.PReferenceScript refTxIn Nothing) C.NoScriptDatumForMint redeemer defExecutionUnits
-- Witness token mint with explicit execution units. Used when building raw txbody content.
mintScriptWitness' :: C.CardanoEra era
-> C.ScriptLanguage lang
-> Either (C.PlutusScript lang) C.TxIn -- either script or reference to script
-> C.ScriptData
-> C.ExecutionUnits
-> C.ScriptWitness C.WitCtxMint era
V1 script
mintScriptWitness' era lang@(C.PlutusScriptLanguage C.PlutusScriptV1) (Left script) redeemer = do
C.PlutusScriptWitness (maybeScriptWitness era lang $ C.scriptLanguageSupportedInEra era lang)
C.PlutusScriptV1 (C.PScript script) C.NoScriptDatumForMint redeemer
V2 script
mintScriptWitness' era lang@(C.PlutusScriptLanguage C.PlutusScriptV2) (Left script) redeemer = do
C.PlutusScriptWitness (maybeScriptWitness era lang $ C.scriptLanguageSupportedInEra era lang)
C.PlutusScriptV2 (C.PScript script) C.NoScriptDatumForMint redeemer
-- V2 reference script
mintScriptWitness' era lang@(C.PlutusScriptLanguage C.PlutusScriptV2) (Right refTxIn) redeemer = do
C.PlutusScriptWitness (maybeScriptWitness era lang $ C.scriptLanguageSupportedInEra era lang)
C.PlutusScriptV2 (C.PReferenceScript refTxIn Nothing) C.NoScriptDatumForMint redeemer
spendScriptWitness :: C.CardanoEra era
-> C.ScriptLanguage lang
-> Either (C.PlutusScript lang) C.TxIn -- either script or reference to script
-> (C.ScriptDatum C.WitCtxTxIn)
-> C.ScriptData
-> C.ScriptWitness C.WitCtxTxIn era
-- V2 reference script
spendScriptWitness era lang@(C.PlutusScriptLanguage C.PlutusScriptV2) (Right refTxIn) datumWit redeemer = do
C.PlutusScriptWitness (maybeScriptWitness era lang $ C.scriptLanguageSupportedInEra era lang)
C.PlutusScriptV2 (C.PReferenceScript refTxIn Nothing) datumWit redeemer defExecutionUnits -- tried with (Just scriptHash) instead of Nothing because hash isn't needed?
-- | Produce ScriptLanguageInEra. Throw error when era doesn't support the script language.
maybeScriptWitness :: C.CardanoEra era
-> C.ScriptLanguage l
-> Maybe (C.ScriptLanguageInEra l era)
-> C.ScriptLanguageInEra l era
maybeScriptWitness era lang Nothing = error $ "Era " ++ show era
++ " does not support script language " ++ show lang
maybeScriptWitness _ _ (Just p) = p
-- | Serialised plutus script from minting policy
policyScript :: MintingPolicy -> C.PlutusScript lang
policyScript = C.PlutusScriptSerialised . SBS.toShort . LBS.toStrict . serialise . unMintingPolicyScript
-- | Serialised plutus script from validator
validatorScript :: Validator -> C.PlutusScript lang
validatorScript = C.PlutusScriptSerialised . SBS.toShort . LBS.toStrict . serialise . unValidatorScript
| V1 Script to general Script , Needed for producing reference script .
unPlutusScriptV1 :: C.PlutusScript C.PlutusScriptV1 -> C.Script C.PlutusScriptV1
unPlutusScriptV1 = C.PlutusScript C.PlutusScriptV1
| V2 Script to general Script , Needed for producing reference script .
unPlutusScriptV2 :: C.PlutusScript C.PlutusScriptV2 -> C.Script C.PlutusScriptV2
unPlutusScriptV2 = C.PlutusScript C.PlutusScriptV2
-- | PolicyId of a V1 minting policy
policyIdV1 :: MintingPolicy -> C.PolicyId
policyIdV1 = C.scriptPolicyId . unPlutusScriptV1 . policyScript
| PolicyId of a V2 minting policy
policyIdV2 :: MintingPolicy -> C.PolicyId
policyIdV2 = C.scriptPolicyId . unPlutusScriptV2 . policyScript
fromPolicyId :: C.PolicyId -> CurrencySymbol
fromPolicyId (C.PolicyId hash) = PlutusV1.CurrencySymbol . BI.toBuiltin $ C.serialiseToRawBytes hash
| null | https://raw.githubusercontent.com/input-output-hk/plutus-apps/f54eca87a81a67a86fbadae4bb32682d59f4ea02/plutus-e2e-tests/test/PlutusScripts/Helpers.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE OverloadedStrings #
| Treat string of hexidecimal bytes literally, without encoding. Useful for hashes.
Useful when exunits are automatically balanced.
either script or reference to script
V2 reference script
Witness token mint with explicit execution units. Used when building raw txbody content.
either script or reference to script
V2 reference script
either script or reference to script
V2 reference script
tried with (Just scriptHash) instead of Nothing because hash isn't needed?
| Produce ScriptLanguageInEra. Throw error when era doesn't support the script language.
| Serialised plutus script from minting policy
| Serialised plutus script from validator
| PolicyId of a V1 minting policy | # LANGUAGE ScopedTypeVariables #
Not using all CardanoEra
module PlutusScripts.Helpers where
import Cardano.Api qualified as C
import Cardano.Api.Shelley qualified as C
import Codec.Serialise (serialise)
import Data.ByteString qualified as BS (ByteString)
import Data.ByteString.Lazy qualified as LBS
import Data.ByteString.Short qualified as SBS
import Plutus.Script.Utils.Value (CurrencySymbol)
import Plutus.V1.Ledger.Api (MintingPolicy, Validator, unMintingPolicyScript, unValidatorScript)
import Plutus.V1.Ledger.Api qualified as PlutusV1
import Plutus.V1.Ledger.Bytes qualified as P (bytes, fromHex)
import Plutus.V1.Ledger.Scripts (Datum (Datum), Redeemer (Redeemer))
import PlutusTx qualified
import PlutusTx.Builtins qualified as BI
bytesFromHex :: BS.ByteString -> BS.ByteString
bytesFromHex = P.bytes . fromEither . P.fromHex
where
fromEither (Left e) = error $ show e
fromEither (Right b) = b
| Default execution units with zero values . Needed for valid script witness in txbody .
defExecutionUnits :: C.ExecutionUnits
defExecutionUnits = C.ExecutionUnits {C.executionSteps = 0, C.executionMemory = 0 }
| Any data to ScriptData . Used for script datum and redeemer .
toScriptData :: PlutusTx.ToData a => a -> C.ScriptData
toScriptData a = C.fromPlutusData $ PlutusTx.toData a
asRedeemer :: PlutusTx.ToData a => a -> Redeemer
asRedeemer a = Redeemer $ PlutusTx.dataToBuiltinData $ PlutusTx.toData a
asDatum :: PlutusTx.ToData a => a -> Datum
asDatum a = Datum $ PlutusTx.dataToBuiltinData $ PlutusTx.toData a
plutusL1 :: C.ScriptLanguage C.PlutusScriptV1
plutusL1 = C.PlutusScriptLanguage C.PlutusScriptV1
plutusL2 :: C.ScriptLanguage C.PlutusScriptV2
plutusL2 = C.PlutusScriptLanguage C.PlutusScriptV2
| Witness token mint for including in txbody 's txMintValue .
Provide either the script or for reference script to include in witness .
Zero execution units can only be used with convenience build function .
mintScriptWitness :: C.CardanoEra era
-> C.ScriptLanguage lang
-> C.ScriptData
-> C.ScriptWitness C.WitCtxMint era
V1 script
mintScriptWitness era lang@(C.PlutusScriptLanguage C.PlutusScriptV1) eScript redeemer =
mintScriptWitness' era lang eScript redeemer defExecutionUnits
V2 script
mintScriptWitness era lang@(C.PlutusScriptLanguage C.PlutusScriptV2) (Left script) redeemer = do
C.PlutusScriptWitness (maybeScriptWitness era lang $ C.scriptLanguageSupportedInEra era lang)
C.PlutusScriptV2 (C.PScript script) C.NoScriptDatumForMint redeemer defExecutionUnits
mintScriptWitness era lang@(C.PlutusScriptLanguage C.PlutusScriptV2) (Right refTxIn) redeemer = do
C.PlutusScriptWitness (maybeScriptWitness era lang $ C.scriptLanguageSupportedInEra era lang)
C.PlutusScriptV2 (C.PReferenceScript refTxIn Nothing) C.NoScriptDatumForMint redeemer defExecutionUnits
mintScriptWitness' :: C.CardanoEra era
-> C.ScriptLanguage lang
-> C.ScriptData
-> C.ExecutionUnits
-> C.ScriptWitness C.WitCtxMint era
V1 script
mintScriptWitness' era lang@(C.PlutusScriptLanguage C.PlutusScriptV1) (Left script) redeemer = do
C.PlutusScriptWitness (maybeScriptWitness era lang $ C.scriptLanguageSupportedInEra era lang)
C.PlutusScriptV1 (C.PScript script) C.NoScriptDatumForMint redeemer
V2 script
mintScriptWitness' era lang@(C.PlutusScriptLanguage C.PlutusScriptV2) (Left script) redeemer = do
C.PlutusScriptWitness (maybeScriptWitness era lang $ C.scriptLanguageSupportedInEra era lang)
C.PlutusScriptV2 (C.PScript script) C.NoScriptDatumForMint redeemer
mintScriptWitness' era lang@(C.PlutusScriptLanguage C.PlutusScriptV2) (Right refTxIn) redeemer = do
C.PlutusScriptWitness (maybeScriptWitness era lang $ C.scriptLanguageSupportedInEra era lang)
C.PlutusScriptV2 (C.PReferenceScript refTxIn Nothing) C.NoScriptDatumForMint redeemer
spendScriptWitness :: C.CardanoEra era
-> C.ScriptLanguage lang
-> (C.ScriptDatum C.WitCtxTxIn)
-> C.ScriptData
-> C.ScriptWitness C.WitCtxTxIn era
spendScriptWitness era lang@(C.PlutusScriptLanguage C.PlutusScriptV2) (Right refTxIn) datumWit redeemer = do
C.PlutusScriptWitness (maybeScriptWitness era lang $ C.scriptLanguageSupportedInEra era lang)
maybeScriptWitness :: C.CardanoEra era
-> C.ScriptLanguage l
-> Maybe (C.ScriptLanguageInEra l era)
-> C.ScriptLanguageInEra l era
maybeScriptWitness era lang Nothing = error $ "Era " ++ show era
++ " does not support script language " ++ show lang
maybeScriptWitness _ _ (Just p) = p
policyScript :: MintingPolicy -> C.PlutusScript lang
policyScript = C.PlutusScriptSerialised . SBS.toShort . LBS.toStrict . serialise . unMintingPolicyScript
validatorScript :: Validator -> C.PlutusScript lang
validatorScript = C.PlutusScriptSerialised . SBS.toShort . LBS.toStrict . serialise . unValidatorScript
| V1 Script to general Script , Needed for producing reference script .
unPlutusScriptV1 :: C.PlutusScript C.PlutusScriptV1 -> C.Script C.PlutusScriptV1
unPlutusScriptV1 = C.PlutusScript C.PlutusScriptV1
| V2 Script to general Script , Needed for producing reference script .
unPlutusScriptV2 :: C.PlutusScript C.PlutusScriptV2 -> C.Script C.PlutusScriptV2
unPlutusScriptV2 = C.PlutusScript C.PlutusScriptV2
policyIdV1 :: MintingPolicy -> C.PolicyId
policyIdV1 = C.scriptPolicyId . unPlutusScriptV1 . policyScript
| PolicyId of a V2 minting policy
policyIdV2 :: MintingPolicy -> C.PolicyId
policyIdV2 = C.scriptPolicyId . unPlutusScriptV2 . policyScript
fromPolicyId :: C.PolicyId -> CurrencySymbol
fromPolicyId (C.PolicyId hash) = PlutusV1.CurrencySymbol . BI.toBuiltin $ C.serialiseToRawBytes hash
|
35b629421bdfd064ff6760afdb1658d7f1a682e721cffbb254de40c63e92a58d | tonyg/kali-scheme | pipe.scm | Copyright ( c ) 1993 , 1994 by and .
Copyright ( c ) 1996 by NEC Research Institute , Inc. See file COPYING .
Scheme analogues of popen ( ) and pclose ( ) library calls .
(define (call-with-mumble-pipe input?)
(lambda (command proc)
(call-with-values pipe
(lambda (pipe-for-read pipe-for-write)
(let ((winner (if input? pipe-for-read pipe-for-write))
(loser (if input? pipe-for-write pipe-for-read))
(pid (fork)))
(if (= pid 0)
(dynamic-wind
(lambda () #f)
(lambda ()
(close winner)
(let ((foo (if input? 1 0)))
(close foo)
(if (not (= (dup loser) foo))
(error "dup lost" loser foo)))
(execv "/bin/sh"
(vector "sh" "-c" command)))
(lambda () (exit 1))))
;; (write `(pid = ,pid)) (newline)
(close loser)
(let* ((channel (open-channel winner
(if input?
(enum open-channel-option
raw-input-channel)
(enum open-channel-option
raw-output-channel))))
(port (if input?
(input-channel->port channel 1024)
(output-channel->port channel 1024))))
(call-with-values (lambda () (proc port))
(lambda vals
(if input?
(close-input-port port)
(close-output-port port))
;; (display "Waiting.") (newline)
(call-with-values (lambda () (waitpid pid 0))
(lambda (pid status)
;; (write `(status = ,status)) (newline)
(apply values vals)))))))))))
(define call-with-input-pipe
(call-with-mumble-pipe #t))
(define call-with-output-pipe
(call-with-mumble-pipe #f))
| null | https://raw.githubusercontent.com/tonyg/kali-scheme/79bf76b4964729b63fce99c4d2149b32cb067ac0/scheme/misc/pipe.scm | scheme | (write `(pid = ,pid)) (newline)
(display "Waiting.") (newline)
(write `(status = ,status)) (newline) | Copyright ( c ) 1993 , 1994 by and .
Copyright ( c ) 1996 by NEC Research Institute , Inc. See file COPYING .
Scheme analogues of popen ( ) and pclose ( ) library calls .
(define (call-with-mumble-pipe input?)
(lambda (command proc)
(call-with-values pipe
(lambda (pipe-for-read pipe-for-write)
(let ((winner (if input? pipe-for-read pipe-for-write))
(loser (if input? pipe-for-write pipe-for-read))
(pid (fork)))
(if (= pid 0)
(dynamic-wind
(lambda () #f)
(lambda ()
(close winner)
(let ((foo (if input? 1 0)))
(close foo)
(if (not (= (dup loser) foo))
(error "dup lost" loser foo)))
(execv "/bin/sh"
(vector "sh" "-c" command)))
(lambda () (exit 1))))
(close loser)
(let* ((channel (open-channel winner
(if input?
(enum open-channel-option
raw-input-channel)
(enum open-channel-option
raw-output-channel))))
(port (if input?
(input-channel->port channel 1024)
(output-channel->port channel 1024))))
(call-with-values (lambda () (proc port))
(lambda vals
(if input?
(close-input-port port)
(close-output-port port))
(call-with-values (lambda () (waitpid pid 0))
(lambda (pid status)
(apply values vals)))))))))))
(define call-with-input-pipe
(call-with-mumble-pipe #t))
(define call-with-output-pipe
(call-with-mumble-pipe #f))
|
1fbaeb6ea22d5c5d84265bdbcacf2df5418b14878a0956697b74d37543cc20aa | jeromesimeon/Galax | norm_overloaded.ml | (***********************************************************************)
(* *)
(* GALAX *)
(* XQuery Engine *)
(* *)
Copyright 2001 - 2007 .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
$ I d : norm_overloaded.ml , v 1.10 2007/05/21 20:22:40 mff Exp $
Module : Norm_overloaded
Description :
This modules implements some support for built - in overloaded
functions in XQuery 1.0 and XPath 2.0 .
Description:
This modules implements some support for built-in overloaded
functions in XQuery 1.0 and XPath 2.0.
*)
open Namespace_names
open Namespace_util
open Namespace_builtin
open Xquery_common_ast
open Xquery_core_ast
open Datatypes
open Error
Mapping tables for overloaded functions .
: The order of functions in the lists below is significant !
For a given overloaded function , its corresponding non - overloaded
functions are added to the hash table in the _ reverse _ order of
precedence , so the function with the most specific signature is
returned first and the function with the least specific signature
( i.e. , the overloaded function itself ) is returned last .
NB: The order of functions in the lists below is significant!
For a given overloaded function, its corresponding non-overloaded
functions are added to the hash table in the _reverse_ order of
precedence, so the function with the most specific signature is
returned first and the function with the least specific signature
(i.e., the overloaded function itself) is returned last.
*)
let op_numeric_add_table =
[ (* The overloaded function itself is the least specific *)
(op_numeric_add, 2);
(op_double_add, 2);
(op_float_add, 2);
(op_decimal_add, 2);
(op_integer_add, 2);
(op_add_yearMonthDuration_to_date, 2);
(op_add_yearMonthDuration_to_date2, 2);
(op_add_dayTimeDuration_to_date, 2);
(op_add_dayTimeDuration_to_date2, 2);
(op_add_dayTimeDuration_to_time, 2);
(op_add_dayTimeDuration_to_time2, 2);
(op_add_yearMonthDuration_to_dateTime, 2);
(op_add_yearMonthDuration_to_dateTime2, 2);
(op_add_dayTimeDuration_to_dateTime, 2);
(op_add_dayTimeDuration_to_dateTime2, 2);
(op_add_yearMonthDurations, 2);
(op_add_dayTimeDurations, 2) ]
let op_numeric_subtract_table =
[ (* The overloaded function itself is the least specific *)
(op_numeric_subtract, 2);
(op_double_subtract, 2);
(op_float_subtract, 2);
(op_decimal_subtract, 2);
(op_integer_subtract, 2);
(op_subtract_yearMonthDuration_from_dateTime, 2);
(op_subtract_dayTimeDuration_from_dateTime, 2);
(op_subtract_dateTimes, 2);
(op_subtract_dates, 2);
(op_subtract_times, 2);
(op_subtract_dayTimeDuration_from_time, 2);
(op_subtract_dayTimeDuration_from_date, 2);
(op_subtract_yearMonthDuration_from_date, 2);
(op_subtract_yearMonthDurations, 2);
(op_subtract_dayTimeDurations, 2) ]
let op_numeric_multiply_table =
[ (* The overloaded function itself is the least specific *)
(op_numeric_multiply, 2);
(op_double_multiply, 2);
(op_float_multiply, 2);
(op_decimal_multiply, 2);
(op_integer_multiply, 2);
(op_multiply_yearMonthDuration, 2);
(op_multiply_yearMonthDuration2, 2);
(op_multiply_dayTimeDuration, 2);
(op_multiply_dayTimeDuration2, 2) ]
let op_numeric_divide_table =
[ (* The overloaded function itself is the least specific *)
(op_numeric_divide, 2);
(op_double_divide, 2);
(op_float_divide, 2);
(op_decimal_divide, 2);
(op_integer_divide, 2);
(op_divide_yearMonthDuration, 2);
(op_divide_yearMonthDuration_by_yearMonthDuration, 2);
(op_divide_dayTimeDuration, 2);
(op_divide_dayTimeDuration_by_dayTimeDuration, 2) ]
let op_numeric_mod_table =
[ (* The overloaded function itself is the least specific *)
(op_numeric_mod, 2);
(op_double_mod, 2);
(op_float_mod, 2);
(op_decimal_mod, 2);
(op_integer_mod, 2) ]
let op_numeric_unary_plus_table =
[ (* The overloaded function itself is the least specific *)
(op_numeric_unary_plus, 1);
(op_double_unary_plus, 1);
(op_float_unary_plus, 1);
(op_decimal_unary_plus, 1);
(op_integer_unary_plus, 1) ]
let op_numeric_unary_minus_table =
[ (* The overloaded function itself is the least specific *)
(op_numeric_unary_minus, 1);
(op_double_unary_minus, 1);
(op_float_unary_minus, 1);
(op_decimal_unary_minus, 1);
(op_integer_unary_minus, 1) ]
let op_numeric_idivide_table =
[ (* The overloaded function itself is the least specific *)
(op_numeric_idivide, 2);
(op_double_idivide, 2);
(op_float_idivide, 2);
(op_decimal_idivide, 2);
(op_integer_idivide, 2) ]
let op_equal_table =
[ (* The overloaded function itself is the least specific *)
(op_equal, 2);
(op_equal_left_empty, 2);
(op_equal_right_empty, 2);
(op_double_equal, 2);
(op_float_equal, 2);
(op_decimal_equal, 2);
(op_integer_equal, 2);
(op_string_equal, 2);
(op_QName_equal, 2);
(op_anyURI_equal, 2);
(op_boolean_equal, 2);
(op_date_equal, 2);
(op_gYearMonth_equal, 2);
(op_gYear_equal, 2);
(op_gMonthDay_equal, 2);
(op_gDay_equal, 2);
(op_gMonth_equal, 2);
(op_time_equal, 2);
(op_dateTime_equal, 2);
(op_duration_equal, 2);
(op_yearMonthDuration_equal, 2);
(op_dayTimeDuration_equal, 2);
(op_hexBinary_equal, 2);
(op_base64Binary_equal, 2) ]
let op_nequal_table =
[ (* The overloaded function itself is the least specific *)
(op_nequal, 2);
(op_nequal_left_empty, 2);
(op_nequal_right_empty, 2);
(op_double_nequal, 2);
(op_float_nequal, 2);
(op_decimal_nequal, 2);
(op_integer_nequal, 2);
(op_string_nequal, 2);
(op_QName_nequal, 2);
(op_anyURI_nequal, 2);
(op_boolean_nequal, 2);
(op_date_nequal, 2);
(op_gYearMonth_nequal, 2);
(op_gYear_nequal, 2);
(op_gMonthDay_nequal, 2);
(op_gDay_nequal, 2);
(op_gMonth_nequal, 2);
(op_time_nequal, 2);
(op_dateTime_nequal, 2);
(op_duration_nequal, 2);
(op_yearMonthDuration_nequal, 2);
(op_dayTimeDuration_nequal, 2);
(op_hexBinary_nequal, 2);
(op_base64Binary_nequal, 2) ]
let op_lt_table =
[ (* The overloaded function itself is the least specific *)
(op_lt, 2);
(op_lt_left_empty, 2);
(op_lt_right_empty, 2);
(op_double_lt, 2);
(op_float_lt, 2);
(op_decimal_lt, 2);
(op_integer_lt, 2);
(op_string_lt, 2);
(op_boolean_lt, 2);
(op_date_lt, 2);
(op_time_lt, 2);
(op_dateTime_lt, 2);
(op_yearMonthDuration_lt, 2);
(op_dayTimeDuration_lt, 2) ]
let op_gt_table =
[ (* The overloaded function itself is the least specific *)
(op_gt, 2);
(op_gt_left_empty, 2);
(op_gt_right_empty, 2);
(op_double_gt, 2);
(op_float_gt, 2);
(op_decimal_gt, 2);
(op_integer_gt, 2);
(op_string_gt, 2);
(op_boolean_gt, 2);
(op_date_gt, 2);
(op_time_gt, 2);
(op_dateTime_gt, 2);
(op_yearMonthDuration_gt, 2);
(op_dayTimeDuration_gt, 2) ]
let op_le_table =
[ (* The overloaded function itself is the least specific *)
(op_le, 2);
(op_le_left_empty, 2);
(op_le_right_empty, 2);
(op_double_le, 2);
(op_float_le, 2);
(op_decimal_le, 2);
(op_integer_le, 2);
(op_string_le, 2);
(op_boolean_le, 2);
(op_date_le, 2);
(op_time_le, 2);
(op_dateTime_le, 2);
(op_yearMonthDuration_le, 2);
(op_dayTimeDuration_le, 2) ]
let op_ge_table =
[ (* The overloaded function itself is the least specific *)
(op_ge, 2);
(op_ge_left_empty, 2);
(op_ge_right_empty, 2);
(op_double_ge, 2);
(op_float_ge, 2);
(op_decimal_ge, 2);
(op_integer_ge, 2);
(op_string_ge, 2);
(op_boolean_ge, 2);
(op_date_ge, 2);
(op_time_ge, 2);
(op_dateTime_ge, 2);
(op_yearMonthDuration_ge, 2);
(op_dayTimeDuration_ge, 2) ]
let fn_floor_table =
[ (* The overloaded function itself is the least specific *)
(fn_floor, 1);
(fn_floor_double, 1);
(fn_floor_float, 1);
(fn_floor_decimal, 1);
(fn_floor_integer, 1) ]
let fn_ceiling_table =
[ (* The overloaded function itself is the least specific *)
(fn_ceiling, 1);
(fn_ceiling_double, 1);
(fn_ceiling_float, 1);
(fn_ceiling_decimal, 1);
(fn_ceiling_integer, 1) ]
let fn_round_table =
[ (* The overloaded function itself is the least specific *)
(fn_round, 1);
(fn_round_double, 1);
(fn_round_float, 1);
(fn_round_decimal, 1);
(fn_round_integer, 1) ]
let fn_round_half_to_even_table =
[ (fn_round_half_to_even_double, 2);
(fn_round_half_to_even_float, 2);
(fn_round_half_to_even_decimal, 2);
(fn_round_half_to_even_integer, 2) ]
let fn_abs_table =
[ (* The overloaded function itself is the least specific *)
(* (fn_abs, 1); *)
(fn_abs_double, 1);
(fn_abs_float, 1);
(fn_abs_decimal, 1);
(fn_abs_integer, 1) ]
let fn_avg_table =
[ (* The overloaded function itself is the least specific *)
(* (fn_avg, 1); *)
(fn_avg_double, 1);
(fn_avg_float, 1);
(fn_avg_decimal, 1);
(fn_avg_integer, 1);
(fn_avg_yearMonthDuration, 1);
(fn_avg_dayTimeDuration, 1) ]
let fn_max_table =
[ (* The overloaded function itself is the least specific *)
(fn_max_double, 1);
(fn_max_float, 1);
(fn_max_decimal, 1);
(fn_max_integer, 1);
(fn_max_string, 1);
(fn_max_date, 1);
(fn_max_time, 1);
(fn_max_dateTime, 1);
(fn_max_yearMonthDuration, 1);
(fn_max_dayTimeDuration, 1); ]
let fn_min_table =
[ (* The overloaded function itself is the least specific *)
(fn_min_double, 1);
(fn_min_float, 1);
(fn_min_decimal, 1);
(fn_min_integer, 1);
(fn_min_string, 1);
(fn_min_date, 1);
(fn_min_time, 1);
(fn_min_dateTime, 1);
(fn_min_yearMonthDuration, 1);
(fn_min_dayTimeDuration, 1) ]
let fn_sum_table_one =
[ (fn_sum_double, 1);
(fn_sum_float, 1);
(fn_sum_decimal, 1);
(fn_sum_yearMonthDuration, 1);
(fn_sum_dayTimeDuration, 1);
THIS NEEDS TO BE FIRST AS THIS IS THE TYPE FOR THE DEFAULT VALUE IF THE INPUT IS EMPTY
let fn_sum_table =
[ (fn_sum_double, 2);
(fn_sum_float, 2);
(fn_sum_decimal, 2);
(fn_sum_integer, 2);
(fn_sum_yearMonthDuration, 2);
(fn_sum_dayTimeDuration, 2) ]
(* Each function table should be indexed by its input types *)
let overloaded_functions =
[ (op_numeric_add, 2), op_numeric_add_table;
(op_numeric_subtract, 2), op_numeric_subtract_table;
(op_numeric_multiply, 2), op_numeric_multiply_table;
(op_numeric_divide, 2), op_numeric_divide_table;
(op_numeric_mod, 2), op_numeric_mod_table;
(op_numeric_unary_plus, 1), op_numeric_unary_plus_table;
(op_numeric_unary_minus, 1), op_numeric_unary_minus_table;
(op_numeric_idivide, 2), op_numeric_idivide_table;
(op_equal, 2), op_equal_table;
(op_nequal, 2), op_nequal_table;
(op_lt, 2), op_lt_table;
(op_gt, 2), op_gt_table;
(op_le, 2), op_le_table;
(op_ge, 2), op_ge_table;
(fn_floor, 1), fn_floor_table;
(fn_ceiling, 1), fn_ceiling_table;
(fn_round, 1), fn_round_table;
(fn_round_half_to_even, 2), fn_round_half_to_even_table;
(fn_abs, 1), fn_abs_table;
(fn_avg, 1), fn_avg_table;
(fn_max, 1), fn_max_table;
(fn_min, 1), fn_min_table;
(fn_sum, 1), fn_sum_table_one;
(fn_sum, 2), fn_sum_table ]
let overloaded_functions_default_type = [
(op_numeric_add, ATDouble);
(op_numeric_subtract, ATDouble);
(op_numeric_multiply, ATDouble);
(op_numeric_divide, ATDouble);
(op_numeric_mod, ATDouble);
(op_numeric_unary_plus, ATDouble);
(op_numeric_unary_minus, ATDouble);
(op_numeric_idivide, ATDouble);
(op_equal, ATString);
(op_nequal, ATString);
(op_lt, ATString);
(op_gt, ATString);
(op_le, ATString);
(op_ge, ATString);
(fn_avg, ATDouble);
(fn_max, ATDouble);
(fn_min, ATDouble);
(fn_sum, ATDouble);
(fn_floor, ATDouble);
(fn_ceiling, ATDouble);
(fn_round, ATDouble);
(fn_round_half_to_even, ATDouble);
(fn_abs, ATDouble)
]
let overloaded_default_type_table =
RQNameHashtbl.create 167
let add_to_overloaded_default_type_table (cfname, default_type) =
RQNameHashtbl.add overloaded_default_type_table cfname default_type
let bulk_add_to_overloaded_functions_table table =
List.iter add_to_overloaded_default_type_table table
let _ =
bulk_add_to_overloaded_functions_table overloaded_functions_default_type
let lookup_default_atomic_type cfname =
try
RQNameHashtbl.find overloaded_default_type_table cfname
with
| Not_found -> raise(Query(Internal_Error("Default type of overloaded function "^(prefixed_string_of_rqname cfname)^"not found")))
(* The internal hashtable for overloaded functions *)
(* Note:
Those functions are built-in in the semantics of XQuery, so that
is ok to leave them as a global variable in the system.
- Jerome
*)
let overloaded_functions_table =
RQNameIntHashtbl.create 167
let add_to_overloaded_functions_table cfname1 cfname2 =
RQNameIntHashtbl.add overloaded_functions_table cfname1 cfname2
let bulk_add_to_overloaded_functions_table (fname1,fnamelist2) =
List.iter (add_to_overloaded_functions_table fname1) fnamelist2
let _ =
List.iter bulk_add_to_overloaded_functions_table overloaded_functions
(* Is a function overloaded ? *)
let is_overloaded cfname_arity =
RQNameIntHashtbl.mem overloaded_functions_table cfname_arity
(* What are all the functions a given overloaded function corresponds to? *)
let all_overloaded_bindings_for_function cfname_arity =
RQNameIntHashtbl.find_all overloaded_functions_table cfname_arity
let resolve_non_overloaded_name norm_context fname =
let (fun_sig, opt_fun_kind, upd) = Norm_context.one_sig_from_norm_context norm_context fname in
(fst fname, fun_sig, opt_fun_kind, upd)
let table_for_overloaded_function norm_context fname =
if is_overloaded fname
then
let all_bindings = all_overloaded_bindings_for_function fname in
(List.map (resolve_non_overloaded_name norm_context) all_bindings)
else
raise (Query (Internal_Error "Not constructing an overloaded function!"))
let table_for_op_gt norm_context =
let fname = (op_gt,2) in
table_for_overloaded_function norm_context fname
let table_for_op_equal norm_context =
let fname = (op_equal,2) in
table_for_overloaded_function norm_context fname
| null | https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/normalization/norm_overloaded.ml | ocaml | *********************************************************************
GALAX
XQuery Engine
Distributed only by permission.
*********************************************************************
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
The overloaded function itself is the least specific
(fn_abs, 1);
The overloaded function itself is the least specific
(fn_avg, 1);
The overloaded function itself is the least specific
The overloaded function itself is the least specific
Each function table should be indexed by its input types
The internal hashtable for overloaded functions
Note:
Those functions are built-in in the semantics of XQuery, so that
is ok to leave them as a global variable in the system.
- Jerome
Is a function overloaded ?
What are all the functions a given overloaded function corresponds to? | Copyright 2001 - 2007 .
$ I d : norm_overloaded.ml , v 1.10 2007/05/21 20:22:40 mff Exp $
Module : Norm_overloaded
Description :
This modules implements some support for built - in overloaded
functions in XQuery 1.0 and XPath 2.0 .
Description:
This modules implements some support for built-in overloaded
functions in XQuery 1.0 and XPath 2.0.
*)
open Namespace_names
open Namespace_util
open Namespace_builtin
open Xquery_common_ast
open Xquery_core_ast
open Datatypes
open Error
Mapping tables for overloaded functions .
: The order of functions in the lists below is significant !
For a given overloaded function , its corresponding non - overloaded
functions are added to the hash table in the _ reverse _ order of
precedence , so the function with the most specific signature is
returned first and the function with the least specific signature
( i.e. , the overloaded function itself ) is returned last .
NB: The order of functions in the lists below is significant!
For a given overloaded function, its corresponding non-overloaded
functions are added to the hash table in the _reverse_ order of
precedence, so the function with the most specific signature is
returned first and the function with the least specific signature
(i.e., the overloaded function itself) is returned last.
*)
let op_numeric_add_table =
(op_numeric_add, 2);
(op_double_add, 2);
(op_float_add, 2);
(op_decimal_add, 2);
(op_integer_add, 2);
(op_add_yearMonthDuration_to_date, 2);
(op_add_yearMonthDuration_to_date2, 2);
(op_add_dayTimeDuration_to_date, 2);
(op_add_dayTimeDuration_to_date2, 2);
(op_add_dayTimeDuration_to_time, 2);
(op_add_dayTimeDuration_to_time2, 2);
(op_add_yearMonthDuration_to_dateTime, 2);
(op_add_yearMonthDuration_to_dateTime2, 2);
(op_add_dayTimeDuration_to_dateTime, 2);
(op_add_dayTimeDuration_to_dateTime2, 2);
(op_add_yearMonthDurations, 2);
(op_add_dayTimeDurations, 2) ]
let op_numeric_subtract_table =
(op_numeric_subtract, 2);
(op_double_subtract, 2);
(op_float_subtract, 2);
(op_decimal_subtract, 2);
(op_integer_subtract, 2);
(op_subtract_yearMonthDuration_from_dateTime, 2);
(op_subtract_dayTimeDuration_from_dateTime, 2);
(op_subtract_dateTimes, 2);
(op_subtract_dates, 2);
(op_subtract_times, 2);
(op_subtract_dayTimeDuration_from_time, 2);
(op_subtract_dayTimeDuration_from_date, 2);
(op_subtract_yearMonthDuration_from_date, 2);
(op_subtract_yearMonthDurations, 2);
(op_subtract_dayTimeDurations, 2) ]
let op_numeric_multiply_table =
(op_numeric_multiply, 2);
(op_double_multiply, 2);
(op_float_multiply, 2);
(op_decimal_multiply, 2);
(op_integer_multiply, 2);
(op_multiply_yearMonthDuration, 2);
(op_multiply_yearMonthDuration2, 2);
(op_multiply_dayTimeDuration, 2);
(op_multiply_dayTimeDuration2, 2) ]
let op_numeric_divide_table =
(op_numeric_divide, 2);
(op_double_divide, 2);
(op_float_divide, 2);
(op_decimal_divide, 2);
(op_integer_divide, 2);
(op_divide_yearMonthDuration, 2);
(op_divide_yearMonthDuration_by_yearMonthDuration, 2);
(op_divide_dayTimeDuration, 2);
(op_divide_dayTimeDuration_by_dayTimeDuration, 2) ]
let op_numeric_mod_table =
(op_numeric_mod, 2);
(op_double_mod, 2);
(op_float_mod, 2);
(op_decimal_mod, 2);
(op_integer_mod, 2) ]
let op_numeric_unary_plus_table =
(op_numeric_unary_plus, 1);
(op_double_unary_plus, 1);
(op_float_unary_plus, 1);
(op_decimal_unary_plus, 1);
(op_integer_unary_plus, 1) ]
let op_numeric_unary_minus_table =
(op_numeric_unary_minus, 1);
(op_double_unary_minus, 1);
(op_float_unary_minus, 1);
(op_decimal_unary_minus, 1);
(op_integer_unary_minus, 1) ]
let op_numeric_idivide_table =
(op_numeric_idivide, 2);
(op_double_idivide, 2);
(op_float_idivide, 2);
(op_decimal_idivide, 2);
(op_integer_idivide, 2) ]
let op_equal_table =
(op_equal, 2);
(op_equal_left_empty, 2);
(op_equal_right_empty, 2);
(op_double_equal, 2);
(op_float_equal, 2);
(op_decimal_equal, 2);
(op_integer_equal, 2);
(op_string_equal, 2);
(op_QName_equal, 2);
(op_anyURI_equal, 2);
(op_boolean_equal, 2);
(op_date_equal, 2);
(op_gYearMonth_equal, 2);
(op_gYear_equal, 2);
(op_gMonthDay_equal, 2);
(op_gDay_equal, 2);
(op_gMonth_equal, 2);
(op_time_equal, 2);
(op_dateTime_equal, 2);
(op_duration_equal, 2);
(op_yearMonthDuration_equal, 2);
(op_dayTimeDuration_equal, 2);
(op_hexBinary_equal, 2);
(op_base64Binary_equal, 2) ]
let op_nequal_table =
(op_nequal, 2);
(op_nequal_left_empty, 2);
(op_nequal_right_empty, 2);
(op_double_nequal, 2);
(op_float_nequal, 2);
(op_decimal_nequal, 2);
(op_integer_nequal, 2);
(op_string_nequal, 2);
(op_QName_nequal, 2);
(op_anyURI_nequal, 2);
(op_boolean_nequal, 2);
(op_date_nequal, 2);
(op_gYearMonth_nequal, 2);
(op_gYear_nequal, 2);
(op_gMonthDay_nequal, 2);
(op_gDay_nequal, 2);
(op_gMonth_nequal, 2);
(op_time_nequal, 2);
(op_dateTime_nequal, 2);
(op_duration_nequal, 2);
(op_yearMonthDuration_nequal, 2);
(op_dayTimeDuration_nequal, 2);
(op_hexBinary_nequal, 2);
(op_base64Binary_nequal, 2) ]
let op_lt_table =
(op_lt, 2);
(op_lt_left_empty, 2);
(op_lt_right_empty, 2);
(op_double_lt, 2);
(op_float_lt, 2);
(op_decimal_lt, 2);
(op_integer_lt, 2);
(op_string_lt, 2);
(op_boolean_lt, 2);
(op_date_lt, 2);
(op_time_lt, 2);
(op_dateTime_lt, 2);
(op_yearMonthDuration_lt, 2);
(op_dayTimeDuration_lt, 2) ]
let op_gt_table =
(op_gt, 2);
(op_gt_left_empty, 2);
(op_gt_right_empty, 2);
(op_double_gt, 2);
(op_float_gt, 2);
(op_decimal_gt, 2);
(op_integer_gt, 2);
(op_string_gt, 2);
(op_boolean_gt, 2);
(op_date_gt, 2);
(op_time_gt, 2);
(op_dateTime_gt, 2);
(op_yearMonthDuration_gt, 2);
(op_dayTimeDuration_gt, 2) ]
let op_le_table =
(op_le, 2);
(op_le_left_empty, 2);
(op_le_right_empty, 2);
(op_double_le, 2);
(op_float_le, 2);
(op_decimal_le, 2);
(op_integer_le, 2);
(op_string_le, 2);
(op_boolean_le, 2);
(op_date_le, 2);
(op_time_le, 2);
(op_dateTime_le, 2);
(op_yearMonthDuration_le, 2);
(op_dayTimeDuration_le, 2) ]
let op_ge_table =
(op_ge, 2);
(op_ge_left_empty, 2);
(op_ge_right_empty, 2);
(op_double_ge, 2);
(op_float_ge, 2);
(op_decimal_ge, 2);
(op_integer_ge, 2);
(op_string_ge, 2);
(op_boolean_ge, 2);
(op_date_ge, 2);
(op_time_ge, 2);
(op_dateTime_ge, 2);
(op_yearMonthDuration_ge, 2);
(op_dayTimeDuration_ge, 2) ]
let fn_floor_table =
(fn_floor, 1);
(fn_floor_double, 1);
(fn_floor_float, 1);
(fn_floor_decimal, 1);
(fn_floor_integer, 1) ]
let fn_ceiling_table =
(fn_ceiling, 1);
(fn_ceiling_double, 1);
(fn_ceiling_float, 1);
(fn_ceiling_decimal, 1);
(fn_ceiling_integer, 1) ]
let fn_round_table =
(fn_round, 1);
(fn_round_double, 1);
(fn_round_float, 1);
(fn_round_decimal, 1);
(fn_round_integer, 1) ]
let fn_round_half_to_even_table =
[ (fn_round_half_to_even_double, 2);
(fn_round_half_to_even_float, 2);
(fn_round_half_to_even_decimal, 2);
(fn_round_half_to_even_integer, 2) ]
let fn_abs_table =
(fn_abs_double, 1);
(fn_abs_float, 1);
(fn_abs_decimal, 1);
(fn_abs_integer, 1) ]
let fn_avg_table =
(fn_avg_double, 1);
(fn_avg_float, 1);
(fn_avg_decimal, 1);
(fn_avg_integer, 1);
(fn_avg_yearMonthDuration, 1);
(fn_avg_dayTimeDuration, 1) ]
let fn_max_table =
(fn_max_double, 1);
(fn_max_float, 1);
(fn_max_decimal, 1);
(fn_max_integer, 1);
(fn_max_string, 1);
(fn_max_date, 1);
(fn_max_time, 1);
(fn_max_dateTime, 1);
(fn_max_yearMonthDuration, 1);
(fn_max_dayTimeDuration, 1); ]
let fn_min_table =
(fn_min_double, 1);
(fn_min_float, 1);
(fn_min_decimal, 1);
(fn_min_integer, 1);
(fn_min_string, 1);
(fn_min_date, 1);
(fn_min_time, 1);
(fn_min_dateTime, 1);
(fn_min_yearMonthDuration, 1);
(fn_min_dayTimeDuration, 1) ]
let fn_sum_table_one =
[ (fn_sum_double, 1);
(fn_sum_float, 1);
(fn_sum_decimal, 1);
(fn_sum_yearMonthDuration, 1);
(fn_sum_dayTimeDuration, 1);
THIS NEEDS TO BE FIRST AS THIS IS THE TYPE FOR THE DEFAULT VALUE IF THE INPUT IS EMPTY
let fn_sum_table =
[ (fn_sum_double, 2);
(fn_sum_float, 2);
(fn_sum_decimal, 2);
(fn_sum_integer, 2);
(fn_sum_yearMonthDuration, 2);
(fn_sum_dayTimeDuration, 2) ]
let overloaded_functions =
[ (op_numeric_add, 2), op_numeric_add_table;
(op_numeric_subtract, 2), op_numeric_subtract_table;
(op_numeric_multiply, 2), op_numeric_multiply_table;
(op_numeric_divide, 2), op_numeric_divide_table;
(op_numeric_mod, 2), op_numeric_mod_table;
(op_numeric_unary_plus, 1), op_numeric_unary_plus_table;
(op_numeric_unary_minus, 1), op_numeric_unary_minus_table;
(op_numeric_idivide, 2), op_numeric_idivide_table;
(op_equal, 2), op_equal_table;
(op_nequal, 2), op_nequal_table;
(op_lt, 2), op_lt_table;
(op_gt, 2), op_gt_table;
(op_le, 2), op_le_table;
(op_ge, 2), op_ge_table;
(fn_floor, 1), fn_floor_table;
(fn_ceiling, 1), fn_ceiling_table;
(fn_round, 1), fn_round_table;
(fn_round_half_to_even, 2), fn_round_half_to_even_table;
(fn_abs, 1), fn_abs_table;
(fn_avg, 1), fn_avg_table;
(fn_max, 1), fn_max_table;
(fn_min, 1), fn_min_table;
(fn_sum, 1), fn_sum_table_one;
(fn_sum, 2), fn_sum_table ]
let overloaded_functions_default_type = [
(op_numeric_add, ATDouble);
(op_numeric_subtract, ATDouble);
(op_numeric_multiply, ATDouble);
(op_numeric_divide, ATDouble);
(op_numeric_mod, ATDouble);
(op_numeric_unary_plus, ATDouble);
(op_numeric_unary_minus, ATDouble);
(op_numeric_idivide, ATDouble);
(op_equal, ATString);
(op_nequal, ATString);
(op_lt, ATString);
(op_gt, ATString);
(op_le, ATString);
(op_ge, ATString);
(fn_avg, ATDouble);
(fn_max, ATDouble);
(fn_min, ATDouble);
(fn_sum, ATDouble);
(fn_floor, ATDouble);
(fn_ceiling, ATDouble);
(fn_round, ATDouble);
(fn_round_half_to_even, ATDouble);
(fn_abs, ATDouble)
]
let overloaded_default_type_table =
RQNameHashtbl.create 167
let add_to_overloaded_default_type_table (cfname, default_type) =
RQNameHashtbl.add overloaded_default_type_table cfname default_type
let bulk_add_to_overloaded_functions_table table =
List.iter add_to_overloaded_default_type_table table
let _ =
bulk_add_to_overloaded_functions_table overloaded_functions_default_type
let lookup_default_atomic_type cfname =
try
RQNameHashtbl.find overloaded_default_type_table cfname
with
| Not_found -> raise(Query(Internal_Error("Default type of overloaded function "^(prefixed_string_of_rqname cfname)^"not found")))
let overloaded_functions_table =
RQNameIntHashtbl.create 167
let add_to_overloaded_functions_table cfname1 cfname2 =
RQNameIntHashtbl.add overloaded_functions_table cfname1 cfname2
let bulk_add_to_overloaded_functions_table (fname1,fnamelist2) =
List.iter (add_to_overloaded_functions_table fname1) fnamelist2
let _ =
List.iter bulk_add_to_overloaded_functions_table overloaded_functions
let is_overloaded cfname_arity =
RQNameIntHashtbl.mem overloaded_functions_table cfname_arity
let all_overloaded_bindings_for_function cfname_arity =
RQNameIntHashtbl.find_all overloaded_functions_table cfname_arity
let resolve_non_overloaded_name norm_context fname =
let (fun_sig, opt_fun_kind, upd) = Norm_context.one_sig_from_norm_context norm_context fname in
(fst fname, fun_sig, opt_fun_kind, upd)
let table_for_overloaded_function norm_context fname =
if is_overloaded fname
then
let all_bindings = all_overloaded_bindings_for_function fname in
(List.map (resolve_non_overloaded_name norm_context) all_bindings)
else
raise (Query (Internal_Error "Not constructing an overloaded function!"))
let table_for_op_gt norm_context =
let fname = (op_gt,2) in
table_for_overloaded_function norm_context fname
let table_for_op_equal norm_context =
let fname = (op_equal,2) in
table_for_overloaded_function norm_context fname
|
0bc25300b8abd029258f022360f1f2c3fc0d2a190e020754bf4287410ffb38bb | vouch-opensource/krell | watcher.clj | (ns krell.watcher
(:import [io.methvin.watcher DirectoryChangeEvent DirectoryChangeEvent$EventType
DirectoryChangeListener DirectoryWatcher]
[java.nio.file Paths]
[org.slf4j LoggerFactory]))
(def logger (LoggerFactory/getLogger "krell"))
(defn fn->listener ^DirectoryChangeListener [f]
(reify
DirectoryChangeListener
(onEvent [this e]
(let [path (.path ^DirectoryChangeEvent e)]
(condp = (. ^DirectoryChangeEvent e eventType)
DirectoryChangeEvent$EventType/CREATE (f {:type :create :path path})
DirectoryChangeEvent$EventType/MODIFY (f {:type :modify :path path})
DirectoryChangeEvent$EventType/DELETE (f {:type :delete :path path})
DirectoryChangeEvent$EventType/OVERFLOW (f {:type :overflow :path path}))))))
(defn to-path [& args]
(Paths/get ^String (first args) (into-array String (rest args))))
(defn create [cb & paths]
(-> (DirectoryWatcher/builder)
(.paths (map to-path paths))
(.listener (fn->listener cb))
(.build)))
(defn watch [^DirectoryWatcher watcher]
(.watchAsync watcher))
(defn stop [^DirectoryWatcher watcher]
(.close watcher))
(comment
(def watcher
(create
(fn [e]
(. logger (info (pr-str e))))
"src"))
(watch watcher)
)
| null | https://raw.githubusercontent.com/vouch-opensource/krell/61546493f3891d5603f3d2fbd03662230e9e6ee6/src/krell/watcher.clj | clojure | (ns krell.watcher
(:import [io.methvin.watcher DirectoryChangeEvent DirectoryChangeEvent$EventType
DirectoryChangeListener DirectoryWatcher]
[java.nio.file Paths]
[org.slf4j LoggerFactory]))
(def logger (LoggerFactory/getLogger "krell"))
(defn fn->listener ^DirectoryChangeListener [f]
(reify
DirectoryChangeListener
(onEvent [this e]
(let [path (.path ^DirectoryChangeEvent e)]
(condp = (. ^DirectoryChangeEvent e eventType)
DirectoryChangeEvent$EventType/CREATE (f {:type :create :path path})
DirectoryChangeEvent$EventType/MODIFY (f {:type :modify :path path})
DirectoryChangeEvent$EventType/DELETE (f {:type :delete :path path})
DirectoryChangeEvent$EventType/OVERFLOW (f {:type :overflow :path path}))))))
(defn to-path [& args]
(Paths/get ^String (first args) (into-array String (rest args))))
(defn create [cb & paths]
(-> (DirectoryWatcher/builder)
(.paths (map to-path paths))
(.listener (fn->listener cb))
(.build)))
(defn watch [^DirectoryWatcher watcher]
(.watchAsync watcher))
(defn stop [^DirectoryWatcher watcher]
(.close watcher))
(comment
(def watcher
(create
(fn [e]
(. logger (info (pr-str e))))
"src"))
(watch watcher)
)
| |
49fc2d23e30e3b43450597dd22b038df89c77a5cc2832d031a15e1474b88279c | jcollard/unm-hip | Boxed.hs | The University of New Mexico 's Haskell Image Processing Library
Copyright ( C ) 2013
--
-- This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
You should have received a copy of the GNU General Public License
-- along with this program. If not, see </>.
# LANGUAGE TypeFamilies , ViewPatterns , FlexibleContexts , FlexibleInstances #
{-# OPTIONS_GHC -O2 #-}
module Data.Image.Boxed(
BoxedImage,
* Gray Images
GrayImage, Gray, readImage,
grayToComplex, makeHotImage,
ref',
-- * Color Images
ColorImage, Color(..), readColorImage,
colorImageRed, colorImageGreen, colorImageBlue,
colorImageToRGB, rgbToColorImage,
colorImageHue, colorImageSaturation, colorImageIntensity,
colorImageToHSI, hsiToColorImage,
-- * Complex Images
ComplexImage, Complex,
CI.makeFilter,
fft, ifft,
realPart, imagPart,
magnitude, angle,
complex, complexImageToRectangular,
complexImageToPolar,
shrink,
-- * Binary Images
distanceTransform, label,
-- * Additional Modules
-- | Contains functionality for performing arithmetic operations on images with scalar values.
module Data.Image.Arithmetic,
-- | Contains functionality related to Binary Images
module Data.Image.Binary,
-- | Contains functionality for convolution of images
module Data.Image.Convolution,
-- | Contains basic functionality for Images
module Data.Image.Internal,
-- | Contains functionality for writing images and displaying with an external program
module Data.Image.IO) where
import Data.Image.Arithmetic
import Data.Image.Binary hiding (distanceTransform, label)
import qualified Data.Image.Binary as Bin
import qualified Data.Image.Complex as CI
import Data.Image.Convolution
import Data.Image.Internal
import Data.Image.IO
--base>=4
import Control.Applicative
import qualified Data.Complex as C
import Data.Maybe(fromJust)
import Data.Monoid
--bytestring-0.10.0.2
import qualified Data.ByteString.Char8 as B
vector>=0.10.0.1
import qualified Data.Vector as V
deepseq>=1.3.0.2
import Control.DeepSeq
type Vector = V.Vector
-- Error Messages
differentDimensionsError = error "The images must have the same dimensions."
BoxedImage
| BoxedImage is a concrete implementation of Image using a boxed internal structure . This allows for it to be installed nicely in Functor and Applicative .
data BoxedImage a = Image { rs :: Int,
cs :: Int,
pixels :: Vector a}
instance Image (BoxedImage a) where
type Pixel (BoxedImage a) = a
rows = rs
cols = cs
ref i r c = (pixels i) V.! (r * (cols i) + c)
makeImage rows cols f = Image rows cols (V.fromList px) where
px | rows < 1 || cols < 1 = error "Invalid dimensions"
| otherwise = [ f r c | r <- [0..rows-1], c <- [0..cols-1]]
pixelList = V.toList . pixels
imageOp = liftA2
instance Functor BoxedImage where
fmap f (Image rows cols pixels) = Image rows cols (fmap f pixels)
instance Applicative BoxedImage where
pure a = Image 1 1 (V.singleton a)
(<*>) (Image rows cols partial) (Image rows' cols' toApply)
| rows /= rows' && cols /= cols' = error "Cannot apply images of unequal dimensions."
| otherwise = Image rows cols (V.imap func toApply) where
func i e = (partial V.! i) e
instance Show (BoxedImage a) where
show (Image rows cols _) = "< Image " ++ (show rows) ++ "x" ++ (show cols) ++ " >"
instance Num a => Num (BoxedImage a) where
(+) = liftA2 (+)
(-) = liftA2 (-)
(*) = liftA2 (*)
abs = fmap abs
signum = fmap signum
fromInteger i = pure $ fromInteger i
instance Fractional a => Fractional (BoxedImage a) where
(/) = liftA2 (/)
recip = fmap recip
fromRational i = pure $ fromRational i
instance Ord a => Ord (BoxedImage a) where
(<=) img0 img1
| (rows img0) /= (rows img1) = differentDimensionsError
| (cols img0) /= (cols img1) = differentDimensionsError
| otherwise = and . zipWith (<=) (pixelList img0) . pixelList $ img1
instance Eq a => Eq (BoxedImage a) where
(==) img0 img1
| (rows img0) /= (rows img1) = False
| (cols img0) /= (cols img1) = False
| otherwise = and . zipWith (==) (pixelList img0) $ pixelList img1
instance NFData a => NFData (BoxedImage a) where
rnf (Image rows cols pixs) = (rnf rows) `seq` (rnf cols) `seq` (rnf pixs)
GrayImage
| A concrete instance of Image representing a gray scale image .
This instance is installed in DisplayFormat as a gray PGM .
This instance is installed in DisplayFormat as a gray PGM.
-}
type GrayImage = BoxedImage Gray
type Gray = Double
instance DisplayFormat GrayImage where
format = toPGM
instance GrayPixel Gray where
type GrayVal Gray = Gray
toGray = id
instance RGBPixel Gray where
type ColorVal Gray = Gray
toRGB px = (px, px, px)
instance HSIPixel Gray where
toHSI = toHSI . RGB . toRGB
instance BinaryPixel Gray where
toBinary 0.0 = False
toBinary _ = True
on = 1.0
off = 0.0
instance CI.ComplexPixel Gray where
type Value Gray = Double
toComplex i = i C.:+ 0.0
fromComplex (r C.:+ i) = r
instance Monoid Gray where
mempty = 0.0
mappend = (+)
instance MaxMin Gray where
maximal = maximum
minimal = minimum
ColorImage
| A concrete instance of Image that represents images with color values .
This instance is installed in DisplayFormat and can be written to
a color PPM
This instance is installed in DisplayFormat and can be written to
a color PPM -}
type ColorImage = BoxedImage Color
class HSIPixel px where
toHSI :: px -> (Double, Double, Double)
instance DisplayFormat ColorImage where
format = toPPM
-- | A color encoding scheme
data Color =
-- | Red, Green, Blue encoding
RGB (Double, Double, Double)
-- | Hue, Saturation, Intensity encoding
| HSI (Double, Double, Double) deriving (Show, Eq)
instance GrayPixel Color where
type GrayVal Color = Double
toGray (RGB (r, g, b)) = (r + g + b) / 3.0
toGray (toRGB -> (r, g, b)) = (r + g + b) / 3.0
instance RGBPixel Color where
type ColorVal Color = Double
toRGB (RGB px) = px
toRGB (HSI (h, s, i)) = (r, g, b) where
r = i + v1
g = i - (v1/2) + v2
b = i - (v1/2) - v2
v1 = const*s*(cos h)/3
v2 = const*s*(sin h)/2
const = 2.44948974278318
instance HSIPixel Color where
toHSI (RGB (r, g, b)) = (h, s, i) where
h = if (v1 /= 0.0) then atan2 v2 v1 else 0
s = sqrt( (v1*v1) + (v2*v2) )
i = (r+g+b)/3
v1 = (2.0*r-g-b) / const
v2 = (g - b) / const
const = 2.44948974278318
toHSI (HSI px) = px
--Requires the image to be scaled
instance ComplexPixel Color where
-- toComplex = undefined
instance BinaryPixel Color where
toBinary (toRGB -> (r, g, b))
| r == 0 && g == 0 && b == 0 = False
| otherwise = True
on = RGB (1.0, 1.0, 1.0)
off = RGB (0.0, 0.0, 0.0)
instance Monoid Color where
mempty = RGB (0.0, 0.0, 0.0)
mappend (toRGB -> (a,b,c)) (toRGB -> (d,e,f)) = RGB (a+d,b+e,c+f)
instance MaxMin Color where
maximal = helper max mempty . map toRGB
minimal = helper min (RGB (10e10, 10e10, 10e10)) . map toRGB
helper :: (Double -> Double -> Double) -> Color -> [(Double, Double, Double)] -> Color
helper compare (RGB (r,g,b)) [] = let i = foldr1 compare [r, g, b] in RGB (i,i,i)
helper compare (RGB (r, g, b)) ((r', g', b'):xs) = helper compare acc' xs where
acc' = (RGB (compare r r', compare g g', compare b b'))
instance Num Color where
(+) = colorOp (+)
(-) = colorOp (-)
(*) = colorOp (*)
abs (toRGB -> (r, g, b)) = RGB (abs r, abs g, abs b)
signum (toRGB -> (r, g, b)) = RGB (signum r, signum g, signum b)
fromInteger (fromIntegral -> i) = RGB (i,i,i)
instance Fractional Color where
(/) = colorOp (/)
recip (toRGB -> (r,g,b)) = RGB (recip r, recip g, recip b)
fromRational _ = error "Could not create Color from Rational."
colorOp :: (Double -> Double -> Double) -> Color -> Color -> Color
colorOp op (toRGB -> (a, b, c)) (toRGB -> (d, e, f)) = RGB (op a d, op b e, op c f)
ComplexImage
| A concrete instance of Image representing pixels as complex values .
This instance can be written to file as a color PPM .
This instance can be written to file as a color PPM.
-}
type ComplexImage = BoxedImage Complex
type Complex = C.Complex Double
instance RealFloat a => Ord (C.Complex a) where
compare x y = compare (C.magnitude x) (C.magnitude y)
instance BinaryPixel Complex where
toBinary (0.0 C.:+ 0.0) = False
toBinary _ = True
on = (1.0 C.:+ 0.0)
off = (0.0 C.:+ 0.0)
instance DisplayFormat ComplexImage where
format (complexImageToColorImage -> rgb) = toPPM rgb
instance CI.ComplexPixel Complex where
type Value Complex = Double
toComplex = id
fromComplex = id
complexImageToColorImage :: ComplexImage -> ColorImage
complexImageToColorImage img = fmap rgb img where
scale = complexScale img
rgb comp = if radius < 1 then RGB (red', grn', blu') else RGB (red, grn, blu) where
[red, grn, blu] = map (+d') [r',g',b']
[red', grn', blu'] = map (flip (-) d') [r',g',b']
[x, y] = map (*scale) [C.realPart comp, C.imagPart comp]
radius = sqrt((x*x) + (y*y))
a = onedivsqrtsix*x
b = sqrttwodivtwo*y
d = 1.0/(1.0 + (radius*radius))
d' = 0.5 - radius*d
r' = 0.5 + (twodivsqrtsix * x * d)
b' = 0.5 - (d * (a - b))
g' = 0.5 - (d * (a + b))
complexScale :: ComplexImage -> Double
complexScale (CI.complexImageToRectangular -> (real, imag)) = 2.0/(maxv - minv) where
maxr = maximum . pixelList $ (real :: GrayImage)
maxi = maximum . pixelList $ imag
minr = minimum . pixelList $ real
mini = minimum . pixelList $ imag
maxv = max maxr maxi
minv = min minr mini
2.0 / sqrt(6 )
1.0 / sqrt(6 )
sqrttwodivtwo = 0.70710678118654752440 --sqrt(2)/2.0
getComponent to component img = fmap (component . to) img
getRGB = getComponent toRGB
| Given a ColorImage , returns a GrayImage representing the Red color component
> > > let red = colorImageRed cacti
< -hip/master/examples/colorimagered.jpg >
>>>let red = colorImageRed cacti
<-hip/master/examples/colorimagered.jpg>
-}
colorImageRed :: ColorImage -> GrayImage
colorImageRed = getRGB (\ (r, _, _) -> r)
| Given a ColorImage , returns a GrayImage representing the Green color component
> > > let green = colorImageGreen cacti
< -hip/master/examples/colorimagegreen.jpg >
>>>let green = colorImageGreen cacti
<-hip/master/examples/colorimagegreen.jpg>
-}
colorImageGreen :: ColorImage -> GrayImage
colorImageGreen = getRGB (\ (_,g,_) -> g)
| Given a ColorImage , returns a GrayImage representing the Blue color component
> > > let blue = colorImageBlue cacti
< -hip/master/examples/colorimageblue.jpg >
>>>let blue = colorImageBlue cacti
<-hip/master/examples/colorimageblue.jpg>
-}
colorImageBlue :: ColorImage -> GrayImage
colorImageBlue = getRGB (\ (_,_,b) -> b)
| Given a ColorImage , returns a triple containing three GrayImages each
containing one of the color components ( red , green , blue )
> > > leftToRight ' . colorImageToRGB $ cacti
< -hip/master/examples/colorimagetorgb.jpg >
containing one of the color components (red, green, blue)
>>>leftToRight' . colorImageToRGB $ cacti
<-hip/master/examples/colorimagetorgb.jpg>
-}
colorImageToRGB :: ColorImage -> (GrayImage, GrayImage, GrayImage)
colorImageToRGB img = (colorImageRed img, colorImageGreen img, colorImageBlue img)
| Given a triple containing three GrayImages each containing one of the
color components ( red , green , blue ) , returns a ColorImage
> > > rgbToColorImage ( red , green , blue )
< -hip/master/examples/cacti.jpg >
color components (red, green, blue), returns a ColorImage
>>>rgbToColorImage (red,green,blue)
<-hip/master/examples/cacti.jpg>
-}
rgbToColorImage :: (GrayImage, GrayImage, GrayImage) -> ColorImage
rgbToColorImage (red, green, blue) = createRGB <$> red <*> green <*> blue where
createRGB r g b = RGB (r, g, b)
getHSI = getComponent toHSI
| Given a ColorImage , returns a GrayImage representing the Hue component
> > > let h = colorImageHue cacti
< -hip/master/examples/colorimagehue.jpg >
>>>let h = colorImageHue cacti
<-hip/master/examples/colorimagehue.jpg>
-}
colorImageHue :: ColorImage -> GrayImage
colorImageHue = getHSI (\ (h, _, _) -> h)
| Given a ColorImage , returns a GrayImage representing the Saturation component
> > > let s = colorImageSaturation cacti
< -hip/master/examples/colorimagesaturation.jpg >
>>>let s = colorImageSaturation cacti
<-hip/master/examples/colorimagesaturation.jpg>
-}
colorImageSaturation :: ColorImage -> GrayImage
colorImageSaturation = getHSI (\ (_,s,_) -> s)
| Given a ColorImage , returns a GrayImage representing the Intensity component
> > > let i = colorImageIntensity cacti
< -hip/master/examples/colorimageintensity.jpg >
>>>let i = colorImageIntensity cacti
<-hip/master/examples/colorimageintensity.jpg>
-}
colorImageIntensity :: ColorImage -> GrayImage
colorImageIntensity = getHSI (\ (_,_,i) -> i)
| Given a triple containing three GrayImages each containing one of the
color components ( hue , saturation , ) , returns a ColorImage
> > > hsiToColorImage ( h , s , i )
< -hip/master/examples/cacti.jpg >
color components (hue, saturation, ), returns a ColorImage
>>> hsiToColorImage (h, s, i)
<-hip/master/examples/cacti.jpg>
-}
hsiToColorImage :: (GrayImage, GrayImage, GrayImage) -> ColorImage
hsiToColorImage (h, s, i) = toHSI <$> h <*> s <*> i where
toHSI h s i = HSI (h, s, i)
| Given a ColorImage , returns a triple containing three GrayImages each
containing one of the components ( hue , saturation , intensity )
> > > let ( h , s , i ) = colorImageToHSI $ cacti
containing one of the components (hue, saturation, intensity)
>>>let (h, s, i) = colorImageToHSI $ cacti
-}
colorImageToHSI :: ColorImage -> (GrayImage, GrayImage, GrayImage)
colorImageToHSI img = (colorImageHue img, colorImageSaturation img, colorImageIntensity img)
| Reads in an ASCI PPM file as a ColorImage
> > > cacti < - readColorImage " images / cacti.ppm "
< -hip/master/examples/cacti.jpg >
>>>cacti <- readColorImage "images/cacti.ppm"
<-hip/master/examples/cacti.jpg>
-}
readColorImage :: FilePath -> IO ColorImage
readColorImage fileName =
do
y <- B.readFile fileName
return $ parseRGBPixelImage . B.intercalate (B.pack " ") . stripComments . B.lines $ y
parseRGBPixelImage :: B.ByteString -> ColorImage
parseRGBPixelImage string = Image rows cols (V.fromList rgbs)
where ws = B.words string
getInt = fst. fromJust . B.readInt
px = map (fromIntegral . getInt) $ drop 4 ws
cols = getInt $ ws !! 1
rows = getInt $ ws !! 2
maxi = fromIntegral . getInt $ ws !! 3
[r, g, b] = colors px
rgbs = map rgb3 . zip3 r g $ b
rgb3 (r, g, b) = RGB (r, g, b)
colors :: [Int] -> [[Gray]]
colors xs = helper xs [] [] []
where helper [] red green blue = map (map fromIntegral) $ map reverse [red, green, blue]
helper (r:g:b:cs) red green blue = helper cs (r:red) (g:green) (b:blue)
| Coerces a GrayImage to a ComplexImage where the imaginary
part for all pixels is 0 .
> > > grayToComplex frog
part for all pixels is 0.
>>>grayToComplex frog
-}
grayToComplex :: GrayImage -> ComplexImage
grayToComplex img = fmap (C.:+ 0.0) img
| Given a GrayImage , makeHotImage returns a ColorImage with the same
dimensions . The R , G , B values of the result image at ( i , j ) are
determined by using the value of the ColorImage at ( i , j ) to index
three lookup tables . These lookup tables implement a false coloring
scheme which maps small values to black , large values to white , and
intermediate values to shades of red , orange , and yellow ( in that order ) .
> > > makeHotImage frog
< -hip/master/examples/makehotimage.jpg >
dimensions. The R, G, B values of the result image at (i, j) are
determined by using the value of the ColorImage at (i, j) to index
three lookup tables. These lookup tables implement a false coloring
scheme which maps small values to black, large values to white, and
intermediate values to shades of red, orange, and yellow (in that order).
>>>makeHotImage frog
<-hip/master/examples/makehotimage.jpg>
-}
makeHotImage :: GrayImage -> ColorImage
makeHotImage img = fmap (toHot max min) img where
max = maxIntensity img
min = minIntensity img
toHot max min pixel = RGB (r, g, b) where
px = (pixel - min)/(max-min)
r = if px < 0.333333333 then (px*3.0) else 1.0
g = if px < 0.333333333 then 0.0 else
if px < 0.666666667 then (px - 0.333333333)*3 else 1.0
b = if px < 0.666666667 then 0.0 else (px - 0.666666667)*3
| Performs bilinear interpolation of a GrayImage at the coordinates provided .
ref' :: GrayImage -> Double -> Double -> Double
ref' im r c = if inside then interpolate im c r else
if onedge then ref im r' c' else 0
where (r', c') = (floor r, floor c)
(rs, cs) = (rows im, cols im)
inside = r' >= 0 && c' >= 0 && r' < rs-1 && c' < cs-1
onedge = (r' == rs-1 && c'>=0 && c' < cs) ||
(c' == cs-1 && r'>=0 && r' < rs)
interpolate :: GrayImage -> Double -> Double -> Double
interpolate im x y = fx1 + y'*(fx0-fx1)
where (x0, y0) = (floor x, floor y)
(x1, y1) = (x0 + 1, y0 +1)
x' = x - (fromIntegral x0);
y' = y - (fromIntegral y0);
f00 = ref im y0 x0
f10 = ref im y0 x1
f01 = ref im y1 x0
f11 = ref im y1 x1
fx0 = f00 + x'*(f10-f00)
fx1 = f01 + x'*(f11-f01)
| Given a complex image , returns a real image representing
the real part of the image .
@
harmonicSignal : : Double - > Double - > Int - > Int - > C.Complex Double
harmonicSignal u v m n = exp ( -pii*2.0 * var ) where
pii = 0.0 C.:+ pi
var = ( ) C.:+ 0.0
[ ' ] = map fromIntegral [ m , n ]
@
> > > let signal = makeImage 128 128 ( harmonicSignal ( 3/128 ) ( 2/128 ) ) : : ComplexImage
< -hip/master/examples/signal.jpg >
> > > let cosine = realPart signal
< -hip/master/examples/cosine.jpg >
> > > realPart realPart . ifft $ ( fft frogpart ) * ( fft d2 g )
< -hip/master/examples/realpart.jpg >
> > > realPart realPart . ifft $ ( fft frogpart ) * ( fft g )
< -hip/master/examples/realpart2.jpg >
the real part of the image.
@
harmonicSignal :: Double -> Double -> Int -> Int -> C.Complex Double
harmonicSignal u v m n = exp (-pii*2.0 * var) where
pii = 0.0 C.:+ pi
var = (u*m' + v*n') C.:+ 0.0
[m',n'] = map fromIntegral [m, n]
@
>>> let signal = makeImage 128 128 (harmonicSignal (3/128) (2/128)) :: ComplexImage
<-hip/master/examples/signal.jpg>
>>>let cosine = realPart signal
<-hip/master/examples/cosine.jpg>
>>>realPart realPart . ifft $ (fft frogpart) * (fft d2g)
<-hip/master/examples/realpart.jpg>
>>>realPart realPart . ifft $ (fft frogpart) * (fft g)
<-hip/master/examples/realpart2.jpg>
-}
realPart :: ComplexImage -> GrayImage
realPart = CI.realPart
{-| Given a complex image, returns a real image representing
the imaginary part of the image
>>>let sine = imagPart signal
<-hip/master/examples/sine.jpg>
-}
imagPart :: ComplexImage -> GrayImage
imagPart = CI.imagPart
{-| Given a complex image, returns a real image representing
the magnitude of the image.
>>>magnitude signal
-}
magnitude :: ComplexImage -> GrayImage
magnitude = CI.magnitude
{-| Given a complex image, returns a real image representing
the angle of the image
>>>angle signal
<-hip/master/examples/angle.jpg>
-}
angle :: ComplexImage -> GrayImage
angle = CI.angle
{-| Given a complex image, returns a pair of real images each
representing the component (magnitude, phase) of the image
>>>leftToRight' . complexImageToPolar $ signal
<-hip/master/examples/compleximagetopolar.jpg>
-}
complexImageToPolar :: ComplexImage -> (GrayImage, GrayImage)
complexImageToPolar = CI.complexImageToPolar
{-| Given an image representing the real part of a complex image, and
an image representing the imaginary part of a complex image, returns
a complex image.
>>>complex cosine sine
<-hip/master/examples/signal.jpg>
-}
complex :: GrayImage -> GrayImage -> ComplexImage
complex = CI.complex
{-| Given a complex image, return a pair of real images each representing
a component of the complex image (real, imaginary).
>>>leftToRight' . complexImageToRectangular $ signal
<-hip/master/examples/complexsignaltorectangular.jpg>
-}
complexImageToRectangular :: ComplexImage -> (GrayImage, GrayImage)
complexImageToRectangular = CI.complexImageToRectangular
| Given a complex image and a real positive number x , shrink returns
a complex image with the same dimensions . Let z be the value of the
image at location ( i , j ) . The value of the complex result image at
location ( i , j ) is zero if |z| < x , otherwise the result has the
same phase as z but the amplitude is decreased by x.
a complex image with the same dimensions. Let z be the value of the
image at location (i, j). The value of the complex result image at
location (i, j) is zero if |z| < x, otherwise the result has the
same phase as z but the amplitude is decreased by x.
-}
shrink :: (Image img,
CI.ComplexPixel (Pixel img)) => (CI.Value (Pixel img)) -> img -> img
shrink = CI.shrink
| Given an image whose pixels can be converted to a complex value ,
fft returns an image with complex pixels representing its 2D discrete
Fourier transform ( DFT ) . Because the DFT is computed using the Fast Fourier
Transform ( FFT ) algorithm , the number of rows and columns of the image
must both be powers of two , i.e. , 2 K where K is an integer .
> > > frog < - readImage " images / frog.pgm "
> > > let frogpart = crop 64 64 128 128 frog
< -hip/master/examples/frog.jpg >
< -hip/master/examples/frogpart.jpg >
> > > imageMap log . fft $ frogpart : : ComplexImage
< -hip/master/examples/fft.jpg >
> > > fft d2 g
< -hip/master/examples/fftd2g.jpg >
> > > fft g
< -hip/master/examples/fftg.jpg >
fft returns an image with complex pixels representing its 2D discrete
Fourier transform (DFT). Because the DFT is computed using the Fast Fourier
Transform (FFT) algorithm, the number of rows and columns of the image
must both be powers of two, i.e., 2K where K is an integer.
>>>frog <- readImage "images/frog.pgm"
>>>let frogpart = crop 64 64 128 128 frog
<-hip/master/examples/frog.jpg>
<-hip/master/examples/frogpart.jpg>
>>>imageMap log . fft $ frogpart :: ComplexImage
<-hip/master/examples/fft.jpg>
>>>fft d2g
<-hip/master/examples/fftd2g.jpg>
>>>fft g
<-hip/master/examples/fftg.jpg>
-}
fft :: (Image img,
CI.ComplexPixel (Pixel img),
CI.Value (Pixel img) ~ Double) => img -> ComplexImage
fft = CI.fft
| Given an image , ifft returns a complex image representing its 2D
inverse discrete Fourier transform ( DFT ) . Because the inverse DFT is
computed using the Fast Fourier Transform ( FFT ) algorithm , the number
of rows and columns of < image > must both be powers of two , i.e. , 2 K
where K is an integer .
> > > ifft ( ( fft frogpart ) * ( fft d2 g ) )
< -hip/master/examples/ifft.jpg >
> > > ifft ( ( fft frogpart ) * ( fft g ) )
< -hip/master/examples/ifft2.jpg >
inverse discrete Fourier transform (DFT). Because the inverse DFT is
computed using the Fast Fourier Transform (FFT) algorithm, the number
of rows and columns of <image> must both be powers of two, i.e., 2K
where K is an integer.
>>>ifft ((fft frogpart) * (fft d2g))
<-hip/master/examples/ifft.jpg>
>>>ifft ((fft frogpart) * (fft g))
<-hip/master/examples/ifft2.jpg>
-}
ifft :: (Image img,
CI.ComplexPixel (Pixel img),
CI.Value (Pixel img) ~ Double) => img -> ComplexImage
ifft = CI.ifft
-- Binary Images
| Given a binary image , distanceTransform returns an image
representing the 2D distance transform of the image .
The distance transform is accurate to within a 2 % error for euclidean
distance .
> > > distanceTransform binaryStop : : GrayImage
< Image 86x159 >
< -hip/master/examples/distancetransform.jpg >
representing the 2D distance transform of the image.
The distance transform is accurate to within a 2% error for euclidean
distance.
>>>distanceTransform binaryStop :: GrayImage
< Image 86x159 >
<-hip/master/examples/distancetransform.jpg>
-}
distanceTransform :: (Image img,
BinaryPixel (Pixel img)) => img -> GrayImage
distanceTransform = Bin.distanceTransform
| Given a binary image , label returns an image where pixels in
distinct connected components ( based on 4 - neighbor connectivity )
have distinct integer values . These values range from 1 to n where
n is the number of connected components in image .
> > > label binaryStop
< Image 86x159 >
< -hip/master/examples/label.jpg >
distinct connected components (based on 4-neighbor connectivity)
have distinct integer values. These values range from 1 to n where
n is the number of connected components in image.
>>> label binaryStop
< Image 86x159 >
<-hip/master/examples/label.jpg>
-}
label :: (Image img,
BinaryPixel (Pixel img)) => img -> GrayImage
label = Bin.label
| Reads in a ASCII PGM image located at fileName as a GrayImage
> > > frog < - readImage " images / frog.pgm "
< -hip/master/examples/frog.jpg >
>>>frog <- readImage "images/frog.pgm"
<-hip/master/examples/frog.jpg>
-}
readImage :: FilePath -> IO GrayImage
readImage fileName =
do
y <- B.readFile fileName
return $ parseImage . B.intercalate (B.pack " ") . stripComments . B.lines $ y
parseImage :: B.ByteString -> GrayImage
parseImage string = img
where ws = B.words string
getInt = fst . fromJust . B.readInt
px = map (fromIntegral . getInt) $ drop 4 ws
cols = getInt $ ws !! 1
rows = getInt $ ws !! 2
maxi = fromIntegral . getInt $ ws !! 3
img = Image rows cols (V.fromList px)
stripComments :: [B.ByteString] -> [B.ByteString]
stripComments xs = filter pred xs
where pred x
| B.null x = False
| B.head x == '#' = False
| otherwise = True
| null | https://raw.githubusercontent.com/jcollard/unm-hip/15684cc0a4f187b718b942b77ae08802bf195b02/Data/Image/Boxed.hs | haskell |
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
# OPTIONS_GHC -O2 #
* Color Images
* Complex Images
* Binary Images
* Additional Modules
| Contains functionality for performing arithmetic operations on images with scalar values.
| Contains functionality related to Binary Images
| Contains functionality for convolution of images
| Contains basic functionality for Images
| Contains functionality for writing images and displaying with an external program
base>=4
bytestring-0.10.0.2
Error Messages
| A color encoding scheme
| Red, Green, Blue encoding
| Hue, Saturation, Intensity encoding
Requires the image to be scaled
toComplex = undefined
sqrt(2)/2.0
| Given a complex image, returns a real image representing
the imaginary part of the image
>>>let sine = imagPart signal
<-hip/master/examples/sine.jpg>
| Given a complex image, returns a real image representing
the magnitude of the image.
>>>magnitude signal
| Given a complex image, returns a real image representing
the angle of the image
>>>angle signal
<-hip/master/examples/angle.jpg>
| Given a complex image, returns a pair of real images each
representing the component (magnitude, phase) of the image
>>>leftToRight' . complexImageToPolar $ signal
<-hip/master/examples/compleximagetopolar.jpg>
| Given an image representing the real part of a complex image, and
an image representing the imaginary part of a complex image, returns
a complex image.
>>>complex cosine sine
<-hip/master/examples/signal.jpg>
| Given a complex image, return a pair of real images each representing
a component of the complex image (real, imaginary).
>>>leftToRight' . complexImageToRectangular $ signal
<-hip/master/examples/complexsignaltorectangular.jpg>
Binary Images | The University of New Mexico 's Haskell Image Processing Library
Copyright ( C ) 2013
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
# LANGUAGE TypeFamilies , ViewPatterns , FlexibleContexts , FlexibleInstances #
module Data.Image.Boxed(
BoxedImage,
* Gray Images
GrayImage, Gray, readImage,
grayToComplex, makeHotImage,
ref',
ColorImage, Color(..), readColorImage,
colorImageRed, colorImageGreen, colorImageBlue,
colorImageToRGB, rgbToColorImage,
colorImageHue, colorImageSaturation, colorImageIntensity,
colorImageToHSI, hsiToColorImage,
ComplexImage, Complex,
CI.makeFilter,
fft, ifft,
realPart, imagPart,
magnitude, angle,
complex, complexImageToRectangular,
complexImageToPolar,
shrink,
distanceTransform, label,
module Data.Image.Arithmetic,
module Data.Image.Binary,
module Data.Image.Convolution,
module Data.Image.Internal,
module Data.Image.IO) where
import Data.Image.Arithmetic
import Data.Image.Binary hiding (distanceTransform, label)
import qualified Data.Image.Binary as Bin
import qualified Data.Image.Complex as CI
import Data.Image.Convolution
import Data.Image.Internal
import Data.Image.IO
import Control.Applicative
import qualified Data.Complex as C
import Data.Maybe(fromJust)
import Data.Monoid
import qualified Data.ByteString.Char8 as B
vector>=0.10.0.1
import qualified Data.Vector as V
deepseq>=1.3.0.2
import Control.DeepSeq
type Vector = V.Vector
differentDimensionsError = error "The images must have the same dimensions."
BoxedImage
| BoxedImage is a concrete implementation of Image using a boxed internal structure . This allows for it to be installed nicely in Functor and Applicative .
data BoxedImage a = Image { rs :: Int,
cs :: Int,
pixels :: Vector a}
instance Image (BoxedImage a) where
type Pixel (BoxedImage a) = a
rows = rs
cols = cs
ref i r c = (pixels i) V.! (r * (cols i) + c)
makeImage rows cols f = Image rows cols (V.fromList px) where
px | rows < 1 || cols < 1 = error "Invalid dimensions"
| otherwise = [ f r c | r <- [0..rows-1], c <- [0..cols-1]]
pixelList = V.toList . pixels
imageOp = liftA2
instance Functor BoxedImage where
fmap f (Image rows cols pixels) = Image rows cols (fmap f pixels)
instance Applicative BoxedImage where
pure a = Image 1 1 (V.singleton a)
(<*>) (Image rows cols partial) (Image rows' cols' toApply)
| rows /= rows' && cols /= cols' = error "Cannot apply images of unequal dimensions."
| otherwise = Image rows cols (V.imap func toApply) where
func i e = (partial V.! i) e
instance Show (BoxedImage a) where
show (Image rows cols _) = "< Image " ++ (show rows) ++ "x" ++ (show cols) ++ " >"
instance Num a => Num (BoxedImage a) where
(+) = liftA2 (+)
(-) = liftA2 (-)
(*) = liftA2 (*)
abs = fmap abs
signum = fmap signum
fromInteger i = pure $ fromInteger i
instance Fractional a => Fractional (BoxedImage a) where
(/) = liftA2 (/)
recip = fmap recip
fromRational i = pure $ fromRational i
instance Ord a => Ord (BoxedImage a) where
(<=) img0 img1
| (rows img0) /= (rows img1) = differentDimensionsError
| (cols img0) /= (cols img1) = differentDimensionsError
| otherwise = and . zipWith (<=) (pixelList img0) . pixelList $ img1
instance Eq a => Eq (BoxedImage a) where
(==) img0 img1
| (rows img0) /= (rows img1) = False
| (cols img0) /= (cols img1) = False
| otherwise = and . zipWith (==) (pixelList img0) $ pixelList img1
instance NFData a => NFData (BoxedImage a) where
rnf (Image rows cols pixs) = (rnf rows) `seq` (rnf cols) `seq` (rnf pixs)
GrayImage
| A concrete instance of Image representing a gray scale image .
This instance is installed in DisplayFormat as a gray PGM .
This instance is installed in DisplayFormat as a gray PGM.
-}
type GrayImage = BoxedImage Gray
type Gray = Double
instance DisplayFormat GrayImage where
format = toPGM
instance GrayPixel Gray where
type GrayVal Gray = Gray
toGray = id
instance RGBPixel Gray where
type ColorVal Gray = Gray
toRGB px = (px, px, px)
instance HSIPixel Gray where
toHSI = toHSI . RGB . toRGB
instance BinaryPixel Gray where
toBinary 0.0 = False
toBinary _ = True
on = 1.0
off = 0.0
instance CI.ComplexPixel Gray where
type Value Gray = Double
toComplex i = i C.:+ 0.0
fromComplex (r C.:+ i) = r
instance Monoid Gray where
mempty = 0.0
mappend = (+)
instance MaxMin Gray where
maximal = maximum
minimal = minimum
ColorImage
| A concrete instance of Image that represents images with color values .
This instance is installed in DisplayFormat and can be written to
a color PPM
This instance is installed in DisplayFormat and can be written to
a color PPM -}
type ColorImage = BoxedImage Color
class HSIPixel px where
toHSI :: px -> (Double, Double, Double)
instance DisplayFormat ColorImage where
format = toPPM
data Color =
RGB (Double, Double, Double)
| HSI (Double, Double, Double) deriving (Show, Eq)
instance GrayPixel Color where
type GrayVal Color = Double
toGray (RGB (r, g, b)) = (r + g + b) / 3.0
toGray (toRGB -> (r, g, b)) = (r + g + b) / 3.0
instance RGBPixel Color where
type ColorVal Color = Double
toRGB (RGB px) = px
toRGB (HSI (h, s, i)) = (r, g, b) where
r = i + v1
g = i - (v1/2) + v2
b = i - (v1/2) - v2
v1 = const*s*(cos h)/3
v2 = const*s*(sin h)/2
const = 2.44948974278318
instance HSIPixel Color where
toHSI (RGB (r, g, b)) = (h, s, i) where
h = if (v1 /= 0.0) then atan2 v2 v1 else 0
s = sqrt( (v1*v1) + (v2*v2) )
i = (r+g+b)/3
v1 = (2.0*r-g-b) / const
v2 = (g - b) / const
const = 2.44948974278318
toHSI (HSI px) = px
instance ComplexPixel Color where
instance BinaryPixel Color where
toBinary (toRGB -> (r, g, b))
| r == 0 && g == 0 && b == 0 = False
| otherwise = True
on = RGB (1.0, 1.0, 1.0)
off = RGB (0.0, 0.0, 0.0)
instance Monoid Color where
mempty = RGB (0.0, 0.0, 0.0)
mappend (toRGB -> (a,b,c)) (toRGB -> (d,e,f)) = RGB (a+d,b+e,c+f)
instance MaxMin Color where
maximal = helper max mempty . map toRGB
minimal = helper min (RGB (10e10, 10e10, 10e10)) . map toRGB
helper :: (Double -> Double -> Double) -> Color -> [(Double, Double, Double)] -> Color
helper compare (RGB (r,g,b)) [] = let i = foldr1 compare [r, g, b] in RGB (i,i,i)
helper compare (RGB (r, g, b)) ((r', g', b'):xs) = helper compare acc' xs where
acc' = (RGB (compare r r', compare g g', compare b b'))
instance Num Color where
(+) = colorOp (+)
(-) = colorOp (-)
(*) = colorOp (*)
abs (toRGB -> (r, g, b)) = RGB (abs r, abs g, abs b)
signum (toRGB -> (r, g, b)) = RGB (signum r, signum g, signum b)
fromInteger (fromIntegral -> i) = RGB (i,i,i)
instance Fractional Color where
(/) = colorOp (/)
recip (toRGB -> (r,g,b)) = RGB (recip r, recip g, recip b)
fromRational _ = error "Could not create Color from Rational."
colorOp :: (Double -> Double -> Double) -> Color -> Color -> Color
colorOp op (toRGB -> (a, b, c)) (toRGB -> (d, e, f)) = RGB (op a d, op b e, op c f)
ComplexImage
| A concrete instance of Image representing pixels as complex values .
This instance can be written to file as a color PPM .
This instance can be written to file as a color PPM.
-}
type ComplexImage = BoxedImage Complex
type Complex = C.Complex Double
instance RealFloat a => Ord (C.Complex a) where
compare x y = compare (C.magnitude x) (C.magnitude y)
instance BinaryPixel Complex where
toBinary (0.0 C.:+ 0.0) = False
toBinary _ = True
on = (1.0 C.:+ 0.0)
off = (0.0 C.:+ 0.0)
instance DisplayFormat ComplexImage where
format (complexImageToColorImage -> rgb) = toPPM rgb
instance CI.ComplexPixel Complex where
type Value Complex = Double
toComplex = id
fromComplex = id
complexImageToColorImage :: ComplexImage -> ColorImage
complexImageToColorImage img = fmap rgb img where
scale = complexScale img
rgb comp = if radius < 1 then RGB (red', grn', blu') else RGB (red, grn, blu) where
[red, grn, blu] = map (+d') [r',g',b']
[red', grn', blu'] = map (flip (-) d') [r',g',b']
[x, y] = map (*scale) [C.realPart comp, C.imagPart comp]
radius = sqrt((x*x) + (y*y))
a = onedivsqrtsix*x
b = sqrttwodivtwo*y
d = 1.0/(1.0 + (radius*radius))
d' = 0.5 - radius*d
r' = 0.5 + (twodivsqrtsix * x * d)
b' = 0.5 - (d * (a - b))
g' = 0.5 - (d * (a + b))
complexScale :: ComplexImage -> Double
complexScale (CI.complexImageToRectangular -> (real, imag)) = 2.0/(maxv - minv) where
maxr = maximum . pixelList $ (real :: GrayImage)
maxi = maximum . pixelList $ imag
minr = minimum . pixelList $ real
mini = minimum . pixelList $ imag
maxv = max maxr maxi
minv = min minr mini
2.0 / sqrt(6 )
1.0 / sqrt(6 )
getComponent to component img = fmap (component . to) img
getRGB = getComponent toRGB
| Given a ColorImage , returns a GrayImage representing the Red color component
> > > let red = colorImageRed cacti
< -hip/master/examples/colorimagered.jpg >
>>>let red = colorImageRed cacti
<-hip/master/examples/colorimagered.jpg>
-}
colorImageRed :: ColorImage -> GrayImage
colorImageRed = getRGB (\ (r, _, _) -> r)
| Given a ColorImage , returns a GrayImage representing the Green color component
> > > let green = colorImageGreen cacti
< -hip/master/examples/colorimagegreen.jpg >
>>>let green = colorImageGreen cacti
<-hip/master/examples/colorimagegreen.jpg>
-}
colorImageGreen :: ColorImage -> GrayImage
colorImageGreen = getRGB (\ (_,g,_) -> g)
| Given a ColorImage , returns a GrayImage representing the Blue color component
> > > let blue = colorImageBlue cacti
< -hip/master/examples/colorimageblue.jpg >
>>>let blue = colorImageBlue cacti
<-hip/master/examples/colorimageblue.jpg>
-}
colorImageBlue :: ColorImage -> GrayImage
colorImageBlue = getRGB (\ (_,_,b) -> b)
| Given a ColorImage , returns a triple containing three GrayImages each
containing one of the color components ( red , green , blue )
> > > leftToRight ' . colorImageToRGB $ cacti
< -hip/master/examples/colorimagetorgb.jpg >
containing one of the color components (red, green, blue)
>>>leftToRight' . colorImageToRGB $ cacti
<-hip/master/examples/colorimagetorgb.jpg>
-}
colorImageToRGB :: ColorImage -> (GrayImage, GrayImage, GrayImage)
colorImageToRGB img = (colorImageRed img, colorImageGreen img, colorImageBlue img)
| Given a triple containing three GrayImages each containing one of the
color components ( red , green , blue ) , returns a ColorImage
> > > rgbToColorImage ( red , green , blue )
< -hip/master/examples/cacti.jpg >
color components (red, green, blue), returns a ColorImage
>>>rgbToColorImage (red,green,blue)
<-hip/master/examples/cacti.jpg>
-}
rgbToColorImage :: (GrayImage, GrayImage, GrayImage) -> ColorImage
rgbToColorImage (red, green, blue) = createRGB <$> red <*> green <*> blue where
createRGB r g b = RGB (r, g, b)
getHSI = getComponent toHSI
| Given a ColorImage , returns a GrayImage representing the Hue component
> > > let h = colorImageHue cacti
< -hip/master/examples/colorimagehue.jpg >
>>>let h = colorImageHue cacti
<-hip/master/examples/colorimagehue.jpg>
-}
colorImageHue :: ColorImage -> GrayImage
colorImageHue = getHSI (\ (h, _, _) -> h)
| Given a ColorImage , returns a GrayImage representing the Saturation component
> > > let s = colorImageSaturation cacti
< -hip/master/examples/colorimagesaturation.jpg >
>>>let s = colorImageSaturation cacti
<-hip/master/examples/colorimagesaturation.jpg>
-}
colorImageSaturation :: ColorImage -> GrayImage
colorImageSaturation = getHSI (\ (_,s,_) -> s)
| Given a ColorImage , returns a GrayImage representing the Intensity component
> > > let i = colorImageIntensity cacti
< -hip/master/examples/colorimageintensity.jpg >
>>>let i = colorImageIntensity cacti
<-hip/master/examples/colorimageintensity.jpg>
-}
colorImageIntensity :: ColorImage -> GrayImage
colorImageIntensity = getHSI (\ (_,_,i) -> i)
| Given a triple containing three GrayImages each containing one of the
color components ( hue , saturation , ) , returns a ColorImage
> > > hsiToColorImage ( h , s , i )
< -hip/master/examples/cacti.jpg >
color components (hue, saturation, ), returns a ColorImage
>>> hsiToColorImage (h, s, i)
<-hip/master/examples/cacti.jpg>
-}
hsiToColorImage :: (GrayImage, GrayImage, GrayImage) -> ColorImage
hsiToColorImage (h, s, i) = toHSI <$> h <*> s <*> i where
toHSI h s i = HSI (h, s, i)
| Given a ColorImage , returns a triple containing three GrayImages each
containing one of the components ( hue , saturation , intensity )
> > > let ( h , s , i ) = colorImageToHSI $ cacti
containing one of the components (hue, saturation, intensity)
>>>let (h, s, i) = colorImageToHSI $ cacti
-}
colorImageToHSI :: ColorImage -> (GrayImage, GrayImage, GrayImage)
colorImageToHSI img = (colorImageHue img, colorImageSaturation img, colorImageIntensity img)
| Reads in an ASCI PPM file as a ColorImage
> > > cacti < - readColorImage " images / cacti.ppm "
< -hip/master/examples/cacti.jpg >
>>>cacti <- readColorImage "images/cacti.ppm"
<-hip/master/examples/cacti.jpg>
-}
readColorImage :: FilePath -> IO ColorImage
readColorImage fileName =
do
y <- B.readFile fileName
return $ parseRGBPixelImage . B.intercalate (B.pack " ") . stripComments . B.lines $ y
parseRGBPixelImage :: B.ByteString -> ColorImage
parseRGBPixelImage string = Image rows cols (V.fromList rgbs)
where ws = B.words string
getInt = fst. fromJust . B.readInt
px = map (fromIntegral . getInt) $ drop 4 ws
cols = getInt $ ws !! 1
rows = getInt $ ws !! 2
maxi = fromIntegral . getInt $ ws !! 3
[r, g, b] = colors px
rgbs = map rgb3 . zip3 r g $ b
rgb3 (r, g, b) = RGB (r, g, b)
colors :: [Int] -> [[Gray]]
colors xs = helper xs [] [] []
where helper [] red green blue = map (map fromIntegral) $ map reverse [red, green, blue]
helper (r:g:b:cs) red green blue = helper cs (r:red) (g:green) (b:blue)
| Coerces a GrayImage to a ComplexImage where the imaginary
part for all pixels is 0 .
> > > grayToComplex frog
part for all pixels is 0.
>>>grayToComplex frog
-}
grayToComplex :: GrayImage -> ComplexImage
grayToComplex img = fmap (C.:+ 0.0) img
| Given a GrayImage , makeHotImage returns a ColorImage with the same
dimensions . The R , G , B values of the result image at ( i , j ) are
determined by using the value of the ColorImage at ( i , j ) to index
three lookup tables . These lookup tables implement a false coloring
scheme which maps small values to black , large values to white , and
intermediate values to shades of red , orange , and yellow ( in that order ) .
> > > makeHotImage frog
< -hip/master/examples/makehotimage.jpg >
dimensions. The R, G, B values of the result image at (i, j) are
determined by using the value of the ColorImage at (i, j) to index
three lookup tables. These lookup tables implement a false coloring
scheme which maps small values to black, large values to white, and
intermediate values to shades of red, orange, and yellow (in that order).
>>>makeHotImage frog
<-hip/master/examples/makehotimage.jpg>
-}
makeHotImage :: GrayImage -> ColorImage
makeHotImage img = fmap (toHot max min) img where
max = maxIntensity img
min = minIntensity img
toHot max min pixel = RGB (r, g, b) where
px = (pixel - min)/(max-min)
r = if px < 0.333333333 then (px*3.0) else 1.0
g = if px < 0.333333333 then 0.0 else
if px < 0.666666667 then (px - 0.333333333)*3 else 1.0
b = if px < 0.666666667 then 0.0 else (px - 0.666666667)*3
| Performs bilinear interpolation of a GrayImage at the coordinates provided .
ref' :: GrayImage -> Double -> Double -> Double
ref' im r c = if inside then interpolate im c r else
if onedge then ref im r' c' else 0
where (r', c') = (floor r, floor c)
(rs, cs) = (rows im, cols im)
inside = r' >= 0 && c' >= 0 && r' < rs-1 && c' < cs-1
onedge = (r' == rs-1 && c'>=0 && c' < cs) ||
(c' == cs-1 && r'>=0 && r' < rs)
interpolate :: GrayImage -> Double -> Double -> Double
interpolate im x y = fx1 + y'*(fx0-fx1)
where (x0, y0) = (floor x, floor y)
(x1, y1) = (x0 + 1, y0 +1)
x' = x - (fromIntegral x0);
y' = y - (fromIntegral y0);
f00 = ref im y0 x0
f10 = ref im y0 x1
f01 = ref im y1 x0
f11 = ref im y1 x1
fx0 = f00 + x'*(f10-f00)
fx1 = f01 + x'*(f11-f01)
| Given a complex image , returns a real image representing
the real part of the image .
@
harmonicSignal : : Double - > Double - > Int - > Int - > C.Complex Double
harmonicSignal u v m n = exp ( -pii*2.0 * var ) where
pii = 0.0 C.:+ pi
var = ( ) C.:+ 0.0
[ ' ] = map fromIntegral [ m , n ]
@
> > > let signal = makeImage 128 128 ( harmonicSignal ( 3/128 ) ( 2/128 ) ) : : ComplexImage
< -hip/master/examples/signal.jpg >
> > > let cosine = realPart signal
< -hip/master/examples/cosine.jpg >
> > > realPart realPart . ifft $ ( fft frogpart ) * ( fft d2 g )
< -hip/master/examples/realpart.jpg >
> > > realPart realPart . ifft $ ( fft frogpart ) * ( fft g )
< -hip/master/examples/realpart2.jpg >
the real part of the image.
@
harmonicSignal :: Double -> Double -> Int -> Int -> C.Complex Double
harmonicSignal u v m n = exp (-pii*2.0 * var) where
pii = 0.0 C.:+ pi
var = (u*m' + v*n') C.:+ 0.0
[m',n'] = map fromIntegral [m, n]
@
>>> let signal = makeImage 128 128 (harmonicSignal (3/128) (2/128)) :: ComplexImage
<-hip/master/examples/signal.jpg>
>>>let cosine = realPart signal
<-hip/master/examples/cosine.jpg>
>>>realPart realPart . ifft $ (fft frogpart) * (fft d2g)
<-hip/master/examples/realpart.jpg>
>>>realPart realPart . ifft $ (fft frogpart) * (fft g)
<-hip/master/examples/realpart2.jpg>
-}
realPart :: ComplexImage -> GrayImage
realPart = CI.realPart
imagPart :: ComplexImage -> GrayImage
imagPart = CI.imagPart
magnitude :: ComplexImage -> GrayImage
magnitude = CI.magnitude
angle :: ComplexImage -> GrayImage
angle = CI.angle
complexImageToPolar :: ComplexImage -> (GrayImage, GrayImage)
complexImageToPolar = CI.complexImageToPolar
complex :: GrayImage -> GrayImage -> ComplexImage
complex = CI.complex
complexImageToRectangular :: ComplexImage -> (GrayImage, GrayImage)
complexImageToRectangular = CI.complexImageToRectangular
| Given a complex image and a real positive number x , shrink returns
a complex image with the same dimensions . Let z be the value of the
image at location ( i , j ) . The value of the complex result image at
location ( i , j ) is zero if |z| < x , otherwise the result has the
same phase as z but the amplitude is decreased by x.
a complex image with the same dimensions. Let z be the value of the
image at location (i, j). The value of the complex result image at
location (i, j) is zero if |z| < x, otherwise the result has the
same phase as z but the amplitude is decreased by x.
-}
shrink :: (Image img,
CI.ComplexPixel (Pixel img)) => (CI.Value (Pixel img)) -> img -> img
shrink = CI.shrink
| Given an image whose pixels can be converted to a complex value ,
fft returns an image with complex pixels representing its 2D discrete
Fourier transform ( DFT ) . Because the DFT is computed using the Fast Fourier
Transform ( FFT ) algorithm , the number of rows and columns of the image
must both be powers of two , i.e. , 2 K where K is an integer .
> > > frog < - readImage " images / frog.pgm "
> > > let frogpart = crop 64 64 128 128 frog
< -hip/master/examples/frog.jpg >
< -hip/master/examples/frogpart.jpg >
> > > imageMap log . fft $ frogpart : : ComplexImage
< -hip/master/examples/fft.jpg >
> > > fft d2 g
< -hip/master/examples/fftd2g.jpg >
> > > fft g
< -hip/master/examples/fftg.jpg >
fft returns an image with complex pixels representing its 2D discrete
Fourier transform (DFT). Because the DFT is computed using the Fast Fourier
Transform (FFT) algorithm, the number of rows and columns of the image
must both be powers of two, i.e., 2K where K is an integer.
>>>frog <- readImage "images/frog.pgm"
>>>let frogpart = crop 64 64 128 128 frog
<-hip/master/examples/frog.jpg>
<-hip/master/examples/frogpart.jpg>
>>>imageMap log . fft $ frogpart :: ComplexImage
<-hip/master/examples/fft.jpg>
>>>fft d2g
<-hip/master/examples/fftd2g.jpg>
>>>fft g
<-hip/master/examples/fftg.jpg>
-}
fft :: (Image img,
CI.ComplexPixel (Pixel img),
CI.Value (Pixel img) ~ Double) => img -> ComplexImage
fft = CI.fft
| Given an image , ifft returns a complex image representing its 2D
inverse discrete Fourier transform ( DFT ) . Because the inverse DFT is
computed using the Fast Fourier Transform ( FFT ) algorithm , the number
of rows and columns of < image > must both be powers of two , i.e. , 2 K
where K is an integer .
> > > ifft ( ( fft frogpart ) * ( fft d2 g ) )
< -hip/master/examples/ifft.jpg >
> > > ifft ( ( fft frogpart ) * ( fft g ) )
< -hip/master/examples/ifft2.jpg >
inverse discrete Fourier transform (DFT). Because the inverse DFT is
computed using the Fast Fourier Transform (FFT) algorithm, the number
of rows and columns of <image> must both be powers of two, i.e., 2K
where K is an integer.
>>>ifft ((fft frogpart) * (fft d2g))
<-hip/master/examples/ifft.jpg>
>>>ifft ((fft frogpart) * (fft g))
<-hip/master/examples/ifft2.jpg>
-}
ifft :: (Image img,
CI.ComplexPixel (Pixel img),
CI.Value (Pixel img) ~ Double) => img -> ComplexImage
ifft = CI.ifft
| Given a binary image , distanceTransform returns an image
representing the 2D distance transform of the image .
The distance transform is accurate to within a 2 % error for euclidean
distance .
> > > distanceTransform binaryStop : : GrayImage
< Image 86x159 >
< -hip/master/examples/distancetransform.jpg >
representing the 2D distance transform of the image.
The distance transform is accurate to within a 2% error for euclidean
distance.
>>>distanceTransform binaryStop :: GrayImage
< Image 86x159 >
<-hip/master/examples/distancetransform.jpg>
-}
distanceTransform :: (Image img,
BinaryPixel (Pixel img)) => img -> GrayImage
distanceTransform = Bin.distanceTransform
| Given a binary image , label returns an image where pixels in
distinct connected components ( based on 4 - neighbor connectivity )
have distinct integer values . These values range from 1 to n where
n is the number of connected components in image .
> > > label binaryStop
< Image 86x159 >
< -hip/master/examples/label.jpg >
distinct connected components (based on 4-neighbor connectivity)
have distinct integer values. These values range from 1 to n where
n is the number of connected components in image.
>>> label binaryStop
< Image 86x159 >
<-hip/master/examples/label.jpg>
-}
label :: (Image img,
BinaryPixel (Pixel img)) => img -> GrayImage
label = Bin.label
| Reads in a ASCII PGM image located at fileName as a GrayImage
> > > frog < - readImage " images / frog.pgm "
< -hip/master/examples/frog.jpg >
>>>frog <- readImage "images/frog.pgm"
<-hip/master/examples/frog.jpg>
-}
readImage :: FilePath -> IO GrayImage
readImage fileName =
do
y <- B.readFile fileName
return $ parseImage . B.intercalate (B.pack " ") . stripComments . B.lines $ y
parseImage :: B.ByteString -> GrayImage
parseImage string = img
where ws = B.words string
getInt = fst . fromJust . B.readInt
px = map (fromIntegral . getInt) $ drop 4 ws
cols = getInt $ ws !! 1
rows = getInt $ ws !! 2
maxi = fromIntegral . getInt $ ws !! 3
img = Image rows cols (V.fromList px)
stripComments :: [B.ByteString] -> [B.ByteString]
stripComments xs = filter pred xs
where pred x
| B.null x = False
| B.head x == '#' = False
| otherwise = True
|
0a374b05e8533d6c9961f9673721353fa900850e89a053a9e734b6bdbdbf3913 | jordanthayer/ocaml-search | das_rewrite.ml | *
@author @since 2011 - 07 - 11
A Reimplementation of the DAS framework that first proposed
back in ' 08 , that did n't actually get published until the SoCS 2011
paper Deadline Aware Search using Measurements of Search Behavior ,
Dionne , Thayer , Ruml
@author jordan
@since 2011-07-11
A Reimplementation of the DAS framework that austin first proposed
back in '08, that didn't actually get published until the SoCS 2011
paper Deadline Aware Search using Measurements of Search Behavior,
Dionne, Thayer, Ruml
*)
type node_type =
| Open
| Reserve
| Closed
type floats = {
g : float;
f : float;
d : float;
depth : float;
generated : float; (* float so it can be time or exp count *)
}
type 'a node = {
data : 'a;
fp : floats;
mutable qpos : int;
mutable ntype : node_type;
}
(* Comparitors *)
let ordered_f a b =
let afp = a.fp
and bfp = b.fp in
let af = afp.f
and bf = bfp.f in
af < bf ||
(af = bf && afp.d < bfp.d) ||
(af = bf && afp.d = bfp.d && afp.g >= bfp.g)
let speedy_order a b =
let afp = a.fp
and bfp = b.fp in
let ad = afp.d
and bd = bfp.d in
ad < bd || (ad = bd && afp.g >= bfp.g)
(* utils *)
let wrap f =
(** takes a function to be applied to the data payload
such as the goal-test or the domain heuristic and
wraps it so that it can be applied to the entire
node *)
(fun n -> f n.data)
let unwrap_sol s =
(** Unwraps a solution which is in the form of a search node and presents
it in the format the domain expects it, which is domain data followed
by cost *)
match s with
| Limit.Nothing -> None
| Limit.Incumbent (q,n) -> Some (n.data, n.fp.g)
let set_pos n i =
* Sets the location of a node , used by dpq 's
n.qpos <- i
let delay_exp info child =
((float info.Limit.expanded) -. child.fp.generated) *. child.fp.d
let exp_rate_time info () =
(float info.Limit.expanded) /. (Sys.time() -. info.Limit.start_time)
let exp_rate _ () = 1.
(* Expand function *)
let make_expand expand hd info =
let expand_das_node n =
Limit.incr_exp info;
let depth' = n.fp.depth +. 1. in
List.map (fun (data, g) ->
let h,d = hd data
and _ = g -. n.fp.g in
Limit.incr_gen info;
let flt_data = { g = g; f = g +. h; d = d; depth = depth';
(* this will be done with a function call later
to allow time based generated instead of
expansion based generated *)
generated = float (info.Limit.expanded); } in
{ data = data; fp = flt_data; qpos = Dpq.no_position; ntype = Open })
(expand n.data n.fp.g) in
expand_das_node
let default_recover reserve openlist remaining =
let r = ref remaining in
while (!r > 0.) do
(let n = Dpq.extract_first reserve in
n.ntype <- Open;
r := !r -. n.fp.d;
Dpq.insert openlist n)
done
let consider_child closed info key make_decision reserve openlist child =
let insert decis =
(match decis with
| Open -> Dpq.insert openlist child
| Reserve -> Dpq.insert reserve child
| _ -> failwith "Bad decision!") in
if not (Limit.promising_p info child) then Limit.incr_prune info
else (let state = key child
and decis = make_decision child in
try
let prev = Htable.find closed state in
Limit.incr_dups info;
if (child.fp.f < prev.fp.f)
then (Htable.replace closed state child;
let pos = prev.qpos in
if (pos = Dpq.no_position) then Dpq.insert openlist child
else (match prev.ntype with
(* prev doesn't need updated here because it is gone *)
| Open -> Dpq.remove openlist pos
| Reserve -> Dpq.remove reserve pos
| _ -> failwith "should have caught close in if state");
insert decis)
with Not_found ->
child.ntype <- decis;
insert decis;
Htable.add closed state child)
let speedy_search_phase closed_list info expand goal_p key root =
let openlist = Dpq.create speedy_order set_pos 100 root in
let make_decision _ = Open in
let consider_kid = (consider_child closed_list info key
make_decision openlist openlist) in
let rec next () =
if (not (Dpq.empty_p openlist)) && (not (Limit.halt_p info)) then
let n = Dpq.extract_first openlist in
n.qpos <- Dpq.no_position;
n.ntype <- Closed;
if not (Limit.promising_p info n) then (Limit.incr_prune info; next())
else if goal_p n then Limit.new_incumbent info (Limit.Incumbent (0.,n))
else (let kids = expand n in
List.iter consider_kid kids;
Limit.curr_q info (Dpq.count openlist);
next()) in
Dpq.insert openlist root;
next();
openlist
let das_search_phase closed_list info root expand key goal_p
prev_open deadline =
let reserve = Dpq.create ordered_f set_pos 100 root
and openlist = Dpq.create ordered_f set_pos 100 root in
let get_delay = delay_exp info in
let exp_rate = exp_rate info in
let remaining () = float (deadline - info.Limit.expanded) in
let make_decision node =
let delay = get_delay node
and rate = exp_rate() in
if delay *. rate < (remaining()) then Open else Reserve in
let consider_kid = (consider_child closed_list info key make_decision
reserve openlist) in
let init () =
while (not (Dpq.empty_p prev_open)) do
(let n = Dpq.extract_first prev_open in
if Limit.promising_p info n then Dpq.insert openlist n
else Limit.incr_prune info)
done in
let recover () =
default_recover reserve openlist (remaining ()) in
let rec next () =
let empty_op = Dpq.empty_p openlist
and empty_re = Dpq.empty_p reserve in
let halt_b = Limit.halt_p info
and rem_b = remaining() <= 0. in
if (not (halt_b || (empty_op && empty_re) || rem_b)) then
if empty_op then (recover (); next ())
else (let n = Dpq.extract_first openlist in
n.qpos <- Dpq.no_position;
n.ntype <- Closed;
if not (Limit.promising_p info n)
then (Limit.incr_prune info;
next())
else if goal_p n
then (Limit.new_incumbent info (Limit.Incumbent (0.,n));
next ())
else (let kids = expand n in
List.iter consider_kid kids;
Limit.curr_q info ((Dpq.count openlist)+(Dpq.count reserve));
next())) in
init ();
next ()
let search hash eq key goal_p info expand root deadline =
(* make the closed list *)
let closed = Htable.create hash eq 100 in
let sopen = speedy_search_phase closed info expand goal_p key root in
let deadline' = deadline - (info.Limit.expanded) in
Verb.pe Verb.always "Speedy phase finished w %i remaining\n%!" deadline';
das_search_phase closed info root expand key goal_p sopen deadline
let dups sface args =
let deadline = Search_args.get_int "Contract_astar.dups" args 0 in
let key = wrap sface.Search_interface.key
and hash = sface.Search_interface.hash
and eq = sface.Search_interface.equals
and goal = wrap sface.Search_interface.goal_p
and hd = sface.Search_interface.hd
and init_state = sface.Search_interface.initial in
let hi, di = hd init_state in
let init_fp = { g = 0.; f = hi; d = di; depth = 0.; generated = 0.; } in
let root = { data = init_state; fp = init_fp;
qpos = Dpq.no_position; ntype = Open }
and info = (Limit.make Limit.Nothing sface.Search_interface.halt_on ordered_f
(Limit.make_default_logger (fun n -> n.fp.f)
(fun n -> sface.Search_interface.get_sol_length n.data))) in
let expand = make_expand sface.Search_interface.domain_expand hd info in
search hash eq key goal info expand root deadline;
Limit.unwrap_sol6 unwrap_sol (Limit.results6 info)
EOF
| null | https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/search/deadline/das_rewrite.ml | ocaml | float so it can be time or exp count
Comparitors
utils
* takes a function to be applied to the data payload
such as the goal-test or the domain heuristic and
wraps it so that it can be applied to the entire
node
* Unwraps a solution which is in the form of a search node and presents
it in the format the domain expects it, which is domain data followed
by cost
Expand function
this will be done with a function call later
to allow time based generated instead of
expansion based generated
prev doesn't need updated here because it is gone
make the closed list | *
@author @since 2011 - 07 - 11
A Reimplementation of the DAS framework that first proposed
back in ' 08 , that did n't actually get published until the SoCS 2011
paper Deadline Aware Search using Measurements of Search Behavior ,
Dionne , Thayer , Ruml
@author jordan
@since 2011-07-11
A Reimplementation of the DAS framework that austin first proposed
back in '08, that didn't actually get published until the SoCS 2011
paper Deadline Aware Search using Measurements of Search Behavior,
Dionne, Thayer, Ruml
*)
type node_type =
| Open
| Reserve
| Closed
type floats = {
g : float;
f : float;
d : float;
depth : float;
}
type 'a node = {
data : 'a;
fp : floats;
mutable qpos : int;
mutable ntype : node_type;
}
let ordered_f a b =
let afp = a.fp
and bfp = b.fp in
let af = afp.f
and bf = bfp.f in
af < bf ||
(af = bf && afp.d < bfp.d) ||
(af = bf && afp.d = bfp.d && afp.g >= bfp.g)
let speedy_order a b =
let afp = a.fp
and bfp = b.fp in
let ad = afp.d
and bd = bfp.d in
ad < bd || (ad = bd && afp.g >= bfp.g)
let wrap f =
(fun n -> f n.data)
let unwrap_sol s =
match s with
| Limit.Nothing -> None
| Limit.Incumbent (q,n) -> Some (n.data, n.fp.g)
let set_pos n i =
* Sets the location of a node , used by dpq 's
n.qpos <- i
let delay_exp info child =
((float info.Limit.expanded) -. child.fp.generated) *. child.fp.d
let exp_rate_time info () =
(float info.Limit.expanded) /. (Sys.time() -. info.Limit.start_time)
let exp_rate _ () = 1.
let make_expand expand hd info =
let expand_das_node n =
Limit.incr_exp info;
let depth' = n.fp.depth +. 1. in
List.map (fun (data, g) ->
let h,d = hd data
and _ = g -. n.fp.g in
Limit.incr_gen info;
let flt_data = { g = g; f = g +. h; d = d; depth = depth';
generated = float (info.Limit.expanded); } in
{ data = data; fp = flt_data; qpos = Dpq.no_position; ntype = Open })
(expand n.data n.fp.g) in
expand_das_node
let default_recover reserve openlist remaining =
let r = ref remaining in
while (!r > 0.) do
(let n = Dpq.extract_first reserve in
n.ntype <- Open;
r := !r -. n.fp.d;
Dpq.insert openlist n)
done
let consider_child closed info key make_decision reserve openlist child =
let insert decis =
(match decis with
| Open -> Dpq.insert openlist child
| Reserve -> Dpq.insert reserve child
| _ -> failwith "Bad decision!") in
if not (Limit.promising_p info child) then Limit.incr_prune info
else (let state = key child
and decis = make_decision child in
try
let prev = Htable.find closed state in
Limit.incr_dups info;
if (child.fp.f < prev.fp.f)
then (Htable.replace closed state child;
let pos = prev.qpos in
if (pos = Dpq.no_position) then Dpq.insert openlist child
else (match prev.ntype with
| Open -> Dpq.remove openlist pos
| Reserve -> Dpq.remove reserve pos
| _ -> failwith "should have caught close in if state");
insert decis)
with Not_found ->
child.ntype <- decis;
insert decis;
Htable.add closed state child)
let speedy_search_phase closed_list info expand goal_p key root =
let openlist = Dpq.create speedy_order set_pos 100 root in
let make_decision _ = Open in
let consider_kid = (consider_child closed_list info key
make_decision openlist openlist) in
let rec next () =
if (not (Dpq.empty_p openlist)) && (not (Limit.halt_p info)) then
let n = Dpq.extract_first openlist in
n.qpos <- Dpq.no_position;
n.ntype <- Closed;
if not (Limit.promising_p info n) then (Limit.incr_prune info; next())
else if goal_p n then Limit.new_incumbent info (Limit.Incumbent (0.,n))
else (let kids = expand n in
List.iter consider_kid kids;
Limit.curr_q info (Dpq.count openlist);
next()) in
Dpq.insert openlist root;
next();
openlist
let das_search_phase closed_list info root expand key goal_p
prev_open deadline =
let reserve = Dpq.create ordered_f set_pos 100 root
and openlist = Dpq.create ordered_f set_pos 100 root in
let get_delay = delay_exp info in
let exp_rate = exp_rate info in
let remaining () = float (deadline - info.Limit.expanded) in
let make_decision node =
let delay = get_delay node
and rate = exp_rate() in
if delay *. rate < (remaining()) then Open else Reserve in
let consider_kid = (consider_child closed_list info key make_decision
reserve openlist) in
let init () =
while (not (Dpq.empty_p prev_open)) do
(let n = Dpq.extract_first prev_open in
if Limit.promising_p info n then Dpq.insert openlist n
else Limit.incr_prune info)
done in
let recover () =
default_recover reserve openlist (remaining ()) in
let rec next () =
let empty_op = Dpq.empty_p openlist
and empty_re = Dpq.empty_p reserve in
let halt_b = Limit.halt_p info
and rem_b = remaining() <= 0. in
if (not (halt_b || (empty_op && empty_re) || rem_b)) then
if empty_op then (recover (); next ())
else (let n = Dpq.extract_first openlist in
n.qpos <- Dpq.no_position;
n.ntype <- Closed;
if not (Limit.promising_p info n)
then (Limit.incr_prune info;
next())
else if goal_p n
then (Limit.new_incumbent info (Limit.Incumbent (0.,n));
next ())
else (let kids = expand n in
List.iter consider_kid kids;
Limit.curr_q info ((Dpq.count openlist)+(Dpq.count reserve));
next())) in
init ();
next ()
let search hash eq key goal_p info expand root deadline =
let closed = Htable.create hash eq 100 in
let sopen = speedy_search_phase closed info expand goal_p key root in
let deadline' = deadline - (info.Limit.expanded) in
Verb.pe Verb.always "Speedy phase finished w %i remaining\n%!" deadline';
das_search_phase closed info root expand key goal_p sopen deadline
let dups sface args =
let deadline = Search_args.get_int "Contract_astar.dups" args 0 in
let key = wrap sface.Search_interface.key
and hash = sface.Search_interface.hash
and eq = sface.Search_interface.equals
and goal = wrap sface.Search_interface.goal_p
and hd = sface.Search_interface.hd
and init_state = sface.Search_interface.initial in
let hi, di = hd init_state in
let init_fp = { g = 0.; f = hi; d = di; depth = 0.; generated = 0.; } in
let root = { data = init_state; fp = init_fp;
qpos = Dpq.no_position; ntype = Open }
and info = (Limit.make Limit.Nothing sface.Search_interface.halt_on ordered_f
(Limit.make_default_logger (fun n -> n.fp.f)
(fun n -> sface.Search_interface.get_sol_length n.data))) in
let expand = make_expand sface.Search_interface.domain_expand hd info in
search hash eq key goal info expand root deadline;
Limit.unwrap_sol6 unwrap_sol (Limit.results6 info)
EOF
|
e0a955a62c528d93a0e0206ebaa2eab387d554bc108c24e6f3f53194dd1a4a32 | locusmath/locus | object.clj | (ns locus.set.tree.chain.core.object
(:require [locus.set.logic.core.set :refer :all]
[locus.set.logic.sequence.object :refer :all]
[locus.set.logic.limit.product :refer :all]
[locus.set.logic.structure.protocols :refer :all]
[locus.set.mapping.general.core.object :refer :all]
[locus.set.mapping.general.core.util :refer :all]
[locus.set.quiver.structure.core.protocols :refer :all]
[locus.set.tree.structure.core.protocols :refer :all])
(:import (locus.set.mapping.general.core.object SetFunction)))
Objects of a copresheaf Sets^{T_n } are called chain copresheaves . Their underlying index
categories are the finite total orders . The triangle copresheaves , which consist of
; ordered pairs of composable functions are a special case. In more advanced applications,
it is common to deal with chain copresheaves with additional structure . In this
file we will simply focus on the elementary topos theoretic aspects of copresheaves
; over finite chain total orders.
(deftype SetChain [functions])
(derive SetChain :locus.set.logic.structure.protocols/copresheaf)
; Get an nth set starting from the source
(defn composition-sequence
[^SetChain chain]
(.-functions chain))
(defn nth-set-from-source
[chain i]
(let [reverse-functions (reverse (composition-sequence chain))]
(if (zero? i)
(inputs (first reverse-functions))
(outputs (nth reverse-functions (dec i))))))
(defn set-sequence-from-source
[chain]
(let [functions (reverse (composition-sequence chain))]
(concat
(map inputs functions)
(list (outputs (last functions))))))
(defn get-function-at-nth-point-from-source
[chain i]
(let [functions (composition-sequence chain)
last-index (dec (count functions))]
(nth functions (- last-index i))))
(defn get-chain-transition-function
[chain x y]
(if (= x y)
(identity-function (nth-set-from-source chain x))
(apply
compose
(map
(fn [i]
(get-function-at-nth-point-from-source chain i))
(reverse (range x y))))))
(defmethod get-set SetChain
[^SetChain chain, i]
(nth-set-from-source chain i))
(defmethod get-function SetChain
[^SetChain chain, [a b]]
(get-chain-transition-function chain a b))
; Get the parent topos of a chain copresheaf
(defn chain-type
[^SetChain chain]
(count (composition-sequence chain)))
; Get the composition of a chain copresheaf
(defn chain-composition
[^SetChain chain]
(apply compose (composition-sequence chain)))
; Compose components in the chain copresheaf
(defn compose-components
[chain i]
(let [j (inc i)
functions (composition-sequence chain)]
(->SetChain
(concat
(take i functions)
(list (compose (nth functions j) (nth functions i)))
(drop (inc j) functions)))))
(defn adjoin-identity-function
[chain i]
(let [functions (composition-sequence chain)
n (count functions)]
(->SetChain
(concat
(take i functions)
(let [coll (if (= n i)
(inputs (last functions))
(outputs (nth functions i)))]
(list (identity-function coll)))
(drop i functions)))))
Eliminate identity functions from a chain copresheaf
(defn eliminate-identity-functions
[chain]
(->SetChain
(filter
(fn [function]
(not (identity-function? function)))
(composition-sequence chain))))
Conversion multimethods
(defmulti to-set-chain type)
(defmethod to-set-chain SetChain
[^SetChain chain] chain)
(defmethod to-set-chain SetFunction
[^SetFunction func] (->SetChain [func]))
(defn singleton-chain
[& coll]
(SetChain.
(reverse
(map
(fn [i]
(pair-function (nth coll i) (nth coll (inc i))))
(range (dec (count coll)))))))
(defmethod to-set-chain clojure.lang.ISeq
[coll] (apply singleton-chain coll))
(defmethod to-set-chain clojure.lang.IPersistentVector
[coll] (apply singleton-chain coll))
; Create an inclusion chain from a monotone sequence of sets
(defn inclusion-chain
[coll]
(SetChain.
(reverse
(map
(fn [i]
(inclusion-function (nth coll i) (nth coll (inc i))))
(range (dec (count coll)))))))
Products and coproducts in topoi of chain copresheaves
(defmethod product SetChain
[& chains]
(let [n (chain-type (first chains))]
(SetChain.
(map
(fn [i]
(apply
function-product
(map
(fn [chain]
(nth (composition-sequence chain) i))
chains)))
(range n)))))
(defmethod coproduct SetChain
[& chains]
(let [n (chain-type (first chains))]
(SetChain.
(map
(fn [i]
(apply
function-coproduct
(map
(fn [chain]
(nth (composition-sequence chain) i))
chains)))
(range n)))))
Ontology of chain copresheaves
(defn set-chain?
[chain]
(= (type chain) SetChain))
(defn chain-of-injective-functions?
[chain]
(and
(set-chain? chain)
(every? injective? (composition-sequence chain))))
(defn chain-of-surjective-functions?
[chain]
(and
(set-chain? chain)
(every? surjective? (composition-sequence chain))))
(defn chain-of-invertible-functions?
[chain]
(and
(set-chain? chain)
(every? invertible? (composition-sequence chain))))
(defn identity-free-chain?
[chain]
(and
(set-chain? chain)
(every?
(fn [i]
(not (identity-function? i)))
(composition-sequence chain))))
; Create the data for a chain copresheaf
(defn create-chain-data
[args]
(let [functions (reverse args)
colls (vec
(concat
(map inputs functions)
(list (outputs (last functions)))))
triples (map-indexed
(fn [i function]
(list i (inc i) function))
functions)]
(list (vector->map colls) triples)))
(defmethod visualize SetChain
[^SetChain chain]
(let [[p t] (apply
generate-copresheaf-data
(create-chain-data (composition-sequence chain)))]
(visualize-clustered-digraph* "LR" p t)))
| null | https://raw.githubusercontent.com/locusmath/locus/fb6068bd78977b51fd3c5783545a5f9986e4235c/src/clojure/locus/set/tree/chain/core/object.clj | clojure | ordered pairs of composable functions are a special case. In more advanced applications,
over finite chain total orders.
Get an nth set starting from the source
Get the parent topos of a chain copresheaf
Get the composition of a chain copresheaf
Compose components in the chain copresheaf
Create an inclusion chain from a monotone sequence of sets
Create the data for a chain copresheaf | (ns locus.set.tree.chain.core.object
(:require [locus.set.logic.core.set :refer :all]
[locus.set.logic.sequence.object :refer :all]
[locus.set.logic.limit.product :refer :all]
[locus.set.logic.structure.protocols :refer :all]
[locus.set.mapping.general.core.object :refer :all]
[locus.set.mapping.general.core.util :refer :all]
[locus.set.quiver.structure.core.protocols :refer :all]
[locus.set.tree.structure.core.protocols :refer :all])
(:import (locus.set.mapping.general.core.object SetFunction)))
Objects of a copresheaf Sets^{T_n } are called chain copresheaves . Their underlying index
categories are the finite total orders . The triangle copresheaves , which consist of
it is common to deal with chain copresheaves with additional structure . In this
file we will simply focus on the elementary topos theoretic aspects of copresheaves
(deftype SetChain [functions])
(derive SetChain :locus.set.logic.structure.protocols/copresheaf)
(defn composition-sequence
[^SetChain chain]
(.-functions chain))
(defn nth-set-from-source
[chain i]
(let [reverse-functions (reverse (composition-sequence chain))]
(if (zero? i)
(inputs (first reverse-functions))
(outputs (nth reverse-functions (dec i))))))
(defn set-sequence-from-source
[chain]
(let [functions (reverse (composition-sequence chain))]
(concat
(map inputs functions)
(list (outputs (last functions))))))
(defn get-function-at-nth-point-from-source
[chain i]
(let [functions (composition-sequence chain)
last-index (dec (count functions))]
(nth functions (- last-index i))))
(defn get-chain-transition-function
[chain x y]
(if (= x y)
(identity-function (nth-set-from-source chain x))
(apply
compose
(map
(fn [i]
(get-function-at-nth-point-from-source chain i))
(reverse (range x y))))))
(defmethod get-set SetChain
[^SetChain chain, i]
(nth-set-from-source chain i))
(defmethod get-function SetChain
[^SetChain chain, [a b]]
(get-chain-transition-function chain a b))
(defn chain-type
[^SetChain chain]
(count (composition-sequence chain)))
(defn chain-composition
[^SetChain chain]
(apply compose (composition-sequence chain)))
(defn compose-components
[chain i]
(let [j (inc i)
functions (composition-sequence chain)]
(->SetChain
(concat
(take i functions)
(list (compose (nth functions j) (nth functions i)))
(drop (inc j) functions)))))
(defn adjoin-identity-function
[chain i]
(let [functions (composition-sequence chain)
n (count functions)]
(->SetChain
(concat
(take i functions)
(let [coll (if (= n i)
(inputs (last functions))
(outputs (nth functions i)))]
(list (identity-function coll)))
(drop i functions)))))
Eliminate identity functions from a chain copresheaf
(defn eliminate-identity-functions
[chain]
(->SetChain
(filter
(fn [function]
(not (identity-function? function)))
(composition-sequence chain))))
Conversion multimethods
(defmulti to-set-chain type)
(defmethod to-set-chain SetChain
[^SetChain chain] chain)
(defmethod to-set-chain SetFunction
[^SetFunction func] (->SetChain [func]))
(defn singleton-chain
[& coll]
(SetChain.
(reverse
(map
(fn [i]
(pair-function (nth coll i) (nth coll (inc i))))
(range (dec (count coll)))))))
(defmethod to-set-chain clojure.lang.ISeq
[coll] (apply singleton-chain coll))
(defmethod to-set-chain clojure.lang.IPersistentVector
[coll] (apply singleton-chain coll))
(defn inclusion-chain
[coll]
(SetChain.
(reverse
(map
(fn [i]
(inclusion-function (nth coll i) (nth coll (inc i))))
(range (dec (count coll)))))))
Products and coproducts in topoi of chain copresheaves
(defmethod product SetChain
[& chains]
(let [n (chain-type (first chains))]
(SetChain.
(map
(fn [i]
(apply
function-product
(map
(fn [chain]
(nth (composition-sequence chain) i))
chains)))
(range n)))))
(defmethod coproduct SetChain
[& chains]
(let [n (chain-type (first chains))]
(SetChain.
(map
(fn [i]
(apply
function-coproduct
(map
(fn [chain]
(nth (composition-sequence chain) i))
chains)))
(range n)))))
Ontology of chain copresheaves
(defn set-chain?
[chain]
(= (type chain) SetChain))
(defn chain-of-injective-functions?
[chain]
(and
(set-chain? chain)
(every? injective? (composition-sequence chain))))
(defn chain-of-surjective-functions?
[chain]
(and
(set-chain? chain)
(every? surjective? (composition-sequence chain))))
(defn chain-of-invertible-functions?
[chain]
(and
(set-chain? chain)
(every? invertible? (composition-sequence chain))))
(defn identity-free-chain?
[chain]
(and
(set-chain? chain)
(every?
(fn [i]
(not (identity-function? i)))
(composition-sequence chain))))
(defn create-chain-data
[args]
(let [functions (reverse args)
colls (vec
(concat
(map inputs functions)
(list (outputs (last functions)))))
triples (map-indexed
(fn [i function]
(list i (inc i) function))
functions)]
(list (vector->map colls) triples)))
(defmethod visualize SetChain
[^SetChain chain]
(let [[p t] (apply
generate-copresheaf-data
(create-chain-data (composition-sequence chain)))]
(visualize-clustered-digraph* "LR" p t)))
|
efd551c655f2ade89f6ac514bb0927ed28e871d2ae21aa4174e2b4ef93fd53ca | Copilot-Language/copilot | Core.hs | {-# LANGUAGE Safe #-}
-- |
-- Description: Intermediate representation for Copilot specifications.
Copyright : ( c ) 2011 National Institute of Aerospace / Galois , Inc.
--
-- The following articles might also be useful:
--
* Carette , and Kiselyov , and , ,
-- \"/Finally tagless, partially evaluated: Tagless staged/
-- /interpreters for simpler typed languages/\",
Journal of Functional Programming vol . 19 , p. 509 - 543 , 2009 .
--
* , and , ,
\"/Type - Safe Code Transformations in " ,
Electronic Notes in Theoretical Computer Science vol . 174 , p. 23 - 39 , 2007 .
--
-- For examples of how to traverse a Copilot specification see
-- the source code of the interpreter (@copilot-interpreter@)
-- and the pretty-printer
-- ("Copilot.Core.PrettyPrint").
module Copilot.Core
( module Copilot.Core.Expr
, module Copilot.Core.Operators
, module Copilot.Core.Spec
, module Copilot.Core.Type
, module Copilot.Core.Type.Array
, module Data.Int
, module Data.Word
)
where
-- External imports
import Data.Int
import Data.Word
-- Internal imports
import Copilot.Core.Expr
import Copilot.Core.Operators
import Copilot.Core.Spec
import Copilot.Core.Type
import Copilot.Core.Type.Array
| null | https://raw.githubusercontent.com/Copilot-Language/copilot/c981e3160f22d0f7438cf691a040185674de583a/copilot-core/src/Copilot/Core.hs | haskell | # LANGUAGE Safe #
|
Description: Intermediate representation for Copilot specifications.
The following articles might also be useful:
\"/Finally tagless, partially evaluated: Tagless staged/
/interpreters for simpler typed languages/\",
For examples of how to traverse a Copilot specification see
the source code of the interpreter (@copilot-interpreter@)
and the pretty-printer
("Copilot.Core.PrettyPrint").
External imports
Internal imports |
Copyright : ( c ) 2011 National Institute of Aerospace / Galois , Inc.
* Carette , and Kiselyov , and , ,
Journal of Functional Programming vol . 19 , p. 509 - 543 , 2009 .
* , and , ,
\"/Type - Safe Code Transformations in " ,
Electronic Notes in Theoretical Computer Science vol . 174 , p. 23 - 39 , 2007 .
module Copilot.Core
( module Copilot.Core.Expr
, module Copilot.Core.Operators
, module Copilot.Core.Spec
, module Copilot.Core.Type
, module Copilot.Core.Type.Array
, module Data.Int
, module Data.Word
)
where
import Data.Int
import Data.Word
import Copilot.Core.Expr
import Copilot.Core.Operators
import Copilot.Core.Spec
import Copilot.Core.Type
import Copilot.Core.Type.Array
|
61bca3494f86f31362fdca3c3d733777173ed4a438671263e390af61d3d46a13 | as-capabl/armageddon | IORefRunner.hs | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
module
Control.Arrow.Machine.IORefRunner
where
import Control.Monad.Trans.Control
import Control.Monad.Base
import qualified Control.Arrow.Machine.World as Mc
import Data.IORef
data IORefRunner (instr :: * -> *) (m :: * -> *) = IORefRunner
instance
MonadBaseControl IO m =>
Mc.WorldRunner IO m (IORefRunner IO m)
where
type Ref (IORefRunner IO m) = IORef
newRef _ = newIORef
refGet _ = readIORef
refSet _ = writeIORef
refAtomicModify _ = atomicModifyIORef
| null | https://raw.githubusercontent.com/as-capabl/armageddon/f9724b7a545f7e66931d2a46732402a67a59a03e/machinecell-extra/src/Control/Arrow/Machine/IORefRunner.hs | haskell | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
module
Control.Arrow.Machine.IORefRunner
where
import Control.Monad.Trans.Control
import Control.Monad.Base
import qualified Control.Arrow.Machine.World as Mc
import Data.IORef
data IORefRunner (instr :: * -> *) (m :: * -> *) = IORefRunner
instance
MonadBaseControl IO m =>
Mc.WorldRunner IO m (IORefRunner IO m)
where
type Ref (IORefRunner IO m) = IORef
newRef _ = newIORef
refGet _ = readIORef
refSet _ = writeIORef
refAtomicModify _ = atomicModifyIORef
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.