_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
d44ee09112ce81602826e671daa1e8a62f11bac447bcd990344c17e155815ac9 | snoyberg/file-embed | FileEmbed.hs | # LANGUAGE TemplateHaskell #
# LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
| This module uses template . Following is a simplified explanation of usage for those unfamiliar with calling functions .
--
-- The function @embedFile@ in this modules embeds a file into the executable
that you can use it at runtime . A file is represented as a @ByteString@.
-- However, as you can see below, the type signature indicates a value of type
@Q Exp@ will be returned . In order to convert this into a @ByteString@ , you
must use Template Haskell syntax , e.g. :
--
-- > $(embedFile "myfile.txt")
--
-- This expression will have type @ByteString@. Be certain to enable the
TemplateHaskell language extension , usually by adding the following to the
-- top of your module:
--
-- > {-# LANGUAGE TemplateHaskell #-}
module Data.FileEmbed
( -- * Embed at compile time
embedFile
, embedFileIfExists
, embedOneFileOf
, embedDir
, embedDirListing
, getDir
* Embed as a IsString
, embedStringFile
, embedOneStringFileOf
-- * Inject into an executable
-- $inject
#if MIN_VERSION_template_haskell(2,5,0)
, dummySpace
, dummySpaceWith
#endif
, inject
, injectFile
, injectWith
, injectFileWith
-- * Relative path manipulation
, makeRelativeToProject
, makeRelativeToLocationPredicate
-- * Internal
, stringToBs
, bsToExp
, strToExp
) where
import Language.Haskell.TH.Syntax
( Exp (AppE, ListE, LitE, TupE, SigE, VarE)
, Lit (..)
, Q
, runIO
, qLocation, loc_filename
#if MIN_VERSION_template_haskell(2,7,0)
, Quasi(qAddDependentFile)
#endif
)
#if MIN_VERSION_template_haskell(2,16,0)
import Language.Haskell.TH ( mkBytes, bytesPrimL )
import qualified Data.ByteString.Internal as B
#endif
import System.Directory (doesDirectoryExist, doesFileExist,
getDirectoryContents, canonicalizePath)
import Control.Exception (throw, tryJust, ErrorCall(..))
import Control.Monad (filterM, guard)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as B8
import Control.Arrow ((&&&), second)
import Control.Applicative ((<$>))
import Data.ByteString.Unsafe (unsafePackAddressLen)
import System.IO.Error (isDoesNotExistError)
import System.IO.Unsafe (unsafePerformIO)
import System.FilePath ((</>), takeDirectory, takeExtension)
import Data.String (fromString)
import Prelude as P
import Data.List (sortBy)
import Data.Ord (comparing)
-- | Embed a single file in your source code.
--
-- > import qualified Data.ByteString
-- >
> myFile : : Data . ByteString . ByteString
-- > myFile = $(embedFile "dirName/fileName")
embedFile :: FilePath -> Q Exp
embedFile fp =
#if MIN_VERSION_template_haskell(2,7,0)
qAddDependentFile fp >>
#endif
(runIO $ B.readFile fp) >>= bsToExp
-- | Maybe embed a single file in your source code depending on whether or not file exists.
--
-- Warning: When a build is compiled with the file missing, a recompile when the file exists might not trigger an embed of the file.
-- You might try to fix this by doing a clean build.
--
-- > import qualified Data.ByteString
-- >
> maybeMyFile : : Maybe Data . ByteString . ByteString
-- > maybeMyFile = $(embedFileIfExists "dirName/fileName")
--
@since 0.0.14.0
embedFileIfExists :: FilePath -> Q Exp
embedFileIfExists fp = do
mbs <- runIO maybeFile
case mbs of
Nothing -> [| Nothing |]
Just bs -> do
#if MIN_VERSION_template_haskell(2,7,0)
qAddDependentFile fp
#endif
[| Just $(bsToExp bs) |]
where
maybeFile :: IO (Maybe B.ByteString)
maybeFile =
either (const Nothing) Just <$>
tryJust (guard . isDoesNotExistError) (B.readFile fp)
-- | Embed a single existing file in your source code
-- out of list a list of paths supplied.
--
-- > import qualified Data.ByteString
-- >
> myFile : : Data . ByteString . ByteString
-- > myFile = $(embedOneFileOf [ "dirName/fileName", "src/dirName/fileName" ])
embedOneFileOf :: [FilePath] -> Q Exp
embedOneFileOf ps =
(runIO $ readExistingFile ps) >>= \ ( path, content ) -> do
#if MIN_VERSION_template_haskell(2,7,0)
qAddDependentFile path
#endif
bsToExp content
where
readExistingFile :: [FilePath] -> IO ( FilePath, B.ByteString )
readExistingFile xs = do
ys <- filterM doesFileExist xs
case ys of
(p:_) -> B.readFile p >>= \ c -> return ( p, c )
_ -> throw $ ErrorCall "Cannot find file to embed as resource"
-- | Embed a directory recursively in your source code.
--
-- > import qualified Data.ByteString
-- >
> myDir : : [ ( FilePath , Data . ByteString . ByteString ) ]
-- > myDir = $(embedDir "dirName")
embedDir :: FilePath -> Q Exp
embedDir fp = do
typ <- [t| [(FilePath, B.ByteString)] |]
e <- ListE <$> ((runIO $ fileList fp) >>= mapM (pairToExp fp))
return $ SigE e typ
-- | Embed a directory listing recursively in your source code.
--
-- > myFiles :: [FilePath]
-- > myFiles = $(embedDirListing "dirName")
--
@since 0.0.11
embedDirListing :: FilePath -> Q Exp
embedDirListing fp = do
typ <- [t| [FilePath] |]
e <- ListE <$> ((runIO $ fmap fst <$> fileList fp) >>= mapM strToExp)
return $ SigE e typ
| Get a directory tree in the IO monad .
--
-- This is the workhorse of 'embedDir'
getDir :: FilePath -> IO [(FilePath, B.ByteString)]
getDir = fileList
pairToExp :: FilePath -> (FilePath, B.ByteString) -> Q Exp
pairToExp _root (path, bs) = do
#if MIN_VERSION_template_haskell(2,7,0)
qAddDependentFile $ _root ++ '/' : path
#endif
exp' <- bsToExp bs
return $! TupE
#if MIN_VERSION_template_haskell(2,16,0)
$ map Just
#endif
[LitE $ StringL path, exp']
bsToExp :: B.ByteString -> Q Exp
#if MIN_VERSION_template_haskell(2, 5, 0)
bsToExp bs =
return $ VarE 'unsafePerformIO
`AppE` (VarE 'unsafePackAddressLen
`AppE` LitE (IntegerL $ fromIntegral $ B8.length bs)
#if MIN_VERSION_template_haskell(2, 16, 0)
`AppE` LitE (bytesPrimL (
let B.PS ptr off sz = bs
in mkBytes ptr (fromIntegral off) (fromIntegral sz))))
#elif MIN_VERSION_template_haskell(2, 8, 0)
`AppE` LitE (StringPrimL $ B.unpack bs))
#else
`AppE` LitE (StringPrimL $ B8.unpack bs))
#endif
#else
bsToExp bs = do
helper <- [| stringToBs |]
let chars = B8.unpack bs
return $! AppE helper $! LitE $! StringL chars
#endif
stringToBs :: String -> B.ByteString
stringToBs = B8.pack
-- | Embed a single file in your source code.
--
-- > import Data.String
-- >
> myFile : : a = > a
-- > myFile = $(embedStringFile "dirName/fileName")
--
Since 0.0.9
embedStringFile :: FilePath -> Q Exp
embedStringFile fp =
#if MIN_VERSION_template_haskell(2,7,0)
qAddDependentFile fp >>
#endif
(runIO $ P.readFile fp) >>= strToExp
-- | Embed a single existing string file in your source code
-- out of list a list of paths supplied.
--
Since 0.0.9
embedOneStringFileOf :: [FilePath] -> Q Exp
embedOneStringFileOf ps =
(runIO $ readExistingFile ps) >>= \ ( path, content ) -> do
#if MIN_VERSION_template_haskell(2,7,0)
qAddDependentFile path
#endif
strToExp content
where
readExistingFile :: [FilePath] -> IO ( FilePath, String )
readExistingFile xs = do
ys <- filterM doesFileExist xs
case ys of
(p:_) -> P.readFile p >>= \ c -> return ( p, c )
_ -> throw $ ErrorCall "Cannot find file to embed as resource"
strToExp :: String -> Q Exp
#if MIN_VERSION_template_haskell(2, 5, 0)
strToExp s =
return $ VarE 'fromString
`AppE` LitE (StringL s)
#else
strToExp s = do
helper <- [| fromString |]
return $! AppE helper $! LitE $! StringL s
#endif
notHidden :: FilePath -> Bool
notHidden ('.':_) = False
notHidden _ = True
fileList :: FilePath -> IO [(FilePath, B.ByteString)]
fileList top = fileList' top ""
fileList' :: FilePath -> FilePath -> IO [(FilePath, B.ByteString)]
fileList' realTop top = do
allContents <- filter notHidden <$> getDirectoryContents (realTop </> top)
let all' = map ((top </>) &&& (\x -> realTop </> top </> x)) allContents
files <- filterM (doesFileExist . snd) all' >>=
mapM (liftPair2 . second B.readFile)
dirs <- filterM (doesDirectoryExist . snd) all' >>=
mapM (fileList' realTop . fst)
return $ sortBy (comparing fst) $ concat $ files : dirs
liftPair2 :: Monad m => (a, m b) -> m (a, b)
liftPair2 (a, b) = b >>= \b' -> return (a, b')
magic :: B.ByteString -> B.ByteString
magic x = B8.concat ["fe", x]
sizeLen :: Int
sizeLen = 20
getInner :: B.ByteString -> B.ByteString
getInner b =
let (sizeBS, rest) = B.splitAt sizeLen b
in case reads $ B8.unpack sizeBS of
(i, _):_ -> B.take i rest
[] -> error "Data.FileEmbed (getInner): Your dummy space has been corrupted."
padSize :: Int -> String
padSize i =
let s = show i
in replicate (sizeLen - length s) '0' ++ s
#if MIN_VERSION_template_haskell(2,5,0)
-- | Allocate the given number of bytes in the generate executable. That space
-- can be filled up with the 'inject' and 'injectFile' functions.
dummySpace :: Int -> Q Exp
dummySpace = dummySpaceWith "MS"
-- | Like 'dummySpace', but takes a postfix for the magic string. In
-- order for this to work, the same postfix must be used by 'inject' /
-- 'injectFile'. This allows an executable to have multiple
' ByteString 's injected into it , without encountering collisions .
--
Since 0.0.8
dummySpaceWith :: B.ByteString -> Int -> Q Exp
dummySpaceWith postfix space = do
let size = padSize space
magic' = magic postfix
start = B8.unpack magic' ++ size
magicLen = B8.length magic'
len = magicLen + sizeLen + space
chars = LitE $ StringPrimL $
#if MIN_VERSION_template_haskell(2,6,0)
map (toEnum . fromEnum) $
#endif
start ++ replicate space '0'
[| getInner (B.drop magicLen (unsafePerformIO (unsafePackAddressLen len $(return chars)))) |]
#endif
-- | Inject some raw data inside a @ByteString@ containing empty, dummy space
( allocated with @dummySpace@ ) . Typically , the original @ByteString@ is an
-- executable read from the filesystem.
inject :: B.ByteString -- ^ bs to inject
^ original BS containing dummy
^ new BS , or Nothing if there is insufficient dummy space
inject = injectWith "MS"
-- | Like 'inject', but takes a postfix for the magic string.
--
Since 0.0.8
injectWith :: B.ByteString -- ^ postfix of magic string
-> B.ByteString -- ^ bs to inject
^ original BS containing dummy
^ new BS , or Nothing if there is insufficient dummy space
injectWith postfix toInj orig =
if toInjL > size
then Nothing
else Just $ B.concat [before, magic', B8.pack $ padSize toInjL, toInj, B8.pack $ replicate (size - toInjL) '0', after]
where
magic' = magic postfix
toInjL = B.length toInj
(before, rest) = B.breakSubstring magic' orig
(sizeBS, rest') = B.splitAt sizeLen $ B.drop (B8.length magic') rest
size = case reads $ B8.unpack sizeBS of
(i, _):_ -> i
[] -> error $ "Data.FileEmbed (inject): Your dummy space has been corrupted. Size is: " ++ show sizeBS
after = B.drop size rest'
-- | Same as 'inject', but instead of performing the injecting in memory, read
-- the contents from the filesystem and write back to a different file on the
-- filesystem.
injectFile :: B.ByteString -- ^ bs to inject
-> FilePath -- ^ template file
-> FilePath -- ^ output file
-> IO ()
injectFile = injectFileWith "MS"
-- | Like 'injectFile', but takes a postfix for the magic string.
--
Since 0.0.8
injectFileWith :: B.ByteString -- ^ postfix of magic string
-> B.ByteString -- ^ bs to inject
-> FilePath -- ^ template file
-> FilePath -- ^ output file
-> IO ()
injectFileWith postfix inj srcFP dstFP = do
src <- B.readFile srcFP
case injectWith postfix inj src of
Nothing -> error "Insufficient dummy space"
Just dst -> B.writeFile dstFP dst
$ inject
The inject system allows arbitrary content to be embedded inside a Haskell
executable , post compilation . Typically , file - embed allows you to read some
contents from the file system at compile time and embed them inside your
executable . Consider a case , instead , where you would want to embed these
contents after compilation . Two real - world examples are :
* You would like to embed a hash of the executable itself , for sanity checking in a network protocol . ( Obviously the hash will change after you embed the hash . )
* You want to create a self - contained web server that has a set of content , but will need to update the content on machines that do not have access to GHC .
The typical workflow use :
* Use ' dummySpace ' or ' dummySpaceWith ' to create some empty space in your executable
* Use ' injectFile ' or ' injectFileWith ' from a separate utility to modify that executable to have the updated content .
The reason for the @With@-variant of the functions is for cases where you wish
to inject multiple different kinds of content , and therefore need control over
the magic key . If you know for certain that there will only be one dummy space
available , you can use the non-@With@ variants .
The inject system allows arbitrary content to be embedded inside a Haskell
executable, post compilation. Typically, file-embed allows you to read some
contents from the file system at compile time and embed them inside your
executable. Consider a case, instead, where you would want to embed these
contents after compilation. Two real-world examples are:
* You would like to embed a hash of the executable itself, for sanity checking in a network protocol. (Obviously the hash will change after you embed the hash.)
* You want to create a self-contained web server that has a set of content, but will need to update the content on machines that do not have access to GHC.
The typical workflow use:
* Use 'dummySpace' or 'dummySpaceWith' to create some empty space in your executable
* Use 'injectFile' or 'injectFileWith' from a separate utility to modify that executable to have the updated content.
The reason for the @With@-variant of the functions is for cases where you wish
to inject multiple different kinds of content, and therefore need control over
the magic key. If you know for certain that there will only be one dummy space
available, you can use the non-@With@ variants.
-}
-- | Take a relative file path and attach it to the root of the current
-- project.
--
The idea here is that , when building with Stack , the build will always be
-- executed with a current working directory of the root of the project (where
-- your .cabal file is located). However, if you load up multiple projects with
-- @stack ghci@, the working directory may be something else entirely.
--
This function looks at the source location of the file calling it ,
finds the first parent directory with a .cabal file , and uses that as the
-- root directory for fixing the relative path.
--
@$(makeRelativeToProject " data / foo.txt " > > = embedFile)@
--
-- @since 0.0.10
makeRelativeToProject :: FilePath -> Q FilePath
makeRelativeToProject = makeRelativeToLocationPredicate $ (==) ".cabal" . takeExtension
-- | Take a predicate to infer the project root and a relative file path, the given file path is then attached to the inferred project root
--
This function looks at the source location of the file calling it ,
finds the first parent directory with a file matching the given predicate , and uses that as the
-- root directory for fixing the relative path.
--
-- @$(makeRelativeToLocationPredicate ((==) ".cabal" . takeExtension) "data/foo.txt" >>= embedFile)@
--
@since 0.0.15.0
makeRelativeToLocationPredicate :: (FilePath -> Bool) -> FilePath -> Q FilePath
makeRelativeToLocationPredicate isTargetFile rel = do
loc <- qLocation
runIO $ do
srcFP <- canonicalizePath $ loc_filename loc
mdir <- findProjectDir srcFP
case mdir of
Nothing -> error $ "Could not find .cabal file for path: " ++ srcFP
Just dir -> return $ dir </> rel
where
findProjectDir x = do
let dir = takeDirectory x
if dir == x
then return Nothing
else do
contents <- getDirectoryContents dir
if any isTargetFile contents
then return (Just dir)
else findProjectDir dir
| null | https://raw.githubusercontent.com/snoyberg/file-embed/548430d2a79bb6f4cb4256768761071f59909aa5/Data/FileEmbed.hs | haskell | # LANGUAGE OverloadedStrings #
The function @embedFile@ in this modules embeds a file into the executable
However, as you can see below, the type signature indicates a value of type
> $(embedFile "myfile.txt")
This expression will have type @ByteString@. Be certain to enable the
top of your module:
> {-# LANGUAGE TemplateHaskell #-}
* Embed at compile time
* Inject into an executable
$inject
* Relative path manipulation
* Internal
| Embed a single file in your source code.
> import qualified Data.ByteString
>
> myFile = $(embedFile "dirName/fileName")
| Maybe embed a single file in your source code depending on whether or not file exists.
Warning: When a build is compiled with the file missing, a recompile when the file exists might not trigger an embed of the file.
You might try to fix this by doing a clean build.
> import qualified Data.ByteString
>
> maybeMyFile = $(embedFileIfExists "dirName/fileName")
| Embed a single existing file in your source code
out of list a list of paths supplied.
> import qualified Data.ByteString
>
> myFile = $(embedOneFileOf [ "dirName/fileName", "src/dirName/fileName" ])
| Embed a directory recursively in your source code.
> import qualified Data.ByteString
>
> myDir = $(embedDir "dirName")
| Embed a directory listing recursively in your source code.
> myFiles :: [FilePath]
> myFiles = $(embedDirListing "dirName")
This is the workhorse of 'embedDir'
| Embed a single file in your source code.
> import Data.String
>
> myFile = $(embedStringFile "dirName/fileName")
| Embed a single existing string file in your source code
out of list a list of paths supplied.
| Allocate the given number of bytes in the generate executable. That space
can be filled up with the 'inject' and 'injectFile' functions.
| Like 'dummySpace', but takes a postfix for the magic string. In
order for this to work, the same postfix must be used by 'inject' /
'injectFile'. This allows an executable to have multiple
| Inject some raw data inside a @ByteString@ containing empty, dummy space
executable read from the filesystem.
^ bs to inject
| Like 'inject', but takes a postfix for the magic string.
^ postfix of magic string
^ bs to inject
| Same as 'inject', but instead of performing the injecting in memory, read
the contents from the filesystem and write back to a different file on the
filesystem.
^ bs to inject
^ template file
^ output file
| Like 'injectFile', but takes a postfix for the magic string.
^ postfix of magic string
^ bs to inject
^ template file
^ output file
| Take a relative file path and attach it to the root of the current
project.
executed with a current working directory of the root of the project (where
your .cabal file is located). However, if you load up multiple projects with
@stack ghci@, the working directory may be something else entirely.
root directory for fixing the relative path.
@since 0.0.10
| Take a predicate to infer the project root and a relative file path, the given file path is then attached to the inferred project root
root directory for fixing the relative path.
@$(makeRelativeToLocationPredicate ((==) ".cabal" . takeExtension) "data/foo.txt" >>= embedFile)@
| # LANGUAGE TemplateHaskell #
# LANGUAGE CPP #
| This module uses template . Following is a simplified explanation of usage for those unfamiliar with calling functions .
that you can use it at runtime . A file is represented as a @ByteString@.
@Q Exp@ will be returned . In order to convert this into a @ByteString@ , you
must use Template Haskell syntax , e.g. :
TemplateHaskell language extension , usually by adding the following to the
module Data.FileEmbed
embedFile
, embedFileIfExists
, embedOneFileOf
, embedDir
, embedDirListing
, getDir
* Embed as a IsString
, embedStringFile
, embedOneStringFileOf
#if MIN_VERSION_template_haskell(2,5,0)
, dummySpace
, dummySpaceWith
#endif
, inject
, injectFile
, injectWith
, injectFileWith
, makeRelativeToProject
, makeRelativeToLocationPredicate
, stringToBs
, bsToExp
, strToExp
) where
import Language.Haskell.TH.Syntax
( Exp (AppE, ListE, LitE, TupE, SigE, VarE)
, Lit (..)
, Q
, runIO
, qLocation, loc_filename
#if MIN_VERSION_template_haskell(2,7,0)
, Quasi(qAddDependentFile)
#endif
)
#if MIN_VERSION_template_haskell(2,16,0)
import Language.Haskell.TH ( mkBytes, bytesPrimL )
import qualified Data.ByteString.Internal as B
#endif
import System.Directory (doesDirectoryExist, doesFileExist,
getDirectoryContents, canonicalizePath)
import Control.Exception (throw, tryJust, ErrorCall(..))
import Control.Monad (filterM, guard)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as B8
import Control.Arrow ((&&&), second)
import Control.Applicative ((<$>))
import Data.ByteString.Unsafe (unsafePackAddressLen)
import System.IO.Error (isDoesNotExistError)
import System.IO.Unsafe (unsafePerformIO)
import System.FilePath ((</>), takeDirectory, takeExtension)
import Data.String (fromString)
import Prelude as P
import Data.List (sortBy)
import Data.Ord (comparing)
> myFile : : Data . ByteString . ByteString
embedFile :: FilePath -> Q Exp
embedFile fp =
#if MIN_VERSION_template_haskell(2,7,0)
qAddDependentFile fp >>
#endif
(runIO $ B.readFile fp) >>= bsToExp
> maybeMyFile : : Maybe Data . ByteString . ByteString
@since 0.0.14.0
embedFileIfExists :: FilePath -> Q Exp
embedFileIfExists fp = do
mbs <- runIO maybeFile
case mbs of
Nothing -> [| Nothing |]
Just bs -> do
#if MIN_VERSION_template_haskell(2,7,0)
qAddDependentFile fp
#endif
[| Just $(bsToExp bs) |]
where
maybeFile :: IO (Maybe B.ByteString)
maybeFile =
either (const Nothing) Just <$>
tryJust (guard . isDoesNotExistError) (B.readFile fp)
> myFile : : Data . ByteString . ByteString
embedOneFileOf :: [FilePath] -> Q Exp
embedOneFileOf ps =
(runIO $ readExistingFile ps) >>= \ ( path, content ) -> do
#if MIN_VERSION_template_haskell(2,7,0)
qAddDependentFile path
#endif
bsToExp content
where
readExistingFile :: [FilePath] -> IO ( FilePath, B.ByteString )
readExistingFile xs = do
ys <- filterM doesFileExist xs
case ys of
(p:_) -> B.readFile p >>= \ c -> return ( p, c )
_ -> throw $ ErrorCall "Cannot find file to embed as resource"
> myDir : : [ ( FilePath , Data . ByteString . ByteString ) ]
embedDir :: FilePath -> Q Exp
embedDir fp = do
typ <- [t| [(FilePath, B.ByteString)] |]
e <- ListE <$> ((runIO $ fileList fp) >>= mapM (pairToExp fp))
return $ SigE e typ
@since 0.0.11
embedDirListing :: FilePath -> Q Exp
embedDirListing fp = do
typ <- [t| [FilePath] |]
e <- ListE <$> ((runIO $ fmap fst <$> fileList fp) >>= mapM strToExp)
return $ SigE e typ
| Get a directory tree in the IO monad .
getDir :: FilePath -> IO [(FilePath, B.ByteString)]
getDir = fileList
pairToExp :: FilePath -> (FilePath, B.ByteString) -> Q Exp
pairToExp _root (path, bs) = do
#if MIN_VERSION_template_haskell(2,7,0)
qAddDependentFile $ _root ++ '/' : path
#endif
exp' <- bsToExp bs
return $! TupE
#if MIN_VERSION_template_haskell(2,16,0)
$ map Just
#endif
[LitE $ StringL path, exp']
bsToExp :: B.ByteString -> Q Exp
#if MIN_VERSION_template_haskell(2, 5, 0)
bsToExp bs =
return $ VarE 'unsafePerformIO
`AppE` (VarE 'unsafePackAddressLen
`AppE` LitE (IntegerL $ fromIntegral $ B8.length bs)
#if MIN_VERSION_template_haskell(2, 16, 0)
`AppE` LitE (bytesPrimL (
let B.PS ptr off sz = bs
in mkBytes ptr (fromIntegral off) (fromIntegral sz))))
#elif MIN_VERSION_template_haskell(2, 8, 0)
`AppE` LitE (StringPrimL $ B.unpack bs))
#else
`AppE` LitE (StringPrimL $ B8.unpack bs))
#endif
#else
bsToExp bs = do
helper <- [| stringToBs |]
let chars = B8.unpack bs
return $! AppE helper $! LitE $! StringL chars
#endif
stringToBs :: String -> B.ByteString
stringToBs = B8.pack
> myFile : : a = > a
Since 0.0.9
embedStringFile :: FilePath -> Q Exp
embedStringFile fp =
#if MIN_VERSION_template_haskell(2,7,0)
qAddDependentFile fp >>
#endif
(runIO $ P.readFile fp) >>= strToExp
Since 0.0.9
embedOneStringFileOf :: [FilePath] -> Q Exp
embedOneStringFileOf ps =
(runIO $ readExistingFile ps) >>= \ ( path, content ) -> do
#if MIN_VERSION_template_haskell(2,7,0)
qAddDependentFile path
#endif
strToExp content
where
readExistingFile :: [FilePath] -> IO ( FilePath, String )
readExistingFile xs = do
ys <- filterM doesFileExist xs
case ys of
(p:_) -> P.readFile p >>= \ c -> return ( p, c )
_ -> throw $ ErrorCall "Cannot find file to embed as resource"
strToExp :: String -> Q Exp
#if MIN_VERSION_template_haskell(2, 5, 0)
strToExp s =
return $ VarE 'fromString
`AppE` LitE (StringL s)
#else
strToExp s = do
helper <- [| fromString |]
return $! AppE helper $! LitE $! StringL s
#endif
notHidden :: FilePath -> Bool
notHidden ('.':_) = False
notHidden _ = True
fileList :: FilePath -> IO [(FilePath, B.ByteString)]
fileList top = fileList' top ""
fileList' :: FilePath -> FilePath -> IO [(FilePath, B.ByteString)]
fileList' realTop top = do
allContents <- filter notHidden <$> getDirectoryContents (realTop </> top)
let all' = map ((top </>) &&& (\x -> realTop </> top </> x)) allContents
files <- filterM (doesFileExist . snd) all' >>=
mapM (liftPair2 . second B.readFile)
dirs <- filterM (doesDirectoryExist . snd) all' >>=
mapM (fileList' realTop . fst)
return $ sortBy (comparing fst) $ concat $ files : dirs
liftPair2 :: Monad m => (a, m b) -> m (a, b)
liftPair2 (a, b) = b >>= \b' -> return (a, b')
magic :: B.ByteString -> B.ByteString
magic x = B8.concat ["fe", x]
sizeLen :: Int
sizeLen = 20
getInner :: B.ByteString -> B.ByteString
getInner b =
let (sizeBS, rest) = B.splitAt sizeLen b
in case reads $ B8.unpack sizeBS of
(i, _):_ -> B.take i rest
[] -> error "Data.FileEmbed (getInner): Your dummy space has been corrupted."
padSize :: Int -> String
padSize i =
let s = show i
in replicate (sizeLen - length s) '0' ++ s
#if MIN_VERSION_template_haskell(2,5,0)
dummySpace :: Int -> Q Exp
dummySpace = dummySpaceWith "MS"
' ByteString 's injected into it , without encountering collisions .
Since 0.0.8
dummySpaceWith :: B.ByteString -> Int -> Q Exp
dummySpaceWith postfix space = do
let size = padSize space
magic' = magic postfix
start = B8.unpack magic' ++ size
magicLen = B8.length magic'
len = magicLen + sizeLen + space
chars = LitE $ StringPrimL $
#if MIN_VERSION_template_haskell(2,6,0)
map (toEnum . fromEnum) $
#endif
start ++ replicate space '0'
[| getInner (B.drop magicLen (unsafePerformIO (unsafePackAddressLen len $(return chars)))) |]
#endif
( allocated with @dummySpace@ ) . Typically , the original @ByteString@ is an
^ original BS containing dummy
^ new BS , or Nothing if there is insufficient dummy space
inject = injectWith "MS"
Since 0.0.8
^ original BS containing dummy
^ new BS , or Nothing if there is insufficient dummy space
injectWith postfix toInj orig =
if toInjL > size
then Nothing
else Just $ B.concat [before, magic', B8.pack $ padSize toInjL, toInj, B8.pack $ replicate (size - toInjL) '0', after]
where
magic' = magic postfix
toInjL = B.length toInj
(before, rest) = B.breakSubstring magic' orig
(sizeBS, rest') = B.splitAt sizeLen $ B.drop (B8.length magic') rest
size = case reads $ B8.unpack sizeBS of
(i, _):_ -> i
[] -> error $ "Data.FileEmbed (inject): Your dummy space has been corrupted. Size is: " ++ show sizeBS
after = B.drop size rest'
-> IO ()
injectFile = injectFileWith "MS"
Since 0.0.8
-> IO ()
injectFileWith postfix inj srcFP dstFP = do
src <- B.readFile srcFP
case injectWith postfix inj src of
Nothing -> error "Insufficient dummy space"
Just dst -> B.writeFile dstFP dst
$ inject
The inject system allows arbitrary content to be embedded inside a Haskell
executable , post compilation . Typically , file - embed allows you to read some
contents from the file system at compile time and embed them inside your
executable . Consider a case , instead , where you would want to embed these
contents after compilation . Two real - world examples are :
* You would like to embed a hash of the executable itself , for sanity checking in a network protocol . ( Obviously the hash will change after you embed the hash . )
* You want to create a self - contained web server that has a set of content , but will need to update the content on machines that do not have access to GHC .
The typical workflow use :
* Use ' dummySpace ' or ' dummySpaceWith ' to create some empty space in your executable
* Use ' injectFile ' or ' injectFileWith ' from a separate utility to modify that executable to have the updated content .
The reason for the @With@-variant of the functions is for cases where you wish
to inject multiple different kinds of content , and therefore need control over
the magic key . If you know for certain that there will only be one dummy space
available , you can use the non-@With@ variants .
The inject system allows arbitrary content to be embedded inside a Haskell
executable, post compilation. Typically, file-embed allows you to read some
contents from the file system at compile time and embed them inside your
executable. Consider a case, instead, where you would want to embed these
contents after compilation. Two real-world examples are:
* You would like to embed a hash of the executable itself, for sanity checking in a network protocol. (Obviously the hash will change after you embed the hash.)
* You want to create a self-contained web server that has a set of content, but will need to update the content on machines that do not have access to GHC.
The typical workflow use:
* Use 'dummySpace' or 'dummySpaceWith' to create some empty space in your executable
* Use 'injectFile' or 'injectFileWith' from a separate utility to modify that executable to have the updated content.
The reason for the @With@-variant of the functions is for cases where you wish
to inject multiple different kinds of content, and therefore need control over
the magic key. If you know for certain that there will only be one dummy space
available, you can use the non-@With@ variants.
-}
The idea here is that , when building with Stack , the build will always be
This function looks at the source location of the file calling it ,
finds the first parent directory with a .cabal file , and uses that as the
@$(makeRelativeToProject " data / foo.txt " > > = embedFile)@
makeRelativeToProject :: FilePath -> Q FilePath
makeRelativeToProject = makeRelativeToLocationPredicate $ (==) ".cabal" . takeExtension
This function looks at the source location of the file calling it ,
finds the first parent directory with a file matching the given predicate , and uses that as the
@since 0.0.15.0
makeRelativeToLocationPredicate :: (FilePath -> Bool) -> FilePath -> Q FilePath
makeRelativeToLocationPredicate isTargetFile rel = do
loc <- qLocation
runIO $ do
srcFP <- canonicalizePath $ loc_filename loc
mdir <- findProjectDir srcFP
case mdir of
Nothing -> error $ "Could not find .cabal file for path: " ++ srcFP
Just dir -> return $ dir </> rel
where
findProjectDir x = do
let dir = takeDirectory x
if dir == x
then return Nothing
else do
contents <- getDirectoryContents dir
if any isTargetFile contents
then return (Just dir)
else findProjectDir dir
|
641945864df459f98131ca0c675eda427ad3941c5f72cf1cfbee4ab9b053c9ce | Jannis/om-next-kanban-demo | lane.cljs | (ns kanban.components.lane
(:require [goog.object :as gobj]
[om.next :as om :refer-macros [defui]]
[om.dom :as dom]
[kanban.components.card :refer [Card card]]))
(defui Lane
static om/Ident
(ident [this props]
[:lane/by-id (:id props)])
static om/IQuery
(query [this]
[:id :name {:cards (om/get-query Card)}])
Object
(render [this]
(let [{:keys [name cards]} (om/props this)
{:keys [card-create-fn card-drag-fns card-edit-fn]}
(om/get-computed this)]
(dom/div #js {:className "lane"
:onDragOver (fn [e] (.preventDefault e))
:onDrop
(fn [e]
(.preventDefault e)
(some-> card-drag-fns :drop
(apply [(om/get-ident this)])))}
(dom/h3 #js {:className "lane-title"}
(dom/span #js {:className "text"} name)
(dom/span #js {:className "count"} (count cards)))
(dom/div #js {:className "add"}
(dom/a #js {:onClick #(some-> card-create-fn
(apply [(om/get-ident this)]))} "+"))
(dom/div #js {:className "cards"}
(let [ref (om/get-ident this)
drag-fns (some->> card-drag-fns
(map (fn [[k f]] [k (partial f ref)]))
(into {}))]
(for [c cards]
(card (om/computed c {:drag-fns drag-fns
:activate-fn card-edit-fn})))))))))
(def lane (om/factory Lane {:keyfn :id}))
| null | https://raw.githubusercontent.com/Jannis/om-next-kanban-demo/84719bfb161d82f1d4405e263f258730277c7783/src/kanban/components/lane.cljs | clojure | (ns kanban.components.lane
(:require [goog.object :as gobj]
[om.next :as om :refer-macros [defui]]
[om.dom :as dom]
[kanban.components.card :refer [Card card]]))
(defui Lane
static om/Ident
(ident [this props]
[:lane/by-id (:id props)])
static om/IQuery
(query [this]
[:id :name {:cards (om/get-query Card)}])
Object
(render [this]
(let [{:keys [name cards]} (om/props this)
{:keys [card-create-fn card-drag-fns card-edit-fn]}
(om/get-computed this)]
(dom/div #js {:className "lane"
:onDragOver (fn [e] (.preventDefault e))
:onDrop
(fn [e]
(.preventDefault e)
(some-> card-drag-fns :drop
(apply [(om/get-ident this)])))}
(dom/h3 #js {:className "lane-title"}
(dom/span #js {:className "text"} name)
(dom/span #js {:className "count"} (count cards)))
(dom/div #js {:className "add"}
(dom/a #js {:onClick #(some-> card-create-fn
(apply [(om/get-ident this)]))} "+"))
(dom/div #js {:className "cards"}
(let [ref (om/get-ident this)
drag-fns (some->> card-drag-fns
(map (fn [[k f]] [k (partial f ref)]))
(into {}))]
(for [c cards]
(card (om/computed c {:drag-fns drag-fns
:activate-fn card-edit-fn})))))))))
(def lane (om/factory Lane {:keyfn :id}))
| |
54839c53afaba17f0adc32ea3d0b75ef13c18193067396d93f0762699dd2b178 | ivan-m/graphviz | Util.hs | # LANGUAGE CPP , OverloadedStrings , PatternGuards #
{-# OPTIONS_HADDOCK hide #-}
|
Module : Data . GraphViz . Internal . Util
Description : Internal utility functions
Copyright : ( c )
License : 3 - Clause BSD - style
Maintainer :
This module defines internal utility functions .
Module : Data.GraphViz.Internal.Util
Description : Internal utility functions
Copyright : (c) Ivan Lazar Miljenovic
License : 3-Clause BSD-style
Maintainer :
This module defines internal utility functions.
-}
module Data.GraphViz.Internal.Util where
import Data.Char (isAsciiLower, isAsciiUpper, isDigit, ord)
import Control.Monad (liftM2)
import Data.Function (on)
import Data.List (groupBy, sortBy)
import Data.Maybe (isJust)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.Read as T
#if MIN_VERSION_base(4,8,0)
import Data.Version (Version, makeVersion)
#else
import Data.Version (Version(..))
#endif
-- -----------------------------------------------------------------------------
isIDString :: Text -> Bool
isIDString = maybe False (\(f,os) -> frstIDString f && T.all restIDString os)
. T.uncons
| First character of a non - quoted ' String ' must match this .
frstIDString :: Char -> Bool
frstIDString c = any ($c) [ isAsciiUpper
, isAsciiLower
, (==) '_'
, (\ x -> ord x >= 128)
]
-- | The rest of a non-quoted 'String' must match this.
restIDString :: Char -> Bool
restIDString c = frstIDString c || isDigit c
-- | Determine if this String represents a number. Boolean parameter
-- determines if exponents are considered part of numbers for this.
isNumString :: Bool -> Text -> Bool
isNumString _ "" = False
isNumString _ "-" = False
isNumString allowE str = case T.uncons $ T.toLower str of
Just ('-',str') -> go str'
_ -> go str
where
-- Can't use Data.Text.Lazy.Read.double as it doesn't cover all
-- possible cases
go s = uncurry go' $ T.span isDigit s
go' ds nds
| T.null nds = True
| T.null ds && nds == "." = False
| T.null ds
, Just ('.',nds') <- T.uncons nds
, Just (d,nds'') <- T.uncons nds' = isDigit d && checkEs' nds''
| Just ('.',nds') <- T.uncons nds = checkEs $ T.dropWhile isDigit nds'
| T.null ds = False
| otherwise = checkEs nds
checkEs' s = case T.break ('e' ==) s of
("", _) -> False
(ds,es) -> T.all isDigit ds && checkEs es
checkEs str' = case T.uncons str' of
Nothing -> True
Just ('e',ds) -> allowE && isIntString ds
_ -> False
-- | This assumes that ' isNumString ' is ' True ' .
toDouble : : Text - > Double
toDouble str = case T.uncons $ T.toLower str of
Just ( ' - ' , str ' ) - > toD $ ' - ' ` T.cons ` adj str '
_ - > toD $ adj str
where
adj s = T.cons ' 0 '
$ case T.span ( ' . ' =
( ds , " . " ) | not $ T.null ds - > s ` T.snoc ` ' 0 '
( ds , ds ' ) | Just ( ' ) < - T.uncons ds '
, Just ( ' e',es ' ) < - T.uncons es
- > ds ` T.snoc ` ' . ' ` T.snoc ` ' 0 '
` T.snoc ` ' e ' ` T.snoc ` ' 0 ' ` T.append ` es '
_ - > s
toD = either ( const $ error " Not a Double " ) fst . T.signed T.double
-- | This assumes that 'isNumString' is 'True'.
toDouble :: Text -> Double
toDouble str = case T.uncons $ T.toLower str of
Just ('-', str') -> toD $ '-' `T.cons` adj str'
_ -> toD $ adj str
where
adj s = T.cons '0'
$ case T.span ('.' ==) s of
(ds, ".") | not $ T.null ds -> s `T.snoc` '0'
(ds, ds') | Just ('.',es) <- T.uncons ds'
, Just ('e',es') <- T.uncons es
-> ds `T.snoc` '.' `T.snoc` '0'
`T.snoc` 'e' `T.snoc` '0' `T.append` es'
_ -> s
toD = either (const $ error "Not a Double") fst . T.signed T.double
-}
-- | This assumes that 'isNumString' is 'True'.
toDouble :: Text -> Double
toDouble str = case T.uncons $ T.toLower str of
Just ('-', str') -> toD $ '-' `T.cons` adj str'
_ -> toD $ adj str
where
adj s = T.cons '0'
$ case T.span ('.' ==) s of
(ds, ".") | not $ T.null ds -> s `T.snoc` '0'
(ds, ds') | Just ('.',es) <- T.uncons ds'
, Just ('e',_) <- T.uncons es
-> ds `T.snoc` '.' `T.snoc` '0' `T.append` es
_ -> s
toD = read . T.unpack
isIntString :: Text -> Bool
isIntString = isJust . stringToInt
| Determine if this represents an integer .
stringToInt :: Text -> Maybe Int
stringToInt str = case T.signed T.decimal str of
Right (n, "") -> Just n
_ -> Nothing
-- | Graphviz requires double quotes to be explicitly escaped.
escapeQuotes :: String -> String
escapeQuotes [] = []
escapeQuotes ('"':str) = '\\':'"': escapeQuotes str
escapeQuotes (c:str) = c : escapeQuotes str
-- | Remove explicit escaping of double quotes.
descapeQuotes :: String -> String
descapeQuotes [] = []
descapeQuotes ('\\':'"':str) = '"' : descapeQuotes str
descapeQuotes (c:str) = c : descapeQuotes str
isKeyword :: Text -> Bool
isKeyword = (`Set.member` keywords) . T.toLower
-- | The following are Dot keywords and are not valid as labels, etc. unquoted.
keywords :: Set Text
keywords = Set.fromList [ "node"
, "edge"
, "graph"
, "digraph"
, "subgraph"
, "strict"
]
createVersion :: [Int] -> Version
#if MIN_VERSION_base(4,8,0)
createVersion = makeVersion
#else
createVersion bs = Version { versionBranch = bs, versionTags = []}
#endif
-- -----------------------------------------------------------------------------
uniq :: (Ord a) => [a] -> [a]
uniq = uniqBy id
uniqBy :: (Ord b) => (a -> b) -> [a] -> [a]
uniqBy f = map head . groupSortBy f
groupSortBy :: (Ord b) => (a -> b) -> [a] -> [[a]]
groupSortBy f = groupBy ((==) `on` f) . sortBy (compare `on` f)
groupSortCollectBy :: (Ord b) => (a -> b) -> (a -> c) -> [a] -> [(b,[c])]
groupSortCollectBy f g = map (liftM2 (,) (f . head) (map g)) . groupSortBy f
| Fold over ' 's ; first param is for ' False ' , second for ' True ' .
bool :: a -> a -> Bool -> a
bool f t b = if b
then t
else f
isSingle :: [a] -> Bool
isSingle [_] = True
isSingle _ = False
| null | https://raw.githubusercontent.com/ivan-m/graphviz/42dbb6312d7edf789d7055079de7b4fa099a4acc/Data/GraphViz/Internal/Util.hs | haskell | # OPTIONS_HADDOCK hide #
-----------------------------------------------------------------------------
| The rest of a non-quoted 'String' must match this.
| Determine if this String represents a number. Boolean parameter
determines if exponents are considered part of numbers for this.
Can't use Data.Text.Lazy.Read.double as it doesn't cover all
possible cases
| This assumes that ' isNumString ' is ' True ' .
| This assumes that 'isNumString' is 'True'.
| This assumes that 'isNumString' is 'True'.
| Graphviz requires double quotes to be explicitly escaped.
| Remove explicit escaping of double quotes.
| The following are Dot keywords and are not valid as labels, etc. unquoted.
----------------------------------------------------------------------------- | # LANGUAGE CPP , OverloadedStrings , PatternGuards #
|
Module : Data . GraphViz . Internal . Util
Description : Internal utility functions
Copyright : ( c )
License : 3 - Clause BSD - style
Maintainer :
This module defines internal utility functions .
Module : Data.GraphViz.Internal.Util
Description : Internal utility functions
Copyright : (c) Ivan Lazar Miljenovic
License : 3-Clause BSD-style
Maintainer :
This module defines internal utility functions.
-}
module Data.GraphViz.Internal.Util where
import Data.Char (isAsciiLower, isAsciiUpper, isDigit, ord)
import Control.Monad (liftM2)
import Data.Function (on)
import Data.List (groupBy, sortBy)
import Data.Maybe (isJust)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.Read as T
#if MIN_VERSION_base(4,8,0)
import Data.Version (Version, makeVersion)
#else
import Data.Version (Version(..))
#endif
isIDString :: Text -> Bool
isIDString = maybe False (\(f,os) -> frstIDString f && T.all restIDString os)
. T.uncons
| First character of a non - quoted ' String ' must match this .
frstIDString :: Char -> Bool
frstIDString c = any ($c) [ isAsciiUpper
, isAsciiLower
, (==) '_'
, (\ x -> ord x >= 128)
]
restIDString :: Char -> Bool
restIDString c = frstIDString c || isDigit c
isNumString :: Bool -> Text -> Bool
isNumString _ "" = False
isNumString _ "-" = False
isNumString allowE str = case T.uncons $ T.toLower str of
Just ('-',str') -> go str'
_ -> go str
where
go s = uncurry go' $ T.span isDigit s
go' ds nds
| T.null nds = True
| T.null ds && nds == "." = False
| T.null ds
, Just ('.',nds') <- T.uncons nds
, Just (d,nds'') <- T.uncons nds' = isDigit d && checkEs' nds''
| Just ('.',nds') <- T.uncons nds = checkEs $ T.dropWhile isDigit nds'
| T.null ds = False
| otherwise = checkEs nds
checkEs' s = case T.break ('e' ==) s of
("", _) -> False
(ds,es) -> T.all isDigit ds && checkEs es
checkEs str' = case T.uncons str' of
Nothing -> True
Just ('e',ds) -> allowE && isIntString ds
_ -> False
toDouble : : Text - > Double
toDouble str = case T.uncons $ T.toLower str of
Just ( ' - ' , str ' ) - > toD $ ' - ' ` T.cons ` adj str '
_ - > toD $ adj str
where
adj s = T.cons ' 0 '
$ case T.span ( ' . ' =
( ds , " . " ) | not $ T.null ds - > s ` T.snoc ` ' 0 '
( ds , ds ' ) | Just ( ' ) < - T.uncons ds '
, Just ( ' e',es ' ) < - T.uncons es
- > ds ` T.snoc ` ' . ' ` T.snoc ` ' 0 '
` T.snoc ` ' e ' ` T.snoc ` ' 0 ' ` T.append ` es '
_ - > s
toD = either ( const $ error " Not a Double " ) fst . T.signed T.double
toDouble :: Text -> Double
toDouble str = case T.uncons $ T.toLower str of
Just ('-', str') -> toD $ '-' `T.cons` adj str'
_ -> toD $ adj str
where
adj s = T.cons '0'
$ case T.span ('.' ==) s of
(ds, ".") | not $ T.null ds -> s `T.snoc` '0'
(ds, ds') | Just ('.',es) <- T.uncons ds'
, Just ('e',es') <- T.uncons es
-> ds `T.snoc` '.' `T.snoc` '0'
`T.snoc` 'e' `T.snoc` '0' `T.append` es'
_ -> s
toD = either (const $ error "Not a Double") fst . T.signed T.double
-}
toDouble :: Text -> Double
toDouble str = case T.uncons $ T.toLower str of
Just ('-', str') -> toD $ '-' `T.cons` adj str'
_ -> toD $ adj str
where
adj s = T.cons '0'
$ case T.span ('.' ==) s of
(ds, ".") | not $ T.null ds -> s `T.snoc` '0'
(ds, ds') | Just ('.',es) <- T.uncons ds'
, Just ('e',_) <- T.uncons es
-> ds `T.snoc` '.' `T.snoc` '0' `T.append` es
_ -> s
toD = read . T.unpack
isIntString :: Text -> Bool
isIntString = isJust . stringToInt
| Determine if this represents an integer .
stringToInt :: Text -> Maybe Int
stringToInt str = case T.signed T.decimal str of
Right (n, "") -> Just n
_ -> Nothing
escapeQuotes :: String -> String
escapeQuotes [] = []
escapeQuotes ('"':str) = '\\':'"': escapeQuotes str
escapeQuotes (c:str) = c : escapeQuotes str
descapeQuotes :: String -> String
descapeQuotes [] = []
descapeQuotes ('\\':'"':str) = '"' : descapeQuotes str
descapeQuotes (c:str) = c : descapeQuotes str
isKeyword :: Text -> Bool
isKeyword = (`Set.member` keywords) . T.toLower
keywords :: Set Text
keywords = Set.fromList [ "node"
, "edge"
, "graph"
, "digraph"
, "subgraph"
, "strict"
]
createVersion :: [Int] -> Version
#if MIN_VERSION_base(4,8,0)
createVersion = makeVersion
#else
createVersion bs = Version { versionBranch = bs, versionTags = []}
#endif
uniq :: (Ord a) => [a] -> [a]
uniq = uniqBy id
uniqBy :: (Ord b) => (a -> b) -> [a] -> [a]
uniqBy f = map head . groupSortBy f
groupSortBy :: (Ord b) => (a -> b) -> [a] -> [[a]]
groupSortBy f = groupBy ((==) `on` f) . sortBy (compare `on` f)
groupSortCollectBy :: (Ord b) => (a -> b) -> (a -> c) -> [a] -> [(b,[c])]
groupSortCollectBy f g = map (liftM2 (,) (f . head) (map g)) . groupSortBy f
| Fold over ' 's ; first param is for ' False ' , second for ' True ' .
bool :: a -> a -> Bool -> a
bool f t b = if b
then t
else f
isSingle :: [a] -> Bool
isSingle [_] = True
isSingle _ = False
|
383f3c61fcf6fba545a56665789336d92cad09b9a8bcd4754c46c97ca3d322d3 | adrieng/melitte | Options.ml | let use_unicode = ref true
let type_in_type = ref false
let debug = ref false
let verbose = ref false
| null | https://raw.githubusercontent.com/adrieng/melitte/b1191e2ac50a2cc64650559553aab948945da50d/src/Options.ml | ocaml | let use_unicode = ref true
let type_in_type = ref false
let debug = ref false
let verbose = ref false
| |
8bdf59c5dba7f8781d530b23d5370b61fe690239c15456eb60f22b23bcf3b782 | k-bx/protocol-buffers | ProtoCompile.hs | | This is the Main module for the command line program ' hprotoc '
module Main where
import Control.Monad(unless,forM_,foldM,forM)
import Control.Monad.State(State, execState, modify)
import qualified Data.ByteString.Lazy.Char8 as LC (hGetContents, hPut, pack, unpack)
import Data.Foldable (toList)
import Data.Maybe (fromMaybe)
import qualified Data.Sequence as Seq (fromList,singleton)
import Data.Sequence ((|>))
import Data.Version(showVersion)
import Language.Haskell.Exts.Pretty(prettyPrintStyleMode,Style(..),Mode(..),PPHsMode(..),PPLayout(..))
import System.Console.GetOpt(OptDescr(Option),ArgDescr(NoArg,ReqArg)
,usageInfo,getOpt,ArgOrder(ReturnInOrder))
import System.Directory(getCurrentDirectory,createDirectoryIfMissing)
import System.Environment(getProgName, getArgs)
import System.FilePath(takeDirectory,combine,joinPath)
import qualified System.FilePath.Posix as Canon(takeBaseName)
import System.IO (stdin, stdout)
import Text.ProtocolBuffers.Basic(defaultValue, Utf8(..), utf8)
import Text.ProtocolBuffers.Identifiers(MName,checkDIString,mangle)
import Text.ProtocolBuffers.Reflections(ProtoInfo(..),EnumInfo(..),OneofInfo(..),ServiceInfo(..))
import Text.ProtocolBuffers.WireMessage (messagePut, messageGet)
import qualified Text.DescriptorProtos.FileDescriptorProto as D(FileDescriptorProto)
import qualified Text.DescriptorProtos.FileDescriptorProto as D.FileDescriptorProto(FileDescriptorProto(..))
import qualified Text . . FileDescriptorSet as D(FileDescriptorSet )
import qualified Text.DescriptorProtos.FileDescriptorSet as D.FileDescriptorSet(FileDescriptorSet(..))
import Text.ProtocolBuffers.ProtoCompile.BreakRecursion(makeResult)
import Text.ProtocolBuffers.ProtoCompile.Gen(protoModule,descriptorModules,enumModule,oneofModule,serviceModule)
import Text.ProtocolBuffers.ProtoCompile.MakeReflections(makeProtoInfo,serializeFDP)
import Text.ProtocolBuffers.ProtoCompile.Resolve(loadProtos,loadCodeGenRequest,makeNameMaps,getTLS
,Env,LocalFP(..),CanonFP(..),TopLevel(..)
,NameMap(..)
)
import Text.Google.Protobuf.Compiler.CodeGeneratorRequest
import Text.Google.Protobuf.Compiler.CodeGeneratorResponse hiding (error, file)
import qualified Text.Google.Protobuf.Compiler.CodeGeneratorResponse as CGR(file)
import qualified Text.Google.Protobuf.Compiler.CodeGeneratorResponse.File as CGR.File
The Paths_hprotoc module is produced by cabal
import Paths_hprotoc(version)
data Options =
Options
{ optPrefix :: [MName String]
, optAs :: [(CanonFP,[MName String])]
, optTarget :: LocalFP
, optInclude :: [LocalFP]
, optProto :: [LocalFP]
, optDesc :: Maybe (LocalFP)
, optImports :: Bool
, optVerbose :: Bool
, optUnknownFields :: Bool
, optLazy :: Bool
, optLenses :: Bool
, optJson :: Bool
, optDryRun :: Bool
}
deriving Show
setPrefix,setTarget,setInclude,setProto,setDesc :: String -> Options -> Options
setImports,setVerbose,setUnknown,setLazy,setLenses,setDryRun,setJson :: Options -> Options
setPrefix s o = o { optPrefix = toPrefix s }
setTarget s o = o { optTarget = LocalFP s }
setInclude s o = o { optInclude = LocalFP s : optInclude o }
setProto s o = o { optProto = optProto o ++ [LocalFP s] }
setDesc s o = o { optDesc = Just (LocalFP s) }
setImports o = o { optImports = True }
setVerbose o = o { optVerbose = True }
setUnknown o = o { optUnknownFields = True }
setLazy o = o { optLazy = True }
setLenses o = o { optLenses = True }
setJson o = o { optJson = True }
setDryRun o = o { optDryRun = True }
toPrefix :: String -> [MName String]
toPrefix s = case checkDIString s of
Left msg -> error $ "Bad module name in options:"++show s++"\n"++msg
Right (True,_) -> error $ "Bad module name in options (cannot start with '.'): "++show s
Right (False,ms) -> map mangle ms
-- | 'setAs' puts both the full path and the basename as keys into the association list
setAs :: String -> Options -> Options
setAs s o =
case break ('='==) s of
(filepath,'=':rawPrefix) -> let value = toPrefix rawPrefix
in o { optAs = (CanonFP filepath,value):
(CanonFP (Canon.takeBaseName filepath),value):
optAs o}
_ -> error . unlines $ [ "Malformed -a or --as option "++show s
, " Expected \"FILEPATH=MODULE\""
, " where FILEPATH is the basename or relative path (using '/') of an imported file"
, " where MODULE is a dotted Haskell name to use as a prefix"
, " also MODULE can be empty to have no prefix" ]
data OptionAction = Mutate (Options->Options) | Run (Options->Options) | Switch Flag
data Flag = VersionInfo
optionList :: [OptDescr OptionAction]
optionList =
[ Option ['a'] ["as"] (ReqArg (Mutate . setAs) "FILEPATH=MODULE")
"assign prefix module to imported proto file: --as descriptor.proto=Text"
, Option ['I'] ["proto_path"] (ReqArg (Mutate . setInclude) "DIR")
"directory from which to search for imported proto files (default is pwd); all DIR searched"
, Option ['d'] ["haskell_out"] (ReqArg (Mutate . setTarget) "DIR")
"directory to use are root of generated files (default is pwd); last flag"
, Option ['n'] ["dry_run"] (NoArg (Mutate setDryRun))
"produce no output but still parse and check the proto file(s)"
, Option ['o'] ["descriptor_set_out"] (ReqArg (Mutate . setDesc) "FILE")
"filename to write binary FileDescriptorSet to"
, Option [] ["include_imports"] (NoArg (Mutate setImports))
"when writing descriptor_set_out include all imported files to be self-contained"
, Option ['p'] ["prefix"] (ReqArg (Mutate . setPrefix) "MODULE")
"dotted Haskell MODULE name to use as a prefix (default is none); last flag used"
, Option ['u'] ["unknown_fields"] (NoArg (Mutate setUnknown))
"generated messages and groups all support unknown fields"
, Option ['l'] ["lazy_fields"] (NoArg (Mutate setLazy))
"new default is now messages with strict fields, this reverts to generating lazy fields"
, Option [] ["lenses"] (NoArg (Mutate setLenses))
"generate lenses for accessing fields"
, Option [] ["json"] (NoArg (Mutate setJson))
"generate json instances"
, Option ['v'] ["verbose"] (NoArg (Mutate setVerbose))
"increase amount of printed information"
, Option [] ["version"] (NoArg (Switch VersionInfo))
"print out version information"
]
usageMsg,versionInfo :: String
usageMsg = usageInfo "Usage: protoCompile [OPTION..] path-to-file.proto ..." optionList
versionInfo = unlines $
[ "Welcome to protocol-buffers version "++showVersion version
, "Copyright (c) 2008-2011, Christopher Kuklewicz."
, "Released under BSD3 style license, see LICENSE file for details."
, "Some proto files, such as descriptor.proto and unittest*.proto"
, "are from google's code and are under an Apache 2.0 license."
, ""
, "Most command line arguments are similar to those of 'protoc'."
, ""
, "This program reads a .proto file and generates haskell code files."
, "See for more."
]
processOptions :: [String] -> Either String [OptionAction]
processOptions argv =
case getOpt (ReturnInOrder (Run . setProto)) optionList argv of
(opts,_,[]) -> Right opts
(_,_,errs) -> Left (unlines errs ++ usageMsg)
defaultOptions :: IO Options
defaultOptions = do
pwd <- fmap LocalFP getCurrentDirectory
return $ Options { optPrefix = []
, optAs = []
, optTarget = pwd
, optInclude = [pwd]
, optProto = []
, optDesc = Nothing
, optImports = False
, optVerbose = False
, optUnknownFields = False
, optLazy = False
, optLenses = False
, optJson = False
, optDryRun = False
}
main :: IO ()
main = do
progName <- getProgName
case progName of
"protoc-gen-haskell" -> pluginMain
_ -> standaloneMain
splitOn :: Char -> String -> [String]
splitOn c s =
case break (== c) s of
(s, []) -> [s]
(s', _:rest) -> s' : splitOn c rest
pluginMain :: IO ()
pluginMain = do
defs <- defaultOptions
inputBytes <- LC.hGetContents stdin
let req = either error fst $ messageGet inputBytes
let parametersString = fmap (LC.unpack . utf8) $ parameter req
let allParameters = maybe [] (splitOn ',') $ parametersString
let processOpt acc opt =
case opt of
"json" -> return $ setJson acc
_ ->
case splitOn '=' opt of
["prefix", prefix] -> return $ setPrefix prefix acc
_ -> fail $ "Unrecognized parameter " ++ opt ++ ". Parameters given: " ++ fromMaybe "Nothing" parametersString
opts <- foldM processOpt defs allParameters
let resp = runPlugin opts req
LC.hPut stdout $ messagePut resp
standaloneMain :: IO ()
standaloneMain = do
defs <- defaultOptions
args <- getArgs
case processOptions args of
Left msg -> putStrLn msg
Right todo -> process defs todo
process :: Options -> [OptionAction] -> IO ()
process options [] = if null (optProto options)
then do putStrLn "No proto file specified (or empty proto file)"
putStrLn ""
putStrLn usageMsg
else putStrLn "Processing complete, have a nice day."
process options (Mutate f:rest) = process (f options) rest
process options (Run f:rest) = let options' = f options
in runStandalone options' >> process options' rest
process _options (Switch VersionInfo:_) = putStrLn versionInfo
mkdirFor :: FilePath -> IO ()
mkdirFor p = createDirectoryIfMissing True (takeDirectory p)
style :: Style
style = Style PageMode 132 0.6
myMode :: PPHsMode
-- myMode = PPHsMode 2 2 2 2 4 1 True PPOffsideRule False -- True
myMode = PPHsMode 2 2 2 2 2 4 1 True PPOffsideRule False -- True
dump :: (Monad m) => Output m -> Bool -> Maybe LocalFP -> D.FileDescriptorProto -> [D.FileDescriptorProto] -> m ()
dump _ _ Nothing _ _ = return ()
dump o imports (Just (LocalFP dumpFile)) fdp fdps = do
outputReport o $ "dumping to filename: "++show dumpFile
let s = if imports then Seq.fromList fdps else Seq.singleton fdp
outputWriteFile o dumpFile $ LC.unpack (messagePut $ defaultValue { D.FileDescriptorSet.file = s })
outputReport o $ "finished dumping FileDescriptorSet binary of: "++show (D.FileDescriptorProto.name fdp)
data Output m = Output {
outputReport :: String -> m (),
outputWriteFile :: FilePath -> String -> m ()
}
runStandalone :: Options -> IO ()
runStandalone options = do
(env,fdps) <- loadProtos (optInclude options) (optProto options)
putStrLn "All proto files loaded"
run' standaloneMode options env fdps where
standaloneMode :: Output IO
standaloneMode = Output putStrLn emitFile
emitFile file contents = do
let fullPath = combine (unLocalFP . optTarget $ options) file
putStrLn fullPath
mkdirFor fullPath
unless (optDryRun options) $ writeFile fullPath contents
runPlugin :: Options -> CodeGeneratorRequest -> CodeGeneratorResponse
runPlugin options req = execState (run' pluginOutput options env fdps) defaultValue where
(env,fdps) = loadCodeGenRequest req requestedFiles
pluginOutput :: Output (State CodeGeneratorResponse)
pluginOutput = Output {
outputReport = const $ return (),
outputWriteFile = appendFileRecord
}
appendFileRecord :: FilePath -> String -> State CodeGeneratorResponse ()
appendFileRecord f c = modify $ \resp -> resp {
CGR.file = (CGR.file resp) |> newFile
} where
newFile = defaultValue {
CGR.File.name = Just $ Utf8 $ LC.pack f,
CGR.File.content = Just $ Utf8 $ LC.pack c
}
requestedFiles = map (LocalFP . LC.unpack . utf8) . toList . file_to_generate $ req
This run ' operates for both runStandalone and runPlugin
run' :: (Monad m) => Output m -> Options -> [Env] -> [D.FileDescriptorProto] -> m ()
run' o@(Output print' writeFile') options envs fdps = do
let toGenerate = map (\env -> (either error id . top'FDP . fst . getTLS $ env, env)) envs
case toGenerate of
[(fdp, _)] -> unless (optDryRun options) $ dump o (optImports options) (optDesc options) fdp fdps
_ -> return ()
results <- forM toGenerate $ \(fdp, env) -> do
Compute the nameMap that determine how to translate from proto names to haskell names
-- This is the part that uses the (optional) package name
nameMap <- either error return $ makeNameMaps (optPrefix options) (optAs options) env
let NameMap _ rm = nameMap -- DEBUG
print' "Haskell name mangling done"
let protoInfo = makeProtoInfo (optUnknownFields options,optLazy options,optLenses options, optJson options) nameMap fdp
result = makeResult protoInfo
seq result (print' "Recursive modules resolved")
let produceMSG di = do
unless (optDryRun options) $ do
-- There might be several modules
let fileModules = descriptorModules result di
forM_ fileModules $ \ (relPath,modSyn) -> do
writeFile' relPath (prettyPrintStyleMode style myMode modSyn)
produceENM ei = do
let file = joinPath . enumFilePath $ ei
writeFile' file (prettyPrintStyleMode style myMode (enumModule ei))
produceONO oi = do
let file = joinPath . oneofFilePath $ oi
writeFile' file (prettyPrintStyleMode style myMode (oneofModule result oi))
produceSRV srv = do
let file = joinPath . serviceFilePath $ srv
writeFile' file (prettyPrintStyleMode style myMode (serviceModule result srv))
mapM_ produceMSG (messages protoInfo)
mapM_ produceENM (enums protoInfo)
mapM_ produceONO (oneofs protoInfo)
mapM_ produceSRV (services protoInfo)
return (result, protoInfo, fdp)
case results of
[(result, protoInfo, fdp)] -> do
let file = joinPath . protoFilePath $ protoInfo
writeFile' file (prettyPrintStyleMode style myMode (protoModule result protoInfo (serializeFDP fdp)))
_ -> return ()
| null | https://raw.githubusercontent.com/k-bx/protocol-buffers/313eb8e69c665dadc46cd5ebf12ac6e934f00d0a/hprotoc/Text/ProtocolBuffers/ProtoCompile.hs | haskell | | 'setAs' puts both the full path and the basename as keys into the association list
myMode = PPHsMode 2 2 2 2 4 1 True PPOffsideRule False -- True
True
This is the part that uses the (optional) package name
DEBUG
There might be several modules | | This is the Main module for the command line program ' hprotoc '
module Main where
import Control.Monad(unless,forM_,foldM,forM)
import Control.Monad.State(State, execState, modify)
import qualified Data.ByteString.Lazy.Char8 as LC (hGetContents, hPut, pack, unpack)
import Data.Foldable (toList)
import Data.Maybe (fromMaybe)
import qualified Data.Sequence as Seq (fromList,singleton)
import Data.Sequence ((|>))
import Data.Version(showVersion)
import Language.Haskell.Exts.Pretty(prettyPrintStyleMode,Style(..),Mode(..),PPHsMode(..),PPLayout(..))
import System.Console.GetOpt(OptDescr(Option),ArgDescr(NoArg,ReqArg)
,usageInfo,getOpt,ArgOrder(ReturnInOrder))
import System.Directory(getCurrentDirectory,createDirectoryIfMissing)
import System.Environment(getProgName, getArgs)
import System.FilePath(takeDirectory,combine,joinPath)
import qualified System.FilePath.Posix as Canon(takeBaseName)
import System.IO (stdin, stdout)
import Text.ProtocolBuffers.Basic(defaultValue, Utf8(..), utf8)
import Text.ProtocolBuffers.Identifiers(MName,checkDIString,mangle)
import Text.ProtocolBuffers.Reflections(ProtoInfo(..),EnumInfo(..),OneofInfo(..),ServiceInfo(..))
import Text.ProtocolBuffers.WireMessage (messagePut, messageGet)
import qualified Text.DescriptorProtos.FileDescriptorProto as D(FileDescriptorProto)
import qualified Text.DescriptorProtos.FileDescriptorProto as D.FileDescriptorProto(FileDescriptorProto(..))
import qualified Text . . FileDescriptorSet as D(FileDescriptorSet )
import qualified Text.DescriptorProtos.FileDescriptorSet as D.FileDescriptorSet(FileDescriptorSet(..))
import Text.ProtocolBuffers.ProtoCompile.BreakRecursion(makeResult)
import Text.ProtocolBuffers.ProtoCompile.Gen(protoModule,descriptorModules,enumModule,oneofModule,serviceModule)
import Text.ProtocolBuffers.ProtoCompile.MakeReflections(makeProtoInfo,serializeFDP)
import Text.ProtocolBuffers.ProtoCompile.Resolve(loadProtos,loadCodeGenRequest,makeNameMaps,getTLS
,Env,LocalFP(..),CanonFP(..),TopLevel(..)
,NameMap(..)
)
import Text.Google.Protobuf.Compiler.CodeGeneratorRequest
import Text.Google.Protobuf.Compiler.CodeGeneratorResponse hiding (error, file)
import qualified Text.Google.Protobuf.Compiler.CodeGeneratorResponse as CGR(file)
import qualified Text.Google.Protobuf.Compiler.CodeGeneratorResponse.File as CGR.File
The Paths_hprotoc module is produced by cabal
import Paths_hprotoc(version)
data Options =
Options
{ optPrefix :: [MName String]
, optAs :: [(CanonFP,[MName String])]
, optTarget :: LocalFP
, optInclude :: [LocalFP]
, optProto :: [LocalFP]
, optDesc :: Maybe (LocalFP)
, optImports :: Bool
, optVerbose :: Bool
, optUnknownFields :: Bool
, optLazy :: Bool
, optLenses :: Bool
, optJson :: Bool
, optDryRun :: Bool
}
deriving Show
setPrefix,setTarget,setInclude,setProto,setDesc :: String -> Options -> Options
setImports,setVerbose,setUnknown,setLazy,setLenses,setDryRun,setJson :: Options -> Options
setPrefix s o = o { optPrefix = toPrefix s }
setTarget s o = o { optTarget = LocalFP s }
setInclude s o = o { optInclude = LocalFP s : optInclude o }
setProto s o = o { optProto = optProto o ++ [LocalFP s] }
setDesc s o = o { optDesc = Just (LocalFP s) }
setImports o = o { optImports = True }
setVerbose o = o { optVerbose = True }
setUnknown o = o { optUnknownFields = True }
setLazy o = o { optLazy = True }
setLenses o = o { optLenses = True }
setJson o = o { optJson = True }
setDryRun o = o { optDryRun = True }
toPrefix :: String -> [MName String]
toPrefix s = case checkDIString s of
Left msg -> error $ "Bad module name in options:"++show s++"\n"++msg
Right (True,_) -> error $ "Bad module name in options (cannot start with '.'): "++show s
Right (False,ms) -> map mangle ms
setAs :: String -> Options -> Options
setAs s o =
case break ('='==) s of
(filepath,'=':rawPrefix) -> let value = toPrefix rawPrefix
in o { optAs = (CanonFP filepath,value):
(CanonFP (Canon.takeBaseName filepath),value):
optAs o}
_ -> error . unlines $ [ "Malformed -a or --as option "++show s
, " Expected \"FILEPATH=MODULE\""
, " where FILEPATH is the basename or relative path (using '/') of an imported file"
, " where MODULE is a dotted Haskell name to use as a prefix"
, " also MODULE can be empty to have no prefix" ]
data OptionAction = Mutate (Options->Options) | Run (Options->Options) | Switch Flag
data Flag = VersionInfo
optionList :: [OptDescr OptionAction]
optionList =
[ Option ['a'] ["as"] (ReqArg (Mutate . setAs) "FILEPATH=MODULE")
"assign prefix module to imported proto file: --as descriptor.proto=Text"
, Option ['I'] ["proto_path"] (ReqArg (Mutate . setInclude) "DIR")
"directory from which to search for imported proto files (default is pwd); all DIR searched"
, Option ['d'] ["haskell_out"] (ReqArg (Mutate . setTarget) "DIR")
"directory to use are root of generated files (default is pwd); last flag"
, Option ['n'] ["dry_run"] (NoArg (Mutate setDryRun))
"produce no output but still parse and check the proto file(s)"
, Option ['o'] ["descriptor_set_out"] (ReqArg (Mutate . setDesc) "FILE")
"filename to write binary FileDescriptorSet to"
, Option [] ["include_imports"] (NoArg (Mutate setImports))
"when writing descriptor_set_out include all imported files to be self-contained"
, Option ['p'] ["prefix"] (ReqArg (Mutate . setPrefix) "MODULE")
"dotted Haskell MODULE name to use as a prefix (default is none); last flag used"
, Option ['u'] ["unknown_fields"] (NoArg (Mutate setUnknown))
"generated messages and groups all support unknown fields"
, Option ['l'] ["lazy_fields"] (NoArg (Mutate setLazy))
"new default is now messages with strict fields, this reverts to generating lazy fields"
, Option [] ["lenses"] (NoArg (Mutate setLenses))
"generate lenses for accessing fields"
, Option [] ["json"] (NoArg (Mutate setJson))
"generate json instances"
, Option ['v'] ["verbose"] (NoArg (Mutate setVerbose))
"increase amount of printed information"
, Option [] ["version"] (NoArg (Switch VersionInfo))
"print out version information"
]
usageMsg,versionInfo :: String
usageMsg = usageInfo "Usage: protoCompile [OPTION..] path-to-file.proto ..." optionList
versionInfo = unlines $
[ "Welcome to protocol-buffers version "++showVersion version
, "Copyright (c) 2008-2011, Christopher Kuklewicz."
, "Released under BSD3 style license, see LICENSE file for details."
, "Some proto files, such as descriptor.proto and unittest*.proto"
, "are from google's code and are under an Apache 2.0 license."
, ""
, "Most command line arguments are similar to those of 'protoc'."
, ""
, "This program reads a .proto file and generates haskell code files."
, "See for more."
]
processOptions :: [String] -> Either String [OptionAction]
processOptions argv =
case getOpt (ReturnInOrder (Run . setProto)) optionList argv of
(opts,_,[]) -> Right opts
(_,_,errs) -> Left (unlines errs ++ usageMsg)
defaultOptions :: IO Options
defaultOptions = do
pwd <- fmap LocalFP getCurrentDirectory
return $ Options { optPrefix = []
, optAs = []
, optTarget = pwd
, optInclude = [pwd]
, optProto = []
, optDesc = Nothing
, optImports = False
, optVerbose = False
, optUnknownFields = False
, optLazy = False
, optLenses = False
, optJson = False
, optDryRun = False
}
main :: IO ()
main = do
progName <- getProgName
case progName of
"protoc-gen-haskell" -> pluginMain
_ -> standaloneMain
splitOn :: Char -> String -> [String]
splitOn c s =
case break (== c) s of
(s, []) -> [s]
(s', _:rest) -> s' : splitOn c rest
pluginMain :: IO ()
pluginMain = do
defs <- defaultOptions
inputBytes <- LC.hGetContents stdin
let req = either error fst $ messageGet inputBytes
let parametersString = fmap (LC.unpack . utf8) $ parameter req
let allParameters = maybe [] (splitOn ',') $ parametersString
let processOpt acc opt =
case opt of
"json" -> return $ setJson acc
_ ->
case splitOn '=' opt of
["prefix", prefix] -> return $ setPrefix prefix acc
_ -> fail $ "Unrecognized parameter " ++ opt ++ ". Parameters given: " ++ fromMaybe "Nothing" parametersString
opts <- foldM processOpt defs allParameters
let resp = runPlugin opts req
LC.hPut stdout $ messagePut resp
standaloneMain :: IO ()
standaloneMain = do
defs <- defaultOptions
args <- getArgs
case processOptions args of
Left msg -> putStrLn msg
Right todo -> process defs todo
process :: Options -> [OptionAction] -> IO ()
process options [] = if null (optProto options)
then do putStrLn "No proto file specified (or empty proto file)"
putStrLn ""
putStrLn usageMsg
else putStrLn "Processing complete, have a nice day."
process options (Mutate f:rest) = process (f options) rest
process options (Run f:rest) = let options' = f options
in runStandalone options' >> process options' rest
process _options (Switch VersionInfo:_) = putStrLn versionInfo
mkdirFor :: FilePath -> IO ()
mkdirFor p = createDirectoryIfMissing True (takeDirectory p)
style :: Style
style = Style PageMode 132 0.6
myMode :: PPHsMode
dump :: (Monad m) => Output m -> Bool -> Maybe LocalFP -> D.FileDescriptorProto -> [D.FileDescriptorProto] -> m ()
dump _ _ Nothing _ _ = return ()
dump o imports (Just (LocalFP dumpFile)) fdp fdps = do
outputReport o $ "dumping to filename: "++show dumpFile
let s = if imports then Seq.fromList fdps else Seq.singleton fdp
outputWriteFile o dumpFile $ LC.unpack (messagePut $ defaultValue { D.FileDescriptorSet.file = s })
outputReport o $ "finished dumping FileDescriptorSet binary of: "++show (D.FileDescriptorProto.name fdp)
data Output m = Output {
outputReport :: String -> m (),
outputWriteFile :: FilePath -> String -> m ()
}
runStandalone :: Options -> IO ()
runStandalone options = do
(env,fdps) <- loadProtos (optInclude options) (optProto options)
putStrLn "All proto files loaded"
run' standaloneMode options env fdps where
standaloneMode :: Output IO
standaloneMode = Output putStrLn emitFile
emitFile file contents = do
let fullPath = combine (unLocalFP . optTarget $ options) file
putStrLn fullPath
mkdirFor fullPath
unless (optDryRun options) $ writeFile fullPath contents
runPlugin :: Options -> CodeGeneratorRequest -> CodeGeneratorResponse
runPlugin options req = execState (run' pluginOutput options env fdps) defaultValue where
(env,fdps) = loadCodeGenRequest req requestedFiles
pluginOutput :: Output (State CodeGeneratorResponse)
pluginOutput = Output {
outputReport = const $ return (),
outputWriteFile = appendFileRecord
}
appendFileRecord :: FilePath -> String -> State CodeGeneratorResponse ()
appendFileRecord f c = modify $ \resp -> resp {
CGR.file = (CGR.file resp) |> newFile
} where
newFile = defaultValue {
CGR.File.name = Just $ Utf8 $ LC.pack f,
CGR.File.content = Just $ Utf8 $ LC.pack c
}
requestedFiles = map (LocalFP . LC.unpack . utf8) . toList . file_to_generate $ req
This run ' operates for both runStandalone and runPlugin
run' :: (Monad m) => Output m -> Options -> [Env] -> [D.FileDescriptorProto] -> m ()
run' o@(Output print' writeFile') options envs fdps = do
let toGenerate = map (\env -> (either error id . top'FDP . fst . getTLS $ env, env)) envs
case toGenerate of
[(fdp, _)] -> unless (optDryRun options) $ dump o (optImports options) (optDesc options) fdp fdps
_ -> return ()
results <- forM toGenerate $ \(fdp, env) -> do
Compute the nameMap that determine how to translate from proto names to haskell names
nameMap <- either error return $ makeNameMaps (optPrefix options) (optAs options) env
print' "Haskell name mangling done"
let protoInfo = makeProtoInfo (optUnknownFields options,optLazy options,optLenses options, optJson options) nameMap fdp
result = makeResult protoInfo
seq result (print' "Recursive modules resolved")
let produceMSG di = do
unless (optDryRun options) $ do
let fileModules = descriptorModules result di
forM_ fileModules $ \ (relPath,modSyn) -> do
writeFile' relPath (prettyPrintStyleMode style myMode modSyn)
produceENM ei = do
let file = joinPath . enumFilePath $ ei
writeFile' file (prettyPrintStyleMode style myMode (enumModule ei))
produceONO oi = do
let file = joinPath . oneofFilePath $ oi
writeFile' file (prettyPrintStyleMode style myMode (oneofModule result oi))
produceSRV srv = do
let file = joinPath . serviceFilePath $ srv
writeFile' file (prettyPrintStyleMode style myMode (serviceModule result srv))
mapM_ produceMSG (messages protoInfo)
mapM_ produceENM (enums protoInfo)
mapM_ produceONO (oneofs protoInfo)
mapM_ produceSRV (services protoInfo)
return (result, protoInfo, fdp)
case results of
[(result, protoInfo, fdp)] -> do
let file = joinPath . protoFilePath $ protoInfo
writeFile' file (prettyPrintStyleMode style myMode (protoModule result protoInfo (serializeFDP fdp)))
_ -> return ()
|
453ab5f58d7d32bc5c34d039df5cc1c6682c07709b0d39c8e2b9119b738bd534 | hackwaly/ocamlearlybird | state_initialized.ml | *
* Copyright ( C ) 2021
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation , either version 3 of the
* License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Affero General Public License for more details .
*
* You should have received a copy of the GNU Affero General Public License
* along with this program . If not , see < / > .
* Copyright (C) 2021 Yuxiang Wen
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see </>.
*)
open Ground
open Debug_protocol_ex
open Path_glob
let spawn_console ~rpc ?name ?env ?cwd prog args =
ignore name;
let cwd = Option.value ~default:(Filename.dirname prog) cwd in
let cmd = ("", prog :: args |> Array.of_list) in
let curr_env =
Unix.environment () |> Array.to_seq
|> Seq.map (String.cut_on_char '=')
|> Seq.fold_left
(fun dict (key, value) -> String_dict.add key value dict)
String_dict.empty
in
let env =
List.fold_left
(fun curr_env (k, v) ->
match v with
| None -> String_dict.remove k curr_env
| Some v -> String_dict.add k v curr_env)
curr_env
(env
|> Option.value ~default:String_opt_dict.empty
|> String_opt_dict.bindings)
in
Lwt_unix.with_chdir ~cwd (fun () ->
let redir_output out_chan category () =
Lwt_io.loop_read out_chan (fun content ->
Debug_rpc.send_event rpc
(module Output_event)
Output_event.Payload.(
make ~category:(Some category) ~output:content ()))
in
let env =
env |> String_dict.bindings
|> List.map (fun (k, v) -> k ^ "=" ^ v)
|> Array.of_list
in
let proc = Lwt_process.open_process_full ~env cmd in
Lwt.async (redir_output proc#stdout Output_event.Payload.Category.Stdout);
Lwt.async (redir_output proc#stderr Output_event.Payload.Category.Stderr);
Lwt.return ());%lwt
Lwt.return ()
let spawn_terminal ~kind ~rpc ?name ?env ?cwd prog args =
let cwd = match cwd with Some cwd -> cwd | None -> Filename.dirname prog in
Lwt.async (fun () ->
let%lwt _ =
Debug_rpc.exec_command rpc
(module Run_in_terminal_command)
Run_in_terminal_command.Arguments.
{ kind = Some kind; title = name; cwd; env; args = prog :: args }
in
Lwt.return ());
Lwt.return ()
let spawn ~kind ~rpc ?name ?env ?cwd prog args =
let open Launch_command.Arguments in
match kind with
| Console.Internal_console -> spawn_console ~rpc ?env ?cwd prog args
| Integrated_terminal ->
spawn_terminal ~kind:Run_in_terminal_command.Arguments.Kind.Integrated
~rpc ?name ?env ?cwd prog args
| External_terminal ->
spawn_terminal ~kind:Run_in_terminal_command.Arguments.Kind.External ~rpc
?name ?env ?cwd prog args
let launch ~rpc ~init_args ~capabilities ~launch_args =
ignore init_args;
ignore capabilities;
let open Launch_command.Arguments in
let open Initialize_command.Arguments in
if launch_args._debug_log |> Option.is_some then (
Logs.set_level (Some Debug);
let file = open_out (launch_args._debug_log |> Option.get) in
let fmt = Format.formatter_of_out_channel file in
Logs.set_reporter (Logs_fmt.reporter ~app:fmt ~dst:fmt ()));
let kind =
if init_args.supports_run_in_terminal_request |> Option.value ~default:false
then launch_args.console
else Console.Internal_console
in
let debug_sock = Lwt_unix.(socket PF_INET SOCK_STREAM 0) in
Lwt_unix.(bind debug_sock Unix.(ADDR_INET (inet_addr_loopback, 0)));%lwt
Lwt_unix.listen debug_sock 1;
let env =
launch_args.env
|> String_opt_dict.add "CAML_DEBUG_SOCKET"
(Some (debug_sock |> Lwt_unix.getsockname |> Unix.Sockaddr.to_string))
in
spawn ~kind ~rpc ?name:launch_args.name ~env ?cwd:launch_args.cwd
launch_args.program launch_args.arguments;%lwt
let%lwt dbg =
let debug_filter =
let globber =
let open Option in
let* glob = launch_args.only_debug_glob in
try Glob.parse glob |> return with _ -> None
in
match globber with
| None -> fun _ -> true
| Some globber -> fun path -> Glob.eval globber path
in
Debugger.init
(Debugger.make_options ~debug_sock ~symbols_file:launch_args.program
?yield_steps:launch_args.yield_steps
~follow_fork_mode:
(match launch_args.follow_fork_mode with
| Fork_parent -> `Fork_parent
| Fork_child -> `Fork_child)
~debug_filter ())
in
Lwt.return dbg
let run ~init_args ~capabilities rpc =
let promise, resolver = Lwt.task () in
let prevent_reenter () =
Debug_rpc.remove_command_handler rpc (module Launch_command);
Debug_rpc.remove_command_handler rpc (module Attach_command)
in
Debug_rpc.set_command_handler rpc
(module Launch_command)
(fun launch_args ->
prevent_reenter ();
let%lwt launched = launch ~rpc ~init_args ~capabilities ~launch_args in
Lwt.wakeup_later resolver (launch_args, launched);
Lwt.return_unit);
Debug_rpc.set_command_handler rpc
(module Attach_command)
(fun _ ->
prevent_reenter ();
Lwt.fail_with "Unsupported");
Debug_rpc.set_command_handler rpc
(module Disconnect_command)
(fun _ ->
Debug_rpc.remove_command_handler rpc (module Disconnect_command);
Lwt.wakeup_later_exn resolver Exit;
Lwt.return_unit);
promise
| null | https://raw.githubusercontent.com/hackwaly/ocamlearlybird/165174e3cc749ba416ec7ebfb4b521e5fac744db/src/adapter/state_initialized.ml | ocaml | *
* Copyright ( C ) 2021
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation , either version 3 of the
* License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Affero General Public License for more details .
*
* You should have received a copy of the GNU Affero General Public License
* along with this program . If not , see < / > .
* Copyright (C) 2021 Yuxiang Wen
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see </>.
*)
open Ground
open Debug_protocol_ex
open Path_glob
let spawn_console ~rpc ?name ?env ?cwd prog args =
ignore name;
let cwd = Option.value ~default:(Filename.dirname prog) cwd in
let cmd = ("", prog :: args |> Array.of_list) in
let curr_env =
Unix.environment () |> Array.to_seq
|> Seq.map (String.cut_on_char '=')
|> Seq.fold_left
(fun dict (key, value) -> String_dict.add key value dict)
String_dict.empty
in
let env =
List.fold_left
(fun curr_env (k, v) ->
match v with
| None -> String_dict.remove k curr_env
| Some v -> String_dict.add k v curr_env)
curr_env
(env
|> Option.value ~default:String_opt_dict.empty
|> String_opt_dict.bindings)
in
Lwt_unix.with_chdir ~cwd (fun () ->
let redir_output out_chan category () =
Lwt_io.loop_read out_chan (fun content ->
Debug_rpc.send_event rpc
(module Output_event)
Output_event.Payload.(
make ~category:(Some category) ~output:content ()))
in
let env =
env |> String_dict.bindings
|> List.map (fun (k, v) -> k ^ "=" ^ v)
|> Array.of_list
in
let proc = Lwt_process.open_process_full ~env cmd in
Lwt.async (redir_output proc#stdout Output_event.Payload.Category.Stdout);
Lwt.async (redir_output proc#stderr Output_event.Payload.Category.Stderr);
Lwt.return ());%lwt
Lwt.return ()
let spawn_terminal ~kind ~rpc ?name ?env ?cwd prog args =
let cwd = match cwd with Some cwd -> cwd | None -> Filename.dirname prog in
Lwt.async (fun () ->
let%lwt _ =
Debug_rpc.exec_command rpc
(module Run_in_terminal_command)
Run_in_terminal_command.Arguments.
{ kind = Some kind; title = name; cwd; env; args = prog :: args }
in
Lwt.return ());
Lwt.return ()
let spawn ~kind ~rpc ?name ?env ?cwd prog args =
let open Launch_command.Arguments in
match kind with
| Console.Internal_console -> spawn_console ~rpc ?env ?cwd prog args
| Integrated_terminal ->
spawn_terminal ~kind:Run_in_terminal_command.Arguments.Kind.Integrated
~rpc ?name ?env ?cwd prog args
| External_terminal ->
spawn_terminal ~kind:Run_in_terminal_command.Arguments.Kind.External ~rpc
?name ?env ?cwd prog args
let launch ~rpc ~init_args ~capabilities ~launch_args =
ignore init_args;
ignore capabilities;
let open Launch_command.Arguments in
let open Initialize_command.Arguments in
if launch_args._debug_log |> Option.is_some then (
Logs.set_level (Some Debug);
let file = open_out (launch_args._debug_log |> Option.get) in
let fmt = Format.formatter_of_out_channel file in
Logs.set_reporter (Logs_fmt.reporter ~app:fmt ~dst:fmt ()));
let kind =
if init_args.supports_run_in_terminal_request |> Option.value ~default:false
then launch_args.console
else Console.Internal_console
in
let debug_sock = Lwt_unix.(socket PF_INET SOCK_STREAM 0) in
Lwt_unix.(bind debug_sock Unix.(ADDR_INET (inet_addr_loopback, 0)));%lwt
Lwt_unix.listen debug_sock 1;
let env =
launch_args.env
|> String_opt_dict.add "CAML_DEBUG_SOCKET"
(Some (debug_sock |> Lwt_unix.getsockname |> Unix.Sockaddr.to_string))
in
spawn ~kind ~rpc ?name:launch_args.name ~env ?cwd:launch_args.cwd
launch_args.program launch_args.arguments;%lwt
let%lwt dbg =
let debug_filter =
let globber =
let open Option in
let* glob = launch_args.only_debug_glob in
try Glob.parse glob |> return with _ -> None
in
match globber with
| None -> fun _ -> true
| Some globber -> fun path -> Glob.eval globber path
in
Debugger.init
(Debugger.make_options ~debug_sock ~symbols_file:launch_args.program
?yield_steps:launch_args.yield_steps
~follow_fork_mode:
(match launch_args.follow_fork_mode with
| Fork_parent -> `Fork_parent
| Fork_child -> `Fork_child)
~debug_filter ())
in
Lwt.return dbg
let run ~init_args ~capabilities rpc =
let promise, resolver = Lwt.task () in
let prevent_reenter () =
Debug_rpc.remove_command_handler rpc (module Launch_command);
Debug_rpc.remove_command_handler rpc (module Attach_command)
in
Debug_rpc.set_command_handler rpc
(module Launch_command)
(fun launch_args ->
prevent_reenter ();
let%lwt launched = launch ~rpc ~init_args ~capabilities ~launch_args in
Lwt.wakeup_later resolver (launch_args, launched);
Lwt.return_unit);
Debug_rpc.set_command_handler rpc
(module Attach_command)
(fun _ ->
prevent_reenter ();
Lwt.fail_with "Unsupported");
Debug_rpc.set_command_handler rpc
(module Disconnect_command)
(fun _ ->
Debug_rpc.remove_command_handler rpc (module Disconnect_command);
Lwt.wakeup_later_exn resolver Exit;
Lwt.return_unit);
promise
| |
84d46a6b1a6a1ed7a62ad3dc9d5f8a231c7734fc9f3131c807a5020945a7760d | malcolmreynolds/GSLL | nonlinear-least-squares.lisp | ;; Nonlinear least squares fitting.
, 2008 - 02 - 09 12:59:16EST nonlinear-least-squares.lisp
Time - stamp : < 2009 - 06 - 06 16:47:47EDT nonlinear-least-squares.lisp >
$ Id$
(in-package :gsl)
;;; /usr/include/gsl/gsl_multifit_nlin.h
;;;;****************************************************************************
;;;; Function-only solver object
;;;;****************************************************************************
Note that GSL currently provides no derivative - free solvers ,
;;; so this is moot for now.
(defmobject nonlinear-ffit "gsl_multifit_fsolver"
((solver-type :pointer)
((first dimensions) sizet) ; number-of-observations
((second dimensions) sizet)) ; number-of-parameters
"nonlinear least squares fit with function only"
"The number of observations must be greater than or equal to parameters."
:callbacks
(callback fnstruct-fit
(number-of-observations number-of-parameters)
(function
:success-failure
(:input :double :marray dim1) :slug
(:output :double :marray dim0)))
:initialize-suffix "set"
:initialize-args ((callback :pointer) ((mpointer initial-guess) :pointer))
:singular (function))
(defmfun name ((solver nonlinear-ffit))
"gsl_multifit_fsolver_name"
(((mpointer solver) :pointer))
:definition :method
:c-return :string
"The name of the solver type.")
;;;;****************************************************************************
;;;; Function and derivative solver object
;;;;****************************************************************************
(defmobject nonlinear-fdffit "gsl_multifit_fdfsolver"
((solver-type :pointer)
((first dimensions) sizet) ; number-of-observations
((second dimensions) sizet)) ; number-of-parameters
"nonlinear least squares fit with function and derivative"
"The number of observations must be greater than or
equal to parameters."
:callbacks
(callback fnstruct-fit-fdf
(number-of-observations number-of-parameters)
(function :success-failure
(:input :double :marray dim1)
:slug
(:output :double :marray dim0))
(df :success-failure
(:input :double :marray dim1)
:slug
(:output :double :marray dim0 dim1))
(fdf :success-failure
(:input :double :marray dim1)
:slug
(:output :double :marray dim0)
(:output :double :marray dim0 dim1)))
:initialize-suffix "set"
:initialize-args ((callback :pointer) ((mpointer initial-guess) :pointer)))
(defmfun name ((solver nonlinear-fdffit))
"gsl_multifit_fdfsolver_name"
(((mpointer solver) :pointer))
:definition :method
:c-return :string
"The name of the solver type.")
;;;;****************************************************************************
;;;; Iteration
;;;;****************************************************************************
(defmfun iterate ((solver nonlinear-ffit))
"gsl_multifit_fsolver_iterate"
(((mpointer solver) :pointer))
:definition :method
"Perform a single iteration of the solver. The solver maintains a
current estimate of the best-fit parameters at all times. ")
(defmfun iterate ((solver nonlinear-fdffit))
"gsl_multifit_fdfsolver_iterate"
(((mpointer solver) :pointer))
:definition :method
"Perform a single iteration of the solver. The solver maintains a
current estimate of the best-fit parameters at all times. ")
(defmfun solution ((solver nonlinear-ffit))
"gsl_multifit_fsolver_position"
(((mpointer solver) :pointer))
:definition :method
:c-return (crtn :pointer)
:return ((copy crtn))
"The current best-fit parameters.")
(defmfun solution ((solver nonlinear-fdffit))
"gsl_multifit_fdfsolver_position"
(((mpointer solver) :pointer))
:definition :method
:c-return (crtn :pointer)
:return ((copy crtn))
"The current best-fit parameters.")
Why does n't GSL have functions to extract these values ?
(defmethod function-value ((solver nonlinear-fdffit))
(copy
(cffi:foreign-slot-value (mpointer solver) 'gsl-fdffit-solver 'f)))
(defmethod last-step ((solver nonlinear-fdffit))
Raw pointer , because we presume we 're passing it on to another GSL function .
(cffi:foreign-slot-value (mpointer solver) 'gsl-fdffit-solver 'dx))
(defun jacobian (solver)
Raw pointer , because we presume we 're passing it on to another GSL function .
(cffi:foreign-slot-value (mpointer solver) 'gsl-fdffit-solver 'jacobian))
;;;;****************************************************************************
;;;; Search stopping
;;;;****************************************************************************
(defmfun fit-test-delta
(last-step current-position absolute-error relative-error)
"gsl_multifit_test_delta"
((last-step :pointer) (current-position :pointer)
(absolute-error :double) (relative-error :double))
:c-return :success-continue
"Test for the convergence of the sequence by comparing the
last step with the absolute error and relative
error to the current position. The test returns T
if |last-step_i| < absolute-error + relative-error |current-position_i|
for each component i of current-position and returns NIL otherwise.")
(defmfun fit-test-gradient (gradient absolute-error)
"gsl_multifit_test_gradient"
((gradient :pointer) (absolute-error :double))
:c-return :success-continue
"Test the residual gradient against the absolute
error bound. Mathematically, the gradient should be
exactly zero at the minimum. The test returns T if the
following condition is achieved: \sum_i |gradient_i| < absolute-error
and returns NIL otherwise. This criterion is suitable
for situations where the precise location of the minimum
is unimportant provided a value can be found where the gradient is small
enough.")
(defmfun fit-gradient (jacobian function-values gradient)
"gsl_multifit_gradient"
((jacobian :pointer) ((mpointer function-values) :pointer) (gradient :pointer))
"Compute the gradient of \Phi(x) = (1/2) ||F(x)||^2
from the Jacobian matrix and the function values using
the formula g = J^T f.")
;;;;****************************************************************************
;;;; Minimization using derivatives
;;;;****************************************************************************
(defmpar +levenberg-marquardt+ "gsl_multifit_fdfsolver_lmsder"
"A robust and efficient version of the Levenberg-Marquardt
algorithm as implemented in the scaled lmder routine in
Minpack, written by Jorge J. More', Burton S. Garbow
and Kenneth E. Hillstrom.
The algorithm uses a generalized trust region to keep each step under
control. In order to be accepted a proposed new position x' must
satisfy the condition |D (x' - x)| < \delta, where D is a
diagonal scaling matrix and \delta is the size of the trust
region. The components of D are computed internally, using the
column norms of the Jacobian to estimate the sensitivity of the residual
to each component of x. This improves the behavior of the
algorithm for badly scaled functions.
On each iteration the algorithm attempts to minimize the linear system
|F + J p| subject to the constraint |D p| < \Delta. The
solution to this constrained linear system is found using the
Levenberg-Marquardt method.
The proposed step is now tested by evaluating the function at the
resulting point, x'. If the step reduces the norm of the
function sufficiently, and follows the predicted behavior of the
function within the trust region, then it is accepted and the size of the
trust region is increased. If the proposed step fails to improve the
solution, or differs significantly from the expected behavior within
the trust region, then the size of the trust region is decreased and
another trial step is computed.
The algorithm also monitors the progress of the solution and
returns an error if the changes in the solution are smaller than
the machine precision. The possible errors signalled are:
'failure-to-reach-tolerance-f the decrease in the function falls
below machine precision,
'failure-to-reach-tolerance-x
the change in the position vector falls below machine precision,
'failure-to-reach-tolerance-g
the norm of the gradient, relative to the norm of the function,
falls below machine precision.
These errors indicate that further iterations would be unlikely to
change the solution from its current value.")
(defmpar +levenberg-marquardt-unscaled+ "gsl_multifit_fdfsolver_lmder"
"The unscaled version of *levenberg-marquardt*. The elements of the
diagonal scaling matrix D are set to 1. This algorithm may be
useful in circumstances where the scaled version of converges too
slowly, or the function is already scaled appropriately.")
;;;;****************************************************************************
;;;; Covariance
;;;;****************************************************************************
(defmfun ls-covariance (jacobian relative-error covariance)
"gsl_multifit_covar"
((jacobian :pointer) (relative-error :double) ((mpointer covariance) :pointer))
:return (covariance)
"Compute the covariance matrix of the best-fit parameters
using the Jacobian matrix J. The relative error
is used to remove linear-dependent columns when J is
rank deficient. The covariance matrix is given by
C = (J^T J)^{-1}
and is computed by QR decomposition of J with column-pivoting. Any
columns of R which satisfy |R_{kk}| <= relative-error |R_{11}|
are considered linearly-dependent and are excluded from the covariance
matrix (the corresponding rows and columns of the covariance matrix are
set to zero).
If the minimisation uses the weighted least-squares function
f_i = (Y(x, t_i) - y_i) / sigma_i then the covariance
matrix above gives the statistical error on the best-fit parameters
resulting from the gaussian errors sigma_i on
the underlying data y_i. This can be verified from the relation
\delta f = J \delta c and the fact that the fluctuations in f
from the data y_i are normalised by sigma_i and
so satisfy <delta f delta f^T> = I.
For an unweighted least-squares function f_i = (Y(x, t_i) -
y_i) the covariance matrix above should be multiplied by the variance
of the residuals about the best-fit sigma^2 = sum (y_i - Y(x,t_i))^2 / (n-p)
to give the variance-covariance matrix sigma^2 C.
This estimates the statistical error on the
best-fit parameters from the scatter of the underlying data.
For more information about covariance matrices see the GSL documentation
Fitting Overview.")
;;;;****************************************************************************
;;;; Example
;;;;****************************************************************************
The example from Section 37.9 of the GSL manual .
See the GSL source tree , doc / examples / expfit.c for the functions
;;; and doc/examples/nlfit.c for the solver.
(defstruct exponent-fit-data n y sigma)
(defvar *nlls-example-data*)
(defun generate-nlls-data (&optional (number-of-observations 40))
"Create the data used in the nonlinear least squares fit example."
(make-exponent-fit-data
:n number-of-observations
:y
(let ((arr (make-marray 'double-float :dimensions number-of-observations))
(rng (make-random-number-generator +mt19937+ 0)))
(dotimes (i number-of-observations arr)
(setf (maref arr i)
(+ 1 (* 5 (exp (* -1/10 i)))
(sample rng 'gaussian :sigma 0.1d0)))))
:sigma
(make-marray
'double-float :dimensions number-of-observations :initial-element 0.1d0)))
(defun exponential-residual (x f)
"Compute the negative of the residuals with the exponential model
for the nonlinear least squares example."
(let ((A (maref x 0))
(lambda (maref x 1))
(b (maref x 2)))
(symbol-macrolet
((y (exponent-fit-data-y *nlls-example-data*))
(sigma (exponent-fit-data-sigma *nlls-example-data*)))
(dotimes (i (exponent-fit-data-n *nlls-example-data*))
(setf (maref f i)
;; the difference model - observation = - residual
(/ (- (+ (* A (exp (* (- lambda) i))) b) (maref y i))
(maref sigma i)))))))
(defun exponential-residual-derivative (x jacobian)
"Compute the partial derivatives of the negative of the
residuals with the exponential model
for the nonlinear least squares example."
(let ((A (maref x 0))
(lambda (maref x 1)))
(symbol-macrolet
((sigma (exponent-fit-data-sigma *nlls-example-data*)))
(dotimes (i (exponent-fit-data-n *nlls-example-data*))
(let ((e (exp (* (- lambda) i)))
(s (maref sigma i)))
(setf (maref jacobian i 0) (/ e s)
(maref jacobian i 1) (* -1 i A (/ e s))
(maref jacobian i 2) (/ s)))))))
(defun exponential-residual-fdf (x f jacobian)
"Compute the function and partial derivatives of the negative of the
residuals with the exponential model
for the nonlinear least squares example."
(exponential-residual x f)
(exponential-residual-derivative x jacobian))
(defun norm-f (fit)
"Find the norm of the fit function f."
(euclidean-norm (function-value fit)))
(defun nonlinear-least-squares-example
(&optional (number-of-observations 40)
(method +levenberg-marquardt+)
(print-steps t))
(let ((*nlls-example-data* (generate-nlls-data number-of-observations)))
(let* ((init #m(1.0d0 0.0d0 0.0d0))
(number-of-parameters 3)
(covariance
(make-marray 'double-float
:dimensions
(list number-of-parameters number-of-parameters)))
(fit (make-nonlinear-fdffit
method
(list number-of-observations number-of-parameters)
'(exponential-residual
exponential-residual-derivative exponential-residual-fdf)
init nil)))
(macrolet ((fitx (i) `(maref (solution fit) ,i))
(err (i) `(sqrt (maref covariance ,i ,i))))
(when print-steps
(format t "iter: ~d x = ~15,8f ~15,8f ~15,8f |f(x)|=~7,6g~&"
0 (fitx 0) (fitx 1) (fitx 2)
(norm-f fit)))
(loop for iter from 0 below 25
until
(and (plusp iter)
(fit-test-delta (last-step fit) (mpointer (solution fit)) 1.0d-4 1.0d-4))
do
(iterate fit)
(ls-covariance (jacobian fit) 0.0d0 covariance)
(when print-steps
(format t "iter: ~d x = ~15,8f ~15,8f ~15,8f |f(x)|=~7,6g~&"
(1+ iter) (fitx 0) (fitx 1) (fitx 2)
(norm-f fit)))
finally
(let* ((chi (norm-f fit))
(dof (- number-of-observations number-of-parameters))
(c (max 1.0d0 (/ chi (sqrt dof)))))
(when print-steps
(format t "chisq/dof = ~g~&" (/ (expt chi 2) dof))
(format t "A = ~,5f +/- ~,5f~&" (fitx 0) (* c (err 0)))
(format t "lambda = ~,5f +/- ~,5f~&" (fitx 1) (* c (err 1)))
(format t "b = ~,5f +/- ~,5f~&" (fitx 2) (* c (err 2))))
(return (list (fitx 0) (fitx 1) (fitx 2)))))))))
(save-test nonlinear-least-squares
(nonlinear-least-squares-example 40 +levenberg-marquardt+ nil))
| null | https://raw.githubusercontent.com/malcolmreynolds/GSLL/2f722f12f1d08e1b9550a46e2a22adba8e1e52c4/solve-minimize-fit/nonlinear-least-squares.lisp | lisp | Nonlinear least squares fitting.
/usr/include/gsl/gsl_multifit_nlin.h
****************************************************************************
Function-only solver object
****************************************************************************
so this is moot for now.
number-of-observations
number-of-parameters
****************************************************************************
Function and derivative solver object
****************************************************************************
number-of-observations
number-of-parameters
****************************************************************************
Iteration
****************************************************************************
****************************************************************************
Search stopping
****************************************************************************
****************************************************************************
Minimization using derivatives
****************************************************************************
****************************************************************************
Covariance
****************************************************************************
****************************************************************************
Example
****************************************************************************
and doc/examples/nlfit.c for the solver.
the difference model - observation = - residual | , 2008 - 02 - 09 12:59:16EST nonlinear-least-squares.lisp
Time - stamp : < 2009 - 06 - 06 16:47:47EDT nonlinear-least-squares.lisp >
$ Id$
(in-package :gsl)
Note that GSL currently provides no derivative - free solvers ,
(defmobject nonlinear-ffit "gsl_multifit_fsolver"
((solver-type :pointer)
"nonlinear least squares fit with function only"
"The number of observations must be greater than or equal to parameters."
:callbacks
(callback fnstruct-fit
(number-of-observations number-of-parameters)
(function
:success-failure
(:input :double :marray dim1) :slug
(:output :double :marray dim0)))
:initialize-suffix "set"
:initialize-args ((callback :pointer) ((mpointer initial-guess) :pointer))
:singular (function))
(defmfun name ((solver nonlinear-ffit))
"gsl_multifit_fsolver_name"
(((mpointer solver) :pointer))
:definition :method
:c-return :string
"The name of the solver type.")
(defmobject nonlinear-fdffit "gsl_multifit_fdfsolver"
((solver-type :pointer)
"nonlinear least squares fit with function and derivative"
"The number of observations must be greater than or
equal to parameters."
:callbacks
(callback fnstruct-fit-fdf
(number-of-observations number-of-parameters)
(function :success-failure
(:input :double :marray dim1)
:slug
(:output :double :marray dim0))
(df :success-failure
(:input :double :marray dim1)
:slug
(:output :double :marray dim0 dim1))
(fdf :success-failure
(:input :double :marray dim1)
:slug
(:output :double :marray dim0)
(:output :double :marray dim0 dim1)))
:initialize-suffix "set"
:initialize-args ((callback :pointer) ((mpointer initial-guess) :pointer)))
(defmfun name ((solver nonlinear-fdffit))
"gsl_multifit_fdfsolver_name"
(((mpointer solver) :pointer))
:definition :method
:c-return :string
"The name of the solver type.")
(defmfun iterate ((solver nonlinear-ffit))
"gsl_multifit_fsolver_iterate"
(((mpointer solver) :pointer))
:definition :method
"Perform a single iteration of the solver. The solver maintains a
current estimate of the best-fit parameters at all times. ")
(defmfun iterate ((solver nonlinear-fdffit))
"gsl_multifit_fdfsolver_iterate"
(((mpointer solver) :pointer))
:definition :method
"Perform a single iteration of the solver. The solver maintains a
current estimate of the best-fit parameters at all times. ")
(defmfun solution ((solver nonlinear-ffit))
"gsl_multifit_fsolver_position"
(((mpointer solver) :pointer))
:definition :method
:c-return (crtn :pointer)
:return ((copy crtn))
"The current best-fit parameters.")
(defmfun solution ((solver nonlinear-fdffit))
"gsl_multifit_fdfsolver_position"
(((mpointer solver) :pointer))
:definition :method
:c-return (crtn :pointer)
:return ((copy crtn))
"The current best-fit parameters.")
Why does n't GSL have functions to extract these values ?
(defmethod function-value ((solver nonlinear-fdffit))
(copy
(cffi:foreign-slot-value (mpointer solver) 'gsl-fdffit-solver 'f)))
(defmethod last-step ((solver nonlinear-fdffit))
Raw pointer , because we presume we 're passing it on to another GSL function .
(cffi:foreign-slot-value (mpointer solver) 'gsl-fdffit-solver 'dx))
(defun jacobian (solver)
Raw pointer , because we presume we 're passing it on to another GSL function .
(cffi:foreign-slot-value (mpointer solver) 'gsl-fdffit-solver 'jacobian))
(defmfun fit-test-delta
(last-step current-position absolute-error relative-error)
"gsl_multifit_test_delta"
((last-step :pointer) (current-position :pointer)
(absolute-error :double) (relative-error :double))
:c-return :success-continue
"Test for the convergence of the sequence by comparing the
last step with the absolute error and relative
error to the current position. The test returns T
if |last-step_i| < absolute-error + relative-error |current-position_i|
for each component i of current-position and returns NIL otherwise.")
(defmfun fit-test-gradient (gradient absolute-error)
"gsl_multifit_test_gradient"
((gradient :pointer) (absolute-error :double))
:c-return :success-continue
"Test the residual gradient against the absolute
error bound. Mathematically, the gradient should be
exactly zero at the minimum. The test returns T if the
following condition is achieved: \sum_i |gradient_i| < absolute-error
and returns NIL otherwise. This criterion is suitable
for situations where the precise location of the minimum
is unimportant provided a value can be found where the gradient is small
enough.")
(defmfun fit-gradient (jacobian function-values gradient)
"gsl_multifit_gradient"
((jacobian :pointer) ((mpointer function-values) :pointer) (gradient :pointer))
"Compute the gradient of \Phi(x) = (1/2) ||F(x)||^2
from the Jacobian matrix and the function values using
the formula g = J^T f.")
(defmpar +levenberg-marquardt+ "gsl_multifit_fdfsolver_lmsder"
"A robust and efficient version of the Levenberg-Marquardt
algorithm as implemented in the scaled lmder routine in
Minpack, written by Jorge J. More', Burton S. Garbow
and Kenneth E. Hillstrom.
The algorithm uses a generalized trust region to keep each step under
control. In order to be accepted a proposed new position x' must
satisfy the condition |D (x' - x)| < \delta, where D is a
diagonal scaling matrix and \delta is the size of the trust
region. The components of D are computed internally, using the
column norms of the Jacobian to estimate the sensitivity of the residual
to each component of x. This improves the behavior of the
algorithm for badly scaled functions.
On each iteration the algorithm attempts to minimize the linear system
|F + J p| subject to the constraint |D p| < \Delta. The
solution to this constrained linear system is found using the
Levenberg-Marquardt method.
The proposed step is now tested by evaluating the function at the
resulting point, x'. If the step reduces the norm of the
function sufficiently, and follows the predicted behavior of the
function within the trust region, then it is accepted and the size of the
trust region is increased. If the proposed step fails to improve the
solution, or differs significantly from the expected behavior within
the trust region, then the size of the trust region is decreased and
another trial step is computed.
The algorithm also monitors the progress of the solution and
returns an error if the changes in the solution are smaller than
the machine precision. The possible errors signalled are:
'failure-to-reach-tolerance-f the decrease in the function falls
below machine precision,
'failure-to-reach-tolerance-x
the change in the position vector falls below machine precision,
'failure-to-reach-tolerance-g
the norm of the gradient, relative to the norm of the function,
falls below machine precision.
These errors indicate that further iterations would be unlikely to
change the solution from its current value.")
(defmpar +levenberg-marquardt-unscaled+ "gsl_multifit_fdfsolver_lmder"
"The unscaled version of *levenberg-marquardt*. The elements of the
diagonal scaling matrix D are set to 1. This algorithm may be
useful in circumstances where the scaled version of converges too
slowly, or the function is already scaled appropriately.")
(defmfun ls-covariance (jacobian relative-error covariance)
"gsl_multifit_covar"
((jacobian :pointer) (relative-error :double) ((mpointer covariance) :pointer))
:return (covariance)
"Compute the covariance matrix of the best-fit parameters
using the Jacobian matrix J. The relative error
is used to remove linear-dependent columns when J is
rank deficient. The covariance matrix is given by
C = (J^T J)^{-1}
and is computed by QR decomposition of J with column-pivoting. Any
columns of R which satisfy |R_{kk}| <= relative-error |R_{11}|
are considered linearly-dependent and are excluded from the covariance
matrix (the corresponding rows and columns of the covariance matrix are
set to zero).
If the minimisation uses the weighted least-squares function
f_i = (Y(x, t_i) - y_i) / sigma_i then the covariance
matrix above gives the statistical error on the best-fit parameters
resulting from the gaussian errors sigma_i on
the underlying data y_i. This can be verified from the relation
\delta f = J \delta c and the fact that the fluctuations in f
from the data y_i are normalised by sigma_i and
so satisfy <delta f delta f^T> = I.
For an unweighted least-squares function f_i = (Y(x, t_i) -
y_i) the covariance matrix above should be multiplied by the variance
of the residuals about the best-fit sigma^2 = sum (y_i - Y(x,t_i))^2 / (n-p)
to give the variance-covariance matrix sigma^2 C.
This estimates the statistical error on the
best-fit parameters from the scatter of the underlying data.
For more information about covariance matrices see the GSL documentation
Fitting Overview.")
The example from Section 37.9 of the GSL manual .
See the GSL source tree , doc / examples / expfit.c for the functions
(defstruct exponent-fit-data n y sigma)
(defvar *nlls-example-data*)
(defun generate-nlls-data (&optional (number-of-observations 40))
"Create the data used in the nonlinear least squares fit example."
(make-exponent-fit-data
:n number-of-observations
:y
(let ((arr (make-marray 'double-float :dimensions number-of-observations))
(rng (make-random-number-generator +mt19937+ 0)))
(dotimes (i number-of-observations arr)
(setf (maref arr i)
(+ 1 (* 5 (exp (* -1/10 i)))
(sample rng 'gaussian :sigma 0.1d0)))))
:sigma
(make-marray
'double-float :dimensions number-of-observations :initial-element 0.1d0)))
(defun exponential-residual (x f)
"Compute the negative of the residuals with the exponential model
for the nonlinear least squares example."
(let ((A (maref x 0))
(lambda (maref x 1))
(b (maref x 2)))
(symbol-macrolet
((y (exponent-fit-data-y *nlls-example-data*))
(sigma (exponent-fit-data-sigma *nlls-example-data*)))
(dotimes (i (exponent-fit-data-n *nlls-example-data*))
(setf (maref f i)
(/ (- (+ (* A (exp (* (- lambda) i))) b) (maref y i))
(maref sigma i)))))))
(defun exponential-residual-derivative (x jacobian)
"Compute the partial derivatives of the negative of the
residuals with the exponential model
for the nonlinear least squares example."
(let ((A (maref x 0))
(lambda (maref x 1)))
(symbol-macrolet
((sigma (exponent-fit-data-sigma *nlls-example-data*)))
(dotimes (i (exponent-fit-data-n *nlls-example-data*))
(let ((e (exp (* (- lambda) i)))
(s (maref sigma i)))
(setf (maref jacobian i 0) (/ e s)
(maref jacobian i 1) (* -1 i A (/ e s))
(maref jacobian i 2) (/ s)))))))
(defun exponential-residual-fdf (x f jacobian)
"Compute the function and partial derivatives of the negative of the
residuals with the exponential model
for the nonlinear least squares example."
(exponential-residual x f)
(exponential-residual-derivative x jacobian))
(defun norm-f (fit)
"Find the norm of the fit function f."
(euclidean-norm (function-value fit)))
(defun nonlinear-least-squares-example
(&optional (number-of-observations 40)
(method +levenberg-marquardt+)
(print-steps t))
(let ((*nlls-example-data* (generate-nlls-data number-of-observations)))
(let* ((init #m(1.0d0 0.0d0 0.0d0))
(number-of-parameters 3)
(covariance
(make-marray 'double-float
:dimensions
(list number-of-parameters number-of-parameters)))
(fit (make-nonlinear-fdffit
method
(list number-of-observations number-of-parameters)
'(exponential-residual
exponential-residual-derivative exponential-residual-fdf)
init nil)))
(macrolet ((fitx (i) `(maref (solution fit) ,i))
(err (i) `(sqrt (maref covariance ,i ,i))))
(when print-steps
(format t "iter: ~d x = ~15,8f ~15,8f ~15,8f |f(x)|=~7,6g~&"
0 (fitx 0) (fitx 1) (fitx 2)
(norm-f fit)))
(loop for iter from 0 below 25
until
(and (plusp iter)
(fit-test-delta (last-step fit) (mpointer (solution fit)) 1.0d-4 1.0d-4))
do
(iterate fit)
(ls-covariance (jacobian fit) 0.0d0 covariance)
(when print-steps
(format t "iter: ~d x = ~15,8f ~15,8f ~15,8f |f(x)|=~7,6g~&"
(1+ iter) (fitx 0) (fitx 1) (fitx 2)
(norm-f fit)))
finally
(let* ((chi (norm-f fit))
(dof (- number-of-observations number-of-parameters))
(c (max 1.0d0 (/ chi (sqrt dof)))))
(when print-steps
(format t "chisq/dof = ~g~&" (/ (expt chi 2) dof))
(format t "A = ~,5f +/- ~,5f~&" (fitx 0) (* c (err 0)))
(format t "lambda = ~,5f +/- ~,5f~&" (fitx 1) (* c (err 1)))
(format t "b = ~,5f +/- ~,5f~&" (fitx 2) (* c (err 2))))
(return (list (fitx 0) (fitx 1) (fitx 2)))))))))
(save-test nonlinear-least-squares
(nonlinear-least-squares-example 40 +levenberg-marquardt+ nil))
|
6d2509a7ec88f8d4e4e56e0f2c57183401720a7fd216ec62788a5b1ae9eeec73 | alanzplus/EOPL | implicit-refs-interpreter.rkt | #lang eopl
(require "./implicit-refs-spec.rkt")
(provide run)
(provide num-val)
(provide bool-val)
(provide proc-val)
(provide expval->num)
(provide expval->bool)
(provide expval->proc)
(provide procedure)
(provide apply-procedure)
(provide newref)
(provide setref!)
; -----------------------------------------------------------------------------
; Expression Value Representation
; -----------------------------------------------------------------------------
(define-datatype expval expval?
(num-val
(num number?))
(bool-val
(bool boolean?))
(proc-val
(proc proc?)))
; ExpVal -> num
(define expval->num
(lambda (val)
(cases expval val
(num-val (num) num)
(else (eopl:error "expected num-val")))))
; ExpVal -> bool
(define expval->bool
(lambda (val)
(cases expval val
(bool-val (bool) bool)
(else (eopl:error "expected bool-val")))))
; ExpVal -> Procedure
(define expval->proc
(lambda (val)
(cases expval val
(proc-val (proc) proc)
(else (eopl:error "expected proc-val")))))
; -----------------------------------------------------------------------------
; Procedure Representation
; -----------------------------------------------------------------------------
(define-datatype proc proc?
(procedure
(var identifier?)
(body expression?)
(env environment?)))
Proc x ExpVal - > ExpVal
(define apply-procedure
(lambda (proc1 val)
(cases proc proc1
(procedure (var body env)
(value-of body (extend-env var (newref val) env))))))
; -----------------------------------------------------------------------------
; Environment
; -----------------------------------------------------------------------------
(define-datatype environment environment?
(empty-env)
(extend-env
(var identifier?)
(val reference?)
(env environment?))
(extend-env-rec
(p-name identifier?)
(p-var identifier?)
(body expression?)
(env environment?)))
Environment x Identifier - > ExpVal
(define apply-env
(lambda (env search-var)
(cases environment env
(empty-env ()
(eopl:error "there is no binding for ~s" search-var))
(extend-env (saved-var saved-val saved-env)
(if (eqv? search-var saved-var)
saved-val
(apply-env saved-env search-var)))
(extend-env-rec (p-name p-var p-body saved-env)
(if (eqv? search-var p-name)
(newref (proc-val (procedure p-var p-body env)))
(apply-env saved-env search-var))))))
; Initilization
(define init-env
(lambda ()
(extend-env
'i (newref (num-val 1))
(extend-env
'v (newref (num-val 5))
(extend-env
'x (newref (num-val 10))
(empty-env))))))
; -----------------------------------------------------------------------------
; Store
; -----------------------------------------------------------------------------
(define the-store 'uninitialized)
( ) - > EmptyList
(define empty-store
(lambda () '()))
; () -> Store
(define get-store
(lambda () the-store))
(define reference?
(lambda (v)
(integer? v)))
ExpVal - > ( Integer )
(define newref
(lambda (val)
(let
((next-ref (length the-store)))
(set! the-store (append the-store (list val)))
next-ref)))
Reference - > ExpVal
(define deref
(lambda (ref)
(list-ref the-store ref)))
; Reference x ExpVal -> Undefined
(define setref!
(lambda (ref val)
(set! the-store
(letrec
((setref-inner
(lambda (store1 ref1)
(cond
((null? store1)
(eopl:error "Cannot set store using reference ~s" ref1))
((zero? ref1)
(cons val (cdr store1)))
(else
(cons
(car store1)
(setref-inner (cdr store1) (- ref1 1))))))))
(setref-inner the-store ref)))))
; Initialization
(define initialize-store!
(lambda ()
(set! the-store (empty-store))))
; -----------------------------------------------------------------------------
; Interpreter
; -----------------------------------------------------------------------------
String - > ExpVal
(define run
(lambda (text)
(value-of-program (scan-parse text))))
Program - > ExpVal
(define value-of-program
(lambda (pgm)
(initialize-store!)
(cases program pgm
(a-program (exp1)
(value-of exp1 (init-env))))))
Expression x Environment - > ExpVal
(define value-of
(lambda (exp env)
(cases expression exp
(assign-exp (var exp1)
(let ((val (value-of exp1 env)))
(begin
(setref! (apply-env env var) val)
val)))
(letrec-exp (p-name p-var p-body letrec-body)
(value-of letrec-body (extend-env-rec p-name p-var p-body env)))
(call-exp (exp1 exp2)
(let ((proc (expval->proc (value-of exp1 env)))
(arg (value-of exp2 env)))
(apply-procedure proc arg)))
(proc-exp (var body) (proc-val (procedure var body env)))
(diff-exp (exp1 exp2)
(let ((arg1 (value-of exp1 env))
(arg2 (value-of exp2 env)))
(num-val
(-
(expval->num arg1)
(expval->num arg2)))))
(const-exp (num) (num-val num))
(var-exp (var)
(deref (apply-env env var)))
(zero?-exp (exp1)
(bool-val (zero? (expval->num (value-of exp1 env)))))
(if-exp (exp1 exp2 exp3)
(if (expval->bool (value-of exp1 env))
(value-of exp2 env)
(value-of exp3 env)))
(let-exp (var exp1 body)
(let ((val1 (value-of exp1 env)))
(value-of body (extend-env var (newref val1) env))))))) | null | https://raw.githubusercontent.com/alanzplus/EOPL/d7b06392d26d93df851d0ca66d9edc681a06693c/EOPL/ch4/implicit-refs-interpreter.rkt | racket | -----------------------------------------------------------------------------
Expression Value Representation
-----------------------------------------------------------------------------
ExpVal -> num
ExpVal -> bool
ExpVal -> Procedure
-----------------------------------------------------------------------------
Procedure Representation
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Environment
-----------------------------------------------------------------------------
Initilization
-----------------------------------------------------------------------------
Store
-----------------------------------------------------------------------------
() -> Store
Reference x ExpVal -> Undefined
Initialization
-----------------------------------------------------------------------------
Interpreter
----------------------------------------------------------------------------- | #lang eopl
(require "./implicit-refs-spec.rkt")
(provide run)
(provide num-val)
(provide bool-val)
(provide proc-val)
(provide expval->num)
(provide expval->bool)
(provide expval->proc)
(provide procedure)
(provide apply-procedure)
(provide newref)
(provide setref!)
(define-datatype expval expval?
(num-val
(num number?))
(bool-val
(bool boolean?))
(proc-val
(proc proc?)))
(define expval->num
(lambda (val)
(cases expval val
(num-val (num) num)
(else (eopl:error "expected num-val")))))
(define expval->bool
(lambda (val)
(cases expval val
(bool-val (bool) bool)
(else (eopl:error "expected bool-val")))))
(define expval->proc
(lambda (val)
(cases expval val
(proc-val (proc) proc)
(else (eopl:error "expected proc-val")))))
(define-datatype proc proc?
(procedure
(var identifier?)
(body expression?)
(env environment?)))
Proc x ExpVal - > ExpVal
(define apply-procedure
(lambda (proc1 val)
(cases proc proc1
(procedure (var body env)
(value-of body (extend-env var (newref val) env))))))
(define-datatype environment environment?
(empty-env)
(extend-env
(var identifier?)
(val reference?)
(env environment?))
(extend-env-rec
(p-name identifier?)
(p-var identifier?)
(body expression?)
(env environment?)))
Environment x Identifier - > ExpVal
(define apply-env
(lambda (env search-var)
(cases environment env
(empty-env ()
(eopl:error "there is no binding for ~s" search-var))
(extend-env (saved-var saved-val saved-env)
(if (eqv? search-var saved-var)
saved-val
(apply-env saved-env search-var)))
(extend-env-rec (p-name p-var p-body saved-env)
(if (eqv? search-var p-name)
(newref (proc-val (procedure p-var p-body env)))
(apply-env saved-env search-var))))))
(define init-env
(lambda ()
(extend-env
'i (newref (num-val 1))
(extend-env
'v (newref (num-val 5))
(extend-env
'x (newref (num-val 10))
(empty-env))))))
(define the-store 'uninitialized)
( ) - > EmptyList
(define empty-store
(lambda () '()))
(define get-store
(lambda () the-store))
(define reference?
(lambda (v)
(integer? v)))
ExpVal - > ( Integer )
(define newref
(lambda (val)
(let
((next-ref (length the-store)))
(set! the-store (append the-store (list val)))
next-ref)))
Reference - > ExpVal
(define deref
(lambda (ref)
(list-ref the-store ref)))
(define setref!
(lambda (ref val)
(set! the-store
(letrec
((setref-inner
(lambda (store1 ref1)
(cond
((null? store1)
(eopl:error "Cannot set store using reference ~s" ref1))
((zero? ref1)
(cons val (cdr store1)))
(else
(cons
(car store1)
(setref-inner (cdr store1) (- ref1 1))))))))
(setref-inner the-store ref)))))
(define initialize-store!
(lambda ()
(set! the-store (empty-store))))
String - > ExpVal
(define run
(lambda (text)
(value-of-program (scan-parse text))))
Program - > ExpVal
(define value-of-program
(lambda (pgm)
(initialize-store!)
(cases program pgm
(a-program (exp1)
(value-of exp1 (init-env))))))
Expression x Environment - > ExpVal
(define value-of
(lambda (exp env)
(cases expression exp
(assign-exp (var exp1)
(let ((val (value-of exp1 env)))
(begin
(setref! (apply-env env var) val)
val)))
(letrec-exp (p-name p-var p-body letrec-body)
(value-of letrec-body (extend-env-rec p-name p-var p-body env)))
(call-exp (exp1 exp2)
(let ((proc (expval->proc (value-of exp1 env)))
(arg (value-of exp2 env)))
(apply-procedure proc arg)))
(proc-exp (var body) (proc-val (procedure var body env)))
(diff-exp (exp1 exp2)
(let ((arg1 (value-of exp1 env))
(arg2 (value-of exp2 env)))
(num-val
(-
(expval->num arg1)
(expval->num arg2)))))
(const-exp (num) (num-val num))
(var-exp (var)
(deref (apply-env env var)))
(zero?-exp (exp1)
(bool-val (zero? (expval->num (value-of exp1 env)))))
(if-exp (exp1 exp2 exp3)
(if (expval->bool (value-of exp1 env))
(value-of exp2 env)
(value-of exp3 env)))
(let-exp (var exp1 body)
(let ((val1 (value-of exp1 env)))
(value-of body (extend-env var (newref val1) env))))))) |
b6b30cab18ee77ce127b6ffb2fabc06bd2dee09eb9d81dfed231946e4f84d4f2 | reflex-frp/patch | Map.hs | # LANGUAGE CPP #
{-# LANGUAGE DeriveTraversable #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
{-|
Description: A basic 'Patch' on 'Map'
Patches of this type consist only of insertions (including overwrites) and
deletions.
-}
module Data.Patch.Map where
import Data.Patch.Class
import Control.Lens hiding (FunctorWithIndex, FoldableWithIndex, TraversableWithIndex)
#if !MIN_VERSION_lens(5,0,0)
import qualified Control.Lens as L
#endif
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid.DecidablyEmpty
import Data.Semigroup (Semigroup (..), stimesIdempotentMonoid)
import Data.Functor.WithIndex
import Data.Foldable.WithIndex
import Data.Traversable.WithIndex
-- | A set of changes to a 'Map'. Any element may be inserted/updated or
deleted . Insertions are represented as values wrapped in ' Just ' , while
-- deletions are represented as 'Nothing's
newtype PatchMap k v = PatchMap { unPatchMap :: Map k (Maybe v) }
deriving ( Show, Read, Eq, Ord
, Foldable, Traversable
, DecidablyEmpty
)
-- | 'fmap'ping a 'PatchMap' will alter all of the values it will insert.
-- Deletions are unaffected.
deriving instance Functor (PatchMap k)
-- | The empty 'PatchMap' contains no insertions or deletions
deriving instance Ord k => Monoid (PatchMap k v)
| @a < > b@ will apply the changes of @b@ and then apply the changes of
-- If the same key is modified by both patches, the one on the left will take
-- precedence.
instance Ord k => Semigroup (PatchMap k v) where
PatchMap a <> PatchMap b = PatchMap $ a `mappend` b --TODO: Add a semigroup instance for Map
-- PatchMap is idempotent, so stimes n is id for every n
stimes = stimesIdempotentMonoid
-- | Apply the insertions or deletions to a given 'Map'.
instance Ord k => Patch (PatchMap k v) where
type PatchTarget (PatchMap k v) = Map k v
# INLINABLE apply #
apply (PatchMap p) old = Just $! insertions `Map.union` (old `Map.difference` deletions) --TODO: return Nothing sometimes --Note: the strict application here is critical to ensuring that incremental merges don't hold onto all their prerequisite events forever; can we make this more robust?
where insertions = Map.mapMaybeWithKey (const id) p
deletions = Map.mapMaybeWithKey (const nothingToJust) p
nothingToJust = \case
Nothing -> Just ()
Just _ -> Nothing
makeWrapped ''PatchMap
instance FunctorWithIndex k (PatchMap k)
instance FoldableWithIndex k (PatchMap k)
instance TraversableWithIndex k (PatchMap k) where
itraverse = (_Wrapped .> itraversed <. traversed) . Indexed
#if !MIN_VERSION_lens(5,0,0)
instance L.FunctorWithIndex k (PatchMap k) where imap = Data.Functor.WithIndex.imap
instance L.FoldableWithIndex k (PatchMap k) where ifoldMap = Data.Foldable.WithIndex.ifoldMap
instance L.TraversableWithIndex k (PatchMap k) where itraverse = Data.Traversable.WithIndex.itraverse
#endif
-- | Returns all the new elements that will be added to the 'Map'
patchMapNewElements :: PatchMap k v -> [v]
patchMapNewElements (PatchMap p) = catMaybes $ Map.elems p
-- | Returns all the new elements that will be added to the 'Map'
patchMapNewElementsMap :: PatchMap k v -> Map k v
patchMapNewElementsMap (PatchMap p) = Map.mapMaybe id p
| null | https://raw.githubusercontent.com/reflex-frp/patch/c556fce899cd0024129da4cdfd13fe5ce0c755af/src/Data/Patch/Map.hs | haskell | # LANGUAGE DeriveTraversable #
|
Description: A basic 'Patch' on 'Map'
Patches of this type consist only of insertions (including overwrites) and
deletions.
| A set of changes to a 'Map'. Any element may be inserted/updated or
deletions are represented as 'Nothing's
| 'fmap'ping a 'PatchMap' will alter all of the values it will insert.
Deletions are unaffected.
| The empty 'PatchMap' contains no insertions or deletions
If the same key is modified by both patches, the one on the left will take
precedence.
TODO: Add a semigroup instance for Map
PatchMap is idempotent, so stimes n is id for every n
| Apply the insertions or deletions to a given 'Map'.
TODO: return Nothing sometimes --Note: the strict application here is critical to ensuring that incremental merges don't hold onto all their prerequisite events forever; can we make this more robust?
| Returns all the new elements that will be added to the 'Map'
| Returns all the new elements that will be added to the 'Map' | # LANGUAGE CPP #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
module Data.Patch.Map where
import Data.Patch.Class
import Control.Lens hiding (FunctorWithIndex, FoldableWithIndex, TraversableWithIndex)
#if !MIN_VERSION_lens(5,0,0)
import qualified Control.Lens as L
#endif
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid.DecidablyEmpty
import Data.Semigroup (Semigroup (..), stimesIdempotentMonoid)
import Data.Functor.WithIndex
import Data.Foldable.WithIndex
import Data.Traversable.WithIndex
deleted . Insertions are represented as values wrapped in ' Just ' , while
newtype PatchMap k v = PatchMap { unPatchMap :: Map k (Maybe v) }
deriving ( Show, Read, Eq, Ord
, Foldable, Traversable
, DecidablyEmpty
)
deriving instance Functor (PatchMap k)
deriving instance Ord k => Monoid (PatchMap k v)
| @a < > b@ will apply the changes of @b@ and then apply the changes of
instance Ord k => Semigroup (PatchMap k v) where
stimes = stimesIdempotentMonoid
instance Ord k => Patch (PatchMap k v) where
type PatchTarget (PatchMap k v) = Map k v
# INLINABLE apply #
where insertions = Map.mapMaybeWithKey (const id) p
deletions = Map.mapMaybeWithKey (const nothingToJust) p
nothingToJust = \case
Nothing -> Just ()
Just _ -> Nothing
makeWrapped ''PatchMap
instance FunctorWithIndex k (PatchMap k)
instance FoldableWithIndex k (PatchMap k)
instance TraversableWithIndex k (PatchMap k) where
itraverse = (_Wrapped .> itraversed <. traversed) . Indexed
#if !MIN_VERSION_lens(5,0,0)
instance L.FunctorWithIndex k (PatchMap k) where imap = Data.Functor.WithIndex.imap
instance L.FoldableWithIndex k (PatchMap k) where ifoldMap = Data.Foldable.WithIndex.ifoldMap
instance L.TraversableWithIndex k (PatchMap k) where itraverse = Data.Traversable.WithIndex.itraverse
#endif
patchMapNewElements :: PatchMap k v -> [v]
patchMapNewElements (PatchMap p) = catMaybes $ Map.elems p
patchMapNewElementsMap :: PatchMap k v -> Map k v
patchMapNewElementsMap (PatchMap p) = Map.mapMaybe id p
|
ba355ebdb4331701c8164affa7aacbbc9ffc0ebd6275c26958cd36f0e68ffabd | lowasser/TrieMap | Search.hs | # LANGUAGE CPP , BangPatterns , ViewPatterns , FlexibleInstances , TypeOperators , FlexibleContexts , TypeSynonymInstances #
# LANGUAGE MultiParamTypeClasses , PatternGuards #
#if __GLASGOW_HASKELL__ >= 700
# OPTIONS -fllvm #
#endif
module Data.TrieMap.RadixTrie.Search (insertEdge) where
import Control.Monad.Unpack
import Control.Monad.Option
import Data.TrieMap.RadixTrie.Base
import Data.TrieMap.RadixTrie.Zipper ()
import Data.Vector.Generic (length)
import Prelude hiding (lookup, length)
#define V(f) f (VVector) (k)
#define U(f) f (PVector) (Word)
#define EDGE(args) (!(eView -> Edge args))
instance TrieKey k => Searchable (TrieMap (VVector k)) (VVector k) where
{-# INLINE search #-}
search ks (Radix m) nomatch0 match0 = case m of
Nothing -> nomatch $~ singleLoc ks
Just e -> searchEdgeC ks e nomatch match
where nomatch = unpack (nomatch0 . Hole)
match a = unpack (match0 a . Hole)
singleZip ks = Hole (singleLoc ks)
singleton ks a = Radix (Just (singletonEdge ks a))
lookup ks (Radix m) = maybeToOption m >>= lookupEdge ks
insertWith f ks a (Radix (Just e)) = Radix (Just (insertEdge f ks a e))
insertWith _ ks a (Radix Nothing) = singleton ks a
instance Searchable (TrieMap (PVector Word)) (PVector Word) where
{-# INLINE search #-}
search ks (WRadix m) nomatch0 match0 = case m of
Nothing -> nomatch $~ singleLoc ks
Just e -> searchEdgeC ks e nomatch match
where nomatch = unpack (nomatch0 . WHole)
match a = unpack (match0 a . WHole)
singleZip ks = WHole (singleLoc ks)
singleton ks a = WRadix (Just (singletonEdge ks a))
lookup ks (WRadix m) = maybeToOption m >>= lookupEdge ks
insertWith f ks a (WRadix (Just e)) = WRadix (Just (insertEdge f ks a e))
insertWith _ ks a (WRadix Nothing) = singleton ks a
# SPECIALIZE lookupEdge : :
k = > V ( ) - > ) a - > Option a ,
U ( ) - > U(Edge ) a - > Option a #
TrieKey k => V() -> V(Edge) a -> Option a,
U() -> U(Edge) a -> Option a #-}
lookupEdge :: (Eq k, Label v k) => v k -> Edge v k a -> Option a
lookupEdge ks e = option $ \ no yes -> let
lookupE !ks !EDGE(_ ls !v ts) = if kLen < lLen then no else matchSlice matcher matches ks ls where
!kLen = length ks
!lLen = length ls
matcher k l z
| k == l = z
| otherwise = no
matches _ _
| kLen == lLen = maybe no yes v
| (_, k, ks') <- splitSlice lLen ks
= runOption (lookup k ts) no (lookupE ks')
in lookupE ks e
# SPECIALIZE INLINE searchEdgeC : :
k = > V ( ) - > ) a - > ( V(EdgeLoc ) a : ~ > r ) - > ( a - > V(EdgeLoc ) a : ~ > r ) - > r ,
U ( ) - > U(Edge ) a - > ( U(EdgeLoc ) a : ~ > r ) - > ( a - > U(EdgeLoc ) a : ~ > r ) - > r #
TrieKey k => V() -> V(Edge) a -> (V(EdgeLoc) a :~> r) -> (a -> V(EdgeLoc) a :~> r) -> r,
U() -> U(Edge) a -> (U(EdgeLoc) a :~> r) -> (a -> U(EdgeLoc) a :~> r) -> r #-}
searchEdgeC :: (Eq k, Label v k, Unpackable (EdgeLoc v k a)) =>
v k -> Edge v k a -> (EdgeLoc v k a :~> r) -> (a -> EdgeLoc v k a :~> r) -> r
searchEdgeC ks0 e nomatch match = searchE ks0 e root where
searchE !ks e@EDGE(_ !ls !v ts) path = iMatchSlice matcher matches ks ls where
matcher i k l z =
runOption (unifierM k l (dropEdge (i+1) e)) z
(\ tHole -> nomatch $~ loc (dropSlice (i+1) ks) empty (deep path (takeSlice i ls) Nothing tHole))
matches kLen lLen = case compare kLen lLen of
LT -> let lPre = takeSlice kLen ls; l = ls !$ kLen; e' = dropEdge (kLen + 1) e in
nomatch $~ loc lPre (singleton l e') path
EQ -> maybe nomatch match v $~ loc ls ts path
GT -> let
# INLINE kk #
kk = ks !$ lLen
ks' = dropSlice (lLen + 1) ks
nomatch' tHole = nomatch $~ loc ks' empty (deep path ls v tHole)
match' e' tHole = searchE ks' e' (deep path ls v tHole)
in search kk ts nomatch' match'
# SPECIALIZE insertEdge : :
( k , Sized a ) = > ( a - > a ) - > V ( ) - > a - > V(Edge ) a - > V(Edge ) a ,
Sized a = > ( a - > a ) - > U ( ) - > a - > U(Edge ) a - > U(Edge ) a #
(TrieKey k, Sized a) => (a -> a) -> V() -> a -> V(Edge) a -> V(Edge) a,
Sized a => (a -> a) -> U() -> a -> U(Edge) a -> U(Edge) a #-}
insertEdge :: (Label v k, Sized a) => (a -> a) -> v k -> a -> Edge v k a -> Edge v k a
insertEdge f ks0 a e = insertE ks0 e where
!sza = getSize a
insertE !ks eL@EDGE(szL ls !v ts) = iMatchSlice matcher matches ks ls where
!szV = szL - sizeM ts
matcher !i k l z = runOption (unifyM k eK' l eL') z (edge (takeSlice i ls) Nothing)
where eK' = edge' sza (dropSlice (i+1) ks) (Just a) empty
eL' = dropEdge (i+1) eL
matches kLen lLen = case compare kLen lLen of
LT -> (edge' (sza + szL) ks (Just a) (singleton l eL'))
where l = ls !$ kLen; eL' = dropEdge (kLen+1) eL
EQ -> (edge ls (Just (maybe a f v)) ts)
GT -> edge' sz' ls v ts' where
ks' = dropSlice (lLen + 1) ks
k = ks !$ lLen
ts' = insertWith (insertE ks') k (edge' sza ks' (Just a) empty) ts
sz' = sizeM ts' + szV
| null | https://raw.githubusercontent.com/lowasser/TrieMap/1ab52b8d83469974a629f2aa577a85de3f9e867a/Data/TrieMap/RadixTrie/Search.hs | haskell | # INLINE search #
# INLINE search # | # LANGUAGE CPP , BangPatterns , ViewPatterns , FlexibleInstances , TypeOperators , FlexibleContexts , TypeSynonymInstances #
# LANGUAGE MultiParamTypeClasses , PatternGuards #
#if __GLASGOW_HASKELL__ >= 700
# OPTIONS -fllvm #
#endif
module Data.TrieMap.RadixTrie.Search (insertEdge) where
import Control.Monad.Unpack
import Control.Monad.Option
import Data.TrieMap.RadixTrie.Base
import Data.TrieMap.RadixTrie.Zipper ()
import Data.Vector.Generic (length)
import Prelude hiding (lookup, length)
#define V(f) f (VVector) (k)
#define U(f) f (PVector) (Word)
#define EDGE(args) (!(eView -> Edge args))
instance TrieKey k => Searchable (TrieMap (VVector k)) (VVector k) where
search ks (Radix m) nomatch0 match0 = case m of
Nothing -> nomatch $~ singleLoc ks
Just e -> searchEdgeC ks e nomatch match
where nomatch = unpack (nomatch0 . Hole)
match a = unpack (match0 a . Hole)
singleZip ks = Hole (singleLoc ks)
singleton ks a = Radix (Just (singletonEdge ks a))
lookup ks (Radix m) = maybeToOption m >>= lookupEdge ks
insertWith f ks a (Radix (Just e)) = Radix (Just (insertEdge f ks a e))
insertWith _ ks a (Radix Nothing) = singleton ks a
instance Searchable (TrieMap (PVector Word)) (PVector Word) where
search ks (WRadix m) nomatch0 match0 = case m of
Nothing -> nomatch $~ singleLoc ks
Just e -> searchEdgeC ks e nomatch match
where nomatch = unpack (nomatch0 . WHole)
match a = unpack (match0 a . WHole)
singleZip ks = WHole (singleLoc ks)
singleton ks a = WRadix (Just (singletonEdge ks a))
lookup ks (WRadix m) = maybeToOption m >>= lookupEdge ks
insertWith f ks a (WRadix (Just e)) = WRadix (Just (insertEdge f ks a e))
insertWith _ ks a (WRadix Nothing) = singleton ks a
# SPECIALIZE lookupEdge : :
k = > V ( ) - > ) a - > Option a ,
U ( ) - > U(Edge ) a - > Option a #
TrieKey k => V() -> V(Edge) a -> Option a,
U() -> U(Edge) a -> Option a #-}
lookupEdge :: (Eq k, Label v k) => v k -> Edge v k a -> Option a
lookupEdge ks e = option $ \ no yes -> let
lookupE !ks !EDGE(_ ls !v ts) = if kLen < lLen then no else matchSlice matcher matches ks ls where
!kLen = length ks
!lLen = length ls
matcher k l z
| k == l = z
| otherwise = no
matches _ _
| kLen == lLen = maybe no yes v
| (_, k, ks') <- splitSlice lLen ks
= runOption (lookup k ts) no (lookupE ks')
in lookupE ks e
# SPECIALIZE INLINE searchEdgeC : :
k = > V ( ) - > ) a - > ( V(EdgeLoc ) a : ~ > r ) - > ( a - > V(EdgeLoc ) a : ~ > r ) - > r ,
U ( ) - > U(Edge ) a - > ( U(EdgeLoc ) a : ~ > r ) - > ( a - > U(EdgeLoc ) a : ~ > r ) - > r #
TrieKey k => V() -> V(Edge) a -> (V(EdgeLoc) a :~> r) -> (a -> V(EdgeLoc) a :~> r) -> r,
U() -> U(Edge) a -> (U(EdgeLoc) a :~> r) -> (a -> U(EdgeLoc) a :~> r) -> r #-}
searchEdgeC :: (Eq k, Label v k, Unpackable (EdgeLoc v k a)) =>
v k -> Edge v k a -> (EdgeLoc v k a :~> r) -> (a -> EdgeLoc v k a :~> r) -> r
searchEdgeC ks0 e nomatch match = searchE ks0 e root where
searchE !ks e@EDGE(_ !ls !v ts) path = iMatchSlice matcher matches ks ls where
matcher i k l z =
runOption (unifierM k l (dropEdge (i+1) e)) z
(\ tHole -> nomatch $~ loc (dropSlice (i+1) ks) empty (deep path (takeSlice i ls) Nothing tHole))
matches kLen lLen = case compare kLen lLen of
LT -> let lPre = takeSlice kLen ls; l = ls !$ kLen; e' = dropEdge (kLen + 1) e in
nomatch $~ loc lPre (singleton l e') path
EQ -> maybe nomatch match v $~ loc ls ts path
GT -> let
# INLINE kk #
kk = ks !$ lLen
ks' = dropSlice (lLen + 1) ks
nomatch' tHole = nomatch $~ loc ks' empty (deep path ls v tHole)
match' e' tHole = searchE ks' e' (deep path ls v tHole)
in search kk ts nomatch' match'
# SPECIALIZE insertEdge : :
( k , Sized a ) = > ( a - > a ) - > V ( ) - > a - > V(Edge ) a - > V(Edge ) a ,
Sized a = > ( a - > a ) - > U ( ) - > a - > U(Edge ) a - > U(Edge ) a #
(TrieKey k, Sized a) => (a -> a) -> V() -> a -> V(Edge) a -> V(Edge) a,
Sized a => (a -> a) -> U() -> a -> U(Edge) a -> U(Edge) a #-}
insertEdge :: (Label v k, Sized a) => (a -> a) -> v k -> a -> Edge v k a -> Edge v k a
insertEdge f ks0 a e = insertE ks0 e where
!sza = getSize a
insertE !ks eL@EDGE(szL ls !v ts) = iMatchSlice matcher matches ks ls where
!szV = szL - sizeM ts
matcher !i k l z = runOption (unifyM k eK' l eL') z (edge (takeSlice i ls) Nothing)
where eK' = edge' sza (dropSlice (i+1) ks) (Just a) empty
eL' = dropEdge (i+1) eL
matches kLen lLen = case compare kLen lLen of
LT -> (edge' (sza + szL) ks (Just a) (singleton l eL'))
where l = ls !$ kLen; eL' = dropEdge (kLen+1) eL
EQ -> (edge ls (Just (maybe a f v)) ts)
GT -> edge' sz' ls v ts' where
ks' = dropSlice (lLen + 1) ks
k = ks !$ lLen
ts' = insertWith (insertE ks') k (edge' sza ks' (Just a) empty) ts
sz' = sizeM ts' + szV
|
e41627bce8dcd29615c9d9ec761930e92f61f25c9b74a242eedf618c0c8b6c6d | exercism/haskell | Strain.hs | module Strain (keep, discard) where
discard :: (a -> Bool) -> [a] -> [a]
discard p xs = error "You need to implement this function."
keep :: (a -> Bool) -> [a] -> [a]
keep p xs = error "You need to implement this function."
| null | https://raw.githubusercontent.com/exercism/haskell/2b98084efc7d5ab098975c462f7977ee19c2fd29/exercises/practice/strain/src/Strain.hs | haskell | module Strain (keep, discard) where
discard :: (a -> Bool) -> [a] -> [a]
discard p xs = error "You need to implement this function."
keep :: (a -> Bool) -> [a] -> [a]
keep p xs = error "You need to implement this function."
| |
e54bf04b8fcba7c01a13506ce1189358f0a482813ad038b92eecb3edf66adebb | nikita-volkov/vector-builder | Prelude.hs | module VectorBuilder.Prelude
( module Exports,
strict,
)
where
import Control.Applicative as Exports
import Control.Arrow as Exports hiding (first, second)
import Control.Category as Exports
import Control.Concurrent as Exports
import Control.Exception as Exports
import Control.Monad as Exports hiding (fail, forM, forM_, mapM, mapM_, msum, sequence, sequence_)
import Control.Monad.Fail as Exports
import Control.Monad.Fix as Exports hiding (fix)
import Control.Monad.IO.Class as Exports
import Control.Monad.ST as Exports
import Data.Bifunctor as Exports
import Data.Bits as Exports
import Data.Bool as Exports
import Data.Char as Exports
import Data.Coerce as Exports
import Data.Complex as Exports
import Data.Data as Exports
import Data.Dynamic as Exports
import Data.Either as Exports
import Data.Fixed as Exports
import Data.Foldable as Exports hiding (toList)
import Data.Function as Exports hiding (id, (.))
import Data.Functor as Exports
import Data.Functor.Compose as Exports
import Data.IORef as Exports
import Data.Int as Exports
import Data.Ix as Exports
import Data.List as Exports hiding (all, and, any, concat, concatMap, elem, find, foldl, foldl', foldl1, foldr, foldr1, isSubsequenceOf, mapAccumL, mapAccumR, maximum, maximumBy, minimum, minimumBy, notElem, or, product, sortOn, sum, uncons)
import Data.List.NonEmpty as Exports (NonEmpty (..))
import Data.Maybe as Exports
import Data.Monoid as Exports hiding (Alt)
import Data.Ord as Exports
import Data.Proxy as Exports
import Data.Ratio as Exports
import Data.STRef as Exports
import Data.Semigroup as Exports (Semigroup (..))
import Data.String as Exports
import Data.Traversable as Exports
import Data.Tuple as Exports
import Data.Unique as Exports
import Data.Version as Exports
import Data.Void as Exports
import Data.Word as Exports
import Debug.Trace as Exports
import Foreign.ForeignPtr as Exports
import Foreign.Ptr as Exports
import Foreign.StablePtr as Exports
import Foreign.Storable as Exports
import GHC.Conc as Exports hiding (orElse, threadWaitRead, threadWaitReadSTM, threadWaitWrite, threadWaitWriteSTM, withMVar)
import GHC.Exts as Exports (IsList (..), groupWith, inline, lazy, sortWith)
import GHC.Generics as Exports (Generic)
import GHC.IO.Exception as Exports
import Numeric as Exports
import System.Environment as Exports
import System.Exit as Exports
import System.IO
import System.IO.Error as Exports
import System.IO.Unsafe as Exports
import System.Mem as Exports
import System.Mem.StableName as Exports
import System.Timeout as Exports
import Text.ParserCombinators.ReadP as Exports (ReadP, ReadS, readP_to_S, readS_to_P)
import Text.ParserCombinators.ReadPrec as Exports (ReadPrec, readP_to_Prec, readPrec_to_P, readPrec_to_S, readS_to_Prec)
import Text.Printf as Exports (hPrintf, printf)
import Text.Read as Exports (Read (..), readEither, readMaybe)
import Unsafe.Coerce as Exports
import Prelude as Exports hiding (all, and, any, concat, concatMap, elem, fail, foldl, foldl1, foldr, foldr1, id, mapM, mapM_, maximum, minimum, notElem, or, product, sequence, sequence_, sum, (.))
# INLINE strict #
strict :: a -> a
strict a =
seq a a
| null | https://raw.githubusercontent.com/nikita-volkov/vector-builder/83f733476ba99bccbbe1e832b668a3f07b639938/library/VectorBuilder/Prelude.hs | haskell | module VectorBuilder.Prelude
( module Exports,
strict,
)
where
import Control.Applicative as Exports
import Control.Arrow as Exports hiding (first, second)
import Control.Category as Exports
import Control.Concurrent as Exports
import Control.Exception as Exports
import Control.Monad as Exports hiding (fail, forM, forM_, mapM, mapM_, msum, sequence, sequence_)
import Control.Monad.Fail as Exports
import Control.Monad.Fix as Exports hiding (fix)
import Control.Monad.IO.Class as Exports
import Control.Monad.ST as Exports
import Data.Bifunctor as Exports
import Data.Bits as Exports
import Data.Bool as Exports
import Data.Char as Exports
import Data.Coerce as Exports
import Data.Complex as Exports
import Data.Data as Exports
import Data.Dynamic as Exports
import Data.Either as Exports
import Data.Fixed as Exports
import Data.Foldable as Exports hiding (toList)
import Data.Function as Exports hiding (id, (.))
import Data.Functor as Exports
import Data.Functor.Compose as Exports
import Data.IORef as Exports
import Data.Int as Exports
import Data.Ix as Exports
import Data.List as Exports hiding (all, and, any, concat, concatMap, elem, find, foldl, foldl', foldl1, foldr, foldr1, isSubsequenceOf, mapAccumL, mapAccumR, maximum, maximumBy, minimum, minimumBy, notElem, or, product, sortOn, sum, uncons)
import Data.List.NonEmpty as Exports (NonEmpty (..))
import Data.Maybe as Exports
import Data.Monoid as Exports hiding (Alt)
import Data.Ord as Exports
import Data.Proxy as Exports
import Data.Ratio as Exports
import Data.STRef as Exports
import Data.Semigroup as Exports (Semigroup (..))
import Data.String as Exports
import Data.Traversable as Exports
import Data.Tuple as Exports
import Data.Unique as Exports
import Data.Version as Exports
import Data.Void as Exports
import Data.Word as Exports
import Debug.Trace as Exports
import Foreign.ForeignPtr as Exports
import Foreign.Ptr as Exports
import Foreign.StablePtr as Exports
import Foreign.Storable as Exports
import GHC.Conc as Exports hiding (orElse, threadWaitRead, threadWaitReadSTM, threadWaitWrite, threadWaitWriteSTM, withMVar)
import GHC.Exts as Exports (IsList (..), groupWith, inline, lazy, sortWith)
import GHC.Generics as Exports (Generic)
import GHC.IO.Exception as Exports
import Numeric as Exports
import System.Environment as Exports
import System.Exit as Exports
import System.IO
import System.IO.Error as Exports
import System.IO.Unsafe as Exports
import System.Mem as Exports
import System.Mem.StableName as Exports
import System.Timeout as Exports
import Text.ParserCombinators.ReadP as Exports (ReadP, ReadS, readP_to_S, readS_to_P)
import Text.ParserCombinators.ReadPrec as Exports (ReadPrec, readP_to_Prec, readPrec_to_P, readPrec_to_S, readS_to_Prec)
import Text.Printf as Exports (hPrintf, printf)
import Text.Read as Exports (Read (..), readEither, readMaybe)
import Unsafe.Coerce as Exports
import Prelude as Exports hiding (all, and, any, concat, concatMap, elem, fail, foldl, foldl1, foldr, foldr1, id, mapM, mapM_, maximum, minimum, notElem, or, product, sequence, sequence_, sum, (.))
# INLINE strict #
strict :: a -> a
strict a =
seq a a
| |
d06f9ec8e53d3a1c2c2d3840b50851a7eb6a628b1b2bf86ce9a0579cf7c06b0f | fare/lisp-interface-library | tree.lisp | ;;;;; Stateful trees - interface
(uiop:define-package :lil/stateful/tree
(:use :closer-common-lisp
:lil/core
:lil/interface/base
:lil/interface/order)
(:use-reexport
:lil/interface/tree
:lil/stateful/map)
(:shadow #:<tree> #:<binary-tree> #:<avl-tree> #:<heighted-binary-tree> #:<number-map>
#:association-pair #:binary-tree-node #:binary-branch
#:heighted-binary-tree-node #:avl-tree-node)
(:export
#:<tree> #:<binary-tree> #:<avl-tree> #:<parametric-avl-tree> #:<heighted-binary-tree>
#:<number-map> #:<nm> #:<string-map>
#:association-pair #:binary-tree-node #:binary-branch
#:heighted-binary-tree-node #:avl-tree-node
#:<post-self-balanced-binary-tree>
#:balance-node #:rotate-node-left #:rotate-node-right #:update-height))
(in-package :lil/stateful/tree)
;;; Trees in general
(define-interface <tree> (lil/interface/tree:<tree> <map>) ()
(:abstract)
(:documentation "abstract interface for stateful trees"))
Vanilla Binary Tree
(define-interface <binary-tree>
(<tree>
lil/interface/tree:<binary-tree>
<foldable-size-from-fold-left>
<map-copy-from-join-empty>
<map-empty-is-empty-object> ;; handles all the empty-object cases so we don't have to.
<map-decons-from-first-key-value-drop>
<map-update-key-from-lookup-insert-drop>
<map-join-from-for-each*-lookup-insert>
<map-join/list-from-join>
<map-map/2-from-for-each*-lookup-insert-drop>
<map>)
()
(:abstract)
(:documentation "Keys in binary trees increase from left to right"))
(defclass binary-branch (lil/interface/tree:binary-branch)
((left :accessor left :initform (make-empty-object))
(right :accessor right :initform (make-empty-object))))
(defclass association-pair (lil/interface/tree:association-pair)
((lil/interface/tree:key :accessor node-key) ;; only write the key when copying a key-value pair.
(lil/interface/tree:value :accessor node-value))) ;; writable value, not writable key.
(defclass binary-tree-node (binary-branch association-pair) ())
;;; Balancing trees
(defgeneric balance-node (<tree> node)
(:documentation "balance a node in a tree"))
;; We really ought to either do *everything* in detached interface-passing style,
;; *or* keep it in subject-oriented code but split it in a different package,
and provide some hooks between the two .
(defgeneric rotate-node-right (node))
(defgeneric rotate-node-left (node))
(define-interface <post-self-balanced-binary-tree> (<binary-tree>) ()
(:abstract))
;;; Trees that maintain a record of their height
(define-interface <heighted-binary-tree> (lil/interface/tree:<heighted-binary-tree> <binary-tree> ) ()
(:abstract))
(defclass heighted-binary-tree-node (lil/interface/tree:heighted-binary-tree-node binary-tree-node)
((lil/interface/tree:height :accessor node-height))) ;; make it writable.
(defgeneric update-height (node))
stateful AVL - tree
(define-interface <avl-tree>
(lil/interface/tree:<avl-tree>
<heighted-binary-tree>
<post-self-balanced-binary-tree>) ()
(:abstract))
(defclass avl-tree-node (lil/interface/tree:avl-tree-node heighted-binary-tree-node) ())
(define-interface <parametric-avl-tree> (<avl-tree>)
((key-interface :type <order> :reader key-interface :initarg :key-interface)
(value-interface :type <type> :reader value-interface :initarg :value-interface))
(:parametric (key-interface &optional (value-interface <any>))
(make-interface :key-interface key-interface :value-interface value-interface)))
;;; Common special cases: when keys are (real) numbers, strings, etc.
(define-interface <number-map> (lil/interface/tree:<number-map> <avl-tree>)
()
(:singleton))
(defparameter <nm> <number-map>)
(defparameter <string-map> (<parametric-avl-tree> <string>))
| null | https://raw.githubusercontent.com/fare/lisp-interface-library/ac2e0063dc65feb805f0c57715d52fda28d4dcd8/stateful/tree.lisp | lisp | Stateful trees - interface
Trees in general
handles all the empty-object cases so we don't have to.
only write the key when copying a key-value pair.
writable value, not writable key.
Balancing trees
We really ought to either do *everything* in detached interface-passing style,
*or* keep it in subject-oriented code but split it in a different package,
Trees that maintain a record of their height
make it writable.
Common special cases: when keys are (real) numbers, strings, etc. |
(uiop:define-package :lil/stateful/tree
(:use :closer-common-lisp
:lil/core
:lil/interface/base
:lil/interface/order)
(:use-reexport
:lil/interface/tree
:lil/stateful/map)
(:shadow #:<tree> #:<binary-tree> #:<avl-tree> #:<heighted-binary-tree> #:<number-map>
#:association-pair #:binary-tree-node #:binary-branch
#:heighted-binary-tree-node #:avl-tree-node)
(:export
#:<tree> #:<binary-tree> #:<avl-tree> #:<parametric-avl-tree> #:<heighted-binary-tree>
#:<number-map> #:<nm> #:<string-map>
#:association-pair #:binary-tree-node #:binary-branch
#:heighted-binary-tree-node #:avl-tree-node
#:<post-self-balanced-binary-tree>
#:balance-node #:rotate-node-left #:rotate-node-right #:update-height))
(in-package :lil/stateful/tree)
(define-interface <tree> (lil/interface/tree:<tree> <map>) ()
(:abstract)
(:documentation "abstract interface for stateful trees"))
Vanilla Binary Tree
(define-interface <binary-tree>
(<tree>
lil/interface/tree:<binary-tree>
<foldable-size-from-fold-left>
<map-copy-from-join-empty>
<map-decons-from-first-key-value-drop>
<map-update-key-from-lookup-insert-drop>
<map-join-from-for-each*-lookup-insert>
<map-join/list-from-join>
<map-map/2-from-for-each*-lookup-insert-drop>
<map>)
()
(:abstract)
(:documentation "Keys in binary trees increase from left to right"))
(defclass binary-branch (lil/interface/tree:binary-branch)
((left :accessor left :initform (make-empty-object))
(right :accessor right :initform (make-empty-object))))
(defclass association-pair (lil/interface/tree:association-pair)
(defclass binary-tree-node (binary-branch association-pair) ())
(defgeneric balance-node (<tree> node)
(:documentation "balance a node in a tree"))
and provide some hooks between the two .
(defgeneric rotate-node-right (node))
(defgeneric rotate-node-left (node))
(define-interface <post-self-balanced-binary-tree> (<binary-tree>) ()
(:abstract))
(define-interface <heighted-binary-tree> (lil/interface/tree:<heighted-binary-tree> <binary-tree> ) ()
(:abstract))
(defclass heighted-binary-tree-node (lil/interface/tree:heighted-binary-tree-node binary-tree-node)
(defgeneric update-height (node))
stateful AVL - tree
(define-interface <avl-tree>
(lil/interface/tree:<avl-tree>
<heighted-binary-tree>
<post-self-balanced-binary-tree>) ()
(:abstract))
(defclass avl-tree-node (lil/interface/tree:avl-tree-node heighted-binary-tree-node) ())
(define-interface <parametric-avl-tree> (<avl-tree>)
((key-interface :type <order> :reader key-interface :initarg :key-interface)
(value-interface :type <type> :reader value-interface :initarg :value-interface))
(:parametric (key-interface &optional (value-interface <any>))
(make-interface :key-interface key-interface :value-interface value-interface)))
(define-interface <number-map> (lil/interface/tree:<number-map> <avl-tree>)
()
(:singleton))
(defparameter <nm> <number-map>)
(defparameter <string-map> (<parametric-avl-tree> <string>))
|
fd664fba47654808cffa925f9ba7f6e6cec533ac88bce11f90f3e78153bd0bb9 | clojure/core.typed | dyn_propagate.clj | (ns clojure.core.typed.test.dyn-propagate
(:require [clojure.core.typed :as t]))
(t/tc-ignore
(def a {:a 1}))
(defn b [local]
(let [i a
l (:a a)
id (identity a)
]
(t/print-env "there")
(inc l)))
| null | https://raw.githubusercontent.com/clojure/core.typed/f5b7d00bbb29d09000d7fef7cca5b40416c9fa91/typed/checker.jvm/test/clojure/core/typed/test/dyn_propagate.clj | clojure | (ns clojure.core.typed.test.dyn-propagate
(:require [clojure.core.typed :as t]))
(t/tc-ignore
(def a {:a 1}))
(defn b [local]
(let [i a
l (:a a)
id (identity a)
]
(t/print-env "there")
(inc l)))
| |
db7c48a262dcd310d899ce2e075b7e9e30aba491f2e909c6d069e30e18a4c876 | dbuenzli/ptime | B0.ml | open B0_kit.V000
open Result.Syntax
(* OCaml library names *)
let compiler_libs_toplevel = B0_ocaml.libname "compiler-libs.toplevel"
let unix = B0_ocaml.libname "unix"
let ptime = B0_ocaml.libname "ptime"
let ptime_top = B0_ocaml.libname "ptime.top"
let ptime_clock = B0_ocaml.libname "ptime.clock"
let ptime_clock_os = B0_ocaml.libname "ptime.clock.os"
(* Libraries *)
let ptime_lib =
let srcs = Fpath.[`File (v "src/ptime.mli"); `File (v "src/ptime.ml")] in
let requires = [] in
B0_ocaml.lib ptime ~doc:"The ptime library" ~srcs ~requires
let ptime_top =
let srcs = Fpath.[`File (v "src/ptime_top.ml")] in
let requires = [compiler_libs_toplevel] in
B0_ocaml.lib ptime_top ~doc:"The ptime.top library" ~srcs ~requires
let ptime_clock =
let srcs = Fpath.[`File (v "src/ptime_clock.mli")] in
let requires = [ptime] in
let doc = "The ptime.clock interface library" in
B0_ocaml.lib ptime_clock ~doc ~srcs ~requires
let ptime_clock_os_lib =
let srcs = Fpath.[`Dir (v "src-clock") ] in
let requires = [ptime] in
let doc = "The ptime.clock library (including JavaScript support)" in
B0_ocaml.lib ptime_clock_os ~doc ~srcs ~requires
(* Tests *)
let in_test f = `File (Fpath.v ("test/" ^ f))
let basics =
let srcs = [in_test "basics.ml"] in
let meta = B0_meta.(empty |> tag test) in
let requires = [ptime] in
B0_ocaml.exe "basics" ~doc:"Examples from the API docs" ~srcs ~meta ~requires
let test =
let srcs =
List.map in_test
["testing.mli"; "testing.ml"; "testing_ptime.ml"; "test_rand.ml";
"test_span.ml"; "test_base.ml"; "test_date.ml";
"test_date_time.ml"; "test_rfc3339.ml"; "test.ml" ]
in
let meta = B0_meta.(empty |> tag test) in
let requires = [ ptime ] in
B0_ocaml.exe "test" ~doc:"Test suite" ~srcs ~meta ~requires
let test_unix =
let srcs = [in_test "testing.mli"; in_test "testing.ml";
in_test "test_rand.ml"; in_test "testing_ptime.ml";
in_test "test_unix.ml"]
in
let meta = B0_meta.(empty |> tag test) in
let requires = [ptime; unix] in
let doc = "Tests against Unix.gmtime" in
B0_ocaml.exe "test-unix" ~doc ~srcs ~meta ~requires
let min_clock =
let srcs = [in_test "min_clock.ml"] in
let meta = B0_meta.(empty |> tag test) in
let requires = [ptime; ptime_clock_os] in
let doc = "Minimal clock example" in
B0_ocaml.exe "min-clock" ~doc ~srcs ~meta ~requires
FIXME b0 this forces the whole build to bytecode which is not
what we want .
let min_clock_jsoo =
let srcs = [ in_test " min_clock.ml " ] in
let meta = B0_meta.(empty | > tag test ) in
let meta = B0_jsoo.meta ~requires:[ptime ; ptime_clock_os ] ~meta ( ) in
let doc = " Minimal clock example " in
B0_jsoo.web " min - clock - jsoo " ~doc ~srcs ~meta
what we want.
let min_clock_jsoo =
let srcs = [in_test "min_clock.ml"] in
let meta = B0_meta.(empty |> tag test) in
let meta = B0_jsoo.meta ~requires:[ptime; ptime_clock_os] ~meta () in
let doc = "Minimal clock example" in
B0_jsoo.web "min-clock-jsoo" ~doc ~srcs ~meta
*)
(* Packs *)
let default =
let meta =
let open B0_meta in
empty
|> tag B0_opam.tag
|> add authors ["The ptime programmers"]
|> add maintainers ["Daniel Bünzli <daniel.buenzl >"]
|> add homepage ""
|> add online_doc "/"
|> add licenses ["ISC"]
|> add repo "git+"
|> add issues ""
|> add description_tags
["time"; "posix"; "system"; "org:erratique"]
|> add B0_opam.Meta.depends
[ "ocaml", {|>= "4.08.0"|};
"ocamlfind", {|build|};
"ocamlbuild", {|build & != "0.9.0"|};
"topkg", {|build & >= "1.0.3"|};
]
|> add B0_opam.Meta.build
{|[["ocaml" "pkg/pkg.ml" "build" "--dev-pkg" "%{dev}%"]]|}
in
B0_pack.v "default" ~doc:"ptime package" ~meta ~locked:true @@
B0_unit.list ()
| null | https://raw.githubusercontent.com/dbuenzli/ptime/a034cdcbd3c54587dba60ecb0527427afe279215/B0.ml | ocaml | OCaml library names
Libraries
Tests
Packs | open B0_kit.V000
open Result.Syntax
let compiler_libs_toplevel = B0_ocaml.libname "compiler-libs.toplevel"
let unix = B0_ocaml.libname "unix"
let ptime = B0_ocaml.libname "ptime"
let ptime_top = B0_ocaml.libname "ptime.top"
let ptime_clock = B0_ocaml.libname "ptime.clock"
let ptime_clock_os = B0_ocaml.libname "ptime.clock.os"
let ptime_lib =
let srcs = Fpath.[`File (v "src/ptime.mli"); `File (v "src/ptime.ml")] in
let requires = [] in
B0_ocaml.lib ptime ~doc:"The ptime library" ~srcs ~requires
let ptime_top =
let srcs = Fpath.[`File (v "src/ptime_top.ml")] in
let requires = [compiler_libs_toplevel] in
B0_ocaml.lib ptime_top ~doc:"The ptime.top library" ~srcs ~requires
let ptime_clock =
let srcs = Fpath.[`File (v "src/ptime_clock.mli")] in
let requires = [ptime] in
let doc = "The ptime.clock interface library" in
B0_ocaml.lib ptime_clock ~doc ~srcs ~requires
let ptime_clock_os_lib =
let srcs = Fpath.[`Dir (v "src-clock") ] in
let requires = [ptime] in
let doc = "The ptime.clock library (including JavaScript support)" in
B0_ocaml.lib ptime_clock_os ~doc ~srcs ~requires
let in_test f = `File (Fpath.v ("test/" ^ f))
let basics =
let srcs = [in_test "basics.ml"] in
let meta = B0_meta.(empty |> tag test) in
let requires = [ptime] in
B0_ocaml.exe "basics" ~doc:"Examples from the API docs" ~srcs ~meta ~requires
let test =
let srcs =
List.map in_test
["testing.mli"; "testing.ml"; "testing_ptime.ml"; "test_rand.ml";
"test_span.ml"; "test_base.ml"; "test_date.ml";
"test_date_time.ml"; "test_rfc3339.ml"; "test.ml" ]
in
let meta = B0_meta.(empty |> tag test) in
let requires = [ ptime ] in
B0_ocaml.exe "test" ~doc:"Test suite" ~srcs ~meta ~requires
let test_unix =
let srcs = [in_test "testing.mli"; in_test "testing.ml";
in_test "test_rand.ml"; in_test "testing_ptime.ml";
in_test "test_unix.ml"]
in
let meta = B0_meta.(empty |> tag test) in
let requires = [ptime; unix] in
let doc = "Tests against Unix.gmtime" in
B0_ocaml.exe "test-unix" ~doc ~srcs ~meta ~requires
let min_clock =
let srcs = [in_test "min_clock.ml"] in
let meta = B0_meta.(empty |> tag test) in
let requires = [ptime; ptime_clock_os] in
let doc = "Minimal clock example" in
B0_ocaml.exe "min-clock" ~doc ~srcs ~meta ~requires
FIXME b0 this forces the whole build to bytecode which is not
what we want .
let min_clock_jsoo =
let srcs = [ in_test " min_clock.ml " ] in
let meta = B0_meta.(empty | > tag test ) in
let meta = B0_jsoo.meta ~requires:[ptime ; ptime_clock_os ] ~meta ( ) in
let doc = " Minimal clock example " in
B0_jsoo.web " min - clock - jsoo " ~doc ~srcs ~meta
what we want.
let min_clock_jsoo =
let srcs = [in_test "min_clock.ml"] in
let meta = B0_meta.(empty |> tag test) in
let meta = B0_jsoo.meta ~requires:[ptime; ptime_clock_os] ~meta () in
let doc = "Minimal clock example" in
B0_jsoo.web "min-clock-jsoo" ~doc ~srcs ~meta
*)
let default =
let meta =
let open B0_meta in
empty
|> tag B0_opam.tag
|> add authors ["The ptime programmers"]
|> add maintainers ["Daniel Bünzli <daniel.buenzl >"]
|> add homepage ""
|> add online_doc "/"
|> add licenses ["ISC"]
|> add repo "git+"
|> add issues ""
|> add description_tags
["time"; "posix"; "system"; "org:erratique"]
|> add B0_opam.Meta.depends
[ "ocaml", {|>= "4.08.0"|};
"ocamlfind", {|build|};
"ocamlbuild", {|build & != "0.9.0"|};
"topkg", {|build & >= "1.0.3"|};
]
|> add B0_opam.Meta.build
{|[["ocaml" "pkg/pkg.ml" "build" "--dev-pkg" "%{dev}%"]]|}
in
B0_pack.v "default" ~doc:"ptime package" ~meta ~locked:true @@
B0_unit.list ()
|
80be681349c9576573f699b5b2266c4e204c3f816bbd8a0ac2326d241efd8fd7 | 5HT/ant | FontMetric.mli |
open XNum;
open Unicode.Types;
open Dim;
open Graphic;
open Substitute;
open GlyphMetric;
(* font metrics *)
type font_parameter =
{
hyphen_glyph : glyph_desc;
skew_glyph : glyph_desc;
margin_glyph : glyph_desc;
space_glyph : glyph_desc;
foreign_glyph : glyph_desc;
slant : num;
space : num;
space_stretch : num;
space_shrink : num;
x_height : num;
quad : num;
extra_space : num;
num_shift_1 : num;
num_shift_2 : num;
num_shift_3 : num;
denom_shift_1 : num;
denom_shift_2 : num;
super_shift_1 : num;
super_shift_2 : num;
super_shift_3 : num;
sub_shift_1 : num;
sub_shift_2 : num;
super_drop : num;
sub_drop : num;
delim_1 : num;
delim_2 : num;
axis_height : num;
rule_thickness : num;
big_op_spacing_1 : num;
big_op_spacing_2 : num;
big_op_spacing_3 : num;
big_op_spacing_4 : num;
big_op_spacing_5 : num
};
type font_type =
[ PostScript
| OpenTypeCFF
| TrueType
| Other
];
type font_metric =
{
name : string;
ps_name : string;
file_name : string;
font_type : font_type;
first_glyph : int;
last_glyph : int;
design_size : num;
at_size : num;
check_sum : num;
parameter : font_parameter;
get_glyph : uc_char -> glyph_desc;
get_unicode : glyph_desc -> uc_string;
get_composer : !'box 'cmd . get_composer_type 'box 'cmd;
kerning : font_metric -> int -> int -> lig_kern;
draw_simple_glyph : font_metric -> int -> simple_box;
accent_base_point : font_metric -> glyph_metric -> (num * num);
accent_attach_point : font_metric -> glyph_metric -> (num * num);
get_glyph_bitmap : font_metric -> uc_char -> GlyphBitmap.glyph;
get_glyph_name : int -> string;
glyph_metric : array glyph_metric
}
and get_composer_type 'box 'cmd = font_metric -> uc_string -> SymbolSet.t -> glyph_composer font_metric 'box 'cmd
and simple_box =
[ Empty
| SimpleGlyph of int and font_metric
| Rule of num and num
| Image of num and num and string and LoadImage.format
| Group of list (graphic_command num simple_box)
| Command of simple_cmd
]
and simple_cmd =
[= `DVI_Special of string
];
type glyph_spec =
[ GlyphIndex of int
| GlyphChar of uc_char
| GlyphName of string
];
type adjustment_spec =
[ AdjKern of num
| AdjLig of glyph_spec
];
module GlyphSpecTrie : DynamicTrie.S with type elt = glyph_spec;
(* User specified modifications of font parameters. *)
type font_load_params =
{
flp_size : num; (* scale font to this size *)
flp_encoding : array uc_string; (* overrides built in encoding *)
FIX : replace these two by
flp_skew_glyph : glyph_desc; (* specifies the skew glyph *) (* a complete font_parameter *)
flp_letter_spacing : num; (* additional letter spacing *)
flp_extra_pos : GlyphSpecTrie.t adjustment_spec; (* additional kerning pairs and ligatures *)
flp_extra_subst : GlyphSpecTrie.t adjustment_spec; (* additional kerning pairs and ligatures *)
flp_extra_kern : list (glyph_spec * extra_kern_info) (* kerning with border glyphs *)
};
(* pages *)
type page =
{
p_contents : simple_box;
p_number : int;
p_width : num;
p_height : num
};
value default_bitmap_resolution : ref int;
value default_mf_mode : ref string;
value get_glyph : font_metric -> uc_char -> glyph_desc;
value get_unicode : font_metric -> glyph_desc -> uc_string;
value index_to_glyph : font_metric -> int -> glyph_desc;
value glyph_exists : font_metric -> int -> bool;
value glyph_spec_to_index : (uc_char -> int) -> (string -> int) -> glyph_spec -> int;
value simple_ligature_substitution : font_metric -> substitution font_metric 'box 'cmd;
value simple_composer : font_metric
-> substitution font_metric 'box 'cmd
-> glyph_composer font_metric 'box 'cmd;
value two_phase_composer : font_metric
-> substitution font_metric 'box 'cmd
-> substitution font_metric 'box 'cmd
-> glyph_composer font_metric 'box 'cmd;
value add_border_kern : int -> int -> int -> num -> list (int * extra_kern_info)
-> list adjustment_table -> list adjustment_table;
value adjustment_spec_to_table : (uc_char -> int) -> (string -> int)
-> GlyphSpecTrie.t adjustment_spec -> adjustment_table;
value get_glyph_metric : font_metric -> glyph_desc -> glyph_metric;
value next_glyph : font_metric -> glyph_desc -> glyph_desc;
value get_glyph_composer : get_composer_type 'box 'cmd;
value get_lig_kern : font_metric -> glyph_desc -> glyph_desc -> lig_kern;
value draw_simple_glyph : font_metric -> int -> simple_box;
value draw_displaced_simple_glyph : num -> num -> font_metric -> int -> simple_box;
value draw_glyph : font_metric -> glyph_desc -> simple_box;
value get_hyphen_glyph : font_metric -> glyph_desc;
value get_skew_glyph : font_metric -> glyph_desc;
value get_margin_glyph : font_metric -> glyph_desc;
value get_space_glyph : font_metric -> glyph_desc;
value get_foreign_glyph : font_metric -> glyph_desc;
value get_border_glyph : font_metric -> border_glyph -> glyph_desc;
value accent_base_point : font_metric -> glyph_metric -> (num * num);
value accent_attach_point : font_metric -> glyph_metric -> (num * num);
value accent_base_point_x_height : font_metric -> glyph_metric -> (num * num);
value accent_attach_point_top : font_metric -> glyph_metric -> (num * num);
value accent_position : font_metric -> glyph_metric ->
font_metric -> glyph_metric -> (num * num);
value construct_accent : font_metric -> glyph_desc -> font_metric -> glyph_desc -> glyph_metric;
(* Shorthand to access the dimension of a normal and an extended space of the font. *)
value space_glue : font_metric -> dim;
value xspace_glue : font_metric -> dim;
value empty_font : font_metric;
value empty_parameter : font_parameter;
value empty_load_params : font_load_params;
| null | https://raw.githubusercontent.com/5HT/ant/6acf51f4c4ebcc06c52c595776e0293cfa2f1da4/Runtime/FontMetric.mli | ocaml | font metrics
User specified modifications of font parameters.
scale font to this size
overrides built in encoding
specifies the skew glyph
a complete font_parameter
additional letter spacing
additional kerning pairs and ligatures
additional kerning pairs and ligatures
kerning with border glyphs
pages
Shorthand to access the dimension of a normal and an extended space of the font. |
open XNum;
open Unicode.Types;
open Dim;
open Graphic;
open Substitute;
open GlyphMetric;
type font_parameter =
{
hyphen_glyph : glyph_desc;
skew_glyph : glyph_desc;
margin_glyph : glyph_desc;
space_glyph : glyph_desc;
foreign_glyph : glyph_desc;
slant : num;
space : num;
space_stretch : num;
space_shrink : num;
x_height : num;
quad : num;
extra_space : num;
num_shift_1 : num;
num_shift_2 : num;
num_shift_3 : num;
denom_shift_1 : num;
denom_shift_2 : num;
super_shift_1 : num;
super_shift_2 : num;
super_shift_3 : num;
sub_shift_1 : num;
sub_shift_2 : num;
super_drop : num;
sub_drop : num;
delim_1 : num;
delim_2 : num;
axis_height : num;
rule_thickness : num;
big_op_spacing_1 : num;
big_op_spacing_2 : num;
big_op_spacing_3 : num;
big_op_spacing_4 : num;
big_op_spacing_5 : num
};
type font_type =
[ PostScript
| OpenTypeCFF
| TrueType
| Other
];
type font_metric =
{
name : string;
ps_name : string;
file_name : string;
font_type : font_type;
first_glyph : int;
last_glyph : int;
design_size : num;
at_size : num;
check_sum : num;
parameter : font_parameter;
get_glyph : uc_char -> glyph_desc;
get_unicode : glyph_desc -> uc_string;
get_composer : !'box 'cmd . get_composer_type 'box 'cmd;
kerning : font_metric -> int -> int -> lig_kern;
draw_simple_glyph : font_metric -> int -> simple_box;
accent_base_point : font_metric -> glyph_metric -> (num * num);
accent_attach_point : font_metric -> glyph_metric -> (num * num);
get_glyph_bitmap : font_metric -> uc_char -> GlyphBitmap.glyph;
get_glyph_name : int -> string;
glyph_metric : array glyph_metric
}
and get_composer_type 'box 'cmd = font_metric -> uc_string -> SymbolSet.t -> glyph_composer font_metric 'box 'cmd
and simple_box =
[ Empty
| SimpleGlyph of int and font_metric
| Rule of num and num
| Image of num and num and string and LoadImage.format
| Group of list (graphic_command num simple_box)
| Command of simple_cmd
]
and simple_cmd =
[= `DVI_Special of string
];
type glyph_spec =
[ GlyphIndex of int
| GlyphChar of uc_char
| GlyphName of string
];
type adjustment_spec =
[ AdjKern of num
| AdjLig of glyph_spec
];
module GlyphSpecTrie : DynamicTrie.S with type elt = glyph_spec;
type font_load_params =
{
FIX : replace these two by
};
type page =
{
p_contents : simple_box;
p_number : int;
p_width : num;
p_height : num
};
value default_bitmap_resolution : ref int;
value default_mf_mode : ref string;
value get_glyph : font_metric -> uc_char -> glyph_desc;
value get_unicode : font_metric -> glyph_desc -> uc_string;
value index_to_glyph : font_metric -> int -> glyph_desc;
value glyph_exists : font_metric -> int -> bool;
value glyph_spec_to_index : (uc_char -> int) -> (string -> int) -> glyph_spec -> int;
value simple_ligature_substitution : font_metric -> substitution font_metric 'box 'cmd;
value simple_composer : font_metric
-> substitution font_metric 'box 'cmd
-> glyph_composer font_metric 'box 'cmd;
value two_phase_composer : font_metric
-> substitution font_metric 'box 'cmd
-> substitution font_metric 'box 'cmd
-> glyph_composer font_metric 'box 'cmd;
value add_border_kern : int -> int -> int -> num -> list (int * extra_kern_info)
-> list adjustment_table -> list adjustment_table;
value adjustment_spec_to_table : (uc_char -> int) -> (string -> int)
-> GlyphSpecTrie.t adjustment_spec -> adjustment_table;
value get_glyph_metric : font_metric -> glyph_desc -> glyph_metric;
value next_glyph : font_metric -> glyph_desc -> glyph_desc;
value get_glyph_composer : get_composer_type 'box 'cmd;
value get_lig_kern : font_metric -> glyph_desc -> glyph_desc -> lig_kern;
value draw_simple_glyph : font_metric -> int -> simple_box;
value draw_displaced_simple_glyph : num -> num -> font_metric -> int -> simple_box;
value draw_glyph : font_metric -> glyph_desc -> simple_box;
value get_hyphen_glyph : font_metric -> glyph_desc;
value get_skew_glyph : font_metric -> glyph_desc;
value get_margin_glyph : font_metric -> glyph_desc;
value get_space_glyph : font_metric -> glyph_desc;
value get_foreign_glyph : font_metric -> glyph_desc;
value get_border_glyph : font_metric -> border_glyph -> glyph_desc;
value accent_base_point : font_metric -> glyph_metric -> (num * num);
value accent_attach_point : font_metric -> glyph_metric -> (num * num);
value accent_base_point_x_height : font_metric -> glyph_metric -> (num * num);
value accent_attach_point_top : font_metric -> glyph_metric -> (num * num);
value accent_position : font_metric -> glyph_metric ->
font_metric -> glyph_metric -> (num * num);
value construct_accent : font_metric -> glyph_desc -> font_metric -> glyph_desc -> glyph_metric;
value space_glue : font_metric -> dim;
value xspace_glue : font_metric -> dim;
value empty_font : font_metric;
value empty_parameter : font_parameter;
value empty_load_params : font_load_params;
|
22f356a29c44a87959afdc070cb1003a75d2c4960b4adeb5960533a892266fed | tvraman/aster-math | new-environment-methods.lisp | ;;; -*- Mode: LISP -*- ;;;
(in-package :aster)
;;; Separating methods on new-environments into their own file:
(defun caption-p (self)
(typep self 'caption))
(defmethod caption ((figure figure ))
"Extract the caption from a figure if any. "
(find-if #'caption-p (contents figure )))
(defmethod caption ((table table))
"Extract the caption from a table if any. "
(find-if #'caption-p (contents table )))
| null | https://raw.githubusercontent.com/tvraman/aster-math/efbf8536dd781604bf6166ded62795564b1e6ec5/lisp/read-aloud/new-environment-methods.lisp | lisp | -*- Mode: LISP -*- ;;;
Separating methods on new-environments into their own file: |
(in-package :aster)
(defun caption-p (self)
(typep self 'caption))
(defmethod caption ((figure figure ))
"Extract the caption from a figure if any. "
(find-if #'caption-p (contents figure )))
(defmethod caption ((table table))
"Extract the caption from a table if any. "
(find-if #'caption-p (contents table )))
|
19099970137286eec425cafc682b400715e11016ea711629b65f8f819193baaf | cky/guile | decompile-bytecode.scm | ;;; Guile VM code converters
Copyright ( C ) 2001 , 2009 , 2010 , 2013 Free Software Foundation , Inc.
;;;; This library is free software; you can redistribute it and/or
;;;; modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 3 of the License , or ( at your option ) any later version .
;;;;
;;;; This library is distributed in the hope that it will be useful,
;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;;; Lesser General Public License for more details.
;;;;
You should have received a copy of the GNU Lesser General Public
;;;; License along with this library; if not, write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
;;; Code:
(define-module (language assembly decompile-bytecode)
#:use-module (system vm instruction)
#:use-module (system base pmatch)
#:use-module (srfi srfi-4)
#:use-module (rnrs bytevectors)
#:use-module (language assembly)
#:use-module ((system vm objcode) #:select (byte-order))
#:export (decompile-bytecode))
(define (decompile-bytecode x env opts)
(let ((i 0) (size (u8vector-length x)))
(define (pop)
(let ((b (cond ((< i size) (u8vector-ref x i))
((= i size) #f)
(else (error "tried to decode too many bytes")))))
(if b (set! i (1+ i)))
b))
(let ((ret (decode-load-program pop)))
(if (= i size)
(values ret env)
(error "bad bytecode: only decoded ~a out of ~a bytes" i size)))))
(define (br-instruction? x)
(memq x '(br br-if br-if-not br-if-eq br-if-not-eq br-if-null br-if-not-null)))
(define (br-nargs-instruction? x)
(memq x '(br-if-nargs-ne br-if-nargs-lt br-if-nargs-gt br-if-nargs-lt/non-kw)))
(define (bytes->s24 a b c)
(let ((x (+ (ash a 16) (ash b 8) c)))
(if (zero? (logand (ash 1 23) x))
x
(- x (ash 1 24)))))
;; FIXME: this is a little-endian disassembly!!!
(define (decode-load-program pop)
(let* ((a (pop)) (b (pop)) (c (pop)) (d (pop))
(e (pop)) (f (pop)) (g (pop)) (h (pop))
(len (+ a (ash b 8) (ash c 16) (ash d 24)))
(metalen (+ e (ash f 8) (ash g 16) (ash h 24)))
(labels '())
(i 0))
(define (ensure-label rel1 rel2 rel3)
(let ((where (+ i (bytes->s24 rel1 rel2 rel3))))
(or (assv-ref labels where)
(begin
(let ((l (gensym ":L")))
(set! labels (acons where l labels))
l)))))
(define (sub-pop) ;; ...records. ha. ha.
(let ((b (cond ((< i len) (pop))
((= i len) #f)
(else (error "tried to decode too many bytes")))))
(if b (set! i (1+ i)))
b))
(let lp ((out '()))
(cond ((> i len)
(error "error decoding program -- read too many bytes" out))
((= i len)
`(load-program ,(map (lambda (x) (cons (cdr x) (car x)))
(reverse labels))
,len
,(if (zero? metalen) #f (decode-load-program pop))
,@(reverse! out)))
(else
(let ((exp (decode-bytecode sub-pop)))
(pmatch exp
((,br ,rel1 ,rel2 ,rel3) (guard (br-instruction? br))
(lp (cons `(,br ,(ensure-label rel1 rel2 rel3)) out)))
((,br ,hi ,lo ,rel1 ,rel2 ,rel3) (guard (br-nargs-instruction? br))
(lp (cons `(,br ,hi ,lo ,(ensure-label rel1 rel2 rel3)) out)))
((bind-optionals/shuffle-or-br ,nreq-hi ,nreq-lo
,nreq-and-nopt-hi ,nreq-and-nopt-lo
,ntotal-hi ,ntotal-lo
,rel1 ,rel2 ,rel3)
(lp (cons `(bind-optionals/shuffle-or-br
,nreq-hi ,nreq-lo
,nreq-and-nopt-hi ,nreq-and-nopt-lo
,ntotal-hi ,ntotal-lo
,(ensure-label rel1 rel2 rel3))
out)))
((mv-call ,n ,rel1 ,rel2 ,rel3)
(lp (cons `(mv-call ,n ,(ensure-label rel1 rel2 rel3)) out)))
((prompt ,n0 ,rel1 ,rel2 ,rel3)
(lp (cons `(prompt ,n0 ,(ensure-label rel1 rel2 rel3)) out)))
(else
(lp (cons exp out))))))))))
(define (decode-bytecode pop)
(and=> (pop)
(lambda (opcode)
(let ((inst (opcode->instruction opcode)))
(cond
((eq? inst 'load-program)
(decode-load-program pop))
((< (instruction-length inst) 0)
;; the negative length indicates a variable length
;; instruction
(let* ((make-sequence
(if (or (memq inst '(load-array load-wide-string)))
make-bytevector
make-string))
(sequence-set!
(if (or (memq inst '(load-array load-wide-string)))
bytevector-u8-set!
(lambda (str pos value)
(string-set! str pos (integer->char value)))))
(len (let* ((a (pop)) (b (pop)) (c (pop)))
(+ (ash a 16) (ash b 8) c)))
(seq (make-sequence len)))
(let lp ((i 0))
(if (= i len)
`(,inst ,(if (eq? inst 'load-wide-string)
(utf32->string seq (native-endianness))
seq))
(begin
(sequence-set! seq i (pop))
(lp (1+ i)))))))
(else
;; fixed length
(let lp ((n (instruction-length inst)) (out (list inst)))
(if (zero? n)
(reverse! out)
(lp (1- n) (cons (pop) out))))))))))
| null | https://raw.githubusercontent.com/cky/guile/89ce9fb31b00f1f243fe6f2450db50372cc0b86d/module/language/assembly/decompile-bytecode.scm | scheme | Guile VM code converters
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
either
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software
Code:
FIXME: this is a little-endian disassembly!!!
...records. ha. ha.
the negative length indicates a variable length
instruction
fixed length |
Copyright ( C ) 2001 , 2009 , 2010 , 2013 Free Software Foundation , Inc.
version 3 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
(define-module (language assembly decompile-bytecode)
#:use-module (system vm instruction)
#:use-module (system base pmatch)
#:use-module (srfi srfi-4)
#:use-module (rnrs bytevectors)
#:use-module (language assembly)
#:use-module ((system vm objcode) #:select (byte-order))
#:export (decompile-bytecode))
(define (decompile-bytecode x env opts)
(let ((i 0) (size (u8vector-length x)))
(define (pop)
(let ((b (cond ((< i size) (u8vector-ref x i))
((= i size) #f)
(else (error "tried to decode too many bytes")))))
(if b (set! i (1+ i)))
b))
(let ((ret (decode-load-program pop)))
(if (= i size)
(values ret env)
(error "bad bytecode: only decoded ~a out of ~a bytes" i size)))))
(define (br-instruction? x)
(memq x '(br br-if br-if-not br-if-eq br-if-not-eq br-if-null br-if-not-null)))
(define (br-nargs-instruction? x)
(memq x '(br-if-nargs-ne br-if-nargs-lt br-if-nargs-gt br-if-nargs-lt/non-kw)))
(define (bytes->s24 a b c)
(let ((x (+ (ash a 16) (ash b 8) c)))
(if (zero? (logand (ash 1 23) x))
x
(- x (ash 1 24)))))
(define (decode-load-program pop)
(let* ((a (pop)) (b (pop)) (c (pop)) (d (pop))
(e (pop)) (f (pop)) (g (pop)) (h (pop))
(len (+ a (ash b 8) (ash c 16) (ash d 24)))
(metalen (+ e (ash f 8) (ash g 16) (ash h 24)))
(labels '())
(i 0))
(define (ensure-label rel1 rel2 rel3)
(let ((where (+ i (bytes->s24 rel1 rel2 rel3))))
(or (assv-ref labels where)
(begin
(let ((l (gensym ":L")))
(set! labels (acons where l labels))
l)))))
(let ((b (cond ((< i len) (pop))
((= i len) #f)
(else (error "tried to decode too many bytes")))))
(if b (set! i (1+ i)))
b))
(let lp ((out '()))
(cond ((> i len)
(error "error decoding program -- read too many bytes" out))
((= i len)
`(load-program ,(map (lambda (x) (cons (cdr x) (car x)))
(reverse labels))
,len
,(if (zero? metalen) #f (decode-load-program pop))
,@(reverse! out)))
(else
(let ((exp (decode-bytecode sub-pop)))
(pmatch exp
((,br ,rel1 ,rel2 ,rel3) (guard (br-instruction? br))
(lp (cons `(,br ,(ensure-label rel1 rel2 rel3)) out)))
((,br ,hi ,lo ,rel1 ,rel2 ,rel3) (guard (br-nargs-instruction? br))
(lp (cons `(,br ,hi ,lo ,(ensure-label rel1 rel2 rel3)) out)))
((bind-optionals/shuffle-or-br ,nreq-hi ,nreq-lo
,nreq-and-nopt-hi ,nreq-and-nopt-lo
,ntotal-hi ,ntotal-lo
,rel1 ,rel2 ,rel3)
(lp (cons `(bind-optionals/shuffle-or-br
,nreq-hi ,nreq-lo
,nreq-and-nopt-hi ,nreq-and-nopt-lo
,ntotal-hi ,ntotal-lo
,(ensure-label rel1 rel2 rel3))
out)))
((mv-call ,n ,rel1 ,rel2 ,rel3)
(lp (cons `(mv-call ,n ,(ensure-label rel1 rel2 rel3)) out)))
((prompt ,n0 ,rel1 ,rel2 ,rel3)
(lp (cons `(prompt ,n0 ,(ensure-label rel1 rel2 rel3)) out)))
(else
(lp (cons exp out))))))))))
(define (decode-bytecode pop)
(and=> (pop)
(lambda (opcode)
(let ((inst (opcode->instruction opcode)))
(cond
((eq? inst 'load-program)
(decode-load-program pop))
((< (instruction-length inst) 0)
(let* ((make-sequence
(if (or (memq inst '(load-array load-wide-string)))
make-bytevector
make-string))
(sequence-set!
(if (or (memq inst '(load-array load-wide-string)))
bytevector-u8-set!
(lambda (str pos value)
(string-set! str pos (integer->char value)))))
(len (let* ((a (pop)) (b (pop)) (c (pop)))
(+ (ash a 16) (ash b 8) c)))
(seq (make-sequence len)))
(let lp ((i 0))
(if (= i len)
`(,inst ,(if (eq? inst 'load-wide-string)
(utf32->string seq (native-endianness))
seq))
(begin
(sequence-set! seq i (pop))
(lp (1+ i)))))))
(else
(let lp ((n (instruction-length inst)) (out (list inst)))
(if (zero? n)
(reverse! out)
(lp (1- n) (cons (pop) out))))))))))
|
7bf5552c5d5e8d5706d882b1191914feeb74ee2a38097b20905382020a82e74b | mpickering/apply-refact | Default8.hs | yes = if f a then True else b | null | https://raw.githubusercontent.com/mpickering/apply-refact/a4343ea0f4f9d8c2e16d6b16b9068f321ba4f272/tests/examples/Default8.hs | haskell | yes = if f a then True else b | |
8c49f35a8de6f7f34f386bbccad3fd4553823db68202dd65f5b02d0be0acfcc6 | acieroid/scala-am | philosophers6.scm | ;; Dining philosophers problem
(letrec ((n 6) ; number of philosophers
(turns 5) ; number of turns to run
(forks (vector (new-lock) (new-lock) (new-lock) (new-lock) (new-lock) (new-lock)))
(pickup (lambda (left right)
(acquire (vector-ref forks (min left right)))
(acquire (vector-ref forks (max left right)))))
(putdown (lambda (left right)
(release (vector-ref forks (min left right)))
(release (vector-ref forks (max left right)))))
(philosopher (lambda (i)
(letrec ((left i)
(right (modulo (- i 1) n))
(process (lambda (turn)
(if (> turn turns)
#t
(begin
(pickup left right)
(display i) (newline)
(putdown left right)
(process (+ turn 1)))))))
(process 0))))
(t1 (fork (philosopher 0)))
(t2 (fork (philosopher 1)))
(t3 (fork (philosopher 2)))
(t4 (fork (philosopher 3)))
(t5 (fork (philosopher 4)))
(t6 (fork (philosopher 5))))
(join t1)
(join t2)
(join t3)
(join t4)
(join t5)
(join t6))
| null | https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/concurrentScheme/threads/variations/philosophers6.scm | scheme | Dining philosophers problem
number of philosophers
number of turns to run | (forks (vector (new-lock) (new-lock) (new-lock) (new-lock) (new-lock) (new-lock)))
(pickup (lambda (left right)
(acquire (vector-ref forks (min left right)))
(acquire (vector-ref forks (max left right)))))
(putdown (lambda (left right)
(release (vector-ref forks (min left right)))
(release (vector-ref forks (max left right)))))
(philosopher (lambda (i)
(letrec ((left i)
(right (modulo (- i 1) n))
(process (lambda (turn)
(if (> turn turns)
#t
(begin
(pickup left right)
(display i) (newline)
(putdown left right)
(process (+ turn 1)))))))
(process 0))))
(t1 (fork (philosopher 0)))
(t2 (fork (philosopher 1)))
(t3 (fork (philosopher 2)))
(t4 (fork (philosopher 3)))
(t5 (fork (philosopher 4)))
(t6 (fork (philosopher 5))))
(join t1)
(join t2)
(join t3)
(join t4)
(join t5)
(join t6))
|
1299e0ebeb4acf7ae5c0d5cd8fc89e7f916ed5955411477816971a97ba773e9a | typeable/octopod | Large.hs | module Frontend.UIKit.Button.Large
( largeButton,
LargeButtonType (..),
LargeButtonConfig (..),
LargeButtonStyle (..),
LargeButtonPriority (..),
BaseButtonTag (..),
)
where
import Control.Lens
import Data.Default
import Data.Generics.Labels ()
import Data.Text (Text)
import Frontend.Classes
import Frontend.UIKit.Button.Common
import GHC.Generics (Generic)
import Reflex.Dom
data LargeButtonConfig t = LargeButtonConfig
{ buttonText :: Text
, buttonEnabled :: Dynamic t Bool
, buttonType :: Dynamic t (Maybe LargeButtonType)
, buttonPriority :: LargeButtonPriority
, buttonStyle :: LargeButtonStyle
, buttonBaseTag :: BaseButtonTag t
}
deriving stock (Generic)
data LargeButtonPriority = PrimaryLargeButton | SecondaryLargeButton
buttonPriorityClasses :: LargeButtonPriority -> Classes
buttonPriorityClasses PrimaryLargeButton = mempty
buttonPriorityClasses SecondaryLargeButton = "button--secondary"
data LargeButtonStyle
= RegularLargeButtonStyle
| PopupActionLargeButtonStyle
| DialogActionLargeButtonStyle
| PageActionLargeButtonStyle
buttonStyleClasses :: LargeButtonStyle -> Classes
buttonStyleClasses RegularLargeButtonStyle = mempty
buttonStyleClasses PopupActionLargeButtonStyle = "popup__action"
buttonStyleClasses DialogActionLargeButtonStyle = "dialog__action"
buttonStyleClasses PageActionLargeButtonStyle = "page__action"
instance Reflex t => Default (LargeButtonConfig t) where
def =
LargeButtonConfig
{ buttonText = ""
, buttonEnabled = pure True
, buttonType = pure Nothing
, buttonStyle = RegularLargeButtonStyle
, buttonPriority = PrimaryLargeButton
, buttonBaseTag = ButtonTag
}
data LargeButtonType
= AddLargeButtonType
| ArchiveLargeButtonType
| RestoreLargeButtonType
| EditLargeButtonType
| LogsLargeButtonType
| SaveLargeButtonType
| LoadingLargeButtonType
buttonTypeClasses :: LargeButtonType -> Classes
buttonTypeClasses AddLargeButtonType = "button--add"
buttonTypeClasses ArchiveLargeButtonType = "button--archive"
buttonTypeClasses RestoreLargeButtonType = "button--restore"
buttonTypeClasses EditLargeButtonType = "button--edit"
buttonTypeClasses LogsLargeButtonType = "button--logs"
buttonTypeClasses SaveLargeButtonType = "button--save"
buttonTypeClasses LoadingLargeButtonType = "button--save-loading"
largeButton ::
(DomBuilder t m, PostBuild t m) =>
LargeButtonConfig t ->
m (Event t (Either () ()))
largeButton cfg =
buttonEl
CommonButtonConfig
{ constantClasses = do
bType <- cfg ^. #buttonType
pure $
"button"
<> maybe mempty buttonTypeClasses bType
<> buttonStyleClasses (cfg ^. #buttonStyle)
<> buttonPriorityClasses (cfg ^. #buttonPriority)
, enabledClasses = mempty
, disabledClasses = "button--disabled"
, buttonEnabled = cfg ^. #buttonEnabled
, buttonText = textBuilder $ cfg ^. #buttonText
, buttonBaseTag = cfg ^. #buttonBaseTag
}
| null | https://raw.githubusercontent.com/typeable/octopod/857a4ee74b12cc061a45ca1e033f230cbbccce99/octopod-frontend/src/Frontend/UIKit/Button/Large.hs | haskell | module Frontend.UIKit.Button.Large
( largeButton,
LargeButtonType (..),
LargeButtonConfig (..),
LargeButtonStyle (..),
LargeButtonPriority (..),
BaseButtonTag (..),
)
where
import Control.Lens
import Data.Default
import Data.Generics.Labels ()
import Data.Text (Text)
import Frontend.Classes
import Frontend.UIKit.Button.Common
import GHC.Generics (Generic)
import Reflex.Dom
data LargeButtonConfig t = LargeButtonConfig
{ buttonText :: Text
, buttonEnabled :: Dynamic t Bool
, buttonType :: Dynamic t (Maybe LargeButtonType)
, buttonPriority :: LargeButtonPriority
, buttonStyle :: LargeButtonStyle
, buttonBaseTag :: BaseButtonTag t
}
deriving stock (Generic)
data LargeButtonPriority = PrimaryLargeButton | SecondaryLargeButton
buttonPriorityClasses :: LargeButtonPriority -> Classes
buttonPriorityClasses PrimaryLargeButton = mempty
buttonPriorityClasses SecondaryLargeButton = "button--secondary"
data LargeButtonStyle
= RegularLargeButtonStyle
| PopupActionLargeButtonStyle
| DialogActionLargeButtonStyle
| PageActionLargeButtonStyle
buttonStyleClasses :: LargeButtonStyle -> Classes
buttonStyleClasses RegularLargeButtonStyle = mempty
buttonStyleClasses PopupActionLargeButtonStyle = "popup__action"
buttonStyleClasses DialogActionLargeButtonStyle = "dialog__action"
buttonStyleClasses PageActionLargeButtonStyle = "page__action"
instance Reflex t => Default (LargeButtonConfig t) where
def =
LargeButtonConfig
{ buttonText = ""
, buttonEnabled = pure True
, buttonType = pure Nothing
, buttonStyle = RegularLargeButtonStyle
, buttonPriority = PrimaryLargeButton
, buttonBaseTag = ButtonTag
}
data LargeButtonType
= AddLargeButtonType
| ArchiveLargeButtonType
| RestoreLargeButtonType
| EditLargeButtonType
| LogsLargeButtonType
| SaveLargeButtonType
| LoadingLargeButtonType
buttonTypeClasses :: LargeButtonType -> Classes
buttonTypeClasses AddLargeButtonType = "button--add"
buttonTypeClasses ArchiveLargeButtonType = "button--archive"
buttonTypeClasses RestoreLargeButtonType = "button--restore"
buttonTypeClasses EditLargeButtonType = "button--edit"
buttonTypeClasses LogsLargeButtonType = "button--logs"
buttonTypeClasses SaveLargeButtonType = "button--save"
buttonTypeClasses LoadingLargeButtonType = "button--save-loading"
largeButton ::
(DomBuilder t m, PostBuild t m) =>
LargeButtonConfig t ->
m (Event t (Either () ()))
largeButton cfg =
buttonEl
CommonButtonConfig
{ constantClasses = do
bType <- cfg ^. #buttonType
pure $
"button"
<> maybe mempty buttonTypeClasses bType
<> buttonStyleClasses (cfg ^. #buttonStyle)
<> buttonPriorityClasses (cfg ^. #buttonPriority)
, enabledClasses = mempty
, disabledClasses = "button--disabled"
, buttonEnabled = cfg ^. #buttonEnabled
, buttonText = textBuilder $ cfg ^. #buttonText
, buttonBaseTag = cfg ^. #buttonBaseTag
}
| |
1aed4afdb95e9f55b1fec365314670a09d58d5ae4863a23835a3d02ab78393c1 | mzp/coq-ruby | ind_tables.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
i $ I d : ind_tables.ml 10739 2008 - 04 - 01 14:45:20Z herbelin $ i
open Names
open Mod_subst
let eq_scheme_map = ref Indmap.empty
let cache_scheme (_,(ind,const)) =
eq_scheme_map := Indmap.add ind const (!eq_scheme_map)
let export_scheme obj =
Some obj
let _ = Summary.declare_summary "eqscheme"
{ Summary.freeze_function = (fun () -> !eq_scheme_map);
Summary.unfreeze_function = (fun fs -> eq_scheme_map := fs);
Summary.init_function = (fun () -> eq_scheme_map := Indmap.empty);
Summary.survive_module = false;
Summary.survive_section = true}
let find_eq_scheme ind =
Indmap.find ind !eq_scheme_map
let check_eq_scheme ind =
Indmap.mem ind !eq_scheme_map
let bl_map = ref Indmap.empty
let lb_map = ref Indmap.empty
let dec_map = ref Indmap.empty
let cache_bl (_,(ind,const)) =
bl_map := Indmap.add ind const (!bl_map)
let cache_lb (_,(ind,const)) =
lb_map := Indmap.add ind const (!lb_map)
let cache_dec (_,(ind,const)) =
dec_map := Indmap.add ind const (!dec_map)
let export_bool_leib obj =
Some obj
let export_leib_bool obj =
Some obj
let export_dec_proof obj =
Some obj
let _ = Summary.declare_summary "bl_proof"
{ Summary.freeze_function = (fun () -> !bl_map);
Summary.unfreeze_function = (fun fs -> bl_map := fs);
Summary.init_function = (fun () -> bl_map := Indmap.empty);
Summary.survive_module = false;
Summary.survive_section = true}
let find_bl_proof ind =
Indmap.find ind !bl_map
let check_bl_proof ind =
Indmap.mem ind !bl_map
let _ = Summary.declare_summary "lb_proof"
{ Summary.freeze_function = (fun () -> !lb_map);
Summary.unfreeze_function = (fun fs -> lb_map := fs);
Summary.init_function = (fun () -> lb_map := Indmap.empty);
Summary.survive_module = false;
Summary.survive_section = true}
let find_lb_proof ind =
Indmap.find ind !lb_map
let check_lb_proof ind =
Indmap.mem ind !lb_map
let _ = Summary.declare_summary "eq_dec_proof"
{ Summary.freeze_function = (fun () -> !dec_map);
Summary.unfreeze_function = (fun fs -> dec_map := fs);
Summary.init_function = (fun () -> dec_map := Indmap.empty);
Summary.survive_module = false;
Summary.survive_section = true}
let find_eq_dec_proof ind =
Indmap.find ind !dec_map
let check_dec_proof ind =
Indmap.mem ind !dec_map
| null | https://raw.githubusercontent.com/mzp/coq-ruby/99b9f87c4397f705d1210702416176b13f8769c1/toplevel/ind_tables.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
********************************************************************** | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
i $ I d : ind_tables.ml 10739 2008 - 04 - 01 14:45:20Z herbelin $ i
open Names
open Mod_subst
let eq_scheme_map = ref Indmap.empty
let cache_scheme (_,(ind,const)) =
eq_scheme_map := Indmap.add ind const (!eq_scheme_map)
let export_scheme obj =
Some obj
let _ = Summary.declare_summary "eqscheme"
{ Summary.freeze_function = (fun () -> !eq_scheme_map);
Summary.unfreeze_function = (fun fs -> eq_scheme_map := fs);
Summary.init_function = (fun () -> eq_scheme_map := Indmap.empty);
Summary.survive_module = false;
Summary.survive_section = true}
let find_eq_scheme ind =
Indmap.find ind !eq_scheme_map
let check_eq_scheme ind =
Indmap.mem ind !eq_scheme_map
let bl_map = ref Indmap.empty
let lb_map = ref Indmap.empty
let dec_map = ref Indmap.empty
let cache_bl (_,(ind,const)) =
bl_map := Indmap.add ind const (!bl_map)
let cache_lb (_,(ind,const)) =
lb_map := Indmap.add ind const (!lb_map)
let cache_dec (_,(ind,const)) =
dec_map := Indmap.add ind const (!dec_map)
let export_bool_leib obj =
Some obj
let export_leib_bool obj =
Some obj
let export_dec_proof obj =
Some obj
let _ = Summary.declare_summary "bl_proof"
{ Summary.freeze_function = (fun () -> !bl_map);
Summary.unfreeze_function = (fun fs -> bl_map := fs);
Summary.init_function = (fun () -> bl_map := Indmap.empty);
Summary.survive_module = false;
Summary.survive_section = true}
let find_bl_proof ind =
Indmap.find ind !bl_map
let check_bl_proof ind =
Indmap.mem ind !bl_map
let _ = Summary.declare_summary "lb_proof"
{ Summary.freeze_function = (fun () -> !lb_map);
Summary.unfreeze_function = (fun fs -> lb_map := fs);
Summary.init_function = (fun () -> lb_map := Indmap.empty);
Summary.survive_module = false;
Summary.survive_section = true}
let find_lb_proof ind =
Indmap.find ind !lb_map
let check_lb_proof ind =
Indmap.mem ind !lb_map
let _ = Summary.declare_summary "eq_dec_proof"
{ Summary.freeze_function = (fun () -> !dec_map);
Summary.unfreeze_function = (fun fs -> dec_map := fs);
Summary.init_function = (fun () -> dec_map := Indmap.empty);
Summary.survive_module = false;
Summary.survive_section = true}
let find_eq_dec_proof ind =
Indmap.find ind !dec_map
let check_dec_proof ind =
Indmap.mem ind !dec_map
|
62c8ade9352832e2024587da5cb294ccbcba90470314fd6b70ff1335fa0be274 | holyjak/fulcro-billing-app | billing_data.clj | (ns billing-app.components.billing-data
(:require
[billing-app.model.cache :as dev]
[billing-app.components.fake-domain-client :as fake-domain-client]
[next.jdbc :as jdbc]
[next.jdbc.result-set :as jdbc.rs]))
(defn ^:static accept-all [_] true)
(defn invoice-batches-raw-categorized
"Raw batches based on invoices, i.e. without phone leasing"
[ds invoice-id]
(let [feature-mappings {}
batches (fake-domain-client/find-subscr-batches ds invoice-id accept-all)]
;; NOTE: We cannot cache this to disk because it contains `meta` that we need
but EDN does not preserve them
(with-meta
(map (fn [batch] (map #(assoc % ::included?
(-> % :kd/charge-type #{:ignored} not))
batch))
batches)
(meta batches))))
(defn leasing-charges
"Get leasing charges from our DB as sid -> charge-like data, to merge with standard charges"
[ds invoice-id {:keys [billing-period]}]
{:pre [invoice-id billing-period]}
(into {}
(map (fn [{:keys [EMPLOYEE_ID, USAGE_INC_VAT]}]
;; See what billing-app.model.billing.invoice.charge/charge-view needs
[EMPLOYEE_ID
{:kd/usage-inc-vat USAGE_INC_VAT,
;:kd/charge-type nil
:period (zipmap [:cbm/startDate :cbm/endDate] billing-period)
:debug {:name "Phone Leasing"}
::included? true}]))
(jdbc/plan
ds
["select EMPLOYEE_ID, USAGE_INC_VAT
from INVOICE_USAGE
where CATEGORY='LEASING_COST' and INVOICE_ID=?"
invoice-id]
{:builder-fn jdbc.rs/as-unqualified-maps})))
(defn add-leasing-charge
"Add a leasing charge to the list of a subscription charges, if available"
[leasing-charges [{:kd/keys [sid]} :as subscr-charges]]
(if-let [leasing-charge (get leasing-charges sid)]
(conj subscr-charges leasing-charge)
subscr-charges))
(defn billing-period+invoice-batches-with-leasing-cacheable
{:ttl-ms (* 24 60 60 1000)}
[ds invoice-id]
(let [batches+meta (invoice-batches-raw-categorized ds invoice-id)
{:keys [billing-period]} (meta batches+meta)
leasing-charges (leasing-charges ds invoice-id {:billing-period billing-period})]
;; NOTE: Metadata cannot be cached so we need to return everything interesting as raw data
[billing-period (map (partial add-leasing-charge leasing-charges) batches+meta)]))
(defn employee-batches [ds invoice-id employee-id]
(let [[_billing-period batches] (dev/caching #'billing-period+invoice-batches-with-leasing-cacheable ds invoice-id)]
(filter #(-> % first :kd/sid #{employee-id}) batches)))
(defn find-invoice-parts-too-long
[ds invoice-id]
(->> (dev/caching #'fake-domain-client/find-invoice-parts-too-long ds invoice-id)
(map (fn [{:keys [billing/partNumber kd/invoicing-period account/accid]}]
{:invoice-part/synt-id (str accid ":" partNumber)
:invoice-part/number partNumber
:invoice-part/period invoicing-period
:invoice-part/accid accid}))))
(defn simulate-invoice-processing
[ds orgnr]
(fake-domain-client/simulate-invoice-processing
{:skip-apply? true, :tmp-invoice-id? true}
orgnr))
(comment
(dev/evict-all-for #'find-invoice-parts-too-long)
(dev/evict-all-for #'billing-period+invoice-batches-with-leasing-cacheable)
(dev/evict-all-for #'employee-batches)
(employee-batches (development/get-jdbc-datasource) "inv201" "e10")
(->> (jdbc/execute! (development/get-jdbc-datasource)
["select employee_id from INVOICE_EMPLOYEE"])
(map :INVOICE_EMPLOYEE/EMPLOYEE_ID)
(sort)
(dedupe))
(->> (invoice-batches-raw-categorized (development/get-jdbc-datasource) "inv201")
(filter #(-> % first :kd/sid #{"e10"}))))
| null | https://raw.githubusercontent.com/holyjak/fulcro-billing-app/568bf28552989e1e611773b2d946c9990e1edc3d/src/shared/billing_app/components/billing_data.clj | clojure | NOTE: We cannot cache this to disk because it contains `meta` that we need
See what billing-app.model.billing.invoice.charge/charge-view needs
:kd/charge-type nil
NOTE: Metadata cannot be cached so we need to return everything interesting as raw data | (ns billing-app.components.billing-data
(:require
[billing-app.model.cache :as dev]
[billing-app.components.fake-domain-client :as fake-domain-client]
[next.jdbc :as jdbc]
[next.jdbc.result-set :as jdbc.rs]))
(defn ^:static accept-all [_] true)
(defn invoice-batches-raw-categorized
"Raw batches based on invoices, i.e. without phone leasing"
[ds invoice-id]
(let [feature-mappings {}
batches (fake-domain-client/find-subscr-batches ds invoice-id accept-all)]
but EDN does not preserve them
(with-meta
(map (fn [batch] (map #(assoc % ::included?
(-> % :kd/charge-type #{:ignored} not))
batch))
batches)
(meta batches))))
(defn leasing-charges
"Get leasing charges from our DB as sid -> charge-like data, to merge with standard charges"
[ds invoice-id {:keys [billing-period]}]
{:pre [invoice-id billing-period]}
(into {}
(map (fn [{:keys [EMPLOYEE_ID, USAGE_INC_VAT]}]
[EMPLOYEE_ID
{:kd/usage-inc-vat USAGE_INC_VAT,
:period (zipmap [:cbm/startDate :cbm/endDate] billing-period)
:debug {:name "Phone Leasing"}
::included? true}]))
(jdbc/plan
ds
["select EMPLOYEE_ID, USAGE_INC_VAT
from INVOICE_USAGE
where CATEGORY='LEASING_COST' and INVOICE_ID=?"
invoice-id]
{:builder-fn jdbc.rs/as-unqualified-maps})))
(defn add-leasing-charge
"Add a leasing charge to the list of a subscription charges, if available"
[leasing-charges [{:kd/keys [sid]} :as subscr-charges]]
(if-let [leasing-charge (get leasing-charges sid)]
(conj subscr-charges leasing-charge)
subscr-charges))
(defn billing-period+invoice-batches-with-leasing-cacheable
{:ttl-ms (* 24 60 60 1000)}
[ds invoice-id]
(let [batches+meta (invoice-batches-raw-categorized ds invoice-id)
{:keys [billing-period]} (meta batches+meta)
leasing-charges (leasing-charges ds invoice-id {:billing-period billing-period})]
[billing-period (map (partial add-leasing-charge leasing-charges) batches+meta)]))
(defn employee-batches [ds invoice-id employee-id]
(let [[_billing-period batches] (dev/caching #'billing-period+invoice-batches-with-leasing-cacheable ds invoice-id)]
(filter #(-> % first :kd/sid #{employee-id}) batches)))
(defn find-invoice-parts-too-long
[ds invoice-id]
(->> (dev/caching #'fake-domain-client/find-invoice-parts-too-long ds invoice-id)
(map (fn [{:keys [billing/partNumber kd/invoicing-period account/accid]}]
{:invoice-part/synt-id (str accid ":" partNumber)
:invoice-part/number partNumber
:invoice-part/period invoicing-period
:invoice-part/accid accid}))))
(defn simulate-invoice-processing
[ds orgnr]
(fake-domain-client/simulate-invoice-processing
{:skip-apply? true, :tmp-invoice-id? true}
orgnr))
(comment
(dev/evict-all-for #'find-invoice-parts-too-long)
(dev/evict-all-for #'billing-period+invoice-batches-with-leasing-cacheable)
(dev/evict-all-for #'employee-batches)
(employee-batches (development/get-jdbc-datasource) "inv201" "e10")
(->> (jdbc/execute! (development/get-jdbc-datasource)
["select employee_id from INVOICE_EMPLOYEE"])
(map :INVOICE_EMPLOYEE/EMPLOYEE_ID)
(sort)
(dedupe))
(->> (invoice-batches-raw-categorized (development/get-jdbc-datasource) "inv201")
(filter #(-> % first :kd/sid #{"e10"}))))
|
206523dd4a924ac24100f598f0f808cb986ab682891985117c43b802ec59fa2d | np/ling | Prelude.hs | # LANGUAGE FlexibleInstances #
{-# LANGUAGE LambdaCase #-}
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
# LANGUAGE TypeFamilies #
module Ling.Prelude (module Ling.Prelude, module X) where
import Control.Applicative as X
import Control.Lens as X hiding (Empty)
import Control.Lens.Extras as X (is)
import Control.Monad as X
import Control.Monad.Except as X
import Control.Monad.Reader as X
import Data.Bifoldable as X
import Data.Bifunctor as X
import Data.Bitraversable as X
import Data.ByteString.Lazy.Lens as BL
import Data.Digest.Pure.SHA (sha256, showDigest)
import Data.Foldable as X
import Data.Function as X
import Data.Functor as X
import Data.List as X (elemIndex, partition, sort,
transpose)
import Data.List.Lens as X
import Data.Map as X (Map, keys, keysSet,
unionWithKey)
import qualified Data.Map as Map
import Data.Maybe as X
import Data.Monoid as X hiding (Dual)
import Data.Set as X (Set)
import Data.Set (intersection, member, notMember,
union, insert)
import qualified Data.Set as Set
import Data.Set.Lens as X
import Data.Traversable as X
import Data.Tuple as X
import Debug.Trace as X
import Language.Haskell.TH (litP, stringE, stringL)
import Language.Haskell.TH.Quote
import Ling.Abs
import Numeric.Lens as X
import Text.Show.Pretty as X (ppShow)
type Endom a = a -> a
type EndoPrism a = Prism' a a
type EndoM m a = a -> m a
type Op2 a = a -> a -> a
type Rel a = a -> a -> Bool
type Msg = String
type Verbosity = Bool
newtype Prll a = Prll { _unPrll :: [a] }
deriving (Eq, Ord, Show, Read)
makePrisms ''Prll
makeLenses ''Prll
ø :: Monoid a => a
ø = mempty
instance Semigroup (Prll a) where
Prll ps <> Prll qs = Prll (ps <> qs)
instance Monoid (Prll a) where
mempty = Prll ø
instance Each (Prll a) (Prll b) a b where
each = _Prll . each
newtype Order a = Order { _unOrder :: [a] }
deriving (Eq, Ord, Show, Read)
makePrisms ''Order
makeLenses ''Order
instance Semigroup (Order a) where
Order x <> Order y = Order (x <> y)
instance Monoid (Order a) where
mempty = Order []
instance Each (Order a) (Order b) a b where
each = _Order . each
anonName :: Name
anonName = Name "_"
data Arg a = Arg { _argName :: Name, _argBody :: a }
deriving (Eq, Ord, Show, Read)
makePrisms ''Arg
makeLenses ''Arg
instance Functor Arg where fmap = over argBody
instance Each (Arg a) (Arg b) a b where
each = argBody
instance t ~ Arg a' => Rewrapped (Arg a) t
instance Wrapped (Arg a) where
type Unwrapped (Arg a) = (Name, a)
_Wrapped' = _Arg
anonArg :: a -> Arg a
anonArg = Arg anonName
data Abs a b = Abs { _argAbs :: Arg a, _bodyAbs :: b }
makePrisms ''Abs
makeLenses ''Abs
instance Functor (Abs a) where fmap = over bodyAbs
instance Bifunctor Abs where
bimap f g (Abs arg x) = Abs (f <$> arg) (g x)
instance Bifoldable Abs where
bifoldMap = bifoldMapDefault
instance Bitraversable Abs where
bitraverse f g (Abs arg x) = Abs <$> argBody f arg <*> g x
instance t ~ Abs a' b' => Rewrapped (Abs a b) t
instance Wrapped (Abs a b) where
type Unwrapped (Abs a b) = (Arg a, b)
_Wrapped' = _Abs
TODO : Rename into something like ' Telescoped ' instead
data Telescope a b = Telescope { _argsTele :: [Arg a], _bodyTele :: b }
makePrisms ''Telescope
makeLenses ''Telescope
instance Functor (Telescope a) where
fmap f (Telescope args x) = Telescope args (f x)
instance Bifunctor Telescope where
bimap f g (Telescope args x) = Telescope (fmap f <$> args) (g x)
instance Bifoldable Telescope where
bifoldMap = bifoldMapDefault
instance Bitraversable Telescope where
bitraverse f g (Telescope args x) = Telescope <$> (traverse . argBody) f args <*> g x
data Ann a b = Ann { _annotation :: a, _annotated :: b }
deriving (Eq, Ord, Read, Show)
makePrisms ''Ann
makeLenses ''Ann
instance Bifunctor Ann where
bimap f g (Ann a b) = Ann (f a) (g b)
instance Bifoldable Ann where
bifoldMap = bifoldMapDefault
instance Bitraversable Ann where
bitraverse f g (Ann a b) = Ann <$> f a <*> g b
instance t ~ Ann a' b' => Rewrapped (Ann a b) t
instance Wrapped (Ann a b) where
type Unwrapped (Ann a b) = (a, b)
_Wrapped' = _Ann
type Channel = Name
_Name :: Iso' Name String
_Name = iso (\(Name x) -> x) Name
unName :: Iso' String Name
unName = from _Name
_OpName :: Iso' OpName String
_OpName = iso (\(OpName x) -> x) OpName
unOpName :: Iso' String OpName
unOpName = from _OpName
indented :: Int -> Fold String String
indented n = lined . re (prefixed (replicate n ' '))
isInternalName :: Name -> Bool
isInternalName (Name s) = '#' `elem` s
internalNameFor :: Show a => a -> EndoPrism Name
internalNameFor = suffixedName . hash256 . show
prefixedName :: String -> EndoPrism Name
prefixedName s = _Name . prefixed (s++"#") . unName
suffixedName :: String -> EndoPrism Name
suffixedName s = _Name . suffixed ('#':s) . unName
suffixedChan :: String -> EndoPrism Channel
suffixedChan = suffixedName
prefixedChan :: String -> EndoPrism Channel
prefixedChan = prefixedName
-- infixed = match "_[^_]*_"
infixed :: Prism' Name OpName
infixed = _Name . prism' con pat . unOpName
where
con x = '_' : x ++ "_"
pat ('_':xs@(_:_:_))
| let s = init xs, last xs == '_' && '_' `notElem` s = Just s
pat _ = Nothing
flat3 :: Iso (a,(b,c)) (d,(e,f)) (a,b,c) (d,e,f)
flat3 = iso (\(x,(y,z))->(x,y,z)) (\(x,y,z)->(x,(y,z)))
traceShowMsg :: Show a => String -> Endom a
traceShowMsg msg x = trace (msg ++ " " ++ show x) x
debugTraceWhen :: Bool -> Msg -> Endom a
debugTraceWhen b s =
if b
then trace (unlines . map ("[DEBUG] " ++) . lines $ s)
else id
type UsedNames = Set Name
avoidUsed :: Name -> Name -> UsedNames -> (Name, UsedNames)
avoidUsed suggestion basename used = go allNames where
allPrefixes = ["x", "y", "z"] ++ ["x" ++ show (i :: Int) | i <- [0..]]
allNames = (if suggestion == anonName then id else (suggestion :)) $
[ prefixedName p # basename | p <- allPrefixes ]
go names | x `member` used = go (tail names)
| otherwise = (x, insert x used)
where x = head names
l2s :: Ord a => [a] -> Set a
l2s = Set.fromList
s2l :: Ord a => Set a -> [a]
s2l = Set.toList
l2m :: Ord k => [(k, a)] -> Map k a
l2m = Map.fromList
m2l :: Ord k => Map k a -> [(k, a)]
m2l = Map.toList
countMap :: (a -> Bool) -> Map k a -> Int
countMap p = Map.size . Map.filter p
infixr 3 ||>
(||>) :: Monad m => Bool -> m Bool -> m Bool
True ||> _ = return True
False ||> my = my
infixr 3 <||>
(<||>) :: Monad m => Op2 (m Bool)
mx <||> my = do
x <- mx
if x
then return True
else my
infixr 3 &&>
(&&>) :: Monad m => Bool -> m Bool -> m Bool
True &&> my = my
False &&> _ = return False
infixr 3 <&&>
(<&&>) :: Monad m => Op2 (m Bool)
mx <&&> my = do
x <- mx
if x
then my
else return False
infixr 4 ?|
-- Reverse infix form of "fromMaybe"
(?|) :: Maybe a -> Endom a
(?|) = flip fromMaybe
(.\\) :: At m => Setter s t m m -> Index m -> s -> t
l .\\ k = l . at k .~ Nothing
theUniqBy :: Rel a -> [a] -> Maybe a
theUniqBy eq (x:xs)
| all (eq x) xs = Just x
theUniqBy _ _ = Nothing
theUniq :: Eq a => [a] -> Maybe a
theUniq = theUniqBy (==)
-- Given a list of sets, return the set of elements which are redundant, namely appear more than
-- once. `redudant` can be used to check the disjointness of sets. Indeed if the result is empty all
-- the sets are disjoint, a non-empty result can be used to report errors.
redundant :: Ord a => [Set a] -> Set a
redundant = snd . foldr f ø
where
f xs (acc, res) =
(acc `union` xs, (acc `intersection` xs) `union` res)
subList :: Eq a => Rel [a]
subList [] _ = True
subList (_:_) [] = False
subList (x:xs) (y:ys)
| x == y = xs `subList` ys
| otherwise = (x:xs) `subList` ys
-- TODO: What is the best threshold between repeatdly deleting elements from a map and filtering the
-- whole map?
deleteList :: Ord k => [k] -> Endom (Map k a)
deleteList = \case
[] -> id
[k] -> Map.delete k
ks -> let sks = l2s ks in
Map.filterWithKey (\k _ -> k `notMember` sks)
rmDups :: Eq a => [a] -> [a]
rmDups (x1:x2:xs) | x1 == x2 = rmDups (x1 : xs)
| otherwise = x1 : rmDups (x2 : xs)
rmDups xs = xs
substPred :: (a -> Bool, s) -> Endom (a -> s)
substPred (p, t) var v
| p v = t
| otherwise = var v
substMember :: Ord a => (Set a, s) -> Endom (a -> s)
substMember (xs, t) = substPred ((`member` xs), t)
subst1 :: Eq a => (a, s) -> Endom (a -> s)
subst1 (x, y) = substPred ((==) x, y)
hasKey :: At m => Index m -> Getter m Bool
hasKey k = to $ has (at k . _Just)
hasNoKey :: At m => Index m -> Getter m Bool
hasNoKey k = to $ has (at k . _Nothing)
The two setters must not overlap .
If they do we can break the composition law :
Given l , f , such that : f.g.f.g = /= f.f.g.g
ll = mergeSetters l l
( ll % ~ f ) . ( ll % ~ g )
= =
l % ~ f.f.g.g
= /=
l % ~ f.g.f.g
= = ll % ~ ( f.g )
The two setters must not overlap.
If they do we can break the composition law:
Given l, f, g such that: f.g.f.g =/= f.f.g.g
ll = mergeSetters l l
(ll %~ f) . (ll %~ g)
==
l %~ f.f.g.g
=/=
l %~ f.g.f.g
== ll %~ (f.g)
-}
mergeSetters :: ASetter s t a b -> ASetter t u a b -> Setter s u a b
mergeSetters l0 l1 = sets $ \f -> over l1 f . over l0 f
composeMapOf :: Getting (Endo r) s a -> (a -> Endom r) -> s -> Endom r
composeMapOf l = flip . foldrOf l
composeOf :: Getting (Endo r) s (Endom r) -> s -> Endom r
composeOf l = appEndo . foldOf (l . to Endo)
quasiQuoter :: String -> QuasiQuoter
quasiQuoter qqName =
QuasiQuoter (err "expressions") (err "patterns") (err "types") (err "declarations")
where
err :: MonadFail m => String -> a -> m b
err kind _ = fail $ qqName ++ ": not available in " ++ kind
list :: Traversal [a] [b] a b
list = traverse
mnot :: a -> Endom (Maybe a)
mnot a Nothing = Just a
mnot _ Just{} = Nothing
q :: QuasiQuoter
q = (quasiQuoter "q") { quoteExp = stringE, quotePat = litP . stringL }
qFile :: QuasiQuoter
qFile = quoteFile q
lookupEnv :: Ord key => Lens' key String -> Lens' env (Map key val)
-> env -> key -> val
lookupEnv keyString vals env k = env ^. vals . at k ?| err
where
err = error $ "lookupEnv " ++ k ^. keyString . to show ++
" in " ++ show (env ^.. vals . to keys . each . keyString)
hash256 :: String -> String
hash256 = showDigest . sha256 . view BL.packedChars
data FinEndom a = FinEndom { _finMap :: !(Map a a), _finDflt :: !a }
deriving (Ord, Show, Read)
mkFinEndom :: (Enum a, Ord a) => Endom a -> FinEndom a
mkFinEndom f = FinEndom (l2m (filter ((/=d).snd) g)) d
where
TODO pick the most recurring element
(_,d):g = [(x, f x) | x <- [toEnum 0..]]
evalFinEndom :: Ord a => FinEndom a -> Endom a
evalFinEndom (FinEndom m d) a = m ^. at a ?| d
instance (Enum a, Ord a) => Semigroup (FinEndom a) where
f <> g = mkFinEndom (evalFinEndom f . evalFinEndom g)
instance (Enum a, Ord a) => Monoid (FinEndom a) where
mempty = mkFinEndom id
constEndom :: Ord a => a -> FinEndom a
constEndom = FinEndom ø
ifEndom :: Ord a => a -> a -> FinEndom a -> FinEndom a
ifEndom c t (FinEndom m d)
| t == d = FinEndom (m & sans c) d
| otherwise = FinEndom (m & at c .~ Just t) d
finEndomMap :: (Enum a, Ord a) => FinEndom a -> Map a a
finEndomMap (FinEndom m d) = foldr (\a -> at a %~ f) m [toEnum 0..]
where f Nothing = Just d
f x = x
instance (Enum a, Ord a) => Eq (FinEndom a) where
f == g = all (\x -> evalFinEndom f x == evalFinEndom g x) [toEnum 0..]
makePrisms ''FinEndom
makeLenses ''FinEndom
| null | https://raw.githubusercontent.com/np/ling/5a49fb5fdaef04b56e26c3ff1cd613e2800b4c23/Ling/Prelude.hs | haskell | # LANGUAGE LambdaCase #
# LANGUAGE Rank2Types #
# LANGUAGE TemplateHaskell #
infixed = match "_[^_]*_"
Reverse infix form of "fromMaybe"
Given a list of sets, return the set of elements which are redundant, namely appear more than
once. `redudant` can be used to check the disjointness of sets. Indeed if the result is empty all
the sets are disjoint, a non-empty result can be used to report errors.
TODO: What is the best threshold between repeatdly deleting elements from a map and filtering the
whole map? | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
module Ling.Prelude (module Ling.Prelude, module X) where
import Control.Applicative as X
import Control.Lens as X hiding (Empty)
import Control.Lens.Extras as X (is)
import Control.Monad as X
import Control.Monad.Except as X
import Control.Monad.Reader as X
import Data.Bifoldable as X
import Data.Bifunctor as X
import Data.Bitraversable as X
import Data.ByteString.Lazy.Lens as BL
import Data.Digest.Pure.SHA (sha256, showDigest)
import Data.Foldable as X
import Data.Function as X
import Data.Functor as X
import Data.List as X (elemIndex, partition, sort,
transpose)
import Data.List.Lens as X
import Data.Map as X (Map, keys, keysSet,
unionWithKey)
import qualified Data.Map as Map
import Data.Maybe as X
import Data.Monoid as X hiding (Dual)
import Data.Set as X (Set)
import Data.Set (intersection, member, notMember,
union, insert)
import qualified Data.Set as Set
import Data.Set.Lens as X
import Data.Traversable as X
import Data.Tuple as X
import Debug.Trace as X
import Language.Haskell.TH (litP, stringE, stringL)
import Language.Haskell.TH.Quote
import Ling.Abs
import Numeric.Lens as X
import Text.Show.Pretty as X (ppShow)
type Endom a = a -> a
type EndoPrism a = Prism' a a
type EndoM m a = a -> m a
type Op2 a = a -> a -> a
type Rel a = a -> a -> Bool
type Msg = String
type Verbosity = Bool
newtype Prll a = Prll { _unPrll :: [a] }
deriving (Eq, Ord, Show, Read)
makePrisms ''Prll
makeLenses ''Prll
ø :: Monoid a => a
ø = mempty
instance Semigroup (Prll a) where
Prll ps <> Prll qs = Prll (ps <> qs)
instance Monoid (Prll a) where
mempty = Prll ø
instance Each (Prll a) (Prll b) a b where
each = _Prll . each
newtype Order a = Order { _unOrder :: [a] }
deriving (Eq, Ord, Show, Read)
makePrisms ''Order
makeLenses ''Order
instance Semigroup (Order a) where
Order x <> Order y = Order (x <> y)
instance Monoid (Order a) where
mempty = Order []
instance Each (Order a) (Order b) a b where
each = _Order . each
anonName :: Name
anonName = Name "_"
data Arg a = Arg { _argName :: Name, _argBody :: a }
deriving (Eq, Ord, Show, Read)
makePrisms ''Arg
makeLenses ''Arg
instance Functor Arg where fmap = over argBody
instance Each (Arg a) (Arg b) a b where
each = argBody
instance t ~ Arg a' => Rewrapped (Arg a) t
instance Wrapped (Arg a) where
type Unwrapped (Arg a) = (Name, a)
_Wrapped' = _Arg
anonArg :: a -> Arg a
anonArg = Arg anonName
data Abs a b = Abs { _argAbs :: Arg a, _bodyAbs :: b }
makePrisms ''Abs
makeLenses ''Abs
instance Functor (Abs a) where fmap = over bodyAbs
instance Bifunctor Abs where
bimap f g (Abs arg x) = Abs (f <$> arg) (g x)
instance Bifoldable Abs where
bifoldMap = bifoldMapDefault
instance Bitraversable Abs where
bitraverse f g (Abs arg x) = Abs <$> argBody f arg <*> g x
instance t ~ Abs a' b' => Rewrapped (Abs a b) t
instance Wrapped (Abs a b) where
type Unwrapped (Abs a b) = (Arg a, b)
_Wrapped' = _Abs
TODO : Rename into something like ' Telescoped ' instead
data Telescope a b = Telescope { _argsTele :: [Arg a], _bodyTele :: b }
makePrisms ''Telescope
makeLenses ''Telescope
instance Functor (Telescope a) where
fmap f (Telescope args x) = Telescope args (f x)
instance Bifunctor Telescope where
bimap f g (Telescope args x) = Telescope (fmap f <$> args) (g x)
instance Bifoldable Telescope where
bifoldMap = bifoldMapDefault
instance Bitraversable Telescope where
bitraverse f g (Telescope args x) = Telescope <$> (traverse . argBody) f args <*> g x
data Ann a b = Ann { _annotation :: a, _annotated :: b }
deriving (Eq, Ord, Read, Show)
makePrisms ''Ann
makeLenses ''Ann
instance Bifunctor Ann where
bimap f g (Ann a b) = Ann (f a) (g b)
instance Bifoldable Ann where
bifoldMap = bifoldMapDefault
instance Bitraversable Ann where
bitraverse f g (Ann a b) = Ann <$> f a <*> g b
instance t ~ Ann a' b' => Rewrapped (Ann a b) t
instance Wrapped (Ann a b) where
type Unwrapped (Ann a b) = (a, b)
_Wrapped' = _Ann
type Channel = Name
_Name :: Iso' Name String
_Name = iso (\(Name x) -> x) Name
unName :: Iso' String Name
unName = from _Name
_OpName :: Iso' OpName String
_OpName = iso (\(OpName x) -> x) OpName
unOpName :: Iso' String OpName
unOpName = from _OpName
indented :: Int -> Fold String String
indented n = lined . re (prefixed (replicate n ' '))
isInternalName :: Name -> Bool
isInternalName (Name s) = '#' `elem` s
internalNameFor :: Show a => a -> EndoPrism Name
internalNameFor = suffixedName . hash256 . show
prefixedName :: String -> EndoPrism Name
prefixedName s = _Name . prefixed (s++"#") . unName
suffixedName :: String -> EndoPrism Name
suffixedName s = _Name . suffixed ('#':s) . unName
suffixedChan :: String -> EndoPrism Channel
suffixedChan = suffixedName
prefixedChan :: String -> EndoPrism Channel
prefixedChan = prefixedName
infixed :: Prism' Name OpName
infixed = _Name . prism' con pat . unOpName
where
con x = '_' : x ++ "_"
pat ('_':xs@(_:_:_))
| let s = init xs, last xs == '_' && '_' `notElem` s = Just s
pat _ = Nothing
flat3 :: Iso (a,(b,c)) (d,(e,f)) (a,b,c) (d,e,f)
flat3 = iso (\(x,(y,z))->(x,y,z)) (\(x,y,z)->(x,(y,z)))
traceShowMsg :: Show a => String -> Endom a
traceShowMsg msg x = trace (msg ++ " " ++ show x) x
debugTraceWhen :: Bool -> Msg -> Endom a
debugTraceWhen b s =
if b
then trace (unlines . map ("[DEBUG] " ++) . lines $ s)
else id
type UsedNames = Set Name
avoidUsed :: Name -> Name -> UsedNames -> (Name, UsedNames)
avoidUsed suggestion basename used = go allNames where
allPrefixes = ["x", "y", "z"] ++ ["x" ++ show (i :: Int) | i <- [0..]]
allNames = (if suggestion == anonName then id else (suggestion :)) $
[ prefixedName p # basename | p <- allPrefixes ]
go names | x `member` used = go (tail names)
| otherwise = (x, insert x used)
where x = head names
l2s :: Ord a => [a] -> Set a
l2s = Set.fromList
s2l :: Ord a => Set a -> [a]
s2l = Set.toList
l2m :: Ord k => [(k, a)] -> Map k a
l2m = Map.fromList
m2l :: Ord k => Map k a -> [(k, a)]
m2l = Map.toList
countMap :: (a -> Bool) -> Map k a -> Int
countMap p = Map.size . Map.filter p
infixr 3 ||>
(||>) :: Monad m => Bool -> m Bool -> m Bool
True ||> _ = return True
False ||> my = my
infixr 3 <||>
(<||>) :: Monad m => Op2 (m Bool)
mx <||> my = do
x <- mx
if x
then return True
else my
infixr 3 &&>
(&&>) :: Monad m => Bool -> m Bool -> m Bool
True &&> my = my
False &&> _ = return False
infixr 3 <&&>
(<&&>) :: Monad m => Op2 (m Bool)
mx <&&> my = do
x <- mx
if x
then my
else return False
infixr 4 ?|
(?|) :: Maybe a -> Endom a
(?|) = flip fromMaybe
(.\\) :: At m => Setter s t m m -> Index m -> s -> t
l .\\ k = l . at k .~ Nothing
theUniqBy :: Rel a -> [a] -> Maybe a
theUniqBy eq (x:xs)
| all (eq x) xs = Just x
theUniqBy _ _ = Nothing
theUniq :: Eq a => [a] -> Maybe a
theUniq = theUniqBy (==)
redundant :: Ord a => [Set a] -> Set a
redundant = snd . foldr f ø
where
f xs (acc, res) =
(acc `union` xs, (acc `intersection` xs) `union` res)
subList :: Eq a => Rel [a]
subList [] _ = True
subList (_:_) [] = False
subList (x:xs) (y:ys)
| x == y = xs `subList` ys
| otherwise = (x:xs) `subList` ys
deleteList :: Ord k => [k] -> Endom (Map k a)
deleteList = \case
[] -> id
[k] -> Map.delete k
ks -> let sks = l2s ks in
Map.filterWithKey (\k _ -> k `notMember` sks)
rmDups :: Eq a => [a] -> [a]
rmDups (x1:x2:xs) | x1 == x2 = rmDups (x1 : xs)
| otherwise = x1 : rmDups (x2 : xs)
rmDups xs = xs
substPred :: (a -> Bool, s) -> Endom (a -> s)
substPred (p, t) var v
| p v = t
| otherwise = var v
substMember :: Ord a => (Set a, s) -> Endom (a -> s)
substMember (xs, t) = substPred ((`member` xs), t)
subst1 :: Eq a => (a, s) -> Endom (a -> s)
subst1 (x, y) = substPred ((==) x, y)
hasKey :: At m => Index m -> Getter m Bool
hasKey k = to $ has (at k . _Just)
hasNoKey :: At m => Index m -> Getter m Bool
hasNoKey k = to $ has (at k . _Nothing)
The two setters must not overlap .
If they do we can break the composition law :
Given l , f , such that : f.g.f.g = /= f.f.g.g
ll = mergeSetters l l
( ll % ~ f ) . ( ll % ~ g )
= =
l % ~ f.f.g.g
= /=
l % ~ f.g.f.g
= = ll % ~ ( f.g )
The two setters must not overlap.
If they do we can break the composition law:
Given l, f, g such that: f.g.f.g =/= f.f.g.g
ll = mergeSetters l l
(ll %~ f) . (ll %~ g)
==
l %~ f.f.g.g
=/=
l %~ f.g.f.g
== ll %~ (f.g)
-}
mergeSetters :: ASetter s t a b -> ASetter t u a b -> Setter s u a b
mergeSetters l0 l1 = sets $ \f -> over l1 f . over l0 f
composeMapOf :: Getting (Endo r) s a -> (a -> Endom r) -> s -> Endom r
composeMapOf l = flip . foldrOf l
composeOf :: Getting (Endo r) s (Endom r) -> s -> Endom r
composeOf l = appEndo . foldOf (l . to Endo)
quasiQuoter :: String -> QuasiQuoter
quasiQuoter qqName =
QuasiQuoter (err "expressions") (err "patterns") (err "types") (err "declarations")
where
err :: MonadFail m => String -> a -> m b
err kind _ = fail $ qqName ++ ": not available in " ++ kind
list :: Traversal [a] [b] a b
list = traverse
mnot :: a -> Endom (Maybe a)
mnot a Nothing = Just a
mnot _ Just{} = Nothing
q :: QuasiQuoter
q = (quasiQuoter "q") { quoteExp = stringE, quotePat = litP . stringL }
qFile :: QuasiQuoter
qFile = quoteFile q
lookupEnv :: Ord key => Lens' key String -> Lens' env (Map key val)
-> env -> key -> val
lookupEnv keyString vals env k = env ^. vals . at k ?| err
where
err = error $ "lookupEnv " ++ k ^. keyString . to show ++
" in " ++ show (env ^.. vals . to keys . each . keyString)
hash256 :: String -> String
hash256 = showDigest . sha256 . view BL.packedChars
data FinEndom a = FinEndom { _finMap :: !(Map a a), _finDflt :: !a }
deriving (Ord, Show, Read)
mkFinEndom :: (Enum a, Ord a) => Endom a -> FinEndom a
mkFinEndom f = FinEndom (l2m (filter ((/=d).snd) g)) d
where
TODO pick the most recurring element
(_,d):g = [(x, f x) | x <- [toEnum 0..]]
evalFinEndom :: Ord a => FinEndom a -> Endom a
evalFinEndom (FinEndom m d) a = m ^. at a ?| d
instance (Enum a, Ord a) => Semigroup (FinEndom a) where
f <> g = mkFinEndom (evalFinEndom f . evalFinEndom g)
instance (Enum a, Ord a) => Monoid (FinEndom a) where
mempty = mkFinEndom id
constEndom :: Ord a => a -> FinEndom a
constEndom = FinEndom ø
ifEndom :: Ord a => a -> a -> FinEndom a -> FinEndom a
ifEndom c t (FinEndom m d)
| t == d = FinEndom (m & sans c) d
| otherwise = FinEndom (m & at c .~ Just t) d
finEndomMap :: (Enum a, Ord a) => FinEndom a -> Map a a
finEndomMap (FinEndom m d) = foldr (\a -> at a %~ f) m [toEnum 0..]
where f Nothing = Just d
f x = x
instance (Enum a, Ord a) => Eq (FinEndom a) where
f == g = all (\x -> evalFinEndom f x == evalFinEndom g x) [toEnum 0..]
makePrisms ''FinEndom
makeLenses ''FinEndom
|
f823617c8e549b04ba44a3cd4ea398935a657abeb43415c2b13123f0d0026e14 | zoomhub/zoomhub | ContentState.hs | # LANGUAGE DataKinds #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
module ZoomHub.Types.ContentState
( ContentState (..),
fromString,
isCompleted,
)
where
import Data.Maybe (fromJust)
import Data.String (IsString)
import qualified Data.String as String
import Data.Text (Text)
import qualified Data.Text as T
import Squeal.PostgreSQL
( FromValue (..),
Literal (..),
PG,
PGType (PGtext),
ToParam (..),
)
data ContentState
= Initialized
| Active
| CompletedSuccess
| CompletedFailure
deriving (Eq, Show)
fromString :: String -> Maybe ContentState
fromString "initialized" = Just Initialized
fromString "active" = Just Active
fromString "completed:success" = Just CompletedSuccess
fromString "completed:failure" = Just CompletedFailure
fromString _ = Nothing
toText :: ContentState -> Text
toText Initialized = "initialized"
toText Active = "active"
toText CompletedSuccess = "completed:success"
toText CompletedFailure = "completed:failure"
isCompleted :: ContentState -> Bool
isCompleted state = case state of
Initialized -> False
Active -> False
CompletedSuccess -> True
CompletedFailure -> True
-- Squeal / PostgreSQL
instance FromValue 'PGtext ContentState where
-- TODO: What if database value is not a valid?
fromValue = fromJust . fromString <$> fromValue @'PGtext
type instance PG ContentState = 'PGtext
instance ToParam ContentState 'PGtext where
toParam = toParam . toText
toExpression :: IsString a => ContentState -> a
toExpression = String.fromString . T.unpack . toText
instance Literal ContentState where
literal = toExpression
| null | https://raw.githubusercontent.com/zoomhub/zoomhub/65aa2ab63902d6425785caf7655f60afe5df37a6/src/ZoomHub/Types/ContentState.hs | haskell | # LANGUAGE OverloadedStrings #
Squeal / PostgreSQL
TODO: What if database value is not a valid? | # LANGUAGE DataKinds #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
module ZoomHub.Types.ContentState
( ContentState (..),
fromString,
isCompleted,
)
where
import Data.Maybe (fromJust)
import Data.String (IsString)
import qualified Data.String as String
import Data.Text (Text)
import qualified Data.Text as T
import Squeal.PostgreSQL
( FromValue (..),
Literal (..),
PG,
PGType (PGtext),
ToParam (..),
)
data ContentState
= Initialized
| Active
| CompletedSuccess
| CompletedFailure
deriving (Eq, Show)
fromString :: String -> Maybe ContentState
fromString "initialized" = Just Initialized
fromString "active" = Just Active
fromString "completed:success" = Just CompletedSuccess
fromString "completed:failure" = Just CompletedFailure
fromString _ = Nothing
toText :: ContentState -> Text
toText Initialized = "initialized"
toText Active = "active"
toText CompletedSuccess = "completed:success"
toText CompletedFailure = "completed:failure"
isCompleted :: ContentState -> Bool
isCompleted state = case state of
Initialized -> False
Active -> False
CompletedSuccess -> True
CompletedFailure -> True
instance FromValue 'PGtext ContentState where
fromValue = fromJust . fromString <$> fromValue @'PGtext
type instance PG ContentState = 'PGtext
instance ToParam ContentState 'PGtext where
toParam = toParam . toText
toExpression :: IsString a => ContentState -> a
toExpression = String.fromString . T.unpack . toText
instance Literal ContentState where
literal = toExpression
|
a2c670b4cf1234f5f7b0ad8b71800d29b88cce154f9af7b39c2986eb1180698b | clojure-interop/java-jdk | StreamReaderDelegate.clj | (ns javax.xml.stream.util.StreamReaderDelegate
"This is the base class for deriving an XMLStreamReader filter
This class is designed to sit between an XMLStreamReader and an
application's XMLStreamReader. By default each method
does nothing but call the corresponding method on the
parent interface."
(:refer-clojure :only [require comment defn ->])
(:import [javax.xml.stream.util StreamReaderDelegate]))
(defn ->stream-reader-delegate
"Constructor.
Construct an filter with the specified parent.
reader - the parent - `javax.xml.stream.XMLStreamReader`"
(^StreamReaderDelegate [^javax.xml.stream.XMLStreamReader reader]
(new StreamReaderDelegate reader))
(^StreamReaderDelegate []
(new StreamReaderDelegate )))
(defn white-space?
"Description copied from interface: XMLStreamReader
returns: true if the cursor points to all whitespace, false otherwise - `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.isWhiteSpace))))
(defn get-location
"Description copied from interface: XMLStreamReader
returns: `javax.xml.stream.Location`"
(^javax.xml.stream.Location [^StreamReaderDelegate this]
(-> this (.getLocation))))
(defn set-parent
"Set the parent of this instance.
reader - the new parent - `javax.xml.stream.XMLStreamReader`"
([^StreamReaderDelegate this ^javax.xml.stream.XMLStreamReader reader]
(-> this (.setParent reader))))
(defn get-attribute-name
"Description copied from interface: XMLStreamReader
index - the position of the attribute - `int`
returns: the QName of the attribute - `javax.xml.namespace.QName`"
(^javax.xml.namespace.QName [^StreamReaderDelegate this ^Integer index]
(-> this (.getAttributeName index))))
(defn get-namespace-uri
"Description copied from interface: XMLStreamReader
prefix - The prefix to lookup, may not be null - `java.lang.String`
returns: the uri bound to the given prefix or null if it is not bound - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^java.lang.String prefix]
(-> this (.getNamespaceURI prefix)))
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getNamespaceURI))))
(defn next
"Description copied from interface: XMLStreamReader
returns: the integer code corresponding to the current parse event - `int`
throws: javax.xml.stream.XMLStreamException - if there is an error processing the underlying XML source"
(^Integer [^StreamReaderDelegate this]
(-> this (.next))))
(defn get-event-type
"Description copied from interface: XMLStreamReader
returns: `int`"
(^Integer [^StreamReaderDelegate this]
(-> this (.getEventType))))
(defn get-encoding
"Description copied from interface: XMLStreamReader
returns: the encoding of this instance or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getEncoding))))
(defn get-pi-data
"Description copied from interface: XMLStreamReader
returns: the data or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getPIData))))
(defn characters?
"Description copied from interface: XMLStreamReader
returns: true if the cursor points to character data, false otherwise - `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.isCharacters))))
(defn start-element?
"Description copied from interface: XMLStreamReader
returns: true if the cursor points to a start tag, false otherwise - `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.isStartElement))))
(defn has-name?
"Description copied from interface: XMLStreamReader
returns: `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.hasName))))
(defn get-text-start
"Description copied from interface: XMLStreamReader
returns: `int`"
(^Integer [^StreamReaderDelegate this]
(-> this (.getTextStart))))
(defn has-text?
"Description copied from interface: XMLStreamReader
returns: `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.hasText))))
(defn get-text
"Description copied from interface: XMLStreamReader
returns: the current text or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getText))))
(defn get-element-text
"Description copied from interface: XMLStreamReader
returns: `java.lang.String`
throws: javax.xml.stream.XMLStreamException - if the current event is not a START_ELEMENT or if a non text element is encountered"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getElementText))))
(defn require
"Description copied from interface: XMLStreamReader
type - the event type - `int`
namespace-uri - the uri of the event, may be null - `java.lang.String`
local-name - the localName of the event, may be null - `java.lang.String`
throws: javax.xml.stream.XMLStreamException - if the required values are not matched."
([^StreamReaderDelegate this ^Integer type ^java.lang.String namespace-uri ^java.lang.String local-name]
(-> this (.require type namespace-uri local-name))))
(defn attribute-specified?
"Description copied from interface: XMLStreamReader
index - the position of the attribute - `int`
returns: true if this is a default attribute - `boolean`"
(^Boolean [^StreamReaderDelegate this ^Integer index]
(-> this (.isAttributeSpecified index))))
(defn get-namespace-count
"Description copied from interface: XMLStreamReader
returns: returns the number of namespace declarations on this specific element - `int`"
(^Integer [^StreamReaderDelegate this]
(-> this (.getNamespaceCount))))
(defn get-attribute-count
"Description copied from interface: XMLStreamReader
returns: returns the number of attributes - `int`"
(^Integer [^StreamReaderDelegate this]
(-> this (.getAttributeCount))))
(defn get-text-length
"Description copied from interface: XMLStreamReader
returns: `int`"
(^Integer [^StreamReaderDelegate this]
(-> this (.getTextLength))))
(defn get-character-encoding-scheme
"Description copied from interface: XMLStreamReader
returns: the encoding declared in the document or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getCharacterEncodingScheme))))
(defn get-name
"Description copied from interface: XMLStreamReader
returns: the QName for the current START_ELEMENT or END_ELEMENT event - `javax.xml.namespace.QName`"
(^javax.xml.namespace.QName [^StreamReaderDelegate this]
(-> this (.getName))))
(defn get-namespace-context
"Description copied from interface: XMLStreamReader
returns: return a namespace context - `javax.xml.namespace.NamespaceContext`"
(^javax.xml.namespace.NamespaceContext [^StreamReaderDelegate this]
(-> this (.getNamespaceContext))))
(defn get-text-characters
"Description copied from interface: XMLStreamReader
source-start - the index of the first character in the source array to copy - `int`
target - the destination array - `char[]`
target-start - the start offset in the target array - `int`
length - the number of characters to copy - `int`
returns: the number of characters actually copied - `int`
throws: javax.xml.stream.XMLStreamException - if the underlying XML source is not well-formed"
(^Integer [^StreamReaderDelegate this ^Integer source-start target ^Integer target-start ^Integer length]
(-> this (.getTextCharacters source-start target target-start length)))
([^StreamReaderDelegate this]
(-> this (.getTextCharacters))))
(defn get-version
"Description copied from interface: XMLStreamReader
returns: the XML version or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getVersion))))
(defn get-parent
"Get the parent of this instance.
returns: the parent or null if none is set - `javax.xml.stream.XMLStreamReader`"
(^javax.xml.stream.XMLStreamReader [^StreamReaderDelegate this]
(-> this (.getParent))))
(defn has-next?
"Description copied from interface: XMLStreamReader
returns: true if there are more events, false otherwise - `boolean`
throws: javax.xml.stream.XMLStreamException - if there is a fatal error detecting the next state"
(^Boolean [^StreamReaderDelegate this]
(-> this (.hasNext))))
(defn get-attribute-value
"Description copied from interface: XMLStreamReader
namespace-uri - the namespace of the attribute - `java.lang.String`
local-name - the local name of the attribute, cannot be null - `java.lang.String`
returns: returns the value of the attribute , returns null if not found - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^java.lang.String namespace-uri ^java.lang.String local-name]
(-> this (.getAttributeValue namespace-uri local-name)))
(^java.lang.String [^StreamReaderDelegate this ^Integer index]
(-> this (.getAttributeValue index))))
(defn close
"Description copied from interface: XMLStreamReader
throws: javax.xml.stream.XMLStreamException - if there are errors freeing associated resources"
([^StreamReaderDelegate this]
(-> this (.close))))
(defn get-property
"Description copied from interface: XMLStreamReader
name - The name of the property, may not be null - `java.lang.String`
returns: The value of the property - `java.lang.Object`"
(^java.lang.Object [^StreamReaderDelegate this ^java.lang.String name]
(-> this (.getProperty name))))
(defn get-namespace-prefix
"Description copied from interface: XMLStreamReader
index - the position of the namespace declaration - `int`
returns: returns the namespace prefix - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^Integer index]
(-> this (.getNamespacePrefix index))))
(defn next-tag
"Description copied from interface: XMLStreamReader
returns: the event type of the element read (START_ELEMENT or END_ELEMENT) - `int`
throws: javax.xml.stream.XMLStreamException - if the current event is not white space, PROCESSING_INSTRUCTION, START_ELEMENT or END_ELEMENT"
(^Integer [^StreamReaderDelegate this]
(-> this (.nextTag))))
(defn get-attribute-local-name
"Description copied from interface: XMLStreamReader
index - the position of the attribute - `int`
returns: the localName of the attribute - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^Integer index]
(-> this (.getAttributeLocalName index))))
(defn standalone?
"Description copied from interface: XMLStreamReader
returns: true if this is standalone, or false otherwise - `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.isStandalone))))
(defn get-prefix
"Description copied from interface: XMLStreamReader
returns: the prefix or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getPrefix))))
(defn get-pi-target
"Description copied from interface: XMLStreamReader
returns: the target or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getPITarget))))
(defn get-attribute-prefix
"Description copied from interface: XMLStreamReader
index - the position of the attribute - `int`
returns: the prefix of the attribute - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^Integer index]
(-> this (.getAttributePrefix index))))
(defn get-local-name
"Description copied from interface: XMLStreamReader
returns: the localName - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getLocalName))))
(defn end-element?
"Description copied from interface: XMLStreamReader
returns: true if the cursor points to an end tag, false otherwise - `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.isEndElement))))
(defn get-attribute-type
"Description copied from interface: XMLStreamReader
index - the position of the attribute - `int`
returns: the XML type of the attribute - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^Integer index]
(-> this (.getAttributeType index))))
(defn standalone-set
"Description copied from interface: XMLStreamReader
returns: true if standalone was set in the document, or false otherwise - `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.standaloneSet))))
(defn get-attribute-namespace
"Description copied from interface: XMLStreamReader
index - the position of the attribute - `int`
returns: the namespace URI (can be null) - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^Integer index]
(-> this (.getAttributeNamespace index))))
| null | https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.xml/src/javax/xml/stream/util/StreamReaderDelegate.clj | clojure | (ns javax.xml.stream.util.StreamReaderDelegate
"This is the base class for deriving an XMLStreamReader filter
This class is designed to sit between an XMLStreamReader and an
application's XMLStreamReader. By default each method
does nothing but call the corresponding method on the
parent interface."
(:refer-clojure :only [require comment defn ->])
(:import [javax.xml.stream.util StreamReaderDelegate]))
(defn ->stream-reader-delegate
"Constructor.
Construct an filter with the specified parent.
reader - the parent - `javax.xml.stream.XMLStreamReader`"
(^StreamReaderDelegate [^javax.xml.stream.XMLStreamReader reader]
(new StreamReaderDelegate reader))
(^StreamReaderDelegate []
(new StreamReaderDelegate )))
(defn white-space?
"Description copied from interface: XMLStreamReader
returns: true if the cursor points to all whitespace, false otherwise - `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.isWhiteSpace))))
(defn get-location
"Description copied from interface: XMLStreamReader
returns: `javax.xml.stream.Location`"
(^javax.xml.stream.Location [^StreamReaderDelegate this]
(-> this (.getLocation))))
(defn set-parent
"Set the parent of this instance.
reader - the new parent - `javax.xml.stream.XMLStreamReader`"
([^StreamReaderDelegate this ^javax.xml.stream.XMLStreamReader reader]
(-> this (.setParent reader))))
(defn get-attribute-name
"Description copied from interface: XMLStreamReader
index - the position of the attribute - `int`
returns: the QName of the attribute - `javax.xml.namespace.QName`"
(^javax.xml.namespace.QName [^StreamReaderDelegate this ^Integer index]
(-> this (.getAttributeName index))))
(defn get-namespace-uri
"Description copied from interface: XMLStreamReader
prefix - The prefix to lookup, may not be null - `java.lang.String`
returns: the uri bound to the given prefix or null if it is not bound - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^java.lang.String prefix]
(-> this (.getNamespaceURI prefix)))
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getNamespaceURI))))
(defn next
"Description copied from interface: XMLStreamReader
returns: the integer code corresponding to the current parse event - `int`
throws: javax.xml.stream.XMLStreamException - if there is an error processing the underlying XML source"
(^Integer [^StreamReaderDelegate this]
(-> this (.next))))
(defn get-event-type
"Description copied from interface: XMLStreamReader
returns: `int`"
(^Integer [^StreamReaderDelegate this]
(-> this (.getEventType))))
(defn get-encoding
"Description copied from interface: XMLStreamReader
returns: the encoding of this instance or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getEncoding))))
(defn get-pi-data
"Description copied from interface: XMLStreamReader
returns: the data or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getPIData))))
(defn characters?
"Description copied from interface: XMLStreamReader
returns: true if the cursor points to character data, false otherwise - `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.isCharacters))))
(defn start-element?
"Description copied from interface: XMLStreamReader
returns: true if the cursor points to a start tag, false otherwise - `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.isStartElement))))
(defn has-name?
"Description copied from interface: XMLStreamReader
returns: `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.hasName))))
(defn get-text-start
"Description copied from interface: XMLStreamReader
returns: `int`"
(^Integer [^StreamReaderDelegate this]
(-> this (.getTextStart))))
(defn has-text?
"Description copied from interface: XMLStreamReader
returns: `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.hasText))))
(defn get-text
"Description copied from interface: XMLStreamReader
returns: the current text or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getText))))
(defn get-element-text
"Description copied from interface: XMLStreamReader
returns: `java.lang.String`
throws: javax.xml.stream.XMLStreamException - if the current event is not a START_ELEMENT or if a non text element is encountered"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getElementText))))
(defn require
"Description copied from interface: XMLStreamReader
type - the event type - `int`
namespace-uri - the uri of the event, may be null - `java.lang.String`
local-name - the localName of the event, may be null - `java.lang.String`
throws: javax.xml.stream.XMLStreamException - if the required values are not matched."
([^StreamReaderDelegate this ^Integer type ^java.lang.String namespace-uri ^java.lang.String local-name]
(-> this (.require type namespace-uri local-name))))
(defn attribute-specified?
"Description copied from interface: XMLStreamReader
index - the position of the attribute - `int`
returns: true if this is a default attribute - `boolean`"
(^Boolean [^StreamReaderDelegate this ^Integer index]
(-> this (.isAttributeSpecified index))))
(defn get-namespace-count
"Description copied from interface: XMLStreamReader
returns: returns the number of namespace declarations on this specific element - `int`"
(^Integer [^StreamReaderDelegate this]
(-> this (.getNamespaceCount))))
(defn get-attribute-count
"Description copied from interface: XMLStreamReader
returns: returns the number of attributes - `int`"
(^Integer [^StreamReaderDelegate this]
(-> this (.getAttributeCount))))
(defn get-text-length
"Description copied from interface: XMLStreamReader
returns: `int`"
(^Integer [^StreamReaderDelegate this]
(-> this (.getTextLength))))
(defn get-character-encoding-scheme
"Description copied from interface: XMLStreamReader
returns: the encoding declared in the document or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getCharacterEncodingScheme))))
(defn get-name
"Description copied from interface: XMLStreamReader
returns: the QName for the current START_ELEMENT or END_ELEMENT event - `javax.xml.namespace.QName`"
(^javax.xml.namespace.QName [^StreamReaderDelegate this]
(-> this (.getName))))
(defn get-namespace-context
"Description copied from interface: XMLStreamReader
returns: return a namespace context - `javax.xml.namespace.NamespaceContext`"
(^javax.xml.namespace.NamespaceContext [^StreamReaderDelegate this]
(-> this (.getNamespaceContext))))
(defn get-text-characters
"Description copied from interface: XMLStreamReader
source-start - the index of the first character in the source array to copy - `int`
target - the destination array - `char[]`
target-start - the start offset in the target array - `int`
length - the number of characters to copy - `int`
returns: the number of characters actually copied - `int`
throws: javax.xml.stream.XMLStreamException - if the underlying XML source is not well-formed"
(^Integer [^StreamReaderDelegate this ^Integer source-start target ^Integer target-start ^Integer length]
(-> this (.getTextCharacters source-start target target-start length)))
([^StreamReaderDelegate this]
(-> this (.getTextCharacters))))
(defn get-version
"Description copied from interface: XMLStreamReader
returns: the XML version or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getVersion))))
(defn get-parent
"Get the parent of this instance.
returns: the parent or null if none is set - `javax.xml.stream.XMLStreamReader`"
(^javax.xml.stream.XMLStreamReader [^StreamReaderDelegate this]
(-> this (.getParent))))
(defn has-next?
"Description copied from interface: XMLStreamReader
returns: true if there are more events, false otherwise - `boolean`
throws: javax.xml.stream.XMLStreamException - if there is a fatal error detecting the next state"
(^Boolean [^StreamReaderDelegate this]
(-> this (.hasNext))))
(defn get-attribute-value
"Description copied from interface: XMLStreamReader
namespace-uri - the namespace of the attribute - `java.lang.String`
local-name - the local name of the attribute, cannot be null - `java.lang.String`
returns: returns the value of the attribute , returns null if not found - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^java.lang.String namespace-uri ^java.lang.String local-name]
(-> this (.getAttributeValue namespace-uri local-name)))
(^java.lang.String [^StreamReaderDelegate this ^Integer index]
(-> this (.getAttributeValue index))))
(defn close
"Description copied from interface: XMLStreamReader
throws: javax.xml.stream.XMLStreamException - if there are errors freeing associated resources"
([^StreamReaderDelegate this]
(-> this (.close))))
(defn get-property
"Description copied from interface: XMLStreamReader
name - The name of the property, may not be null - `java.lang.String`
returns: The value of the property - `java.lang.Object`"
(^java.lang.Object [^StreamReaderDelegate this ^java.lang.String name]
(-> this (.getProperty name))))
(defn get-namespace-prefix
"Description copied from interface: XMLStreamReader
index - the position of the namespace declaration - `int`
returns: returns the namespace prefix - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^Integer index]
(-> this (.getNamespacePrefix index))))
(defn next-tag
"Description copied from interface: XMLStreamReader
returns: the event type of the element read (START_ELEMENT or END_ELEMENT) - `int`
throws: javax.xml.stream.XMLStreamException - if the current event is not white space, PROCESSING_INSTRUCTION, START_ELEMENT or END_ELEMENT"
(^Integer [^StreamReaderDelegate this]
(-> this (.nextTag))))
(defn get-attribute-local-name
"Description copied from interface: XMLStreamReader
index - the position of the attribute - `int`
returns: the localName of the attribute - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^Integer index]
(-> this (.getAttributeLocalName index))))
(defn standalone?
"Description copied from interface: XMLStreamReader
returns: true if this is standalone, or false otherwise - `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.isStandalone))))
(defn get-prefix
"Description copied from interface: XMLStreamReader
returns: the prefix or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getPrefix))))
(defn get-pi-target
"Description copied from interface: XMLStreamReader
returns: the target or null - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getPITarget))))
(defn get-attribute-prefix
"Description copied from interface: XMLStreamReader
index - the position of the attribute - `int`
returns: the prefix of the attribute - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^Integer index]
(-> this (.getAttributePrefix index))))
(defn get-local-name
"Description copied from interface: XMLStreamReader
returns: the localName - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this]
(-> this (.getLocalName))))
(defn end-element?
"Description copied from interface: XMLStreamReader
returns: true if the cursor points to an end tag, false otherwise - `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.isEndElement))))
(defn get-attribute-type
"Description copied from interface: XMLStreamReader
index - the position of the attribute - `int`
returns: the XML type of the attribute - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^Integer index]
(-> this (.getAttributeType index))))
(defn standalone-set
"Description copied from interface: XMLStreamReader
returns: true if standalone was set in the document, or false otherwise - `boolean`"
(^Boolean [^StreamReaderDelegate this]
(-> this (.standaloneSet))))
(defn get-attribute-namespace
"Description copied from interface: XMLStreamReader
index - the position of the attribute - `int`
returns: the namespace URI (can be null) - `java.lang.String`"
(^java.lang.String [^StreamReaderDelegate this ^Integer index]
(-> this (.getAttributeNamespace index))))
| |
2c8c81b32dc8eab0166646a5b0ffee6156c6a9ecb2785729adc257b3dfd9446d | EwenG/muance | utils_test.cljs | (ns muance.utils-test
(:require [goog.dom :as gdom]
[muance.core :as m]
[muance.print :as mprint]
[muance.dom :as dom]))
(defonce v-state (atom nil))
(defn update-v-state [vtree]
(reset! v-state (mprint/format-vtree vtree)))
(defn new-root []
(when-let [root (.getElementById js/document "root")]
(gdom/removeNode root))
(let [root (doto (js/document.createElement "div")
(aset "id" "root"))]
(.appendChild js/document.body root)
root))
(defn root []
(.getElementById js/document "root"))
(defn new-vtree
([vtree]
(new-vtree vtree nil))
([vtree vtree-params]
(when vtree
(m/remove vtree))
(dom/vtree (assoc vtree-params :post-render-hook update-v-state))))
| null | https://raw.githubusercontent.com/EwenG/muance/11315603060aa57f8614e8f5fcecefa0cd0db17a/test/muance/utils_test.cljs | clojure | (ns muance.utils-test
(:require [goog.dom :as gdom]
[muance.core :as m]
[muance.print :as mprint]
[muance.dom :as dom]))
(defonce v-state (atom nil))
(defn update-v-state [vtree]
(reset! v-state (mprint/format-vtree vtree)))
(defn new-root []
(when-let [root (.getElementById js/document "root")]
(gdom/removeNode root))
(let [root (doto (js/document.createElement "div")
(aset "id" "root"))]
(.appendChild js/document.body root)
root))
(defn root []
(.getElementById js/document "root"))
(defn new-vtree
([vtree]
(new-vtree vtree nil))
([vtree vtree-params]
(when vtree
(m/remove vtree))
(dom/vtree (assoc vtree-params :post-render-hook update-v-state))))
| |
aa1ef5687b5f28aecf07e4adcfb753558f4a4f2c4e9c0afac98b29c5c8e04017 | fgalassi/cs61a-sp11 | h7.4.scm | (load "obj")
(define-class (miss-manners obj)
(method (please meth arg)
(ask obj meth arg)))
| null | https://raw.githubusercontent.com/fgalassi/cs61a-sp11/66df3b54b03ee27f368c716ae314fd7ed85c4dba/homework/h7.4.scm | scheme | (load "obj")
(define-class (miss-manners obj)
(method (please meth arg)
(ask obj meth arg)))
| |
3e59771a14b4ede5bc7aa06ff93d7a1e1136e7b7deda6e765fe004781afcacf7 | jonascarpay/apecs | Util.hs | For Data . Semigroup compatibility
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE Strict #-}
# LANGUAGE TypeFamilies #
module Apecs.Util (
-- * Utility
runGC, global,
*
EntityCounter(..), nextEntity, newEntity, newEntity_,
) where
import Control.Applicative (liftA2)
import Control.Monad.IO.Class
import Control.Monad.Reader
import Data.Monoid
import Data.Semigroup
import System.Mem (performMajorGC)
import Apecs.Core
import Apecs.Stores
import Apecs.System
-- | Convenience entity, for use in places where the entity value does not matter, i.e. a global store.
global :: Entity
global = Entity (-1)
| Component used by newEntity to track the number of issued entities .
-- Automatically added to any world created with @makeWorld@
newtype EntityCounter = EntityCounter {getCounter :: Sum Int} deriving (Semigroup, Monoid, Eq, Show)
instance Component EntityCounter where
type Storage EntityCounter = ReadOnly (Global EntityCounter)
| Bumps the and yields its value
{-# INLINE nextEntity #-}
nextEntity :: (MonadIO m, Get w m EntityCounter) => SystemT w m Entity
nextEntity = do EntityCounter n <- get global
setReadOnly global (EntityCounter $ n+1)
return (Entity . getSum $ n)
-- | Writes the given components to a new entity, and yields that entity.
-- The return value is often ignored.
# INLINE newEntity #
newEntity :: (MonadIO m, Set w m c, Get w m EntityCounter)
=> c -> SystemT w m Entity
newEntity c = do ety <- nextEntity
set ety c
return ety
-- | Writes the given components to a new entity without yelding the result.
-- Used mostly for convenience.
# INLINE newEntity _ #
newEntity_ :: (MonadIO m, Set world m component, Get world m EntityCounter)
=> component -> SystemT world m ()
newEntity_ component = do
entity <- nextEntity
set entity component
-- | Explicitly invoke the garbage collector
runGC :: System w ()
runGC = lift performMajorGC
| null | https://raw.githubusercontent.com/jonascarpay/apecs/19c9dc15b31bfd4173230a327dec98e2aa959936/apecs/src/Apecs/Util.hs | haskell | # LANGUAGE Strict #
* Utility
| Convenience entity, for use in places where the entity value does not matter, i.e. a global store.
Automatically added to any world created with @makeWorld@
# INLINE nextEntity #
| Writes the given components to a new entity, and yields that entity.
The return value is often ignored.
| Writes the given components to a new entity without yelding the result.
Used mostly for convenience.
| Explicitly invoke the garbage collector | For Data . Semigroup compatibility
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Apecs.Util (
runGC, global,
*
EntityCounter(..), nextEntity, newEntity, newEntity_,
) where
import Control.Applicative (liftA2)
import Control.Monad.IO.Class
import Control.Monad.Reader
import Data.Monoid
import Data.Semigroup
import System.Mem (performMajorGC)
import Apecs.Core
import Apecs.Stores
import Apecs.System
global :: Entity
global = Entity (-1)
| Component used by newEntity to track the number of issued entities .
newtype EntityCounter = EntityCounter {getCounter :: Sum Int} deriving (Semigroup, Monoid, Eq, Show)
instance Component EntityCounter where
type Storage EntityCounter = ReadOnly (Global EntityCounter)
| Bumps the and yields its value
nextEntity :: (MonadIO m, Get w m EntityCounter) => SystemT w m Entity
nextEntity = do EntityCounter n <- get global
setReadOnly global (EntityCounter $ n+1)
return (Entity . getSum $ n)
# INLINE newEntity #
newEntity :: (MonadIO m, Set w m c, Get w m EntityCounter)
=> c -> SystemT w m Entity
newEntity c = do ety <- nextEntity
set ety c
return ety
# INLINE newEntity _ #
newEntity_ :: (MonadIO m, Set world m component, Get world m EntityCounter)
=> component -> SystemT world m ()
newEntity_ component = do
entity <- nextEntity
set entity component
runGC :: System w ()
runGC = lift performMajorGC
|
4acf2c8ec6056b009cd180b3ace02153c50982df482d49535bb755fc1276d1fa | cnuernber/dtype-next | nio_buffer.clj | (ns tech.v3.datatype.nio-buffer
(:require [tech.v3.datatype.array-buffer :as array-buffer]
[tech.v3.datatype.native-buffer :as native-buffer]
[tech.v3.datatype.protocols :as dtype-proto]
[tech.v3.datatype.casting :as casting]
[tech.v3.datatype.base :as dtype-base]
[tech.v3.datatype.errors :as errors]
[tech.v3.resource :as resource]
[clojure.tools.logging :as log])
(:import [java.nio Buffer ByteBuffer ShortBuffer IntBuffer LongBuffer
FloatBuffer DoubleBuffer ByteOrder]
[tech.v3.datatype UnsafeUtil]
[tech.v3.datatype.native_buffer NativeBuffer]
[tech.v3.datatype.array_buffer ArrayBuffer]))
(defn datatype->nio-buf-type
[datatype]
(case (casting/host-flatten datatype)
:int8 java.nio.ByteBuffer
:int16 java.nio.ShortBuffer
:int32 java.nio.IntBuffer
:int64 java.nio.LongBuffer
:float32 java.nio.FloatBuffer
:float64 java.nio.DoubleBuffer))
(defn as-byte-buffer ^ByteBuffer [item] item)
(defn as-short-buffer ^ShortBuffer [item] item)
(defn as-int-buffer ^IntBuffer [item] item)
(defn as-long-buffer ^LongBuffer [item] item)
(defn as-float-buffer ^FloatBuffer [item] item)
(defn as-double-buffer ^DoubleBuffer [item] item)
(defmacro datatype->nio-buf
[datatype item]
(case (casting/host-flatten datatype)
:int8 `(as-byte-buffer ~item)
:int16 `(as-short-buffer ~item)
:int32 `(as-int-buffer ~item)
:int64 `(as-long-buffer ~item)
:float32 `(as-float-buffer ~item)
:float64 `(as-double-buffer ~item)))
(def nio-datatypes #{:int8 :int16 :int32 :int64 :float32 :float64})
(defn buffer-address
^long [^Buffer buf]
(.getLong (native-buffer/unsafe) ^Object buf UnsafeUtil/addressFieldOffset))
(defn buf->buffer
[^Buffer buf]
(let [cbuf (if (.isDirect buf)
(dtype-proto/->native-buffer buf)
(dtype-proto/->array-buffer buf))]
(dtype-proto/->buffer cbuf)))
(defn extend-nio-types!
[]
(doseq [dtype nio-datatypes]
(let [buf-type (datatype->nio-buf-type dtype)]
(extend buf-type
dtype-proto/PElemwiseDatatype
{:elemwise-datatype (constantly dtype)}
dtype-proto/PElemwiseReaderCast
{:elemwise-reader-cast buf->buffer}
dtype-proto/PECount
{:ecount #(.remaining ^Buffer %)}
dtype-proto/PToArrayBuffer
{:convertible-to-array-buffer? #(not (.isDirect ^Buffer %))
:->array-buffer (fn [^Buffer buf]
(let [offset (.position buf)
length (.remaining buf)]
(when-not (.isDirect buf)
(-> (array-buffer/array-buffer (.array buf))
(dtype-proto/sub-buffer offset length)))))}
dtype-proto/PToNativeBuffer
{:convertible-to-native-buffer? #(.isDirect ^Buffer %)
:->native-buffer
(fn [^Buffer buf]
(native-buffer/wrap-address
(buffer-address buf)
(* (dtype-proto/ecount buf)
(casting/numeric-byte-width (dtype-proto/elemwise-datatype buf)))
(dtype-proto/elemwise-datatype buf)
(dtype-proto/endianness buf)
buf))}
dtype-proto/PToBuffer
{:convertible-to-buffer? (constantly true)
:->buffer buf->buffer}
dtype-proto/PToReader
{:convertible-to-reader? (constantly true)
:->reader buf->buffer}
dtype-proto/PToWriter
{:convertible-to-writer? (constantly true)
:->writer buf->buffer}))))
(extend-nio-types!)
(def buffer-constructor*
(delay (if UnsafeUtil/directBufferConstructor
(fn [nbuf ^long address ^long nbytes _options]
(let [retval
(UnsafeUtil/constructByteBufferFromAddress
address nbytes)]
(resource/chain-resources retval nbuf)))
(do
(try
(requiring-resolve 'tech.v3.datatype.ffi.nio-buf-mmodel-jdk19/direct-buffer-constructor)
(catch Exception e
(log/info "Unable to find direct buffer constructor -
falling back to jdk16 memory model.")
(try
(requiring-resolve 'tech.v3.datatype.ffi.nio-buf-mmodel/direct-buffer-constructor)
(catch Exception e
(throw (RuntimeException. "Unable to load direct buffer constructor. If you are using JDK-17, set your runtime :jvm-opts as follows:
:jvm-opts [\"--add-modules\" \"jdk.incubator.foreign,jdk.incubator.vector\"
\"--enable-native-access=ALL-UNNAMED\"]}"
e))))))))))
(defn native-buf->nio-buf
(^java.nio.Buffer [^NativeBuffer buffer options]
(let [dtype (dtype-proto/elemwise-datatype buffer)
byte-width (casting/numeric-byte-width dtype)
n-bytes (* (.n-elems buffer) byte-width)
^ByteBuffer byte-buf (@buffer-constructor* buffer (.address buffer) n-bytes
options)]
(.order byte-buf
(case (.endianness buffer)
:little-endian ByteOrder/LITTLE_ENDIAN
:big-endian ByteOrder/BIG_ENDIAN))
(let [retval (case (casting/host-flatten dtype)
:int8 byte-buf
:int16 (.asShortBuffer byte-buf)
:int32 (.asIntBuffer byte-buf)
:int64 (.asLongBuffer byte-buf)
:float32 (.asFloatBuffer byte-buf)
:float64 (.asDoubleBuffer byte-buf))]
(if (:resource-type options :auto)
(resource/chain-resources retval buffer)
retval))))
(^java.nio.Buffer [buffer]
(native-buf->nio-buf buffer nil)))
(defn as-nio-buffer
"Convert to a nio buffer returning nil if not possible."
(^Buffer [item options]
(when-let [cbuf (dtype-base/as-concrete-buffer item)]
(when (nio-datatypes (casting/host-flatten (dtype-base/elemwise-datatype cbuf)))
(if (instance? NativeBuffer cbuf)
(native-buf->nio-buf cbuf options)
(let [^ArrayBuffer ary-buf cbuf
pos (.offset ary-buf)
limit (+ pos (.n-elems ary-buf))]
(case (casting/host-flatten (dtype-base/elemwise-datatype cbuf))
:int8 (doto (ByteBuffer/wrap (.ary-data ary-buf))
(.position pos)
(.limit limit))
:int16 (doto (ShortBuffer/wrap (.ary-data ary-buf))
(.position pos)
(.limit limit))
:int32 (doto (IntBuffer/wrap (.ary-data ary-buf))
(.position pos)
(.limit limit))
:int64 (doto (LongBuffer/wrap (.ary-data ary-buf))
(.position pos)
(.limit limit))
:float32 (doto (FloatBuffer/wrap (.ary-data ary-buf))
(.position pos)
(.limit limit))
:float64 (doto (DoubleBuffer/wrap (.ary-data ary-buf))
(.position pos)
(.limit limit))))))))
(^Buffer [item] (as-nio-buffer item nil)))
(defn ->nio-buffer
"Convert to nio buffer throwing exception if not possible."
(^Buffer [item options]
(if-let [retval (as-nio-buffer item options)]
retval
(errors/throwf "Failed to convert item to nio buffer: %s" item)))
(^Buffer [item]
(->nio-buffer item nil)))
| null | https://raw.githubusercontent.com/cnuernber/dtype-next/71477752fc376fad4b678fa9607ef54a40b2b1ff/src/tech/v3/datatype/nio_buffer.clj | clojure | (ns tech.v3.datatype.nio-buffer
(:require [tech.v3.datatype.array-buffer :as array-buffer]
[tech.v3.datatype.native-buffer :as native-buffer]
[tech.v3.datatype.protocols :as dtype-proto]
[tech.v3.datatype.casting :as casting]
[tech.v3.datatype.base :as dtype-base]
[tech.v3.datatype.errors :as errors]
[tech.v3.resource :as resource]
[clojure.tools.logging :as log])
(:import [java.nio Buffer ByteBuffer ShortBuffer IntBuffer LongBuffer
FloatBuffer DoubleBuffer ByteOrder]
[tech.v3.datatype UnsafeUtil]
[tech.v3.datatype.native_buffer NativeBuffer]
[tech.v3.datatype.array_buffer ArrayBuffer]))
(defn datatype->nio-buf-type
[datatype]
(case (casting/host-flatten datatype)
:int8 java.nio.ByteBuffer
:int16 java.nio.ShortBuffer
:int32 java.nio.IntBuffer
:int64 java.nio.LongBuffer
:float32 java.nio.FloatBuffer
:float64 java.nio.DoubleBuffer))
(defn as-byte-buffer ^ByteBuffer [item] item)
(defn as-short-buffer ^ShortBuffer [item] item)
(defn as-int-buffer ^IntBuffer [item] item)
(defn as-long-buffer ^LongBuffer [item] item)
(defn as-float-buffer ^FloatBuffer [item] item)
(defn as-double-buffer ^DoubleBuffer [item] item)
(defmacro datatype->nio-buf
[datatype item]
(case (casting/host-flatten datatype)
:int8 `(as-byte-buffer ~item)
:int16 `(as-short-buffer ~item)
:int32 `(as-int-buffer ~item)
:int64 `(as-long-buffer ~item)
:float32 `(as-float-buffer ~item)
:float64 `(as-double-buffer ~item)))
(def nio-datatypes #{:int8 :int16 :int32 :int64 :float32 :float64})
(defn buffer-address
^long [^Buffer buf]
(.getLong (native-buffer/unsafe) ^Object buf UnsafeUtil/addressFieldOffset))
(defn buf->buffer
[^Buffer buf]
(let [cbuf (if (.isDirect buf)
(dtype-proto/->native-buffer buf)
(dtype-proto/->array-buffer buf))]
(dtype-proto/->buffer cbuf)))
(defn extend-nio-types!
[]
(doseq [dtype nio-datatypes]
(let [buf-type (datatype->nio-buf-type dtype)]
(extend buf-type
dtype-proto/PElemwiseDatatype
{:elemwise-datatype (constantly dtype)}
dtype-proto/PElemwiseReaderCast
{:elemwise-reader-cast buf->buffer}
dtype-proto/PECount
{:ecount #(.remaining ^Buffer %)}
dtype-proto/PToArrayBuffer
{:convertible-to-array-buffer? #(not (.isDirect ^Buffer %))
:->array-buffer (fn [^Buffer buf]
(let [offset (.position buf)
length (.remaining buf)]
(when-not (.isDirect buf)
(-> (array-buffer/array-buffer (.array buf))
(dtype-proto/sub-buffer offset length)))))}
dtype-proto/PToNativeBuffer
{:convertible-to-native-buffer? #(.isDirect ^Buffer %)
:->native-buffer
(fn [^Buffer buf]
(native-buffer/wrap-address
(buffer-address buf)
(* (dtype-proto/ecount buf)
(casting/numeric-byte-width (dtype-proto/elemwise-datatype buf)))
(dtype-proto/elemwise-datatype buf)
(dtype-proto/endianness buf)
buf))}
dtype-proto/PToBuffer
{:convertible-to-buffer? (constantly true)
:->buffer buf->buffer}
dtype-proto/PToReader
{:convertible-to-reader? (constantly true)
:->reader buf->buffer}
dtype-proto/PToWriter
{:convertible-to-writer? (constantly true)
:->writer buf->buffer}))))
(extend-nio-types!)
(def buffer-constructor*
(delay (if UnsafeUtil/directBufferConstructor
(fn [nbuf ^long address ^long nbytes _options]
(let [retval
(UnsafeUtil/constructByteBufferFromAddress
address nbytes)]
(resource/chain-resources retval nbuf)))
(do
(try
(requiring-resolve 'tech.v3.datatype.ffi.nio-buf-mmodel-jdk19/direct-buffer-constructor)
(catch Exception e
(log/info "Unable to find direct buffer constructor -
falling back to jdk16 memory model.")
(try
(requiring-resolve 'tech.v3.datatype.ffi.nio-buf-mmodel/direct-buffer-constructor)
(catch Exception e
(throw (RuntimeException. "Unable to load direct buffer constructor. If you are using JDK-17, set your runtime :jvm-opts as follows:
:jvm-opts [\"--add-modules\" \"jdk.incubator.foreign,jdk.incubator.vector\"
\"--enable-native-access=ALL-UNNAMED\"]}"
e))))))))))
(defn native-buf->nio-buf
(^java.nio.Buffer [^NativeBuffer buffer options]
(let [dtype (dtype-proto/elemwise-datatype buffer)
byte-width (casting/numeric-byte-width dtype)
n-bytes (* (.n-elems buffer) byte-width)
^ByteBuffer byte-buf (@buffer-constructor* buffer (.address buffer) n-bytes
options)]
(.order byte-buf
(case (.endianness buffer)
:little-endian ByteOrder/LITTLE_ENDIAN
:big-endian ByteOrder/BIG_ENDIAN))
(let [retval (case (casting/host-flatten dtype)
:int8 byte-buf
:int16 (.asShortBuffer byte-buf)
:int32 (.asIntBuffer byte-buf)
:int64 (.asLongBuffer byte-buf)
:float32 (.asFloatBuffer byte-buf)
:float64 (.asDoubleBuffer byte-buf))]
(if (:resource-type options :auto)
(resource/chain-resources retval buffer)
retval))))
(^java.nio.Buffer [buffer]
(native-buf->nio-buf buffer nil)))
(defn as-nio-buffer
"Convert to a nio buffer returning nil if not possible."
(^Buffer [item options]
(when-let [cbuf (dtype-base/as-concrete-buffer item)]
(when (nio-datatypes (casting/host-flatten (dtype-base/elemwise-datatype cbuf)))
(if (instance? NativeBuffer cbuf)
(native-buf->nio-buf cbuf options)
(let [^ArrayBuffer ary-buf cbuf
pos (.offset ary-buf)
limit (+ pos (.n-elems ary-buf))]
(case (casting/host-flatten (dtype-base/elemwise-datatype cbuf))
:int8 (doto (ByteBuffer/wrap (.ary-data ary-buf))
(.position pos)
(.limit limit))
:int16 (doto (ShortBuffer/wrap (.ary-data ary-buf))
(.position pos)
(.limit limit))
:int32 (doto (IntBuffer/wrap (.ary-data ary-buf))
(.position pos)
(.limit limit))
:int64 (doto (LongBuffer/wrap (.ary-data ary-buf))
(.position pos)
(.limit limit))
:float32 (doto (FloatBuffer/wrap (.ary-data ary-buf))
(.position pos)
(.limit limit))
:float64 (doto (DoubleBuffer/wrap (.ary-data ary-buf))
(.position pos)
(.limit limit))))))))
(^Buffer [item] (as-nio-buffer item nil)))
(defn ->nio-buffer
"Convert to nio buffer throwing exception if not possible."
(^Buffer [item options]
(if-let [retval (as-nio-buffer item options)]
retval
(errors/throwf "Failed to convert item to nio buffer: %s" item)))
(^Buffer [item]
(->nio-buffer item nil)))
| |
c6ef5b9317f92c4b24a1f2dab400358f8b5e59e96474e3e6b19aa77d31b2c5c9 | pallix/tikkba | writefile.clj | (ns tikkba.examples.writefile
(:use [analemma svg charts xml]
[tikkba swing dom]
tikkba.utils.xml)
(:require [tikkba.utils.dom :as dom]))
(defn create-svg
"Creates a SVG representation with the Analemma functions"
[]
(svg
(-> (rect 20 30 100 400 :id "rect0")
(style :fill "white" :stroke "blue" :stroke-width 10))
(-> (rect 50 250 50 80 :id "rect1")
(style :fill "white" :stroke "red" :stroke-width 10))))
(defn -main
[]
Converts the SVG representation to a XML Document
;; and writes it to a file
(let [doc (svg-doc (create-svg))]
(dom/spit-xml "/tmp/rectangle.svg" doc
:indent "yes"
:encoding "UTF8")))
| null | https://raw.githubusercontent.com/pallix/tikkba/86fda7f97c3b1ff835f02c2b1c0337f3e134fd2c/src/tikkba/examples/writefile.clj | clojure | and writes it to a file | (ns tikkba.examples.writefile
(:use [analemma svg charts xml]
[tikkba swing dom]
tikkba.utils.xml)
(:require [tikkba.utils.dom :as dom]))
(defn create-svg
"Creates a SVG representation with the Analemma functions"
[]
(svg
(-> (rect 20 30 100 400 :id "rect0")
(style :fill "white" :stroke "blue" :stroke-width 10))
(-> (rect 50 250 50 80 :id "rect1")
(style :fill "white" :stroke "red" :stroke-width 10))))
(defn -main
[]
Converts the SVG representation to a XML Document
(let [doc (svg-doc (create-svg))]
(dom/spit-xml "/tmp/rectangle.svg" doc
:indent "yes"
:encoding "UTF8")))
|
8bf6e84b2533d017c074663c6a94ce0ad944db913473246e62b69969218d08bc | facebook/duckling | HE_XX.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
of patent rights can be found in the PATENTS file in the same directory .
-----------------------------------------------------------------
-- Auto-generated by regenClassifiers
--
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
@generated
-----------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ranking.Classifiers.HE_XX (classifiers) where
import Data.String
import Prelude
import qualified Data.HashMap.Strict as HashMap
import Duckling.Ranking.Types
classifiers :: Classifiers
classifiers
= HashMap.fromList
[("\1502\1512\1509",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer (numeric)",
Classifier{okData =
ClassData{prior = -1.110882381259924, unseen = -3.367295829986474,
likelihoods = HashMap.fromList [("", 0.0)], n = 27},
koData =
ClassData{prior = -0.3993860620317821, unseen = -4.04305126783455,
likelihoods = HashMap.fromList [("", 0.0)], n = 55}}),
("\1489 <date>",
Classifier{okData =
ClassData{prior = -0.12783337150988489,
unseen = -4.189654742026425,
likelihoods =
HashMap.fromList
[("\1502\1512\1509", -3.0757749812275272),
("week", -3.0757749812275272),
("<time> <part-of-day>", -3.481240089335692),
("mm/dd", -3.481240089335692),
("intersect", -3.0757749812275272), ("day", -2.228477120840324),
("this <cycle>", -3.481240089335692),
("time-of-day (latent)", -3.0757749812275272),
("<time-of-day> am|pm", -3.481240089335692),
("named-day", -3.0757749812275272),
("current <day-of-week>", -3.481240089335692),
("last <time>", -3.481240089335692),
("\1488\1493\1511\1496\1493\1489\1512", -3.481240089335692),
("hour", -1.7764919970972666), ("month", -2.5649493574615367),
("\1508\1489\1512\1493\1488\1512", -3.481240089335692),
("last <cycle>", -3.481240089335692),
("<named-month> <day-of-month> (non ordinal)",
-3.481240089335692),
("week-end", -3.0757749812275272),
("this <time>", -3.0757749812275272)],
n = 22},
koData =
ClassData{prior = -2.120263536200091, unseen = -3.332204510175204,
likelihoods =
HashMap.fromList
[("intersect", -2.6026896854443837),
("day", -2.1972245773362196),
("<day-of-month> (ordinal)", -2.6026896854443837),
("month", -2.6026896854443837),
("this <time>", -2.6026896854443837)],
n = 3}}),
("lunch",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer (20..90)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time> <part-of-day>",
Classifier{okData =
ClassData{prior = -0.10536051565782628,
unseen = -3.4011973816621555,
likelihoods =
HashMap.fromList
[("dayhour", -1.4213856809311607),
("\1489 <date>morning", -2.6741486494265287),
("yesterdayevening|night", -2.6741486494265287),
("hourhour", -1.9810014688665833),
("time-of-day (latent)morning", -2.6741486494265287),
("named-daymorning", -2.6741486494265287),
("todayevening|night", -2.6741486494265287),
("tomorrowlunch", -2.268683541318364),
("at <time-of-day>morning", -2.6741486494265287),
("tomorrowevening|night", -2.6741486494265287)],
n = 9},
koData =
ClassData{prior = -2.3025850929940455, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("dayhour", -1.8718021769015913),
("<day-of-month> (ordinal)morning", -1.8718021769015913)],
n = 1}}),
("today",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("mm/dd",
Classifier{okData =
ClassData{prior = -1.0986122886681098,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.40546510810816444,
unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("at <time-of-day>",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -1.0986122886681098),
("<time-of-day> am|pm", -1.5040773967762742),
("hour", -0.8109302162163288)],
n = 3},
koData =
ClassData{prior = -1.3862943611198906, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -0.916290731874155),
("hour", -0.916290731874155)],
n = 1}}),
("<day-of-month> (non ordinal) of <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.0910424533583156,
likelihoods =
HashMap.fromList
[("integer (numeric)\1502\1512\1509", -1.9459101490553135),
("integer (numeric)\1508\1489\1512\1493\1488\1512",
-1.4350845252893227),
("integer (numeric)\1488\1508\1512\1497\1500",
-2.3513752571634776),
("integer 3\1502\1512\1509", -2.3513752571634776),
("month", -0.8472978603872037)],
n = 8},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("absorption of , after named day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("day", -0.6931471805599453),
("named-day", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("the ides of <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("\1502\1512\1509", -0.6931471805599453),
("month", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 4",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("\1489 <named-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("day", -0.6931471805599453),
("named-day", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("month (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hour (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 9",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("integer 21..99 (with and)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods =
HashMap.fromList [("integer (20..90)integer 4", 0.0)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("intersect",
Classifier{okData =
ClassData{prior = -0.37320424588994294,
unseen = -4.859812404361672,
likelihoods =
HashMap.fromList
[("hourday", -2.6548056865833978),
("daymonth", -3.242592351485517),
("monthyear", -3.7534179752515073),
("the <day-of-month> (ordinal)in <named-month>",
-3.7534179752515073),
("\1488\1493\1511\1496\1493\1489\1512year",
-4.1588830833596715),
("intersect<time-of-day> am|pm", -4.1588830833596715),
("<time> <part-of-day>\1489 <named-day>", -3.4657359027997265),
("intersect by \",\"<time-of-day> am|pm", -3.242592351485517),
("last <day-of-week> of <time>year", -4.1588830833596715),
("dayday", -2.772588722239781),
("the <day-of-month> (ordinal)\1489 <date>",
-3.7534179752515073),
("dayyear", -2.367123614131617),
("<day-of-month>(ordinal) <named-month>year",
-3.7534179752515073),
("\1489 <date>\1489 <named-day>", -4.1588830833596715),
("absorption of , after named day<named-month> <day-of-month> (non ordinal)",
-4.1588830833596715),
("absorption of , after named day<day-of-month> (ordinal) of <named-month>",
-4.1588830833596715),
("<day-of-month> (ordinal) of <named-month>year",
-3.4657359027997265),
("named-daynext <cycle>", -4.1588830833596715),
("dayminute", -2.772588722239781),
("named-day<day-of-month> (ordinal) of <named-month>",
-4.1588830833596715),
("named-day\1489 <date>", -4.1588830833596715),
("absorption of , after named dayintersect",
-4.1588830833596715),
("named-day<day-of-month> (non ordinal) of <named-month>",
-4.1588830833596715),
("absorption of , after named dayintersect by \",\"",
-3.7534179752515073),
("year<time-of-day> am|pm", -4.1588830833596715),
("\1489 <date>\1489 <date>", -4.1588830833596715),
("<day-of-month> (non ordinal) of <named-month>year",
-3.4657359027997265),
("absorption of , after named day<day-of-month> (non ordinal) of <named-month>",
-4.1588830833596715),
("dayweek", -3.7534179752515073),
("\1502\1512\1509year", -4.1588830833596715),
("<time> <part-of-day>\1489 <date>", -3.4657359027997265),
("named-daythe <day-of-month> (ordinal)", -4.1588830833596715),
("<day-of-month> (non ordinal) <named-month>year",
-3.7534179752515073),
("yearminute", -4.1588830833596715)],
n = 42},
koData =
ClassData{prior = -1.1664348850068706, unseen = -4.418840607796598,
likelihoods =
HashMap.fromList
[("in <named-month>year", -3.713572066704308),
("dayhour", -3.3081069585961433),
("daymonth", -3.0204248861443626),
("monthyear", -2.6149597780361984),
("intersecthh:mm", -3.713572066704308),
("dayday", -3.713572066704308),
("intersect by \",\"hh:mm", -2.797281334830153),
("\1488\1508\1512\1497\1500year", -3.713572066704308),
("named-dayin <named-month>", -3.713572066704308),
("dayminute", -2.327277705584417),
("named-day\1489 <date>", -3.713572066704308),
("absorption of , after named dayintersect",
-3.713572066704308),
("yearhh:mm", -3.713572066704308),
("absorption of , after named dayintersect by \",\"",
-3.713572066704308),
("\1502\1512\1509year", -3.3081069585961433),
("\1489 <date>year", -3.713572066704308),
("named-daythe <day-of-month> (ordinal)", -3.713572066704308),
("absorption of , after named day\1497\1493\1500\1497",
-3.713572066704308),
("tomorrownoon", -3.3081069585961433),
("yearminute", -3.713572066704308)],
n = 19}}),
("year (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("week", -1.540445040947149),
("month (grain)", -1.9459101490553135),
("year (grain)", -1.9459101490553135),
("week (grain)", -1.540445040947149),
("year", -1.9459101490553135), ("month", -1.9459101490553135)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("yyyy-mm-dd",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("mm/dd/yyyy",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("evening|night",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 3",
Classifier{okData =
ClassData{prior = -0.916290731874155, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.5108256237659907,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("yesterday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hh:mm:ss",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month> (ordinal) of <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.2188758248682006,
likelihoods =
HashMap.fromList
[("ordinal 3\1502\1512\1509", -2.4849066497880004),
("ordinal (digits)\1502\1512\1509", -2.0794415416798357),
("ordinal (digits)\1488\1508\1512\1497\1500",
-2.4849066497880004),
("ordinal 3\1488\1493\1511\1496\1493\1489\1512",
-2.4849066497880004),
("month", -0.8754687373538999),
("ordinal (digits)\1508\1489\1512\1493\1488\1512",
-1.5686159179138452)],
n = 9},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("<hour-of-day> and <integer>",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("time-of-day (latent)integer 4", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1}}),
("quarter to|till|before <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<hour-of-day> <integer>",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("time-of-day (latent)integer 10", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1}}),
("ordinal 19",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\1488\1493\1490\1493\1505\1496",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\1488\1508\1512\1497\1500",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("now",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer 11..19",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("integer 9integer 10", 0.0)],
n = 1}}),
("integer 7",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month>(ordinal) <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("ordinal 3\1489 <date>", -1.7346010553881064),
("ordinal (digits)in <named-month>", -2.1400661634962708),
("month", -0.8873031950009028),
("ordinal 3in <named-month>", -1.7346010553881064),
("ordinal (digits)\1489 <date>", -2.1400661634962708)],
n = 6},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("\1497\1493\1500\1497",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("numbers prefix with -, negative or minus",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 6}}),
("tomorrow",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 2",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6}}),
("next <day-of-week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("day", -0.6931471805599453),
("named-day", -0.6931471805599453)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("half an hour",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("the <day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -2.0794415416798357,
likelihoods =
HashMap.fromList
[("ordinal 3", -1.252762968495368),
("ordinal 19", -1.252762968495368),
("ordinal (digits)", -1.252762968495368)],
n = 3},
koData =
ClassData{prior = -1.3862943611198906, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("ordinal 9", -0.916290731874155)],
n = 1}}),
("fractional number",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6}}),
("this <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("week", -0.6931471805599453),
("week (grain)", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("minute (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 7",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("time-of-day (latent)",
Classifier{okData =
ClassData{prior = -1.0986122886681098,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.4700036292457356),
("integer 9", -1.3862943611198906)],
n = 5},
koData =
ClassData{prior = -0.40546510810816444,
unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.3677247801253174),
("integer (20..90)", -1.8718021769015913),
("integer 9", -1.8718021769015913)],
n = 10}}),
("year",
Classifier{okData =
ClassData{prior = -0.40546510810816444,
unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 8},
koData =
ClassData{prior = -1.0986122886681098, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 4}}),
("integer 9",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("last <day-of-week> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("daymonth", -0.8109302162163288),
("named-dayintersect", -1.5040773967762742),
("named-day\1502\1512\1509", -1.0986122886681098)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> <unit-of-duration>",
Classifier{okData =
ClassData{prior = -0.15415067982725836,
unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("integer 21..99 (with and)hour (grain)", -2.3025850929940455),
("day", -1.8971199848858813),
("integer 7day (grain)", -1.8971199848858813),
("hour", -1.8971199848858813),
("integer (numeric)minute (grain)", -1.8971199848858813),
("minute", -1.8971199848858813),
("integer (numeric)hour (grain)", -2.3025850929940455)],
n = 6},
koData =
ClassData{prior = -1.9459101490553135,
unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("hour", -1.6094379124341003),
("integer 4hour (grain)", -1.6094379124341003)],
n = 1}}),
("<time-of-day> am|pm",
Classifier{okData =
ClassData{prior = -0.3629054936893685,
unseen = -3.6635616461296463,
likelihoods =
HashMap.fromList
[("\1489 <date>", -2.9444389791664407),
("at <time-of-day>", -2.9444389791664407),
("time-of-day (latent)", -2.2512917986064953),
("hh:mm", -1.1526795099383855), ("hour", -1.845826690498331),
("minute", -1.1526795099383855)],
n = 16},
koData =
ClassData{prior = -1.1895840668738362, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -0.916290731874155),
("hour", -0.916290731874155)],
n = 7}}),
("integer 10",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("intersect by \",\"",
Classifier{okData =
ClassData{prior = -0.3794896217049037,
unseen = -3.7612001156935624,
likelihoods =
HashMap.fromList
[("intersect by \",\"year", -3.044522437723423),
("dayday", -2.128231705849268),
("named-dayintersect by \",\"", -2.639057329615259),
("dayyear", -2.128231705849268),
("<named-month> <day-of-month> (non ordinal)intersect",
-3.044522437723423),
("named-day<named-month> <day-of-month> (non ordinal)",
-3.044522437723423),
("intersect by \",\"intersect", -3.044522437723423),
("named-dayintersect", -3.044522437723423),
("dayminute", -1.9459101490553135),
("named-day<day-of-month> (ordinal) of <named-month>",
-3.044522437723423),
("intersectyear", -3.044522437723423),
("named-day<day-of-month> (non ordinal) of <named-month>",
-3.044522437723423),
("intersectintersect", -3.044522437723423),
("<named-month> <day-of-month> (non ordinal)year",
-2.639057329615259)],
n = 13},
koData =
ClassData{prior = -1.1526795099383855, unseen = -3.367295829986474,
likelihoods =
HashMap.fromList
[("daymonth", -2.639057329615259),
("named-dayintersect by \",\"", -2.639057329615259),
("named-day\1497\1493\1500\1497", -2.639057329615259),
("<named-month> <day-of-month> (non ordinal)intersect",
-2.639057329615259),
("intersect by \",\"intersect", -2.639057329615259),
("named-dayintersect", -2.639057329615259),
("dayminute", -1.540445040947149),
("intersectintersect", -2.639057329615259)],
n = 6}}),
("hh:mm",
Classifier{okData =
ClassData{prior = -7.410797215372185e-2,
unseen = -2.70805020110221,
likelihoods = HashMap.fromList [("", 0.0)], n = 13},
koData =
ClassData{prior = -2.639057329615259, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("named-day",
Classifier{okData =
ClassData{prior = -3.7740327982847086e-2,
unseen = -3.332204510175204,
likelihoods = HashMap.fromList [("", 0.0)], n = 26},
koData =
ClassData{prior = -3.295836866004329, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("ordinal 1",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7}}),
("current <day-of-week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("\1489 <date>", -1.9459101490553135),
("\1489 <named-day>", -1.9459101490553135),
("day", -0.8472978603872037),
("named-day", -1.252762968495368)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("integer 4",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("quarter of an hour",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<named-day> <day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("named-dayordinal (digits)", -0.916290731874155),
("day", -0.916290731874155)],
n = 1},
koData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("absorption of , after named dayordinal (digits)",
-0.916290731874155),
("day", -0.916290731874155)],
n = 1}}),
("<duration> ago",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("day", -0.6931471805599453),
("<integer> <unit-of-duration>", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 6",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("integer 3",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("last <time>",
Classifier{okData =
ClassData{prior = -0.9808292530117262,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("\1489 <date>", -1.791759469228055),
("day", -1.791759469228055), ("named-day", -1.791759469228055),
("hour", -1.3862943611198906),
("week-end", -1.791759469228055)],
n = 3},
koData =
ClassData{prior = -0.4700036292457356, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("day", -0.9808292530117262),
("named-day", -1.6739764335716716),
("<day-of-month> (ordinal)", -1.3862943611198906)],
n = 5}}),
("<day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods =
HashMap.fromList
[("ordinal 4", -1.7047480922384253),
("ordinal 3", -1.7047480922384253),
("ordinal 2", -1.0116009116784799),
("ordinal 1", -1.2992829841302609)],
n = 7}}),
("\1488\1493\1511\1496\1493\1489\1512",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("noon",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("\1508\1489\1512\1493\1488\1512",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next <time>",
Classifier{okData =
ClassData{prior = -1.3862943611198906,
unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("\1502\1512\1509", -1.0986122886681098),
("month", -1.0986122886681098)],
n = 1},
koData =
ClassData{prior = -0.2876820724517809,
unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("day", -0.916290731874155), ("named-day", -0.916290731874155)],
n = 3}}),
("last <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("week", -1.0986122886681098),
("month (grain)", -1.791759469228055),
("week (grain)", -1.0986122886681098),
("month", -1.791759469228055)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 20..90",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("hhmm (military) am|pm",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("in <duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("day", -2.3025850929940455),
("half an hour", -2.3025850929940455),
("<integer> <unit-of-duration>", -1.2039728043259361),
("quarter of an hour", -2.3025850929940455),
("hour", -1.8971199848858813), ("minute", -1.3862943611198906)],
n = 7},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("in <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("\1502\1512\1509", -1.3862943611198906),
("\1488\1493\1511\1496\1493\1489\1512", -1.791759469228055),
("month", -0.8754687373538999),
("\1508\1489\1512\1493\1488\1512", -1.791759469228055)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("half",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("ordinal 5",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("<named-month> <day-of-month> (non ordinal)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("in <named-month>integer (numeric)", -2.3978952727983707),
("\1502\1512\1509integer (numeric)", -2.3978952727983707),
("\1489 <date>integer (numeric)", -2.3978952727983707),
("\1508\1489\1512\1493\1488\1512integer (numeric)",
-2.3978952727983707),
("\1488\1493\1490\1493\1505\1496integer (numeric)",
-2.3978952727983707),
("\1488\1508\1512\1497\1500integer (numeric)",
-2.3978952727983707),
("\1497\1493\1500\1497integer (numeric)", -2.3978952727983707),
("month", -1.0116009116784799)],
n = 7},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month> (non ordinal) <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("integer (numeric)in <named-month>", -1.8718021769015913),
("integer 3\1489 <date>", -1.8718021769015913),
("integer (numeric)\1489 <date>", -1.8718021769015913),
("month", -0.9555114450274363),
("integer 3in <named-month>", -1.8718021769015913)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal (digits)",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7}}),
("<day-of-month>(ordinal) <named-month> year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods =
HashMap.fromList
[("ordinal 3\1489 <date>", -1.252762968495368),
("month", -0.8472978603872037),
("ordinal 3in <named-month>", -1.252762968495368)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("morning",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week-end",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this evening",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("day (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this <time>",
Classifier{okData =
ClassData{prior = -0.45198512374305727,
unseen = -3.0910424533583156,
likelihoods =
HashMap.fromList
[("\1489 <date>", -1.9459101490553135),
("\1489 <named-day>", -2.3513752571634776),
("day", -1.252762968495368), ("named-day", -1.6582280766035324),
("hour", -1.9459101490553135),
("week-end", -2.3513752571634776)],
n = 7},
koData =
ClassData{prior = -1.0116009116784799, unseen = -2.772588722239781,
likelihoods =
HashMap.fromList
[("\1489 <date>", -2.0149030205422647),
("day", -1.0986122886681098),
("<day-of-month> (ordinal)", -1.3217558399823195)],
n = 4}})] | null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/Duckling/Ranking/Classifiers/HE_XX.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. An additional grant
---------------------------------------------------------------
Auto-generated by regenClassifiers
DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
---------------------------------------------------------------
# LANGUAGE OverloadedStrings # | Copyright ( c ) 2016 - present , Facebook , Inc.
of patent rights can be found in the PATENTS file in the same directory .
@generated
module Duckling.Ranking.Classifiers.HE_XX (classifiers) where
import Data.String
import Prelude
import qualified Data.HashMap.Strict as HashMap
import Duckling.Ranking.Types
classifiers :: Classifiers
classifiers
= HashMap.fromList
[("\1502\1512\1509",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer (numeric)",
Classifier{okData =
ClassData{prior = -1.110882381259924, unseen = -3.367295829986474,
likelihoods = HashMap.fromList [("", 0.0)], n = 27},
koData =
ClassData{prior = -0.3993860620317821, unseen = -4.04305126783455,
likelihoods = HashMap.fromList [("", 0.0)], n = 55}}),
("\1489 <date>",
Classifier{okData =
ClassData{prior = -0.12783337150988489,
unseen = -4.189654742026425,
likelihoods =
HashMap.fromList
[("\1502\1512\1509", -3.0757749812275272),
("week", -3.0757749812275272),
("<time> <part-of-day>", -3.481240089335692),
("mm/dd", -3.481240089335692),
("intersect", -3.0757749812275272), ("day", -2.228477120840324),
("this <cycle>", -3.481240089335692),
("time-of-day (latent)", -3.0757749812275272),
("<time-of-day> am|pm", -3.481240089335692),
("named-day", -3.0757749812275272),
("current <day-of-week>", -3.481240089335692),
("last <time>", -3.481240089335692),
("\1488\1493\1511\1496\1493\1489\1512", -3.481240089335692),
("hour", -1.7764919970972666), ("month", -2.5649493574615367),
("\1508\1489\1512\1493\1488\1512", -3.481240089335692),
("last <cycle>", -3.481240089335692),
("<named-month> <day-of-month> (non ordinal)",
-3.481240089335692),
("week-end", -3.0757749812275272),
("this <time>", -3.0757749812275272)],
n = 22},
koData =
ClassData{prior = -2.120263536200091, unseen = -3.332204510175204,
likelihoods =
HashMap.fromList
[("intersect", -2.6026896854443837),
("day", -2.1972245773362196),
("<day-of-month> (ordinal)", -2.6026896854443837),
("month", -2.6026896854443837),
("this <time>", -2.6026896854443837)],
n = 3}}),
("lunch",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer (20..90)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time> <part-of-day>",
Classifier{okData =
ClassData{prior = -0.10536051565782628,
unseen = -3.4011973816621555,
likelihoods =
HashMap.fromList
[("dayhour", -1.4213856809311607),
("\1489 <date>morning", -2.6741486494265287),
("yesterdayevening|night", -2.6741486494265287),
("hourhour", -1.9810014688665833),
("time-of-day (latent)morning", -2.6741486494265287),
("named-daymorning", -2.6741486494265287),
("todayevening|night", -2.6741486494265287),
("tomorrowlunch", -2.268683541318364),
("at <time-of-day>morning", -2.6741486494265287),
("tomorrowevening|night", -2.6741486494265287)],
n = 9},
koData =
ClassData{prior = -2.3025850929940455, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("dayhour", -1.8718021769015913),
("<day-of-month> (ordinal)morning", -1.8718021769015913)],
n = 1}}),
("today",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("mm/dd",
Classifier{okData =
ClassData{prior = -1.0986122886681098,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.40546510810816444,
unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("at <time-of-day>",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -1.0986122886681098),
("<time-of-day> am|pm", -1.5040773967762742),
("hour", -0.8109302162163288)],
n = 3},
koData =
ClassData{prior = -1.3862943611198906, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -0.916290731874155),
("hour", -0.916290731874155)],
n = 1}}),
("<day-of-month> (non ordinal) of <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.0910424533583156,
likelihoods =
HashMap.fromList
[("integer (numeric)\1502\1512\1509", -1.9459101490553135),
("integer (numeric)\1508\1489\1512\1493\1488\1512",
-1.4350845252893227),
("integer (numeric)\1488\1508\1512\1497\1500",
-2.3513752571634776),
("integer 3\1502\1512\1509", -2.3513752571634776),
("month", -0.8472978603872037)],
n = 8},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("absorption of , after named day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("day", -0.6931471805599453),
("named-day", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("the ides of <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("\1502\1512\1509", -0.6931471805599453),
("month", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 4",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("\1489 <named-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("day", -0.6931471805599453),
("named-day", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("month (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hour (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 9",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("integer 21..99 (with and)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods =
HashMap.fromList [("integer (20..90)integer 4", 0.0)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("intersect",
Classifier{okData =
ClassData{prior = -0.37320424588994294,
unseen = -4.859812404361672,
likelihoods =
HashMap.fromList
[("hourday", -2.6548056865833978),
("daymonth", -3.242592351485517),
("monthyear", -3.7534179752515073),
("the <day-of-month> (ordinal)in <named-month>",
-3.7534179752515073),
("\1488\1493\1511\1496\1493\1489\1512year",
-4.1588830833596715),
("intersect<time-of-day> am|pm", -4.1588830833596715),
("<time> <part-of-day>\1489 <named-day>", -3.4657359027997265),
("intersect by \",\"<time-of-day> am|pm", -3.242592351485517),
("last <day-of-week> of <time>year", -4.1588830833596715),
("dayday", -2.772588722239781),
("the <day-of-month> (ordinal)\1489 <date>",
-3.7534179752515073),
("dayyear", -2.367123614131617),
("<day-of-month>(ordinal) <named-month>year",
-3.7534179752515073),
("\1489 <date>\1489 <named-day>", -4.1588830833596715),
("absorption of , after named day<named-month> <day-of-month> (non ordinal)",
-4.1588830833596715),
("absorption of , after named day<day-of-month> (ordinal) of <named-month>",
-4.1588830833596715),
("<day-of-month> (ordinal) of <named-month>year",
-3.4657359027997265),
("named-daynext <cycle>", -4.1588830833596715),
("dayminute", -2.772588722239781),
("named-day<day-of-month> (ordinal) of <named-month>",
-4.1588830833596715),
("named-day\1489 <date>", -4.1588830833596715),
("absorption of , after named dayintersect",
-4.1588830833596715),
("named-day<day-of-month> (non ordinal) of <named-month>",
-4.1588830833596715),
("absorption of , after named dayintersect by \",\"",
-3.7534179752515073),
("year<time-of-day> am|pm", -4.1588830833596715),
("\1489 <date>\1489 <date>", -4.1588830833596715),
("<day-of-month> (non ordinal) of <named-month>year",
-3.4657359027997265),
("absorption of , after named day<day-of-month> (non ordinal) of <named-month>",
-4.1588830833596715),
("dayweek", -3.7534179752515073),
("\1502\1512\1509year", -4.1588830833596715),
("<time> <part-of-day>\1489 <date>", -3.4657359027997265),
("named-daythe <day-of-month> (ordinal)", -4.1588830833596715),
("<day-of-month> (non ordinal) <named-month>year",
-3.7534179752515073),
("yearminute", -4.1588830833596715)],
n = 42},
koData =
ClassData{prior = -1.1664348850068706, unseen = -4.418840607796598,
likelihoods =
HashMap.fromList
[("in <named-month>year", -3.713572066704308),
("dayhour", -3.3081069585961433),
("daymonth", -3.0204248861443626),
("monthyear", -2.6149597780361984),
("intersecthh:mm", -3.713572066704308),
("dayday", -3.713572066704308),
("intersect by \",\"hh:mm", -2.797281334830153),
("\1488\1508\1512\1497\1500year", -3.713572066704308),
("named-dayin <named-month>", -3.713572066704308),
("dayminute", -2.327277705584417),
("named-day\1489 <date>", -3.713572066704308),
("absorption of , after named dayintersect",
-3.713572066704308),
("yearhh:mm", -3.713572066704308),
("absorption of , after named dayintersect by \",\"",
-3.713572066704308),
("\1502\1512\1509year", -3.3081069585961433),
("\1489 <date>year", -3.713572066704308),
("named-daythe <day-of-month> (ordinal)", -3.713572066704308),
("absorption of , after named day\1497\1493\1500\1497",
-3.713572066704308),
("tomorrownoon", -3.3081069585961433),
("yearminute", -3.713572066704308)],
n = 19}}),
("year (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("week", -1.540445040947149),
("month (grain)", -1.9459101490553135),
("year (grain)", -1.9459101490553135),
("week (grain)", -1.540445040947149),
("year", -1.9459101490553135), ("month", -1.9459101490553135)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("yyyy-mm-dd",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("mm/dd/yyyy",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("evening|night",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 3",
Classifier{okData =
ClassData{prior = -0.916290731874155, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.5108256237659907,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("yesterday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hh:mm:ss",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month> (ordinal) of <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.2188758248682006,
likelihoods =
HashMap.fromList
[("ordinal 3\1502\1512\1509", -2.4849066497880004),
("ordinal (digits)\1502\1512\1509", -2.0794415416798357),
("ordinal (digits)\1488\1508\1512\1497\1500",
-2.4849066497880004),
("ordinal 3\1488\1493\1511\1496\1493\1489\1512",
-2.4849066497880004),
("month", -0.8754687373538999),
("ordinal (digits)\1508\1489\1512\1493\1488\1512",
-1.5686159179138452)],
n = 9},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("<hour-of-day> and <integer>",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("time-of-day (latent)integer 4", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1}}),
("quarter to|till|before <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<hour-of-day> <integer>",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("time-of-day (latent)integer 10", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1}}),
("ordinal 19",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\1488\1493\1490\1493\1505\1496",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\1488\1508\1512\1497\1500",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("now",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer 11..19",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("integer 9integer 10", 0.0)],
n = 1}}),
("integer 7",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month>(ordinal) <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("ordinal 3\1489 <date>", -1.7346010553881064),
("ordinal (digits)in <named-month>", -2.1400661634962708),
("month", -0.8873031950009028),
("ordinal 3in <named-month>", -1.7346010553881064),
("ordinal (digits)\1489 <date>", -2.1400661634962708)],
n = 6},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("\1497\1493\1500\1497",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("numbers prefix with -, negative or minus",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 6}}),
("tomorrow",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 2",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6}}),
("next <day-of-week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("day", -0.6931471805599453),
("named-day", -0.6931471805599453)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("half an hour",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("the <day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -2.0794415416798357,
likelihoods =
HashMap.fromList
[("ordinal 3", -1.252762968495368),
("ordinal 19", -1.252762968495368),
("ordinal (digits)", -1.252762968495368)],
n = 3},
koData =
ClassData{prior = -1.3862943611198906, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("ordinal 9", -0.916290731874155)],
n = 1}}),
("fractional number",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6}}),
("this <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("week", -0.6931471805599453),
("week (grain)", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("minute (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 7",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("time-of-day (latent)",
Classifier{okData =
ClassData{prior = -1.0986122886681098,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.4700036292457356),
("integer 9", -1.3862943611198906)],
n = 5},
koData =
ClassData{prior = -0.40546510810816444,
unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.3677247801253174),
("integer (20..90)", -1.8718021769015913),
("integer 9", -1.8718021769015913)],
n = 10}}),
("year",
Classifier{okData =
ClassData{prior = -0.40546510810816444,
unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 8},
koData =
ClassData{prior = -1.0986122886681098, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 4}}),
("integer 9",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("last <day-of-week> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("daymonth", -0.8109302162163288),
("named-dayintersect", -1.5040773967762742),
("named-day\1502\1512\1509", -1.0986122886681098)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> <unit-of-duration>",
Classifier{okData =
ClassData{prior = -0.15415067982725836,
unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("integer 21..99 (with and)hour (grain)", -2.3025850929940455),
("day", -1.8971199848858813),
("integer 7day (grain)", -1.8971199848858813),
("hour", -1.8971199848858813),
("integer (numeric)minute (grain)", -1.8971199848858813),
("minute", -1.8971199848858813),
("integer (numeric)hour (grain)", -2.3025850929940455)],
n = 6},
koData =
ClassData{prior = -1.9459101490553135,
unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("hour", -1.6094379124341003),
("integer 4hour (grain)", -1.6094379124341003)],
n = 1}}),
("<time-of-day> am|pm",
Classifier{okData =
ClassData{prior = -0.3629054936893685,
unseen = -3.6635616461296463,
likelihoods =
HashMap.fromList
[("\1489 <date>", -2.9444389791664407),
("at <time-of-day>", -2.9444389791664407),
("time-of-day (latent)", -2.2512917986064953),
("hh:mm", -1.1526795099383855), ("hour", -1.845826690498331),
("minute", -1.1526795099383855)],
n = 16},
koData =
ClassData{prior = -1.1895840668738362, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -0.916290731874155),
("hour", -0.916290731874155)],
n = 7}}),
("integer 10",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("intersect by \",\"",
Classifier{okData =
ClassData{prior = -0.3794896217049037,
unseen = -3.7612001156935624,
likelihoods =
HashMap.fromList
[("intersect by \",\"year", -3.044522437723423),
("dayday", -2.128231705849268),
("named-dayintersect by \",\"", -2.639057329615259),
("dayyear", -2.128231705849268),
("<named-month> <day-of-month> (non ordinal)intersect",
-3.044522437723423),
("named-day<named-month> <day-of-month> (non ordinal)",
-3.044522437723423),
("intersect by \",\"intersect", -3.044522437723423),
("named-dayintersect", -3.044522437723423),
("dayminute", -1.9459101490553135),
("named-day<day-of-month> (ordinal) of <named-month>",
-3.044522437723423),
("intersectyear", -3.044522437723423),
("named-day<day-of-month> (non ordinal) of <named-month>",
-3.044522437723423),
("intersectintersect", -3.044522437723423),
("<named-month> <day-of-month> (non ordinal)year",
-2.639057329615259)],
n = 13},
koData =
ClassData{prior = -1.1526795099383855, unseen = -3.367295829986474,
likelihoods =
HashMap.fromList
[("daymonth", -2.639057329615259),
("named-dayintersect by \",\"", -2.639057329615259),
("named-day\1497\1493\1500\1497", -2.639057329615259),
("<named-month> <day-of-month> (non ordinal)intersect",
-2.639057329615259),
("intersect by \",\"intersect", -2.639057329615259),
("named-dayintersect", -2.639057329615259),
("dayminute", -1.540445040947149),
("intersectintersect", -2.639057329615259)],
n = 6}}),
("hh:mm",
Classifier{okData =
ClassData{prior = -7.410797215372185e-2,
unseen = -2.70805020110221,
likelihoods = HashMap.fromList [("", 0.0)], n = 13},
koData =
ClassData{prior = -2.639057329615259, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("named-day",
Classifier{okData =
ClassData{prior = -3.7740327982847086e-2,
unseen = -3.332204510175204,
likelihoods = HashMap.fromList [("", 0.0)], n = 26},
koData =
ClassData{prior = -3.295836866004329, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("ordinal 1",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7}}),
("current <day-of-week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("\1489 <date>", -1.9459101490553135),
("\1489 <named-day>", -1.9459101490553135),
("day", -0.8472978603872037),
("named-day", -1.252762968495368)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("integer 4",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("quarter of an hour",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<named-day> <day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("named-dayordinal (digits)", -0.916290731874155),
("day", -0.916290731874155)],
n = 1},
koData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("absorption of , after named dayordinal (digits)",
-0.916290731874155),
("day", -0.916290731874155)],
n = 1}}),
("<duration> ago",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("day", -0.6931471805599453),
("<integer> <unit-of-duration>", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 6",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("integer 3",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("last <time>",
Classifier{okData =
ClassData{prior = -0.9808292530117262,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("\1489 <date>", -1.791759469228055),
("day", -1.791759469228055), ("named-day", -1.791759469228055),
("hour", -1.3862943611198906),
("week-end", -1.791759469228055)],
n = 3},
koData =
ClassData{prior = -0.4700036292457356, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("day", -0.9808292530117262),
("named-day", -1.6739764335716716),
("<day-of-month> (ordinal)", -1.3862943611198906)],
n = 5}}),
("<day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods =
HashMap.fromList
[("ordinal 4", -1.7047480922384253),
("ordinal 3", -1.7047480922384253),
("ordinal 2", -1.0116009116784799),
("ordinal 1", -1.2992829841302609)],
n = 7}}),
("\1488\1493\1511\1496\1493\1489\1512",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("noon",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("\1508\1489\1512\1493\1488\1512",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next <time>",
Classifier{okData =
ClassData{prior = -1.3862943611198906,
unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("\1502\1512\1509", -1.0986122886681098),
("month", -1.0986122886681098)],
n = 1},
koData =
ClassData{prior = -0.2876820724517809,
unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("day", -0.916290731874155), ("named-day", -0.916290731874155)],
n = 3}}),
("last <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("week", -1.0986122886681098),
("month (grain)", -1.791759469228055),
("week (grain)", -1.0986122886681098),
("month", -1.791759469228055)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal 20..90",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("hhmm (military) am|pm",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("in <duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("day", -2.3025850929940455),
("half an hour", -2.3025850929940455),
("<integer> <unit-of-duration>", -1.2039728043259361),
("quarter of an hour", -2.3025850929940455),
("hour", -1.8971199848858813), ("minute", -1.3862943611198906)],
n = 7},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("in <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("\1502\1512\1509", -1.3862943611198906),
("\1488\1493\1511\1496\1493\1489\1512", -1.791759469228055),
("month", -0.8754687373538999),
("\1508\1489\1512\1493\1488\1512", -1.791759469228055)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("half",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("ordinal 5",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("<named-month> <day-of-month> (non ordinal)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("in <named-month>integer (numeric)", -2.3978952727983707),
("\1502\1512\1509integer (numeric)", -2.3978952727983707),
("\1489 <date>integer (numeric)", -2.3978952727983707),
("\1508\1489\1512\1493\1488\1512integer (numeric)",
-2.3978952727983707),
("\1488\1493\1490\1493\1505\1496integer (numeric)",
-2.3978952727983707),
("\1488\1508\1512\1497\1500integer (numeric)",
-2.3978952727983707),
("\1497\1493\1500\1497integer (numeric)", -2.3978952727983707),
("month", -1.0116009116784799)],
n = 7},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month> (non ordinal) <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("integer (numeric)in <named-month>", -1.8718021769015913),
("integer 3\1489 <date>", -1.8718021769015913),
("integer (numeric)\1489 <date>", -1.8718021769015913),
("month", -0.9555114450274363),
("integer 3in <named-month>", -1.8718021769015913)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal (digits)",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7}}),
("<day-of-month>(ordinal) <named-month> year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods =
HashMap.fromList
[("ordinal 3\1489 <date>", -1.252762968495368),
("month", -0.8472978603872037),
("ordinal 3in <named-month>", -1.252762968495368)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("morning",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week-end",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this evening",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("day (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this <time>",
Classifier{okData =
ClassData{prior = -0.45198512374305727,
unseen = -3.0910424533583156,
likelihoods =
HashMap.fromList
[("\1489 <date>", -1.9459101490553135),
("\1489 <named-day>", -2.3513752571634776),
("day", -1.252762968495368), ("named-day", -1.6582280766035324),
("hour", -1.9459101490553135),
("week-end", -2.3513752571634776)],
n = 7},
koData =
ClassData{prior = -1.0116009116784799, unseen = -2.772588722239781,
likelihoods =
HashMap.fromList
[("\1489 <date>", -2.0149030205422647),
("day", -1.0986122886681098),
("<day-of-month> (ordinal)", -1.3217558399823195)],
n = 4}})] |
5f43d4955e34fb02990f180d333ae213186557f443552a6d0e31e1bd8d1870ba | yoriyuki/Camomile | stringPrep.ml | Copyright ( C ) 2010
(* This library is free software; you can redistribute it and/or *)
(* modify it under the terms of the GNU Lesser General Public License *)
as published by the Free Software Foundation ; either version 2 of
the License , or ( at your option ) any later version .
As a special exception to the GNU Library General Public License , you
(* may link, statically or dynamically, a "work that uses this library" *)
(* with a publicly distributed version of this library to produce an *)
(* executable file containing portions of this library, and distribute *)
(* that executable file under terms of your choice, without any of the *)
additional requirements listed in clause 6 of the GNU Library General
(* Public License. By "a publicly distributed version of this library", *)
we mean either the unmodified Library as distributed by the authors ,
(* or a modified version of this library that is distributed under the *)
conditions defined in clause 3 of the GNU Library General Public
(* License. This exception does not however invalidate any other reasons *)
why the executable file might be covered by the GNU Library General
(* Public License . *)
(* This library is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *)
(* Lesser General Public License for more details. *)
You should have received a copy of the GNU Lesser General Public
(* License along with this library; if not, write to the Free Software *)
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307
USA
(* You can contact the authour by sending email to *)
(* *)
module type Type =
sig
type text
exception Prohibited of UChar.t
exception Bad_bidi
type profile =
* RFC 3491
* RFC 3920 , Appendix A
* RFC 3920 , Appendix B
* RFC 4013
* for SASL Anonymous , RFC 4505 , Section 3
| `Iscsi (** RFC 3722 *)
* RFC 4011
val stringprep : profile -> text -> text
end
module Make (Config : ConfigInt.Type) (Text : UnicodeString.Type) :
Type with type text = Text.t
=
struct
module UNF = UNF.Make ( Config ) ( Text )
module UCharInfo = UCharInfo.Make ( Config )
module StringPrep_data' = StringPrep_data.Make ( Config )
open StringPrep_data'
type text = Text.t
exception Prohibited of UChar.t
exception Bad_bidi
type normalisation =
[ `C
| `KC
| `D
| `KD
| `No ]
type profile =
* RFC 3491
* RFC 3920 , Appendix A
* RFC 3920 , Appendix B
* RFC 4013
* for SASL Anonymous , RFC 4505 , Section 3
| `Iscsi (** RFC 3722 *)
* RFC 4011
type internal_profile =
{ map : UChar.t -> UChar.t list;
normalize : normalisation;
prohibited : UChar.t -> bool;
check_bidi : bool;
unicode_version : UCharInfo.version_type;
bidi_ral : UChar.t -> bool;
bidi_l : UChar.t -> bool; }
let make_map map =
let f x =
let m = StringPrep_data.MappingMap.get map x in
StringPrep_data.mapping_to_list x m
in
f
let make_set set =
let f x = UCharTbl.Bool.get set x in
f
let nodeprep () =
{ map = make_map (map_b1b2 ());
normalize = `KC;
prohibited = make_set (nodeprep_prohibited ());
check_bidi = true;
unicode_version = `v3_2;
bidi_ral = make_set (d1 ());
bidi_l = make_set (d2 ()) }
let resourceprep () =
{ map = make_map (map_b1 ());
normalize = `KC;
prohibited = make_set (resourceprep_prohibited ());
check_bidi = true;
unicode_version = `v3_2;
bidi_ral = make_set (d1 ());
bidi_l = make_set (d2 ()) }
let nameprep () =
{ map = make_map (map_b1b2 ());
normalize = `KC;
prohibited = make_set (nameprep_prohibited ());
check_bidi = true;
unicode_version = `v3_2;
bidi_ral = make_set (d1 ());
bidi_l = make_set (d2 ()) }
let saslprep () =
{ map = make_map (saslprep_map ());
normalize = `KC;
prohibited = make_set (saslprep_prohibited ());
check_bidi = true;
unicode_version = `v3_2;
bidi_ral = make_set (d1 ());
bidi_l = make_set (d2 ()) }
let trace () =
{ map = (fun x -> [x]);
normalize = `No;
prohibited = make_set (trace_prohibited ());
check_bidi = true;
unicode_version = `v3_2;
bidi_ral = make_set (d1 ());
bidi_l = make_set (d2 ()) }
let iscsi () =
{ map = make_map (map_b1b2 ());
normalize = `KC;
prohibited = make_set (iscsi_prohibited ());
check_bidi = true;
unicode_version = `v3_2;
bidi_ral = make_set (d1 ());
bidi_l = make_set (d2 ()) }
* rfc 4011
let mib () =
{ map = make_map (map_b1 ());
normalize = `KC;
prohibited = make_set (mib_prohibited ());
check_bidi = false;
unicode_version = `v3_2;
bidi_ral = (fun _ -> false);
bidi_l = (fun _ -> false) }
let to_internal_profile : profile -> unit -> internal_profile = function
| `Nameprep -> nameprep
| `Nodeprep -> nodeprep
| `Resourceprep -> resourceprep
| `Saslprep -> saslprep
| `Trace -> trace
| `Iscsi -> iscsi
| `Mib -> mib
let is_correct_bidi profile text =
let is_rand_al_cat index = profile.bidi_ral (Text.look text index) in
let is_lcat index = profile.bidi_l (Text.look text index) in
let rec check_rand_al_cat index =
let next = Text.next text index in
if Text.out_of_range text next
then is_rand_al_cat index
else
if is_lcat index
then false
else check_rand_al_cat next
in
let rec check_not_rand_al_cat index =
if is_rand_al_cat index
then false
else
let next = Text.next text index in
if Text.out_of_range text next
then true
else check_not_rand_al_cat next
in
let first = Text.first text in
if Text.out_of_range text first
then (* empty text *) true
else
if is_rand_al_cat first
then check_rand_al_cat first
else check_not_rand_al_cat first
let normalisation : normalisation -> text -> text = function
| `C -> UNF.nfc
| `KC -> UNF.nfkc
| `D -> UNF.nfd
| `KD -> UNF.nfkd
| `No -> ( fun x -> x )
let stringprep profile text =
let profile = to_internal_profile profile () in
let buffer = Text.Buf.create 10 in
let add_char = Text.Buf.add_char buffer in
let map c = List.iter add_char (profile.map c) in
Text.iter map text;
let text = Text.Buf.contents buffer in
Text.Buf.clear buffer;
let text = normalisation profile.normalize text in
let rec check_prohibited index =
if Text.out_of_range text index
then ()
else begin
let char = (Text.look text index) in
let prohibited =
(not (UCharInfo.older (UCharInfo.age char) profile.unicode_version))
|| ( profile.prohibited char )
in
if prohibited
then raise (Prohibited (Text.look text index))
else check_prohibited (Text.next text index)
end
in
check_prohibited (Text.first text);
if profile.check_bidi
then begin
if is_correct_bidi profile text
then text
else raise Bad_bidi
end
else text
end
| null | https://raw.githubusercontent.com/yoriyuki/Camomile/d7d8843c88fae774f513610f8e09a613778e64b3/Camomile/public/stringPrep.ml | ocaml | This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public License
may link, statically or dynamically, a "work that uses this library"
with a publicly distributed version of this library to produce an
executable file containing portions of this library, and distribute
that executable file under terms of your choice, without any of the
Public License. By "a publicly distributed version of this library",
or a modified version of this library that is distributed under the
License. This exception does not however invalidate any other reasons
Public License .
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software
You can contact the authour by sending email to
* RFC 3722
* RFC 3722
empty text | Copyright ( C ) 2010
as published by the Free Software Foundation ; either version 2 of
the License , or ( at your option ) any later version .
As a special exception to the GNU Library General Public License , you
additional requirements listed in clause 6 of the GNU Library General
we mean either the unmodified Library as distributed by the authors ,
conditions defined in clause 3 of the GNU Library General Public
why the executable file might be covered by the GNU Library General
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307
USA
module type Type =
sig
type text
exception Prohibited of UChar.t
exception Bad_bidi
type profile =
* RFC 3491
* RFC 3920 , Appendix A
* RFC 3920 , Appendix B
* RFC 4013
* for SASL Anonymous , RFC 4505 , Section 3
* RFC 4011
val stringprep : profile -> text -> text
end
module Make (Config : ConfigInt.Type) (Text : UnicodeString.Type) :
Type with type text = Text.t
=
struct
module UNF = UNF.Make ( Config ) ( Text )
module UCharInfo = UCharInfo.Make ( Config )
module StringPrep_data' = StringPrep_data.Make ( Config )
open StringPrep_data'
type text = Text.t
exception Prohibited of UChar.t
exception Bad_bidi
type normalisation =
[ `C
| `KC
| `D
| `KD
| `No ]
type profile =
* RFC 3491
* RFC 3920 , Appendix A
* RFC 3920 , Appendix B
* RFC 4013
* for SASL Anonymous , RFC 4505 , Section 3
* RFC 4011
type internal_profile =
{ map : UChar.t -> UChar.t list;
normalize : normalisation;
prohibited : UChar.t -> bool;
check_bidi : bool;
unicode_version : UCharInfo.version_type;
bidi_ral : UChar.t -> bool;
bidi_l : UChar.t -> bool; }
let make_map map =
let f x =
let m = StringPrep_data.MappingMap.get map x in
StringPrep_data.mapping_to_list x m
in
f
let make_set set =
let f x = UCharTbl.Bool.get set x in
f
let nodeprep () =
{ map = make_map (map_b1b2 ());
normalize = `KC;
prohibited = make_set (nodeprep_prohibited ());
check_bidi = true;
unicode_version = `v3_2;
bidi_ral = make_set (d1 ());
bidi_l = make_set (d2 ()) }
let resourceprep () =
{ map = make_map (map_b1 ());
normalize = `KC;
prohibited = make_set (resourceprep_prohibited ());
check_bidi = true;
unicode_version = `v3_2;
bidi_ral = make_set (d1 ());
bidi_l = make_set (d2 ()) }
let nameprep () =
{ map = make_map (map_b1b2 ());
normalize = `KC;
prohibited = make_set (nameprep_prohibited ());
check_bidi = true;
unicode_version = `v3_2;
bidi_ral = make_set (d1 ());
bidi_l = make_set (d2 ()) }
let saslprep () =
{ map = make_map (saslprep_map ());
normalize = `KC;
prohibited = make_set (saslprep_prohibited ());
check_bidi = true;
unicode_version = `v3_2;
bidi_ral = make_set (d1 ());
bidi_l = make_set (d2 ()) }
let trace () =
{ map = (fun x -> [x]);
normalize = `No;
prohibited = make_set (trace_prohibited ());
check_bidi = true;
unicode_version = `v3_2;
bidi_ral = make_set (d1 ());
bidi_l = make_set (d2 ()) }
let iscsi () =
{ map = make_map (map_b1b2 ());
normalize = `KC;
prohibited = make_set (iscsi_prohibited ());
check_bidi = true;
unicode_version = `v3_2;
bidi_ral = make_set (d1 ());
bidi_l = make_set (d2 ()) }
* rfc 4011
let mib () =
{ map = make_map (map_b1 ());
normalize = `KC;
prohibited = make_set (mib_prohibited ());
check_bidi = false;
unicode_version = `v3_2;
bidi_ral = (fun _ -> false);
bidi_l = (fun _ -> false) }
let to_internal_profile : profile -> unit -> internal_profile = function
| `Nameprep -> nameprep
| `Nodeprep -> nodeprep
| `Resourceprep -> resourceprep
| `Saslprep -> saslprep
| `Trace -> trace
| `Iscsi -> iscsi
| `Mib -> mib
let is_correct_bidi profile text =
let is_rand_al_cat index = profile.bidi_ral (Text.look text index) in
let is_lcat index = profile.bidi_l (Text.look text index) in
let rec check_rand_al_cat index =
let next = Text.next text index in
if Text.out_of_range text next
then is_rand_al_cat index
else
if is_lcat index
then false
else check_rand_al_cat next
in
let rec check_not_rand_al_cat index =
if is_rand_al_cat index
then false
else
let next = Text.next text index in
if Text.out_of_range text next
then true
else check_not_rand_al_cat next
in
let first = Text.first text in
if Text.out_of_range text first
else
if is_rand_al_cat first
then check_rand_al_cat first
else check_not_rand_al_cat first
let normalisation : normalisation -> text -> text = function
| `C -> UNF.nfc
| `KC -> UNF.nfkc
| `D -> UNF.nfd
| `KD -> UNF.nfkd
| `No -> ( fun x -> x )
let stringprep profile text =
let profile = to_internal_profile profile () in
let buffer = Text.Buf.create 10 in
let add_char = Text.Buf.add_char buffer in
let map c = List.iter add_char (profile.map c) in
Text.iter map text;
let text = Text.Buf.contents buffer in
Text.Buf.clear buffer;
let text = normalisation profile.normalize text in
let rec check_prohibited index =
if Text.out_of_range text index
then ()
else begin
let char = (Text.look text index) in
let prohibited =
(not (UCharInfo.older (UCharInfo.age char) profile.unicode_version))
|| ( profile.prohibited char )
in
if prohibited
then raise (Prohibited (Text.look text index))
else check_prohibited (Text.next text index)
end
in
check_prohibited (Text.first text);
if profile.check_bidi
then begin
if is_correct_bidi profile text
then text
else raise Bad_bidi
end
else text
end
|
49f087a1d18545c5eec8f56249ee6282aee8911abdab313e4f981db1504d6ded | karlhof26/gimp-scheme | hexmap.scm | ; hexmap.scm
by < >
based on hex_grid by
;
Version 1.0 ( 2015 )
Uodated for GIMP-2.10.22 by ( Jan 2021 )
;
; Description
;
; Build hex map with nice options
;
;
; Issues
;
you must first select the pencil tool beforce calling this script ? ! ? ? ! ? ! ?
;
;
; License:
;
Copyright ( c ) < 2015 > < >
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
; THE SOFTWARE.
;
;===============================================================================
(define (search l x y)
(if (null? l)
#f
(if (and (= (vector-ref (car l) 0) x)
(= (vector-ref (car l) 1) y))
#t
(search (cdr l) x y)
)
)
)
(define (maybe_add l x y)
(set! x (/ (trunc (* x 1000)) 1000))
(set! y (/ (trunc (* y 1000)) 1000))
(if (search l x y)
l
(list* (vector x y) l)
)
)
(define (crawl_hex_edges edges mask pressure)
(gimp-message "crawl x")
(if (not (null? edges))
(begin
(gimp-airbrush mask pressure 2 (car edges))
; Note recursion here
(crawl_hex_edges (cdr edges) mask pressure)
)
(begin
(gimp-message "edges are null")
)
)
)
(define (erase_hex_edges edges mask size)
(let* (
(dummy 0)
)
(gimp-message "erase hex edges")
(gimp-context-set-brush-size (* 1.4 size))
(gimp-context-set-foreground "black")
(gimp-context-set-opacity 80)
(gimp-context-set-brush "2. Hardness 025")
(gimp-message "ready to crawl")
(crawl_hex_edges edges mask 80)
)
)
(define (build_border_path border_edges border_path)
(let* (
(i 0)
(y 0)
(s (length border_edges))
(n (* s 3))
(v (make-vector n 0))
)
(while (< i s)
(vector-set! v y (list-ref border_edges i))
(vector-set! v (+ y 1) (list-ref border_edges (+ i 1)))
(vector-set! v (+ y 2) (list-ref border_edges i))
(vector-set! v (+ y 3) (list-ref border_edges (+ i 1)))
(vector-set! v (+ y 4) (list-ref border_edges i))
(vector-set! v (+ y 5) (list-ref border_edges (+ i 1)))
(set! i (+ i 2))
(set! y (+ y 6))
)
(gimp-vectors-stroke-new-from-points border_path 0 n v TRUE)
)
)
(define (build_h_border_edges border_edges vx vy x y xBorder yBorder bRight bBottom)
( gimp - message " line 106 " )
(if (and (>= x 0) (>= y 0) (<= x xBorder) (<= y yBorder))
; top ?
(if (= y 0)
(if (or (equal? bRight #t) (< x xBorder))
(set! border_edges (append border_edges
(list
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 3)
(vector-ref vy 3)
(vector-ref vx 2)
(vector-ref vy 2)
(vector-ref vx 1)
(vector-ref vy 1)
))
)
(set! border_edges (append border_edges
(list
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
))
)
)
; else bottom ?
(if (= y yBorder)
(if (= x 0)
(set! border_edges (list*
(vector-ref vx 7)
(vector-ref vy 7)
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 3)
(vector-ref vy 3)
border_edges)
)
(if (< x xBorder)
(set! border_edges (list*
(vector-ref vx 7)
(vector-ref vy 7)
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
(if (equal? bRight #t)
(set! border_edges (list*
(vector-ref vx 2)
(vector-ref vy 2)
(vector-ref vx 1)
(vector-ref vy 1)
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
(set! border_edges (list*
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
)
)
)
; else left ?
(if (= x 0)
(set! border_edges (list*
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 3)
(vector-ref vy 3)
border_edges)
)
; else right ?
(if (= x xBorder)
(if (equal? bRight #t)
(set! border_edges (append border_edges
(list
(vector-ref vx 2)
(vector-ref vy 2)
(vector-ref vx 1)
(vector-ref vy 1)
))
)
(set! border_edges (append border_edges
(list
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
))
)
)
)
)
)
)
)
border_edges
)
(define (build_v_border_edges border_edges vx vy x y xBorder yBorder bRight bBottom)
(if (and (>= x 0) (>= y 0) (<= x xBorder) (<= y yBorder))
; top ?
(if (= y 0)
; top left
(if (= x 0)
(set! border_edges (append border_edges
(list
(vector-ref vx 1)
(vector-ref vy 1)
(vector-ref vx 2)
(vector-ref vy 2)
(vector-ref vx 3)
(vector-ref vy 3)
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
))
)
(if (= x xBorder)
(set! border_edges (append border_edges
(list
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 7)
(vector-ref vy 7)
))
)
(set! border_edges (append border_edges
(list
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
))
)
)
)
; else bottom ?
(if (= y yBorder)
(if (equal? bBottom #t)
(if (= x 0)
(begin
(set! border_edges (list*
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 1)
(vector-ref vy 1)
(vector-ref vx 2)
(vector-ref vy 2)
(vector-ref vx 3)
(vector-ref vy 3)
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
)
(begin
(if (< x xBorder)
(set! border_edges (list*
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 7)
(vector-ref vy 7)
border_edges)
)
; else
(set! border_edges (list*
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 7)
(vector-ref vy 7)
border_edges)
)
)
)
)
(if (< x xBorder)
(set! border_edges (list*
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
(set! border_edges (list*
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
)
)
; else left ?
(if (= x 0)
(set! border_edges (list*
(vector-ref vx 1)
(vector-ref vy 1)
(vector-ref vx 2)
(vector-ref vy 2)
(vector-ref vx 3)
(vector-ref vy 3)
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
; else right ?
(if (= x xBorder)
(set! border_edges (append border_edges
(list
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 7)
(vector-ref vy 7)
))
)
)
)
)
)
)
border_edges
)
(define (build_grid grid_path border_path width height sideLength orientation xOff yOff)
(let*
(
(w (- width xOff))
(h (- height yOff))
(x (if (> xOff 0) -1 0))
(y (if (> yOff 0) -1 0))
(vx (make-vector 8 0))
(vy (make-vector 8 0))
(hex_edges '())
(border_edges '())
(hX 0)
(hY 0)
(xAdd 0)
(yAdd 0)
(xLast 0)
(yLast 0)
(xBorder 0)
(yBorder 0)
(bRight #t)
(bBottom #t)
)
(if (= orientation 0)
; horizontal
(begin
( gimp - message " line 381 " )
(set! xLast (trunc (/ w (* sideLength 3.0))))
(set! yLast (trunc (/ h (* sideLength 1.73205))))
(set! xBorder (trunc (/ (- w xOff) (* sideLength 3.0))))
(set! yBorder (- yLast 1))
(set! bRight (if (> (- w xOff (* (* xBorder sideLength) 3.0)) sideLength) #t #f))
; (set! bBottom #t)
(set! hX (vector (* sideLength 3.0) (* sideLength 2.0) (* sideLength 1.5) (* sideLength 0.5) 0 (* sideLength 0.5) (* sideLength 1.5) (* sideLength 2.0)))
(set! hY (vector (* (* sideLength 1.73205) 0.5) (* (* sideLength 1.73205) 0.5) 0 0 (* sideLength 1.73205 0.5) (* sideLength 1.73205) (* sideLength 1.73205) (* sideLength 1.73205 0.5)))
)
(begin
( gimp - message " line 391 " )
(set! xLast (trunc (/ w (* sideLength 1.73205))))
(set! yLast (trunc (/ h (* sideLength 3.0))))
(set! xBorder (- xLast 1))
(set! yBorder (trunc (/ (- h yOff) (* sideLength 3.0))))
; (set! bRight #t)
(set! bBottom (if (> (- h yOff (* yBorder sideLength 3.0)) sideLength) #t #f))
(set! hX (vector (* sideLength 1.73205 0.5) (* sideLength 1.73205 0.5) 0 0 (* sideLength 1.73205 0.5) (* sideLength 1.73205) (* sideLength 1.73205) (* sideLength 1.73205 0.5)))
(set! hY (vector (* sideLength 3.0) (* sideLength 2.0) (* sideLength 1.5) (* sideLength 0.5) 0 (* sideLength 0.5) (* sideLength 1.5) (* sideLength 2.0)))
)
)
( gimp - message " line 402 " )
(gimp-progress-init "defining grid" -1)
(while (<= y yLast)
(gimp-progress-update (/ y yLast))
(while (<= x xLast)
(if (= orientation 0)
; horizontal
(begin
(set! xAdd (+ (* (* x sideLength) 3.0) xOff))
(set! yAdd (+ (* y sideLength 1.73205) yOff))
)
(begin
(set! xAdd (+ (* x sideLength 1.73205) xOff))
(set! yAdd (+ (* y sideLength 3.0) yOff))
)
)
(vector-set! vx 0 (+ (vector-ref hX 0) xAdd))
(vector-set! vx 1 (+ (vector-ref hX 1) xAdd))
(vector-set! vx 2 (+ (vector-ref hX 2) xAdd))
(vector-set! vx 3 (+ (vector-ref hX 3) xAdd))
(vector-set! vx 4 (+ (vector-ref hX 4) xAdd))
(vector-set! vx 5 (+ (vector-ref hX 5) xAdd))
(vector-set! vx 6 (+ (vector-ref hX 6) xAdd))
(vector-set! vx 7 (+ (vector-ref hX 7) xAdd))
(vector-set! vy 0 (+ (vector-ref hY 0) yAdd))
(vector-set! vy 1 (+ (vector-ref hY 1) yAdd))
(vector-set! vy 2 (+ (vector-ref hY 2) yAdd))
(vector-set! vy 3 (+ (vector-ref hY 3) yAdd))
(vector-set! vy 4 (+ (vector-ref hY 4) yAdd))
(vector-set! vy 5 (+ (vector-ref hY 5) yAdd))
(vector-set! vy 6 (+ (vector-ref hY 6) yAdd))
(vector-set! vy 7 (+ (vector-ref hY 7) yAdd))
; hex path
(gimp-vectors-stroke-new-from-points grid_path 0 (* 8 2 3)
(vector
(vector-ref vx 0) (vector-ref vy 0)
(vector-ref vx 0) (vector-ref vy 0)
(vector-ref vx 0) (vector-ref vy 0)
(vector-ref vx 1) (vector-ref vy 1)
(vector-ref vx 1) (vector-ref vy 1)
(vector-ref vx 1) (vector-ref vy 1)
(vector-ref vx 2) (vector-ref vy 2)
(vector-ref vx 2) (vector-ref vy 2)
(vector-ref vx 2) (vector-ref vy 2)
(vector-ref vx 3) (vector-ref vy 3)
(vector-ref vx 3) (vector-ref vy 3)
(vector-ref vx 3) (vector-ref vy 3)
(vector-ref vx 4) (vector-ref vy 4)
(vector-ref vx 4) (vector-ref vy 4)
(vector-ref vx 4) (vector-ref vy 4)
(vector-ref vx 5) (vector-ref vy 5)
(vector-ref vx 5) (vector-ref vy 5)
(vector-ref vx 5) (vector-ref vy 5)
(vector-ref vx 6) (vector-ref vy 6)
(vector-ref vx 6) (vector-ref vy 6)
(vector-ref vx 6) (vector-ref vy 6)
(vector-ref vx 7) (vector-ref vy 7)
(vector-ref vx 7) (vector-ref vy 7)
(vector-ref vx 7) (vector-ref vy 7)
) FALSE
)
; border
(if (= orientation 0) ; horizontal
(set! border_edges (build_h_border_edges border_edges vx vy x y xBorder yBorder bRight bBottom))
(set! border_edges (build_v_border_edges border_edges vx vy x y xBorder yBorder bRight bBottom))
)
; hex edges
(set! hex_edges (maybe_add hex_edges (vector-ref vx 0) (vector-ref vy 0)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 1) (vector-ref vy 1)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 2) (vector-ref vy 2)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 3) (vector-ref vy 3)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 4) (vector-ref vy 4)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 5) (vector-ref vy 5)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 6) (vector-ref vy 6)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 7) (vector-ref vy 7)))
; next loop values
(set! x (+ x 1))
)
(set! y (+ y 1))
(set! x (if (> xOff 0) -1 0))
)
(display border_edges)
(build_border_path border_edges border_path)
;(gimp-message "returning")
;(gimp-display-new img)
hex_edges
)
)
(define (script-fu-hex-map-advanced orientation elm len xN yN xOff yOff erase gStroke gColour bStroke bColour bOpacity)
(let* (
(img 0)
(gridLayer 0)
(borderLayer 0)
(mask 0)
(width 0)
(height 0)
(grid_path 0)
(hex_edges 0)
(border_path 0)
(border_edges '())
(sideLength (cond ((equal? elm 0) len) ((equal? elm 1) (/ len 2.0)) ((equal? elm 2) (/ len 1.73205))))
;(brushTemp (car (gimp-brush-new "HexMapBrush")))
)
;(gimp-message "started OK")
(if (= orientation 0)
; horizontal
(begin
(set! height (+ (* 2 yOff) (* 1.73205 yN sideLength)))
(set! width (+ (* 2 xOff) (* (+ 0.5 (* 1.5 xN)) sideLength)))
)
(begin
(set! width (+ (* 2 xOff) (* 1.73205 xN sideLength)))
(set! height (+ (* 2 yOff) (* (+ 0.5 (* 1.5 yN)) sideLength)))
)
)
START
(gimp-context-push)
(set! img (car (gimp-image-new width height RGB)))
(gimp-image-undo-group-start img)
; set brush
;(gimp-brush-set-shape brushTemp BRUSH-GENERATED-CIRCLE)
;(gimp-brush-set-angle brushTemp 0)
( gimp - brush - set - aspect - ratio brushTemp 1 )
( gimp - brush - set - hardness brushTemp 1 )
;(gimp-brush-set-spacing brushTemp 0)
( gimp - brush - set - spikes brushTemp 2 ) ; was 1
;(gimp-brushes-refresh)
was HexMapBrush
(gimp-context-set-opacity 100)
(gimp-context-set-brush-size 2.0)
(gimp-context-set-dynamics "Dynamics Off")
(gimp-context-set-paint-mode LAYER-MODE-NORMAL)
; paths
(set! grid_path (car (gimp-vectors-new img "Hex Grid")))
(gimp-image-add-vectors img grid_path -1)
(set! border_path (car (gimp-vectors-new img "Map Border")))
(gimp-image-add-vectors img border_path -1)
;(gimp-displays-flush)
;(gimp-display-new img)
(set! hex_edges (build_grid grid_path border_path width height sideLength orientation xOff yOff))
;(gimp-display-new img)
; grid layer
(set! gridLayer (car (gimp-layer-new img width height RGBA-IMAGE "Grid" 100 LAYER-MODE-NORMAL)))
(gimp-image-insert-layer img gridLayer 0 -1)
(gimp-context-set-brush-size gStroke)
(gimp-context-set-foreground gColour)
(if (> gStroke 0)
(begin
(gimp-edit-stroke-vectors gridLayer grid_path)
( gimp - path - to - selection img " Map Border " 2 0 0 0 0 )
;(gimp-selection-invert img)
( gimp - edit - clear gridLayer )
)
)
;(gimp-display-new img)
; border layer
(set! borderLayer (car (gimp-layer-new img width height RGBA-IMAGE "Border" 100 LAYER-MODE-NORMAL)))
(gimp-image-insert-layer img borderLayer 0 -1)
; transparent border
(gimp-context-set-foreground '(0 0 0))
(gimp-edit-bucket-fill borderLayer 0 0 bOpacity 20 0 0 0)
; border stroke
(if (> bStroke 0)
(begin
(gimp-context-set-brush-size bStroke)
(gimp-context-set-foreground bColour)
(gimp-edit-stroke-vectors borderLayer border_path)
)
)
(gimp-path-to-selection img "Map Border" 2 0 0 0 0)
(gimp-selection-invert img)
(gimp-edit-clear gridLayer)
;(gimp-display-new img)
(gimp-selection-none img)
;(gimp-brush-delete brushTemp)
REMOVED BY karlhof26 - no need for this as I see .
; ; grid mask
(if (> erase 0)
(begin
(set! mask (car (gimp-layer-create-mask gridLayer ADD-MASK-WHITE)))
(gimp-layer-add-mask gridLayer mask)
(erase_hex_edges hex_edges mask sideLength)
(if (= erase 2)
(gimp-drawable-invert mask)
)
(gimp-message "Layer mask in use - disbale mask to see hexgrid")
)
)
(gimp-display-new img)
(gimp-image-clean-all img)
; END
(gimp-image-undo-group-end img)
(gimp-displays-flush)
(gimp-context-pop)
(gc) ; garbage collect
)
)
; (define (script-fu-hex_mapp)
( script - fu - hex_map 0 0 100 5 5 140 50 2 " black " 6 " red " )
; )
(script-fu-register "script-fu-hex-map-advanced"
"Hex Map..."
"Draws a hex grid on a layer of the image. Erase, if used, creates a mask that must be disabled to see the hexgrid.\nfile:hexmap.scm"
"Jérémy Zurcher"
"Copyright 2015, Jérémy Zurcher"
"Nov 2015"
""
SF-OPTION "Hex Orientation" '("Horizontal" "Vertical")
SF-OPTION "Element to Specify" '("Side" "Point to Point" "Side to Side")
SF-ADJUSTMENT "Length of Element" '(100 2 400 1 10 0 SF-SPINNER)
SF-ADJUSTMENT "Horizontal Hex (#)" '(18 2 500 1 10 0 SF-SPINNER)
SF-ADJUSTMENT "Vertical Hex (#)" '(10 2 500 1 10 0 SF-SPINNER)
SF-ADJUSTMENT "Horizontal Offset (px)" '(50 0 399 0.5 10 1 SF-SPINNER)
SF-ADJUSTMENT "Vertical Offset (px)" '(50 0 399 0.5 10 1 SF-SPINNER)
SF-OPTION "Erase (Mask out areas)" '("None" "Points" "Segments")
SF-ADJUSTMENT "Line Width (px)" '(2 1 20 1 10 0 SF-SPINNER)
SF-COLOR "Line Colour" '(244 244 244)
SF-ADJUSTMENT "Border Width (px)" '(6 0 20 1 10 0 SF-SPINNER)
' ( 69 70 11 )
SF-ADJUSTMENT "Border Opacity (px)" '(40 0 100 1 10 0 SF-SPINNER)
)
(script-fu-menu-register "script-fu-hex-map-advanced" "<Toolbox>/Script-Fu/Render/Pattern")
; end of script | null | https://raw.githubusercontent.com/karlhof26/gimp-scheme/f5515e4f1f7d41872bf97761f2d5242eb8047b9a/hexmap.scm | scheme | hexmap.scm
Description
Build hex map with nice options
Issues
License:
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
===============================================================================
Note recursion here
top ?
else bottom ?
else left ?
else right ?
top ?
top left
else bottom ?
else
else left ?
else right ?
horizontal
(set! bBottom #t)
(set! bRight #t)
horizontal
hex path
border
horizontal
hex edges
next loop values
(gimp-message "returning")
(gimp-display-new img)
(brushTemp (car (gimp-brush-new "HexMapBrush")))
(gimp-message "started OK")
horizontal
set brush
(gimp-brush-set-shape brushTemp BRUSH-GENERATED-CIRCLE)
(gimp-brush-set-angle brushTemp 0)
(gimp-brush-set-spacing brushTemp 0)
was 1
(gimp-brushes-refresh)
paths
(gimp-displays-flush)
(gimp-display-new img)
(gimp-display-new img)
grid layer
(gimp-selection-invert img)
(gimp-display-new img)
border layer
transparent border
border stroke
(gimp-display-new img)
(gimp-brush-delete brushTemp)
; grid mask
END
garbage collect
(define (script-fu-hex_mapp)
)
end of script | by < >
based on hex_grid by
Version 1.0 ( 2015 )
Uodated for GIMP-2.10.22 by ( Jan 2021 )
you must first select the pencil tool beforce calling this script ? ! ? ? ! ? ! ?
Copyright ( c ) < 2015 > < >
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
(define (search l x y)
(if (null? l)
#f
(if (and (= (vector-ref (car l) 0) x)
(= (vector-ref (car l) 1) y))
#t
(search (cdr l) x y)
)
)
)
(define (maybe_add l x y)
(set! x (/ (trunc (* x 1000)) 1000))
(set! y (/ (trunc (* y 1000)) 1000))
(if (search l x y)
l
(list* (vector x y) l)
)
)
(define (crawl_hex_edges edges mask pressure)
(gimp-message "crawl x")
(if (not (null? edges))
(begin
(gimp-airbrush mask pressure 2 (car edges))
(crawl_hex_edges (cdr edges) mask pressure)
)
(begin
(gimp-message "edges are null")
)
)
)
(define (erase_hex_edges edges mask size)
(let* (
(dummy 0)
)
(gimp-message "erase hex edges")
(gimp-context-set-brush-size (* 1.4 size))
(gimp-context-set-foreground "black")
(gimp-context-set-opacity 80)
(gimp-context-set-brush "2. Hardness 025")
(gimp-message "ready to crawl")
(crawl_hex_edges edges mask 80)
)
)
(define (build_border_path border_edges border_path)
(let* (
(i 0)
(y 0)
(s (length border_edges))
(n (* s 3))
(v (make-vector n 0))
)
(while (< i s)
(vector-set! v y (list-ref border_edges i))
(vector-set! v (+ y 1) (list-ref border_edges (+ i 1)))
(vector-set! v (+ y 2) (list-ref border_edges i))
(vector-set! v (+ y 3) (list-ref border_edges (+ i 1)))
(vector-set! v (+ y 4) (list-ref border_edges i))
(vector-set! v (+ y 5) (list-ref border_edges (+ i 1)))
(set! i (+ i 2))
(set! y (+ y 6))
)
(gimp-vectors-stroke-new-from-points border_path 0 n v TRUE)
)
)
(define (build_h_border_edges border_edges vx vy x y xBorder yBorder bRight bBottom)
( gimp - message " line 106 " )
(if (and (>= x 0) (>= y 0) (<= x xBorder) (<= y yBorder))
(if (= y 0)
(if (or (equal? bRight #t) (< x xBorder))
(set! border_edges (append border_edges
(list
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 3)
(vector-ref vy 3)
(vector-ref vx 2)
(vector-ref vy 2)
(vector-ref vx 1)
(vector-ref vy 1)
))
)
(set! border_edges (append border_edges
(list
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
))
)
)
(if (= y yBorder)
(if (= x 0)
(set! border_edges (list*
(vector-ref vx 7)
(vector-ref vy 7)
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 3)
(vector-ref vy 3)
border_edges)
)
(if (< x xBorder)
(set! border_edges (list*
(vector-ref vx 7)
(vector-ref vy 7)
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
(if (equal? bRight #t)
(set! border_edges (list*
(vector-ref vx 2)
(vector-ref vy 2)
(vector-ref vx 1)
(vector-ref vy 1)
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
(set! border_edges (list*
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
)
)
)
(if (= x 0)
(set! border_edges (list*
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 3)
(vector-ref vy 3)
border_edges)
)
(if (= x xBorder)
(if (equal? bRight #t)
(set! border_edges (append border_edges
(list
(vector-ref vx 2)
(vector-ref vy 2)
(vector-ref vx 1)
(vector-ref vy 1)
))
)
(set! border_edges (append border_edges
(list
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
))
)
)
)
)
)
)
)
border_edges
)
(define (build_v_border_edges border_edges vx vy x y xBorder yBorder bRight bBottom)
(if (and (>= x 0) (>= y 0) (<= x xBorder) (<= y yBorder))
(if (= y 0)
(if (= x 0)
(set! border_edges (append border_edges
(list
(vector-ref vx 1)
(vector-ref vy 1)
(vector-ref vx 2)
(vector-ref vy 2)
(vector-ref vx 3)
(vector-ref vy 3)
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
))
)
(if (= x xBorder)
(set! border_edges (append border_edges
(list
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 7)
(vector-ref vy 7)
))
)
(set! border_edges (append border_edges
(list
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
))
)
)
)
(if (= y yBorder)
(if (equal? bBottom #t)
(if (= x 0)
(begin
(set! border_edges (list*
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 1)
(vector-ref vy 1)
(vector-ref vx 2)
(vector-ref vy 2)
(vector-ref vx 3)
(vector-ref vy 3)
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
)
(begin
(if (< x xBorder)
(set! border_edges (list*
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 7)
(vector-ref vy 7)
border_edges)
)
(set! border_edges (list*
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 7)
(vector-ref vy 7)
border_edges)
)
)
)
)
(if (< x xBorder)
(set! border_edges (list*
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
(set! border_edges (list*
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
)
)
(if (= x 0)
(set! border_edges (list*
(vector-ref vx 1)
(vector-ref vy 1)
(vector-ref vx 2)
(vector-ref vy 2)
(vector-ref vx 3)
(vector-ref vy 3)
(vector-ref vx 4)
(vector-ref vy 4)
border_edges)
)
(if (= x xBorder)
(set! border_edges (append border_edges
(list
(vector-ref vx 4)
(vector-ref vy 4)
(vector-ref vx 5)
(vector-ref vy 5)
(vector-ref vx 6)
(vector-ref vy 6)
(vector-ref vx 7)
(vector-ref vy 7)
))
)
)
)
)
)
)
border_edges
)
(define (build_grid grid_path border_path width height sideLength orientation xOff yOff)
(let*
(
(w (- width xOff))
(h (- height yOff))
(x (if (> xOff 0) -1 0))
(y (if (> yOff 0) -1 0))
(vx (make-vector 8 0))
(vy (make-vector 8 0))
(hex_edges '())
(border_edges '())
(hX 0)
(hY 0)
(xAdd 0)
(yAdd 0)
(xLast 0)
(yLast 0)
(xBorder 0)
(yBorder 0)
(bRight #t)
(bBottom #t)
)
(if (= orientation 0)
(begin
( gimp - message " line 381 " )
(set! xLast (trunc (/ w (* sideLength 3.0))))
(set! yLast (trunc (/ h (* sideLength 1.73205))))
(set! xBorder (trunc (/ (- w xOff) (* sideLength 3.0))))
(set! yBorder (- yLast 1))
(set! bRight (if (> (- w xOff (* (* xBorder sideLength) 3.0)) sideLength) #t #f))
(set! hX (vector (* sideLength 3.0) (* sideLength 2.0) (* sideLength 1.5) (* sideLength 0.5) 0 (* sideLength 0.5) (* sideLength 1.5) (* sideLength 2.0)))
(set! hY (vector (* (* sideLength 1.73205) 0.5) (* (* sideLength 1.73205) 0.5) 0 0 (* sideLength 1.73205 0.5) (* sideLength 1.73205) (* sideLength 1.73205) (* sideLength 1.73205 0.5)))
)
(begin
( gimp - message " line 391 " )
(set! xLast (trunc (/ w (* sideLength 1.73205))))
(set! yLast (trunc (/ h (* sideLength 3.0))))
(set! xBorder (- xLast 1))
(set! yBorder (trunc (/ (- h yOff) (* sideLength 3.0))))
(set! bBottom (if (> (- h yOff (* yBorder sideLength 3.0)) sideLength) #t #f))
(set! hX (vector (* sideLength 1.73205 0.5) (* sideLength 1.73205 0.5) 0 0 (* sideLength 1.73205 0.5) (* sideLength 1.73205) (* sideLength 1.73205) (* sideLength 1.73205 0.5)))
(set! hY (vector (* sideLength 3.0) (* sideLength 2.0) (* sideLength 1.5) (* sideLength 0.5) 0 (* sideLength 0.5) (* sideLength 1.5) (* sideLength 2.0)))
)
)
( gimp - message " line 402 " )
(gimp-progress-init "defining grid" -1)
(while (<= y yLast)
(gimp-progress-update (/ y yLast))
(while (<= x xLast)
(if (= orientation 0)
(begin
(set! xAdd (+ (* (* x sideLength) 3.0) xOff))
(set! yAdd (+ (* y sideLength 1.73205) yOff))
)
(begin
(set! xAdd (+ (* x sideLength 1.73205) xOff))
(set! yAdd (+ (* y sideLength 3.0) yOff))
)
)
(vector-set! vx 0 (+ (vector-ref hX 0) xAdd))
(vector-set! vx 1 (+ (vector-ref hX 1) xAdd))
(vector-set! vx 2 (+ (vector-ref hX 2) xAdd))
(vector-set! vx 3 (+ (vector-ref hX 3) xAdd))
(vector-set! vx 4 (+ (vector-ref hX 4) xAdd))
(vector-set! vx 5 (+ (vector-ref hX 5) xAdd))
(vector-set! vx 6 (+ (vector-ref hX 6) xAdd))
(vector-set! vx 7 (+ (vector-ref hX 7) xAdd))
(vector-set! vy 0 (+ (vector-ref hY 0) yAdd))
(vector-set! vy 1 (+ (vector-ref hY 1) yAdd))
(vector-set! vy 2 (+ (vector-ref hY 2) yAdd))
(vector-set! vy 3 (+ (vector-ref hY 3) yAdd))
(vector-set! vy 4 (+ (vector-ref hY 4) yAdd))
(vector-set! vy 5 (+ (vector-ref hY 5) yAdd))
(vector-set! vy 6 (+ (vector-ref hY 6) yAdd))
(vector-set! vy 7 (+ (vector-ref hY 7) yAdd))
(gimp-vectors-stroke-new-from-points grid_path 0 (* 8 2 3)
(vector
(vector-ref vx 0) (vector-ref vy 0)
(vector-ref vx 0) (vector-ref vy 0)
(vector-ref vx 0) (vector-ref vy 0)
(vector-ref vx 1) (vector-ref vy 1)
(vector-ref vx 1) (vector-ref vy 1)
(vector-ref vx 1) (vector-ref vy 1)
(vector-ref vx 2) (vector-ref vy 2)
(vector-ref vx 2) (vector-ref vy 2)
(vector-ref vx 2) (vector-ref vy 2)
(vector-ref vx 3) (vector-ref vy 3)
(vector-ref vx 3) (vector-ref vy 3)
(vector-ref vx 3) (vector-ref vy 3)
(vector-ref vx 4) (vector-ref vy 4)
(vector-ref vx 4) (vector-ref vy 4)
(vector-ref vx 4) (vector-ref vy 4)
(vector-ref vx 5) (vector-ref vy 5)
(vector-ref vx 5) (vector-ref vy 5)
(vector-ref vx 5) (vector-ref vy 5)
(vector-ref vx 6) (vector-ref vy 6)
(vector-ref vx 6) (vector-ref vy 6)
(vector-ref vx 6) (vector-ref vy 6)
(vector-ref vx 7) (vector-ref vy 7)
(vector-ref vx 7) (vector-ref vy 7)
(vector-ref vx 7) (vector-ref vy 7)
) FALSE
)
(set! border_edges (build_h_border_edges border_edges vx vy x y xBorder yBorder bRight bBottom))
(set! border_edges (build_v_border_edges border_edges vx vy x y xBorder yBorder bRight bBottom))
)
(set! hex_edges (maybe_add hex_edges (vector-ref vx 0) (vector-ref vy 0)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 1) (vector-ref vy 1)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 2) (vector-ref vy 2)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 3) (vector-ref vy 3)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 4) (vector-ref vy 4)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 5) (vector-ref vy 5)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 6) (vector-ref vy 6)))
(set! hex_edges (maybe_add hex_edges (vector-ref vx 7) (vector-ref vy 7)))
(set! x (+ x 1))
)
(set! y (+ y 1))
(set! x (if (> xOff 0) -1 0))
)
(display border_edges)
(build_border_path border_edges border_path)
hex_edges
)
)
(define (script-fu-hex-map-advanced orientation elm len xN yN xOff yOff erase gStroke gColour bStroke bColour bOpacity)
(let* (
(img 0)
(gridLayer 0)
(borderLayer 0)
(mask 0)
(width 0)
(height 0)
(grid_path 0)
(hex_edges 0)
(border_path 0)
(border_edges '())
(sideLength (cond ((equal? elm 0) len) ((equal? elm 1) (/ len 2.0)) ((equal? elm 2) (/ len 1.73205))))
)
(if (= orientation 0)
(begin
(set! height (+ (* 2 yOff) (* 1.73205 yN sideLength)))
(set! width (+ (* 2 xOff) (* (+ 0.5 (* 1.5 xN)) sideLength)))
)
(begin
(set! width (+ (* 2 xOff) (* 1.73205 xN sideLength)))
(set! height (+ (* 2 yOff) (* (+ 0.5 (* 1.5 yN)) sideLength)))
)
)
START
(gimp-context-push)
(set! img (car (gimp-image-new width height RGB)))
(gimp-image-undo-group-start img)
( gimp - brush - set - aspect - ratio brushTemp 1 )
( gimp - brush - set - hardness brushTemp 1 )
was HexMapBrush
(gimp-context-set-opacity 100)
(gimp-context-set-brush-size 2.0)
(gimp-context-set-dynamics "Dynamics Off")
(gimp-context-set-paint-mode LAYER-MODE-NORMAL)
(set! grid_path (car (gimp-vectors-new img "Hex Grid")))
(gimp-image-add-vectors img grid_path -1)
(set! border_path (car (gimp-vectors-new img "Map Border")))
(gimp-image-add-vectors img border_path -1)
(set! hex_edges (build_grid grid_path border_path width height sideLength orientation xOff yOff))
(set! gridLayer (car (gimp-layer-new img width height RGBA-IMAGE "Grid" 100 LAYER-MODE-NORMAL)))
(gimp-image-insert-layer img gridLayer 0 -1)
(gimp-context-set-brush-size gStroke)
(gimp-context-set-foreground gColour)
(if (> gStroke 0)
(begin
(gimp-edit-stroke-vectors gridLayer grid_path)
( gimp - path - to - selection img " Map Border " 2 0 0 0 0 )
( gimp - edit - clear gridLayer )
)
)
(set! borderLayer (car (gimp-layer-new img width height RGBA-IMAGE "Border" 100 LAYER-MODE-NORMAL)))
(gimp-image-insert-layer img borderLayer 0 -1)
(gimp-context-set-foreground '(0 0 0))
(gimp-edit-bucket-fill borderLayer 0 0 bOpacity 20 0 0 0)
(if (> bStroke 0)
(begin
(gimp-context-set-brush-size bStroke)
(gimp-context-set-foreground bColour)
(gimp-edit-stroke-vectors borderLayer border_path)
)
)
(gimp-path-to-selection img "Map Border" 2 0 0 0 0)
(gimp-selection-invert img)
(gimp-edit-clear gridLayer)
(gimp-selection-none img)
REMOVED BY karlhof26 - no need for this as I see .
(if (> erase 0)
(begin
(set! mask (car (gimp-layer-create-mask gridLayer ADD-MASK-WHITE)))
(gimp-layer-add-mask gridLayer mask)
(erase_hex_edges hex_edges mask sideLength)
(if (= erase 2)
(gimp-drawable-invert mask)
)
(gimp-message "Layer mask in use - disbale mask to see hexgrid")
)
)
(gimp-display-new img)
(gimp-image-clean-all img)
(gimp-image-undo-group-end img)
(gimp-displays-flush)
(gimp-context-pop)
)
)
( script - fu - hex_map 0 0 100 5 5 140 50 2 " black " 6 " red " )
(script-fu-register "script-fu-hex-map-advanced"
"Hex Map..."
"Draws a hex grid on a layer of the image. Erase, if used, creates a mask that must be disabled to see the hexgrid.\nfile:hexmap.scm"
"Jérémy Zurcher"
"Copyright 2015, Jérémy Zurcher"
"Nov 2015"
""
SF-OPTION "Hex Orientation" '("Horizontal" "Vertical")
SF-OPTION "Element to Specify" '("Side" "Point to Point" "Side to Side")
SF-ADJUSTMENT "Length of Element" '(100 2 400 1 10 0 SF-SPINNER)
SF-ADJUSTMENT "Horizontal Hex (#)" '(18 2 500 1 10 0 SF-SPINNER)
SF-ADJUSTMENT "Vertical Hex (#)" '(10 2 500 1 10 0 SF-SPINNER)
SF-ADJUSTMENT "Horizontal Offset (px)" '(50 0 399 0.5 10 1 SF-SPINNER)
SF-ADJUSTMENT "Vertical Offset (px)" '(50 0 399 0.5 10 1 SF-SPINNER)
SF-OPTION "Erase (Mask out areas)" '("None" "Points" "Segments")
SF-ADJUSTMENT "Line Width (px)" '(2 1 20 1 10 0 SF-SPINNER)
SF-COLOR "Line Colour" '(244 244 244)
SF-ADJUSTMENT "Border Width (px)" '(6 0 20 1 10 0 SF-SPINNER)
' ( 69 70 11 )
SF-ADJUSTMENT "Border Opacity (px)" '(40 0 100 1 10 0 SF-SPINNER)
)
(script-fu-menu-register "script-fu-hex-map-advanced" "<Toolbox>/Script-Fu/Render/Pattern")
|
eea62f351a98083de46ba94ad9cc7c435df424b364477bc843c58a12df57bb9c | VitorCBSB/HetrisV2 | Constants.hs | module Constants
( introScreenTime,
fieldHeight,
visibleFieldHeight,
fieldWidth,
cellSize,
softDropSpeed,
timeToLock,
moveRotationLimit,
timeToRestart,
clearingLinesTime,
linesLevelUp,
gamePosition,
windowSize,
)
where
introScreenTime :: Double
introScreenTime = 4
fieldHeight :: Int
fieldHeight = 30
visibleFieldHeight :: Int
visibleFieldHeight = 20
fieldWidth :: Int
fieldWidth = 10
cellSize :: Int
cellSize = 32
In cells per second
softDropSpeed :: Double
softDropSpeed = 40
Time ( in seconds ) needed to wait until a piece is locked in place .
timeToLock :: Double
timeToLock = 0.5
Time ( in seconds ) whereafter you can press ' Enter ' to restart the game .
timeToRestart :: Double
timeToRestart = 2.25
In seconds .
clearingLinesTime :: Double
clearingLinesTime = 0.5
-- Line clears needed to level up
linesLevelUp :: Int
linesLevelUp = 10
-- Move / rotation limit.
-- How many moves or rotations we can perform before
-- a piece locks into place instantly.
moveRotationLimit :: Int
moveRotationLimit = 15
gamePosition :: (Int, Int)
gamePosition = (200, 25)
windowSize :: (Int, Int)
windowSize = (1024, 768)
| null | https://raw.githubusercontent.com/VitorCBSB/HetrisV2/2544e8ce56a4d0daba9126642118f0504215f742/app/Constants.hs | haskell | Line clears needed to level up
Move / rotation limit.
How many moves or rotations we can perform before
a piece locks into place instantly. | module Constants
( introScreenTime,
fieldHeight,
visibleFieldHeight,
fieldWidth,
cellSize,
softDropSpeed,
timeToLock,
moveRotationLimit,
timeToRestart,
clearingLinesTime,
linesLevelUp,
gamePosition,
windowSize,
)
where
introScreenTime :: Double
introScreenTime = 4
fieldHeight :: Int
fieldHeight = 30
visibleFieldHeight :: Int
visibleFieldHeight = 20
fieldWidth :: Int
fieldWidth = 10
cellSize :: Int
cellSize = 32
In cells per second
softDropSpeed :: Double
softDropSpeed = 40
Time ( in seconds ) needed to wait until a piece is locked in place .
timeToLock :: Double
timeToLock = 0.5
Time ( in seconds ) whereafter you can press ' Enter ' to restart the game .
timeToRestart :: Double
timeToRestart = 2.25
In seconds .
clearingLinesTime :: Double
clearingLinesTime = 0.5
linesLevelUp :: Int
linesLevelUp = 10
moveRotationLimit :: Int
moveRotationLimit = 15
gamePosition :: (Int, Int)
gamePosition = (200, 25)
windowSize :: (Int, Int)
windowSize = (1024, 768)
|
db1493cb2c33984575c4d7fd449cb739e92e5d3a2d99b22d6e1d5c00fe54ff93 | FundingCircle/jackdaw | fixtures_test.clj | (ns jackdaw.test.fixtures-test
(:require
[clojure.test :refer [deftest is]]
[jackdaw.test.fixtures :refer [list-topics reset-application-fixture topic-fixture with-fixtures]])
(:import
(org.apache.kafka.clients.admin AdminClient)))
(set! *warn-on-reflection* false)
(def topic-foo
{:topic-name "foo"
:partition-count 1
:replication-factor 1
:config {}})
(def kafka-config
{"bootstrap.servers" "localhost:9092"})
(def test-topics
(let [topics {"foo" topic-foo}]
(topic-fixture kafka-config topics)))
(defn- topic-exists?
[client t]
(contains? (-> (list-topics client)
(.names)
(deref)
(set))
(:topic-name t)))
(deftest test-topic-fixture
(with-fixtures [(topic-fixture kafka-config {"foo" topic-foo})]
(with-open [client (AdminClient/create kafka-config)]
(is (topic-exists? client topic-foo)))))
(defn test-resetter
{:style/indent 1}
[{:keys [app-config reset-params reset-fn]} assertion-fn]
(let [reset-args (atom [])
error-data (atom {})
test-fn (fn []
(is true "fake test function"))
fix-fn (reset-application-fixture app-config reset-params
(partial reset-fn reset-args))]
;; invoke the reset-application fixture with the sample test-fn
(try
(fix-fn test-fn)
(catch Exception e
(reset! error-data (ex-data e))))
(assertion-fn {:resetter (first @reset-args)
:reset-args (second @reset-args)
:error-data @error-data})))
(deftest test-reset-application-fixture
(test-resetter {:app-config {"application.id" "yolo"
"bootstrap.servers" "kafka.test:9092"}
:reset-params ["--foo" "foo"
"--bar" "bar"]
:reset-fn (fn [reset-args rt args]
(reset! reset-args [rt args])
0)}
(fn [{:keys [resetter reset-args error-data]}]
(is (instance? kafka.tools.StreamsResetter resetter))
(is (= ["--application-id" "yolo"
"--bootstrap-servers" "kafka.test:9092"
"--foo" "foo"
"--bar" "bar"]
reset-args))
(is (empty? error-data)))))
(deftest test-reset-application-fixture-failure
(test-resetter {:app-config {"application.id" "yolo"
"bootstrap.servers" "kafka.test:9092"}
:reset-params ["--foo" "foo"
"--bar" "bar"]
:reset-fn (fn [reset-args rt args]
(reset! reset-args [rt args])
(.write *err* "helpful error message\n")
(.write *out* "essential application info\n")
1)}
(fn [{:keys [resetter error-data]}]
(is (instance? kafka.tools.StreamsResetter resetter))
(is (= 1 (:status error-data)))
(is (= "helpful error message\n" (:err error-data)))
(is (= "essential application info\n" (:out error-data))))))
| null | https://raw.githubusercontent.com/FundingCircle/jackdaw/e0c66d386277282219e070cfbd0fe2ffa3c9dca5/test/jackdaw/test/fixtures_test.clj | clojure | invoke the reset-application fixture with the sample test-fn | (ns jackdaw.test.fixtures-test
(:require
[clojure.test :refer [deftest is]]
[jackdaw.test.fixtures :refer [list-topics reset-application-fixture topic-fixture with-fixtures]])
(:import
(org.apache.kafka.clients.admin AdminClient)))
(set! *warn-on-reflection* false)
(def topic-foo
{:topic-name "foo"
:partition-count 1
:replication-factor 1
:config {}})
(def kafka-config
{"bootstrap.servers" "localhost:9092"})
(def test-topics
(let [topics {"foo" topic-foo}]
(topic-fixture kafka-config topics)))
(defn- topic-exists?
[client t]
(contains? (-> (list-topics client)
(.names)
(deref)
(set))
(:topic-name t)))
(deftest test-topic-fixture
(with-fixtures [(topic-fixture kafka-config {"foo" topic-foo})]
(with-open [client (AdminClient/create kafka-config)]
(is (topic-exists? client topic-foo)))))
(defn test-resetter
{:style/indent 1}
[{:keys [app-config reset-params reset-fn]} assertion-fn]
(let [reset-args (atom [])
error-data (atom {})
test-fn (fn []
(is true "fake test function"))
fix-fn (reset-application-fixture app-config reset-params
(partial reset-fn reset-args))]
(try
(fix-fn test-fn)
(catch Exception e
(reset! error-data (ex-data e))))
(assertion-fn {:resetter (first @reset-args)
:reset-args (second @reset-args)
:error-data @error-data})))
(deftest test-reset-application-fixture
(test-resetter {:app-config {"application.id" "yolo"
"bootstrap.servers" "kafka.test:9092"}
:reset-params ["--foo" "foo"
"--bar" "bar"]
:reset-fn (fn [reset-args rt args]
(reset! reset-args [rt args])
0)}
(fn [{:keys [resetter reset-args error-data]}]
(is (instance? kafka.tools.StreamsResetter resetter))
(is (= ["--application-id" "yolo"
"--bootstrap-servers" "kafka.test:9092"
"--foo" "foo"
"--bar" "bar"]
reset-args))
(is (empty? error-data)))))
(deftest test-reset-application-fixture-failure
(test-resetter {:app-config {"application.id" "yolo"
"bootstrap.servers" "kafka.test:9092"}
:reset-params ["--foo" "foo"
"--bar" "bar"]
:reset-fn (fn [reset-args rt args]
(reset! reset-args [rt args])
(.write *err* "helpful error message\n")
(.write *out* "essential application info\n")
1)}
(fn [{:keys [resetter error-data]}]
(is (instance? kafka.tools.StreamsResetter resetter))
(is (= 1 (:status error-data)))
(is (= "helpful error message\n" (:err error-data)))
(is (= "essential application info\n" (:out error-data))))))
|
3d14e47c7e4ec9111c8ed1ae10ee6310601be92c68801fbd2ea471fe4cbf448f | effectfully/tiny-lang | Main.hs | module Main where
import qualified Field.Axioms as Field (test_fields)
import qualified Field.Raw.Textual as Raw (gen_test_parsing)
import qualified Field.Renaming as Field (test_free_variables, test_renaming)
import qualified Field.Textual as Field (gen_test_roundtrip, test_textual)
import qualified Field.Typed.Textual as Field (gen_test_typechecking)
import Test.Tasty
test_all :: IO TestTree
test_all =
testGroup "all" <$> sequence
[ pure Field.test_free_variables
, pure Field.test_renaming
, pure Field.test_fields
, pure Field.test_textual
, Field.gen_test_roundtrip
, Raw.gen_test_parsing
, Field.gen_test_typechecking
]
main :: IO ()
main = defaultMain =<< test_all
| null | https://raw.githubusercontent.com/effectfully/tiny-lang/3dbb730d3595a76f087a33da3b353783ff2a27e9/test/Main.hs | haskell | module Main where
import qualified Field.Axioms as Field (test_fields)
import qualified Field.Raw.Textual as Raw (gen_test_parsing)
import qualified Field.Renaming as Field (test_free_variables, test_renaming)
import qualified Field.Textual as Field (gen_test_roundtrip, test_textual)
import qualified Field.Typed.Textual as Field (gen_test_typechecking)
import Test.Tasty
test_all :: IO TestTree
test_all =
testGroup "all" <$> sequence
[ pure Field.test_free_variables
, pure Field.test_renaming
, pure Field.test_fields
, pure Field.test_textual
, Field.gen_test_roundtrip
, Raw.gen_test_parsing
, Field.gen_test_typechecking
]
main :: IO ()
main = defaultMain =<< test_all
| |
eee57d0b2edc0cab76ff5119abba60981156b18820675ffef52022c4dd3a3c7e | stackbuilders/atomic-write | BinarySpec.hs | module System.AtomicWrite.Writer.ByteString.BinarySpec (spec) where
import Test.Hspec (Spec, describe,
it, shouldBe)
import System.AtomicWrite.Writer.ByteString.Binary (atomicWriteFile, atomicWriteFileWithMode)
import System.FilePath.Posix (joinPath)
import System.IO.Temp (withSystemTempDirectory)
import System.PosixCompat.Files (fileMode,
getFileStatus,
setFileCreationMask,
setFileMode)
import Data.ByteString.Char8 (pack)
spec :: Spec
spec = do
describe "atomicWriteFile" $ do
it "writes the contents to a file" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let path = joinPath [ tmpDir, "writeTest.tmp" ]
atomicWriteFile path $ pack "just testing"
contents <- readFile path
contents `shouldBe` "just testing"
it "preserves the permissions of original file, regardless of umask" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let filePath = joinPath [tmpDir, "testFile"]
writeFile filePath "initial contents"
setFileMode filePath 0o100644
newStat <- getFileStatus filePath
fileMode newStat `shouldBe` 0o100644
New files are created with 100600 perms .
_ <- setFileCreationMask 0o100066
-- Create a new file once different mask is set and make sure that mask
-- is applied.
writeFile (joinPath [tmpDir, "sanityCheck"]) "with sanity check mask"
sanityCheckStat <- getFileStatus $ joinPath [tmpDir, "sanityCheck"]
fileMode sanityCheckStat `shouldBe` 0o100600
Since we move , this makes the new file assume the filemask of 0600
atomicWriteFile filePath $ pack "new contents"
resultStat <- getFileStatus filePath
-- reset mask to not break subsequent specs
_ <- setFileCreationMask 0o100022
-- Fails when using atomic mv command unless apply perms on initial file
fileMode resultStat `shouldBe` 0o100644
it "creates a new file with permissions based on active umask" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let
filePath = joinPath [tmpDir, "testFile"]
sampleFilePath = joinPath [tmpDir, "sampleFile"]
-- Set somewhat distinctive defaults for test
_ <- setFileCreationMask 0o100171
-- We don't know what the default file permissions are, so create a
-- file to sample them.
writeFile sampleFilePath "I'm being written to sample permissions"
newStat <- getFileStatus sampleFilePath
fileMode newStat `shouldBe` 0o100606
atomicWriteFile filePath $ pack "new contents"
resultStat <- getFileStatus filePath
-- reset mask to not break subsequent specs
_ <- setFileCreationMask 0o100022
The default tempfile permissions are 0600 , so this fails unless we
-- make sure that the default umask is relied on for creation of the
-- tempfile.
fileMode resultStat `shouldBe` 0o100606
describe "atomicWriteFileWithMode" $ do
it "writes contents to a file" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let path = joinPath [ tmpDir, "writeTest.tmp" ]
atomicWriteFileWithMode 0o100777 path $ pack "just testing"
contents <- readFile path
contents `shouldBe` "just testing"
it "changes the permissions of a previously created file, regardless of umask" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let filePath = joinPath [tmpDir, "testFile"]
writeFile filePath "initial contents"
setFileMode filePath 0o100644
newStat <- getFileStatus filePath
fileMode newStat `shouldBe` 0o100644
New files are created with 100600 perms .
_ <- setFileCreationMask 0o100066
-- Create a new file once different mask is set and make sure that mask
-- is applied.
writeFile (joinPath [tmpDir, "sanityCheck"]) "with sanity check mask"
sanityCheckStat <- getFileStatus $ joinPath [tmpDir, "sanityCheck"]
fileMode sanityCheckStat `shouldBe` 0o100600
Since we move , this makes the new file assume the filemask of 0600
atomicWriteFileWithMode 0o100655 filePath $ pack "new contents"
resultStat <- getFileStatus filePath
-- reset mask to not break subsequent specs
_ <- setFileCreationMask 0o100022
-- Fails when using atomic mv command unless apply perms on initial file
fileMode resultStat `shouldBe` 0o100655
it "creates a new file with specified permissions" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let
filePath = joinPath [tmpDir, "testFile"]
atomicWriteFileWithMode 0o100606 filePath $ pack "new contents"
resultStat <- getFileStatus filePath
fileMode resultStat `shouldBe` 0o100606
| null | https://raw.githubusercontent.com/stackbuilders/atomic-write/50976b9d249134d05191c4e430343594fdb034fb/spec/System/AtomicWrite/Writer/ByteString/BinarySpec.hs | haskell | Create a new file once different mask is set and make sure that mask
is applied.
reset mask to not break subsequent specs
Fails when using atomic mv command unless apply perms on initial file
Set somewhat distinctive defaults for test
We don't know what the default file permissions are, so create a
file to sample them.
reset mask to not break subsequent specs
make sure that the default umask is relied on for creation of the
tempfile.
Create a new file once different mask is set and make sure that mask
is applied.
reset mask to not break subsequent specs
Fails when using atomic mv command unless apply perms on initial file | module System.AtomicWrite.Writer.ByteString.BinarySpec (spec) where
import Test.Hspec (Spec, describe,
it, shouldBe)
import System.AtomicWrite.Writer.ByteString.Binary (atomicWriteFile, atomicWriteFileWithMode)
import System.FilePath.Posix (joinPath)
import System.IO.Temp (withSystemTempDirectory)
import System.PosixCompat.Files (fileMode,
getFileStatus,
setFileCreationMask,
setFileMode)
import Data.ByteString.Char8 (pack)
spec :: Spec
spec = do
describe "atomicWriteFile" $ do
it "writes the contents to a file" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let path = joinPath [ tmpDir, "writeTest.tmp" ]
atomicWriteFile path $ pack "just testing"
contents <- readFile path
contents `shouldBe` "just testing"
it "preserves the permissions of original file, regardless of umask" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let filePath = joinPath [tmpDir, "testFile"]
writeFile filePath "initial contents"
setFileMode filePath 0o100644
newStat <- getFileStatus filePath
fileMode newStat `shouldBe` 0o100644
New files are created with 100600 perms .
_ <- setFileCreationMask 0o100066
writeFile (joinPath [tmpDir, "sanityCheck"]) "with sanity check mask"
sanityCheckStat <- getFileStatus $ joinPath [tmpDir, "sanityCheck"]
fileMode sanityCheckStat `shouldBe` 0o100600
Since we move , this makes the new file assume the filemask of 0600
atomicWriteFile filePath $ pack "new contents"
resultStat <- getFileStatus filePath
_ <- setFileCreationMask 0o100022
fileMode resultStat `shouldBe` 0o100644
it "creates a new file with permissions based on active umask" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let
filePath = joinPath [tmpDir, "testFile"]
sampleFilePath = joinPath [tmpDir, "sampleFile"]
_ <- setFileCreationMask 0o100171
writeFile sampleFilePath "I'm being written to sample permissions"
newStat <- getFileStatus sampleFilePath
fileMode newStat `shouldBe` 0o100606
atomicWriteFile filePath $ pack "new contents"
resultStat <- getFileStatus filePath
_ <- setFileCreationMask 0o100022
The default tempfile permissions are 0600 , so this fails unless we
fileMode resultStat `shouldBe` 0o100606
describe "atomicWriteFileWithMode" $ do
it "writes contents to a file" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let path = joinPath [ tmpDir, "writeTest.tmp" ]
atomicWriteFileWithMode 0o100777 path $ pack "just testing"
contents <- readFile path
contents `shouldBe` "just testing"
it "changes the permissions of a previously created file, regardless of umask" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let filePath = joinPath [tmpDir, "testFile"]
writeFile filePath "initial contents"
setFileMode filePath 0o100644
newStat <- getFileStatus filePath
fileMode newStat `shouldBe` 0o100644
New files are created with 100600 perms .
_ <- setFileCreationMask 0o100066
writeFile (joinPath [tmpDir, "sanityCheck"]) "with sanity check mask"
sanityCheckStat <- getFileStatus $ joinPath [tmpDir, "sanityCheck"]
fileMode sanityCheckStat `shouldBe` 0o100600
Since we move , this makes the new file assume the filemask of 0600
atomicWriteFileWithMode 0o100655 filePath $ pack "new contents"
resultStat <- getFileStatus filePath
_ <- setFileCreationMask 0o100022
fileMode resultStat `shouldBe` 0o100655
it "creates a new file with specified permissions" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let
filePath = joinPath [tmpDir, "testFile"]
atomicWriteFileWithMode 0o100606 filePath $ pack "new contents"
resultStat <- getFileStatus filePath
fileMode resultStat `shouldBe` 0o100606
|
ee8386ecb15d8f8e9e69d7e7feadc043828d44ca3c550d2c7770ff52bd754029 | mojombo/egitd | server.erl | -module(server).
-export([start_link/0, init/1]).
start_link() ->
proc_lib:start_link(?MODULE, init, [self()]).
init(Parent) ->
ets:new(db, [set, named_table]),
read_conf(),
init_log(),
log:write("start", ["ok"]),
LSock = try_listen(10),
proc_lib:init_ack(Parent, {ok, self()}),
loop(LSock).
read_conf() ->
{ok, Conf} = application:get_env(conf),
error_logger:info_msg("Using conf file ~p~n", [Conf]),
conf:read_conf(Conf).
init_log() ->
init_log(application:get_env(log)).
init_log({ok, Log}) ->
log:init_log(Log);
init_log(undefined) ->
ok.
try_listen(0) ->
error_logger:info_msg("Could not listen on port 9418~n");
try_listen(Times) ->
Res = gen_tcp:listen(9418, [list, {packet, 0}, {active, false}]),
case Res of
{ok, LSock} ->
error_logger:info_msg("Listening on port 9418~n"),
LSock;
{error, Reason} ->
error_logger:info_msg("Could not listen on port 9418: ~p~n", [Reason]),
timer:sleep(5000),
try_listen(Times - 1)
end.
loop(LSock) ->
{ok, Sock} = gen_tcp:accept(LSock),
spawn(fun() -> handle_method(Sock) end),
loop(LSock).
handle_method(Sock) ->
% get the requested host and method
case gen_tcp:recv(Sock, 0) of
{ok, Header} ->
% io:format("header = ~p~n", [Header]),
{ok, Host} = extract_host(Header),
Method = extract_method_name(Header),
% dispatch
handle_method_dispatch(Method, Sock, Host, Header);
{error, closed} ->
ok = gen_tcp:close(Sock)
end.
handle_method_dispatch({ok, "upload-pack"}, Sock, Host, Header) ->
upload_pack:handle(Sock, Host, Header);
handle_method_dispatch({ok, "receive-pack"}, Sock, Host, Header) ->
receive_pack:handle(Sock, Host, Header);
handle_method_dispatch(invalid, Sock, _Host, _Header) ->
gen_tcp:send(Sock, "Invalid method declaration. Upgrade to the latest git.\n"),
ok = gen_tcp:close(Sock).
extract_method_name(Header) ->
case regexp:match(Header, "....git[ -][a-z\-]+ ") of
{match, Start, Length} ->
{ok, string:substr(Header, Start + 8, Length - 9)};
_Else ->
invalid
end.
extract_host(Header) ->
case regexp:match(string:to_lower(Header), "\000host=[^\000]+\000") of
{match, Start, Length} ->
{ok, string:substr(Header, Start + 6, Length - 7)};
_Else ->
{ok, "invalid"}
end. | null | https://raw.githubusercontent.com/mojombo/egitd/5309fe5f9a5908cd741f8dedf13790eea5d7528d/elibs/server.erl | erlang | get the requested host and method
io:format("header = ~p~n", [Header]),
dispatch | -module(server).
-export([start_link/0, init/1]).
start_link() ->
proc_lib:start_link(?MODULE, init, [self()]).
init(Parent) ->
ets:new(db, [set, named_table]),
read_conf(),
init_log(),
log:write("start", ["ok"]),
LSock = try_listen(10),
proc_lib:init_ack(Parent, {ok, self()}),
loop(LSock).
read_conf() ->
{ok, Conf} = application:get_env(conf),
error_logger:info_msg("Using conf file ~p~n", [Conf]),
conf:read_conf(Conf).
init_log() ->
init_log(application:get_env(log)).
init_log({ok, Log}) ->
log:init_log(Log);
init_log(undefined) ->
ok.
try_listen(0) ->
error_logger:info_msg("Could not listen on port 9418~n");
try_listen(Times) ->
Res = gen_tcp:listen(9418, [list, {packet, 0}, {active, false}]),
case Res of
{ok, LSock} ->
error_logger:info_msg("Listening on port 9418~n"),
LSock;
{error, Reason} ->
error_logger:info_msg("Could not listen on port 9418: ~p~n", [Reason]),
timer:sleep(5000),
try_listen(Times - 1)
end.
loop(LSock) ->
{ok, Sock} = gen_tcp:accept(LSock),
spawn(fun() -> handle_method(Sock) end),
loop(LSock).
handle_method(Sock) ->
case gen_tcp:recv(Sock, 0) of
{ok, Header} ->
{ok, Host} = extract_host(Header),
Method = extract_method_name(Header),
handle_method_dispatch(Method, Sock, Host, Header);
{error, closed} ->
ok = gen_tcp:close(Sock)
end.
handle_method_dispatch({ok, "upload-pack"}, Sock, Host, Header) ->
upload_pack:handle(Sock, Host, Header);
handle_method_dispatch({ok, "receive-pack"}, Sock, Host, Header) ->
receive_pack:handle(Sock, Host, Header);
handle_method_dispatch(invalid, Sock, _Host, _Header) ->
gen_tcp:send(Sock, "Invalid method declaration. Upgrade to the latest git.\n"),
ok = gen_tcp:close(Sock).
extract_method_name(Header) ->
case regexp:match(Header, "....git[ -][a-z\-]+ ") of
{match, Start, Length} ->
{ok, string:substr(Header, Start + 8, Length - 9)};
_Else ->
invalid
end.
extract_host(Header) ->
case regexp:match(string:to_lower(Header), "\000host=[^\000]+\000") of
{match, Start, Length} ->
{ok, string:substr(Header, Start + 6, Length - 7)};
_Else ->
{ok, "invalid"}
end. |
e2291d3f8e07b7c448817bc3853d484764f8454774ed5f8c52d4aed85161ebf1 | Mathnerd314/stroscot | Laziness.hs | import Data.List.Ordered (minus, union, unionAll)
-- Generators/streams:
fromThenTo n m
| n > m = []
| otherwise = n:(fromThenTo (n+1) m)
fromThen n = n:(fromThen (n+1))
Sieve of Eratosthenes
primes = 2 : 3 : minus [5,7..] (unionAll [[p*p, p*p+2*p..] | p <- tail primes])
-- composition, like Unix pipes.
composition = take 10 primes
Hamming numbers example ( quoted from " A theory of nondeterminism , parallelism ,
and concurrency " by , " Theoretical Computer Science " vol 45 , pp1 - 61 ,
example on page 9 .
A program is required which generates the infinite stream of all numbers
greater than 1 of the form 2^i*3^j*4^k in ascending order .
To me , this is nicer than other solutions ( eg Dijkstra . " A discipline of
programming " P129 ) I have seen or could construct myself .
Hamming numbers example (quoted from "A theory of nondeterminism, parallelism,
and concurrency" by M. Broy, "Theoretical Computer Science" vol 45, pp1-61,
example on page 9.
A program is required which generates the infinite stream of all numbers
greater than 1 of the form 2^i*3^j*4^k in ascending order.
To me, this is nicer than other solutions (eg Dijkstra. "A discipline of
programming" P129) I have seen or could construct myself.
-}
streammult n s = (n * head s) : streammult n (tail s)
merge s1 s2 =
if head s1 <= head s2
then head s1 : merge (tail s1) s2
else head s2 : merge s1 (tail s2)
s1 = streammult 5 (1 : s1)
s2 = merge (streammult 3 (1 & s2)) s1
s3 = merge (streammult 2 (1 & s3)) s2
{-
Oh!! BTW, lazy lists also allows the programmer to code things that s/he would
need 'static' variables for in 'C' -- like a random number generator -- since
there's no place for the 'seed' in a purely applicative order program. A lazy
program could define randoms() as follows to supply an infinite list of random
numbers...
-}
my_randoms = randoms start_seed -- start_seed is constant
randoms n = nh : randoms nh
where
nh = some_seed_hasher n
Where clauses
This definition expresses a well - known algorithm for multiplying
without using multiplication ( except multiplication and division by 2 , which
are easy in binary ) .
mult x n = x*n
The " where " part is a convenient notation , saving some typing and
clarifying the structure ( by making explicit the fact that the same value y is
calculated for both the 2nd and 3rd lines ) . But if y is evaluated eagerly , then
this function as written diverges . Therefore for an eager implementation extra
information is needed to say which cases y is calculated for .
My point was that this kind of solution involves a minute bit of extra
scheduling responsibility to say that the where / let clause should only be
evaluated in the last two cases , whereas the lazy solution can leave that to
the compiler and run - time system .
Where clauses
This Miranda definition expresses a well-known algorithm for multiplying
without using multiplication (except multiplication and division by 2, which
are easy in binary).
mult x n = x*n
The "where" part is a convenient notation, saving some typing and
clarifying the structure (by making explicit the fact that the same value y is
calculated for both the 2nd and 3rd lines). But if y is evaluated eagerly, then
this function as written diverges. Therefore for an eager implementation extra
information is needed to say which cases y is calculated for.
My point was that this kind of solution involves a minute bit of extra
scheduling responsibility to say that the where/let clause should only be
evaluated in the last two cases, whereas the lazy solution can leave that to
the compiler and run-time system.
-}
mult :: Int->Nat->Int
mult x n
| n == 0 = 0
| n > 0 && n mod 2 == 0 = y
| otherwise = y+x
where y = 2*(mult x (n div 2))
Circular programs .
wrote a paper about this ( Acta Informatica 21 , 239 - 250 ( 1984 ) ) .
Example : Suppose you have a list of natural numbers and you want to
subtract the least element of the list from all elements . In a lazy
language , you can do it all in one list traversal .
The first argument of " norm " is like an inherited attribute ,
it has no value yet when " norm " is called from " normalize " .
But when " norm " comes back , it returns ( synthesized attribute )
the changed list and the minimum of the list
and the local declaration in " normalize " puts the two attributes together .
This only works , because the subtractions ( x - m ) in the result of
" norm " are delayed , their result is not needed for " norm " to proceed .
Circular programs.
Richard Bird wrote a paper about this (Acta Informatica 21, 239-250 (1984)).
Example: Suppose you have a list of natural numbers and you want to
subtract the least element of the list from all elements. In a lazy
language, you can do it all in one list traversal.
The first argument of "norm" is like an inherited attribute,
it has no value yet when "norm" is called from "normalize".
But when "norm" comes back, it returns (synthesized attribute)
the changed list and the minimum of the list
and the local declaration in "normalize" puts the two attributes together.
This only works, because the subtractions (x-m) in the result of
"norm" are delayed, their result is not needed for "norm" to proceed.
-}
normalize [] = []
normalize ls = res
where (m,res) = norm m ls
norm m [x] = (x,[x-m])
norm m (x:xs) = (min x m',x-m : res)
where (m',res) = norm m xs
accumulate f a [] = a
accumulate f a (b::x) = accumulate f (f a b) x
reduce f a [] = a
reduce f a (b::x) = f b (reduce f a x)
x appendR y = reduce cons y x
x appendA y = revonto y (rev x)
where
rev x = revonto [] x
revonto y x = accumulate consonto y x
consonto y a = a :: y
accumulate is fine in a non - lazy language , but for a lazy language
it has the serious flaw that it returns no result until it has seen its
entire argument . On the other hand ,
reduce is capable of yielding a result without first seeing the entire argument .
That means that appendR 's behavior will be quite different in a lazy language from
appendA. x appendA omega = omega for all x ( where omega is the
` undefined ' or ` nonterminating ' value ) .
accumulate is fine in a non-lazy language, but for a lazy language
it has the serious flaw that it returns no result until it has seen its
entire argument. On the other hand,
reduce is capable of yielding a result without first seeing the entire argument.
That means that appendR's behavior will be quite different in a lazy language from
appendA. x appendA omega = omega for all x (where omega is the
`undefined' or `nonterminating' value).
-}
myand False x = False
myand x False = False
myand True True = True
test = myand undef False == False
This test fails in Haskell .
As a programmer , I think of the equations I write down as equations
and not as nested case - expressions , and I would like to be able to
manipulate them as I usually manipulate equations .
In other words : I would like the semantics to keep the promises
the syntax makes .
This test fails in Haskell.
As a programmer, I think of the equations I write down as equations
and not as nested case-expressions, and I would like to be able to
manipulate them as I usually manipulate equations.
In other words: I would like the semantics to keep the promises
the syntax makes.
-}
| null | https://raw.githubusercontent.com/Mathnerd314/stroscot/c5999489dffe00454bcdafb9a7e5edb9d4c673ec/tests/Laziness.hs | haskell | Generators/streams:
composition, like Unix pipes.
Oh!! BTW, lazy lists also allows the programmer to code things that s/he would
need 'static' variables for in 'C' -- like a random number generator -- since
there's no place for the 'seed' in a purely applicative order program. A lazy
program could define randoms() as follows to supply an infinite list of random
numbers...
start_seed is constant | import Data.List.Ordered (minus, union, unionAll)
fromThenTo n m
| n > m = []
| otherwise = n:(fromThenTo (n+1) m)
fromThen n = n:(fromThen (n+1))
Sieve of Eratosthenes
primes = 2 : 3 : minus [5,7..] (unionAll [[p*p, p*p+2*p..] | p <- tail primes])
composition = take 10 primes
Hamming numbers example ( quoted from " A theory of nondeterminism , parallelism ,
and concurrency " by , " Theoretical Computer Science " vol 45 , pp1 - 61 ,
example on page 9 .
A program is required which generates the infinite stream of all numbers
greater than 1 of the form 2^i*3^j*4^k in ascending order .
To me , this is nicer than other solutions ( eg Dijkstra . " A discipline of
programming " P129 ) I have seen or could construct myself .
Hamming numbers example (quoted from "A theory of nondeterminism, parallelism,
and concurrency" by M. Broy, "Theoretical Computer Science" vol 45, pp1-61,
example on page 9.
A program is required which generates the infinite stream of all numbers
greater than 1 of the form 2^i*3^j*4^k in ascending order.
To me, this is nicer than other solutions (eg Dijkstra. "A discipline of
programming" P129) I have seen or could construct myself.
-}
streammult n s = (n * head s) : streammult n (tail s)
merge s1 s2 =
if head s1 <= head s2
then head s1 : merge (tail s1) s2
else head s2 : merge s1 (tail s2)
s1 = streammult 5 (1 : s1)
s2 = merge (streammult 3 (1 & s2)) s1
s3 = merge (streammult 2 (1 & s3)) s2
randoms n = nh : randoms nh
where
nh = some_seed_hasher n
Where clauses
This definition expresses a well - known algorithm for multiplying
without using multiplication ( except multiplication and division by 2 , which
are easy in binary ) .
mult x n = x*n
The " where " part is a convenient notation , saving some typing and
clarifying the structure ( by making explicit the fact that the same value y is
calculated for both the 2nd and 3rd lines ) . But if y is evaluated eagerly , then
this function as written diverges . Therefore for an eager implementation extra
information is needed to say which cases y is calculated for .
My point was that this kind of solution involves a minute bit of extra
scheduling responsibility to say that the where / let clause should only be
evaluated in the last two cases , whereas the lazy solution can leave that to
the compiler and run - time system .
Where clauses
This Miranda definition expresses a well-known algorithm for multiplying
without using multiplication (except multiplication and division by 2, which
are easy in binary).
mult x n = x*n
The "where" part is a convenient notation, saving some typing and
clarifying the structure (by making explicit the fact that the same value y is
calculated for both the 2nd and 3rd lines). But if y is evaluated eagerly, then
this function as written diverges. Therefore for an eager implementation extra
information is needed to say which cases y is calculated for.
My point was that this kind of solution involves a minute bit of extra
scheduling responsibility to say that the where/let clause should only be
evaluated in the last two cases, whereas the lazy solution can leave that to
the compiler and run-time system.
-}
mult :: Int->Nat->Int
mult x n
| n == 0 = 0
| n > 0 && n mod 2 == 0 = y
| otherwise = y+x
where y = 2*(mult x (n div 2))
Circular programs .
wrote a paper about this ( Acta Informatica 21 , 239 - 250 ( 1984 ) ) .
Example : Suppose you have a list of natural numbers and you want to
subtract the least element of the list from all elements . In a lazy
language , you can do it all in one list traversal .
The first argument of " norm " is like an inherited attribute ,
it has no value yet when " norm " is called from " normalize " .
But when " norm " comes back , it returns ( synthesized attribute )
the changed list and the minimum of the list
and the local declaration in " normalize " puts the two attributes together .
This only works , because the subtractions ( x - m ) in the result of
" norm " are delayed , their result is not needed for " norm " to proceed .
Circular programs.
Richard Bird wrote a paper about this (Acta Informatica 21, 239-250 (1984)).
Example: Suppose you have a list of natural numbers and you want to
subtract the least element of the list from all elements. In a lazy
language, you can do it all in one list traversal.
The first argument of "norm" is like an inherited attribute,
it has no value yet when "norm" is called from "normalize".
But when "norm" comes back, it returns (synthesized attribute)
the changed list and the minimum of the list
and the local declaration in "normalize" puts the two attributes together.
This only works, because the subtractions (x-m) in the result of
"norm" are delayed, their result is not needed for "norm" to proceed.
-}
normalize [] = []
normalize ls = res
where (m,res) = norm m ls
norm m [x] = (x,[x-m])
norm m (x:xs) = (min x m',x-m : res)
where (m',res) = norm m xs
accumulate f a [] = a
accumulate f a (b::x) = accumulate f (f a b) x
reduce f a [] = a
reduce f a (b::x) = f b (reduce f a x)
x appendR y = reduce cons y x
x appendA y = revonto y (rev x)
where
rev x = revonto [] x
revonto y x = accumulate consonto y x
consonto y a = a :: y
accumulate is fine in a non - lazy language , but for a lazy language
it has the serious flaw that it returns no result until it has seen its
entire argument . On the other hand ,
reduce is capable of yielding a result without first seeing the entire argument .
That means that appendR 's behavior will be quite different in a lazy language from
appendA. x appendA omega = omega for all x ( where omega is the
` undefined ' or ` nonterminating ' value ) .
accumulate is fine in a non-lazy language, but for a lazy language
it has the serious flaw that it returns no result until it has seen its
entire argument. On the other hand,
reduce is capable of yielding a result without first seeing the entire argument.
That means that appendR's behavior will be quite different in a lazy language from
appendA. x appendA omega = omega for all x (where omega is the
`undefined' or `nonterminating' value).
-}
myand False x = False
myand x False = False
myand True True = True
test = myand undef False == False
This test fails in Haskell .
As a programmer , I think of the equations I write down as equations
and not as nested case - expressions , and I would like to be able to
manipulate them as I usually manipulate equations .
In other words : I would like the semantics to keep the promises
the syntax makes .
This test fails in Haskell.
As a programmer, I think of the equations I write down as equations
and not as nested case-expressions, and I would like to be able to
manipulate them as I usually manipulate equations.
In other words: I would like the semantics to keep the promises
the syntax makes.
-}
|
5fe2d395f8993a26b82c7e5c86ff1c25939a08c1b56f5c397ae90c0926f277c5 | roryk/bcbio.rnaseq | project.clj | (defproject bcbio.rnaseq "1.2.0"
:description "Quality control and differential expression of bcbio RNA-seq runs"
:dependencies [[org.clojure/clojure "1.7.0"]
[de.ubercode.clostache/clostache "1.4.0"]
[version-clj "0.1.2"]
[incanter "1.5.5"]
[clj-stacktrace "0.2.5"]
[incanter/incanter-io "1.5.5"]
[me.raynes/fs "1.4.4"]
[clj-yaml "0.4.0"]
[org.clojure/math.numeric-tower "0.0.4"]
[org.clojure/data.json "0.2.2"]
[clj-http "0.7.5"]
[net.mikera/vectorz-clj "0.22.0"]
[org.clojure/math.combinatorics "0.0.4"]
[org.clojure/tools.cli "0.3.5"]
[bcbio.run "0.0.1"]
[clj-http "0.7.8"]]
:resource-paths ["resources"]
:jvm-opts ["-Xmx2g"]
:aot [bcbio.rnaseq.core]
:main bcbio.rnaseq.core
:keep-non-project-classes true
:test-selectors {:default (complement :slow)
:slow :slow
:all (constantly true)})
| null | https://raw.githubusercontent.com/roryk/bcbio.rnaseq/66c629eb737c9a0096082d6683657bf9d89eb271/project.clj | clojure | (defproject bcbio.rnaseq "1.2.0"
:description "Quality control and differential expression of bcbio RNA-seq runs"
:dependencies [[org.clojure/clojure "1.7.0"]
[de.ubercode.clostache/clostache "1.4.0"]
[version-clj "0.1.2"]
[incanter "1.5.5"]
[clj-stacktrace "0.2.5"]
[incanter/incanter-io "1.5.5"]
[me.raynes/fs "1.4.4"]
[clj-yaml "0.4.0"]
[org.clojure/math.numeric-tower "0.0.4"]
[org.clojure/data.json "0.2.2"]
[clj-http "0.7.5"]
[net.mikera/vectorz-clj "0.22.0"]
[org.clojure/math.combinatorics "0.0.4"]
[org.clojure/tools.cli "0.3.5"]
[bcbio.run "0.0.1"]
[clj-http "0.7.8"]]
:resource-paths ["resources"]
:jvm-opts ["-Xmx2g"]
:aot [bcbio.rnaseq.core]
:main bcbio.rnaseq.core
:keep-non-project-classes true
:test-selectors {:default (complement :slow)
:slow :slow
:all (constantly true)})
| |
27af93e2fb730e689721ae40aa7e79db9916bba3aa7aab726ee50b3d0974236f | kelsey-sorrels/robinson | materials.clj | ;; Functions for generating random items.
(ns robinson.materials
(:require [taoensso.timbre :as log]
[taoensso.timbre :as log]
[datascript.core :as d]))
(def material-schema {
::isa {:db/valueType :db.type/ref
:db/cardinality :db.cardinality/many}
::properties {:db/cardinality :db.cardinality/many}})
(def item-schema {
:item/id {:db/unique :db.unique/identity}
:item/materials {:db/cardinality :db.cardinality/many
:db/type :db.type/ref}
:item/components {:db/cardinality :db.cardinality/many
:db/type :db.type/ref}
:item/weapon-types {:db.cardinality :db.cardinality/many}})
(def material-heirarchy [
{:db/ident :solid ::properties #{:wieldable}}
{:db/ident :liquid ::properties #{:containable}}
{:db/ident :container ::isa #{:solid}}
{:db/ident :mineral ::isa #{:solid}}
{:db/ident :plant-based ::isa #{:solid} ::properties #{:flammable}}
{:db/ident :animal-based ::isa #{:solid}}
{:db/ident :wood ::isa #{:plant-based}}
{:db/ident :water ::isa #{:liquid}}
{:db/ident :lava ::isa #{:liquid} ::properties #{:hot}}
{:db/ident :rock ::isa #{:solid}}
{:db/ident :obsidian ::isa #{:solid} ::properties #{:brittle}}
{:db/ident :flint ::isa #{:mineral}}
{:db/ident :metal ::isa #{:solid}}
{:db/ident :cloth ::isa #{:plant-based} ::properties #{:flexible}}
{:db/ident :paper-based ::isa #{:plant-based} ::properties #{:flexible}}
{:db/ident :glass ::isa #{:solid} ::properties #{:brittle :reflective}}
{:db/ident :metal ::properties #{:mallable}}
{:db/ident :fruit ::isa #{:plant-based} ::properties #{:edible}}
{:db/ident :bone ::isa #{:animal-based
:solid}}
{:db/ident :leather ::isa #{:animal-based} ::properties #{:flexible}}
])
(def db (-> (d/empty-db (merge material-schema item-schema))
(d/db-with material-heirarchy)))
(def all-properties
'[[(all-properties ?material ?properties)
[?material ::properties ?properties]]
[(all-properties ?material ?properties)
[?material ::isa ?parents]
(all-properties ?parents ?properties)]])
(defn -main [& args]
(let [test-item {:item/id :obsidian-knife :item/materials #{:obsidian :wood}}
q '[:find ?properties #_?material-ident
:in $ %
:where
[_ :item/materials ?material]
[?material :db/ident ?material-ident]
[all-properties ?material ?properties]]]
;; execute query: item
(log/info (->> (d/q q
(d/db-with db [test-item])
all-properties)
(map first)
set))))
(-main)
| null | https://raw.githubusercontent.com/kelsey-sorrels/robinson/337fd2646882708331257d1f3db78a3074ccc67a/src/robinson/materials.clj | clojure | Functions for generating random items.
execute query: item | (ns robinson.materials
(:require [taoensso.timbre :as log]
[taoensso.timbre :as log]
[datascript.core :as d]))
(def material-schema {
::isa {:db/valueType :db.type/ref
:db/cardinality :db.cardinality/many}
::properties {:db/cardinality :db.cardinality/many}})
(def item-schema {
:item/id {:db/unique :db.unique/identity}
:item/materials {:db/cardinality :db.cardinality/many
:db/type :db.type/ref}
:item/components {:db/cardinality :db.cardinality/many
:db/type :db.type/ref}
:item/weapon-types {:db.cardinality :db.cardinality/many}})
(def material-heirarchy [
{:db/ident :solid ::properties #{:wieldable}}
{:db/ident :liquid ::properties #{:containable}}
{:db/ident :container ::isa #{:solid}}
{:db/ident :mineral ::isa #{:solid}}
{:db/ident :plant-based ::isa #{:solid} ::properties #{:flammable}}
{:db/ident :animal-based ::isa #{:solid}}
{:db/ident :wood ::isa #{:plant-based}}
{:db/ident :water ::isa #{:liquid}}
{:db/ident :lava ::isa #{:liquid} ::properties #{:hot}}
{:db/ident :rock ::isa #{:solid}}
{:db/ident :obsidian ::isa #{:solid} ::properties #{:brittle}}
{:db/ident :flint ::isa #{:mineral}}
{:db/ident :metal ::isa #{:solid}}
{:db/ident :cloth ::isa #{:plant-based} ::properties #{:flexible}}
{:db/ident :paper-based ::isa #{:plant-based} ::properties #{:flexible}}
{:db/ident :glass ::isa #{:solid} ::properties #{:brittle :reflective}}
{:db/ident :metal ::properties #{:mallable}}
{:db/ident :fruit ::isa #{:plant-based} ::properties #{:edible}}
{:db/ident :bone ::isa #{:animal-based
:solid}}
{:db/ident :leather ::isa #{:animal-based} ::properties #{:flexible}}
])
(def db (-> (d/empty-db (merge material-schema item-schema))
(d/db-with material-heirarchy)))
(def all-properties
'[[(all-properties ?material ?properties)
[?material ::properties ?properties]]
[(all-properties ?material ?properties)
[?material ::isa ?parents]
(all-properties ?parents ?properties)]])
(defn -main [& args]
(let [test-item {:item/id :obsidian-knife :item/materials #{:obsidian :wood}}
q '[:find ?properties #_?material-ident
:in $ %
:where
[_ :item/materials ?material]
[?material :db/ident ?material-ident]
[all-properties ?material ?properties]]]
(log/info (->> (d/q q
(d/db-with db [test-item])
all-properties)
(map first)
set))))
(-main)
|
312760e62a8f09a1ee6d175e7598957be261e1c001fdbc6af2cc53d863873e37 | bburdette/chordster | PlaySong.hs | # LANGUAGE DeriveGeneric #
{-# LANGUAGE OverloadedStrings #-}
module PlaySong where
import Import
import Control.Concurrent
import Sound.OSC.FD
import Control.Monad
import SongControl
import Data.Ratio
import Data.List
import Data.Int
import Data.Maybe
import Data.IORef
import qualified Data.Text as T
import Control.Concurrent.STM.TChan
import Control.Concurrent.STM
import GHC.Generics
data PlaySongChord = PlaySongChord
{ songChord :: SongChord
, chordRoot :: ChordRoot
, name :: Text
, notes :: [Rational]
}
deriving Show
data TextSong = TextSong
{ song : : , chords : : [ PlaySongChord ]
}
deriving Show
loadTextSong : : SongId - > Handler ( Maybe TextSong )
= do
get chords < - runDB $ selectList [ SongChordSong = = . ] [ Asc SongChordSeqnum ]
pscs < - makePscs ( map entityVal chords )
case mbsong of
Nothing - > return Nothing
Just sng - > return $ Just $ TextSong sng ( pscs )
tsToWebSong : : TextSong - > WebSong
tsToWebSong ts = WebSong {
wsName = songName ( song ts )
, wsChords = ( \psc - > name ) < $ > ( chords ts )
}
data TextSong = TextSong
{ song :: Song
, chords :: [PlaySongChord]
}
deriving Show
loadTextSong :: SongId -> Handler (Maybe TextSong)
loadTextSong sid = do
mbsong <- runDB $ get sid
chords <- runDB $ selectList [SongChordSong ==. sid] [Asc SongChordSeqnum]
pscs <- makePscs (map entityVal chords)
case mbsong of
Nothing -> return Nothing
Just sng -> return $ Just $ TextSong sng (catMaybes pscs)
tsToWebSong :: TextSong -> WebSong
tsToWebSong ts = WebSong {
wsName = songName (song ts)
, wsChords = (\psc -> name psc) <$> (chords ts)
}
-}
data WebSong = WebSong
{ wsId :: Int64
, wsName :: Text
, wsChords :: [WebChord]
, wsTempo :: Int
}
deriving (Show, Generic)
instance ToJSON WebSong
data WebChord = WebChord
{ wcName :: Text
, wcDuration :: Int
}
deriving (Show, Generic)
instance ToJSON WebChord
data WsIndex = WsIndex { wiIndex :: Int }
deriving (Show, Generic)
instance ToJSON WsIndex
data WsStop = WsStop
{ wssId :: Int64
}
deriving Generic
instance ToJSON WsStop
toWebSong :: Int64 -> Song -> [PlaySongChord] -> WebSong
toWebSong sid song chords = WebSong {
wsId = sid
, wsName = songName song
, wsChords =
(\psc -> WebChord
{ wcName = (chordRootName (chordRoot psc) <> name psc)
, wcDuration = songChordDuration (songChord psc) } )
<$> chords
, wsTempo = songTempo song
}
makePscs :: [SongChord] -> Handler [Maybe PlaySongChord]
makePscs scs = do
mapM scPsc scs
scPsc :: SongChord -> Handler (Maybe PlaySongChord)
scPsc sc = do
mbchroot <- runDB $ get (songChordChordroot sc)
mbntst <- runDB $ get (songChordNoteset sc)
notes <- runDB $ selectList [NoteNoteset ==. songChordNoteset sc] []
case (mbchroot, mbntst) of
(Nothing,_) -> return Nothing
(_,Nothing) -> return Nothing
(Just chr, Just ntst) ->
return $ Just PlaySongChord {
songChord = sc,
chordRoot = chr,
name = (noteSetName ntst),
notes = map (\(Entity nid note) -> fromIntegral (noteNumer note) % fromIntegral (noteDenom note)) notes
}
tempoToBeattime :: Int -> Int
tempoToBeattime tempo =
-- tempo is Beats per minute.
one minute in microseconds is ...
let minute = 1000000 * 60 in
div minute tempo
setupLights :: [UDP] -> IO ()
setupLights lightcons = do
send light flash
_ <- mapM (\conn -> do
setArrayColor conn 1 0
setArrayColor conn 2 16711680)
lightcons
return ()
setArrayColor :: UDP -> Int -> Int -> IO ()
setArrayColor conn arrayindex color = do
let tst = (Message "updatearray" [(d_put arrayindex)])
sendOSC conn tst
_ <- mapM (\elt -> do
sendOSC conn (Message "setpixel" [(d_put elt), (d_put color)]))
[0..23::Int]
return ()
playSong :: SongId -> IORef (Maybe SongId) -> TChan Text -> Song -> [PlaySongChord] -> [(String,Int)] -> [(String,Int)] -> IO ()
playSong sid iorsid textchan song chords chorddests lightdests = do
chordcons <- mapM (\(ip,port) -> openUDP ip port) chorddests
lightcons <- mapM (\(ip,port) -> openUDP ip port) lightdests
setupLights lightcons
let websong = toWebSong 0 song chords
wsjs = toJSON websong
(liftIO . atomically) $ writeTChan textchan (toJsonText wsjs)
-- set current song id.
writeIORef iorsid (Just sid)
forever (playit textchan chordcons lightcons ((tempoToBeattime . songTempo) song) chords 0)
playSongSequence :: IORef (Maybe SongId) -> TChan Text -> [(Song, SongId, Int, [PlaySongChord])] -> [(String,Int)] -> [(String,Int)] -> IO ()
playSongSequence iorsid textchan songchords chorddests lightdests = do
chordcons <- mapM (\(ip,port) -> openUDP ip port) chorddests
lightcons <- mapM (\(ip,port) -> openUDP ip port) lightdests
setupLights lightcons
forever (mapM (\(song, sid, reps, chords) -> do
let websong = toWebSong 0 song chords
wsjs = toJSON websong
(liftIO . atomically) $ writeTChan textchan (toJsonText wsjs)
-- set current song id.
writeIORef iorsid (Just sid)
replicateM_ reps
(playit textchan chordcons lightcons ((tempoToBeattime . songTempo) song) chords 0))
songchords)
chordnotes :: Int -> [Rational] -> [Int]
chordnotes _ [] = []
chordnotes den rats =
let
notes = map (\rat -> (fromIntegral (numerator rat)) * (quot den (fromIntegral (denominator rat)))) rats
in
(den : notes)
playit :: TChan Text -> [UDP] -> [UDP] -> Int -> [PlaySongChord] -> Int -> IO ()
playit textchan ccons lcons beattime [] count = return ()
playit textchan ccons lcons beattime (psc:pscs) count =
-- on chord change, set the root and the scale.
let rootmsg = Message "root" (map d_put [(chordRootNumer (chordRoot psc)),
(chordRootDenom (chordRoot psc))])
chordmsg = Message "scale" $ map d_put $ chordnotes 12 $ notes psc
chordtext = ( chordRootName ( ) ) < > ( name )
wsi = WsIndex { wiIndex = count }
wsijs = toJSON wsi
in do
send root and scale msgs to all destinations .
_ <- mapM (\conn -> do
sendOSC conn rootmsg
sendOSC conn chordmsg)
ccons
(liftIO . atomically) $ writeTChan textchan (toJsonText wsijs)
-- delay N times for N beats, flashing the lights each time.
let flashmsg1 = Message "fadeto" (map d_put [1::Int,20])
flashmsg2 = Message "fadeto" (map d_put [0::Int,20])
_ <- mapM (\_ -> do
send light flash
_ <- mapM (\conn -> do
sendOSC conn flashmsg1
sendOSC conn flashmsg2)
lcons
-- delay for a beat.
threadDelay beattime)
(take (songChordDuration (songChord psc)) [0..])
playit textchan ccons lcons beattime pscs (count + 1)
getSongInfo :: SongId -> Handler (Maybe (Song, [PlaySongChord]))
getSongInfo sid = do
app <- getYesod
mbsong <- runDB $ get sid
chords <- runDB $ selectList [SongChordSong ==. sid] [Asc SongChordSeqnum]
songchords <- makePscs (map entityVal chords)
case mbsong of
(Just song) -> return (Just (song, catMaybes songchords))
Nothing -> return Nothing
getDests :: Handler ([(String,Int)], [(String,Int)])
getDests = do
chorddests <- runDB $ selectList [OSCDestType ==. T.pack "chords"] []
lightdests <- runDB $ selectList [OSCDestType ==. T.pack "lights"] []
let chordips = map (\(Entity _ dest) ->
(T.unpack $ oSCDestIp dest, oSCDestPort dest))
chorddests
lightips = map (\(Entity _ dest) ->
(T.unpack $ oSCDestIp dest, oSCDestPort dest))
lightdests
return (chordips, lightips)
| null | https://raw.githubusercontent.com/bburdette/chordster/70d235f1ca379e5ecd4a8f39dc1e734e2d50978b/PlaySong.hs | haskell | # LANGUAGE OverloadedStrings #
tempo is Beats per minute.
set current song id.
set current song id.
on chord change, set the root and the scale.
delay N times for N beats, flashing the lights each time.
delay for a beat. | # LANGUAGE DeriveGeneric #
module PlaySong where
import Import
import Control.Concurrent
import Sound.OSC.FD
import Control.Monad
import SongControl
import Data.Ratio
import Data.List
import Data.Int
import Data.Maybe
import Data.IORef
import qualified Data.Text as T
import Control.Concurrent.STM.TChan
import Control.Concurrent.STM
import GHC.Generics
data PlaySongChord = PlaySongChord
{ songChord :: SongChord
, chordRoot :: ChordRoot
, name :: Text
, notes :: [Rational]
}
deriving Show
data TextSong = TextSong
{ song : : , chords : : [ PlaySongChord ]
}
deriving Show
loadTextSong : : SongId - > Handler ( Maybe TextSong )
= do
get chords < - runDB $ selectList [ SongChordSong = = . ] [ Asc SongChordSeqnum ]
pscs < - makePscs ( map entityVal chords )
case mbsong of
Nothing - > return Nothing
Just sng - > return $ Just $ TextSong sng ( pscs )
tsToWebSong : : TextSong - > WebSong
tsToWebSong ts = WebSong {
wsName = songName ( song ts )
, wsChords = ( \psc - > name ) < $ > ( chords ts )
}
data TextSong = TextSong
{ song :: Song
, chords :: [PlaySongChord]
}
deriving Show
loadTextSong :: SongId -> Handler (Maybe TextSong)
loadTextSong sid = do
mbsong <- runDB $ get sid
chords <- runDB $ selectList [SongChordSong ==. sid] [Asc SongChordSeqnum]
pscs <- makePscs (map entityVal chords)
case mbsong of
Nothing -> return Nothing
Just sng -> return $ Just $ TextSong sng (catMaybes pscs)
tsToWebSong :: TextSong -> WebSong
tsToWebSong ts = WebSong {
wsName = songName (song ts)
, wsChords = (\psc -> name psc) <$> (chords ts)
}
-}
data WebSong = WebSong
{ wsId :: Int64
, wsName :: Text
, wsChords :: [WebChord]
, wsTempo :: Int
}
deriving (Show, Generic)
instance ToJSON WebSong
data WebChord = WebChord
{ wcName :: Text
, wcDuration :: Int
}
deriving (Show, Generic)
instance ToJSON WebChord
data WsIndex = WsIndex { wiIndex :: Int }
deriving (Show, Generic)
instance ToJSON WsIndex
data WsStop = WsStop
{ wssId :: Int64
}
deriving Generic
instance ToJSON WsStop
toWebSong :: Int64 -> Song -> [PlaySongChord] -> WebSong
toWebSong sid song chords = WebSong {
wsId = sid
, wsName = songName song
, wsChords =
(\psc -> WebChord
{ wcName = (chordRootName (chordRoot psc) <> name psc)
, wcDuration = songChordDuration (songChord psc) } )
<$> chords
, wsTempo = songTempo song
}
makePscs :: [SongChord] -> Handler [Maybe PlaySongChord]
makePscs scs = do
mapM scPsc scs
scPsc :: SongChord -> Handler (Maybe PlaySongChord)
scPsc sc = do
mbchroot <- runDB $ get (songChordChordroot sc)
mbntst <- runDB $ get (songChordNoteset sc)
notes <- runDB $ selectList [NoteNoteset ==. songChordNoteset sc] []
case (mbchroot, mbntst) of
(Nothing,_) -> return Nothing
(_,Nothing) -> return Nothing
(Just chr, Just ntst) ->
return $ Just PlaySongChord {
songChord = sc,
chordRoot = chr,
name = (noteSetName ntst),
notes = map (\(Entity nid note) -> fromIntegral (noteNumer note) % fromIntegral (noteDenom note)) notes
}
tempoToBeattime :: Int -> Int
tempoToBeattime tempo =
one minute in microseconds is ...
let minute = 1000000 * 60 in
div minute tempo
setupLights :: [UDP] -> IO ()
setupLights lightcons = do
send light flash
_ <- mapM (\conn -> do
setArrayColor conn 1 0
setArrayColor conn 2 16711680)
lightcons
return ()
setArrayColor :: UDP -> Int -> Int -> IO ()
setArrayColor conn arrayindex color = do
let tst = (Message "updatearray" [(d_put arrayindex)])
sendOSC conn tst
_ <- mapM (\elt -> do
sendOSC conn (Message "setpixel" [(d_put elt), (d_put color)]))
[0..23::Int]
return ()
playSong :: SongId -> IORef (Maybe SongId) -> TChan Text -> Song -> [PlaySongChord] -> [(String,Int)] -> [(String,Int)] -> IO ()
playSong sid iorsid textchan song chords chorddests lightdests = do
chordcons <- mapM (\(ip,port) -> openUDP ip port) chorddests
lightcons <- mapM (\(ip,port) -> openUDP ip port) lightdests
setupLights lightcons
let websong = toWebSong 0 song chords
wsjs = toJSON websong
(liftIO . atomically) $ writeTChan textchan (toJsonText wsjs)
writeIORef iorsid (Just sid)
forever (playit textchan chordcons lightcons ((tempoToBeattime . songTempo) song) chords 0)
playSongSequence :: IORef (Maybe SongId) -> TChan Text -> [(Song, SongId, Int, [PlaySongChord])] -> [(String,Int)] -> [(String,Int)] -> IO ()
playSongSequence iorsid textchan songchords chorddests lightdests = do
chordcons <- mapM (\(ip,port) -> openUDP ip port) chorddests
lightcons <- mapM (\(ip,port) -> openUDP ip port) lightdests
setupLights lightcons
forever (mapM (\(song, sid, reps, chords) -> do
let websong = toWebSong 0 song chords
wsjs = toJSON websong
(liftIO . atomically) $ writeTChan textchan (toJsonText wsjs)
writeIORef iorsid (Just sid)
replicateM_ reps
(playit textchan chordcons lightcons ((tempoToBeattime . songTempo) song) chords 0))
songchords)
chordnotes :: Int -> [Rational] -> [Int]
chordnotes _ [] = []
chordnotes den rats =
let
notes = map (\rat -> (fromIntegral (numerator rat)) * (quot den (fromIntegral (denominator rat)))) rats
in
(den : notes)
playit :: TChan Text -> [UDP] -> [UDP] -> Int -> [PlaySongChord] -> Int -> IO ()
playit textchan ccons lcons beattime [] count = return ()
playit textchan ccons lcons beattime (psc:pscs) count =
let rootmsg = Message "root" (map d_put [(chordRootNumer (chordRoot psc)),
(chordRootDenom (chordRoot psc))])
chordmsg = Message "scale" $ map d_put $ chordnotes 12 $ notes psc
chordtext = ( chordRootName ( ) ) < > ( name )
wsi = WsIndex { wiIndex = count }
wsijs = toJSON wsi
in do
send root and scale msgs to all destinations .
_ <- mapM (\conn -> do
sendOSC conn rootmsg
sendOSC conn chordmsg)
ccons
(liftIO . atomically) $ writeTChan textchan (toJsonText wsijs)
let flashmsg1 = Message "fadeto" (map d_put [1::Int,20])
flashmsg2 = Message "fadeto" (map d_put [0::Int,20])
_ <- mapM (\_ -> do
send light flash
_ <- mapM (\conn -> do
sendOSC conn flashmsg1
sendOSC conn flashmsg2)
lcons
threadDelay beattime)
(take (songChordDuration (songChord psc)) [0..])
playit textchan ccons lcons beattime pscs (count + 1)
getSongInfo :: SongId -> Handler (Maybe (Song, [PlaySongChord]))
getSongInfo sid = do
app <- getYesod
mbsong <- runDB $ get sid
chords <- runDB $ selectList [SongChordSong ==. sid] [Asc SongChordSeqnum]
songchords <- makePscs (map entityVal chords)
case mbsong of
(Just song) -> return (Just (song, catMaybes songchords))
Nothing -> return Nothing
getDests :: Handler ([(String,Int)], [(String,Int)])
getDests = do
chorddests <- runDB $ selectList [OSCDestType ==. T.pack "chords"] []
lightdests <- runDB $ selectList [OSCDestType ==. T.pack "lights"] []
let chordips = map (\(Entity _ dest) ->
(T.unpack $ oSCDestIp dest, oSCDestPort dest))
chorddests
lightips = map (\(Entity _ dest) ->
(T.unpack $ oSCDestIp dest, oSCDestPort dest))
lightdests
return (chordips, lightips)
|
c7697724ee3e360b38db76d416bf08f71d8029e5a7adb1b4fb1d49856aaf8742 | 0install/0install | common.ml | Copyright ( C ) 2013 , the README file for details , or visit .
* See the README file for details, or visit .
*)
(** Common types and functions. This module is intended to be opened. *)
type filepath = string
(** Raise this to exit the program. Allows finally blocks to run. *)
exception System_exit of int
module Platform =
struct
type t = {
* OS , e.g. " Linux "
* OS version , e.g. " 3.10.3 - 1 - ARCH "
* CPU type , e.g. " x86_64 "
}
end
(** Define an interface for interacting with the system, so we can replace it
in unit-tests. *)
class type filesystem =
object
method with_open_in : open_flag list -> (in_channel -> 'a) -> filepath -> 'a
method with_open_out : open_flag list -> mode:Unix.file_perm -> (out_channel -> 'a) -> filepath -> 'a
method atomic_write : open_flag list -> mode:Unix.file_perm -> (out_channel -> 'a) -> filepath -> 'a
method mkdir : filepath -> Unix.file_perm -> unit
(** Returns [false] for a broken symlink. *)
method file_exists : filepath -> bool
method lstat : filepath -> Unix.stats option
method stat : filepath -> Unix.stats option
method unlink : filepath -> unit
method rmdir : filepath -> unit
method getcwd : filepath
method chdir : filepath -> unit
method hardlink : filepath -> filepath -> unit
method rename : filepath -> filepath -> unit
method readdir : filepath -> (string array, exn) result
method chmod : filepath -> Unix.file_perm -> unit
method set_mtime : filepath -> float -> unit
method symlink : target:filepath -> newlink:filepath -> unit
method readlink : filepath -> filepath option
end
class type processes =
object
method exec : 'a. ?search_path:bool -> ?env:string array -> string list -> 'a
method spawn_detach : ?search_path:bool -> ?env:string array -> string list -> unit
method create_process : ?env:string array -> string list -> Unix.file_descr -> Unix.file_descr -> Unix.file_descr -> int
* [ reap_child ? kill_first : signal child_pid ] calls [ waitpid ] to collect the child .
@raise Safe_exn . T if it did n't exit with a status of 0 ( success ) .
@raise Safe_exn.T if it didn't exit with a status of 0 (success). *)
method reap_child : ?kill_first:int -> int -> unit
(** Low-level interface, in case you need to process the exit status yourself. *)
method waitpid_non_intr : int -> (int * Unix.process_status)
end
class type environment =
object
method getenv : Env.name -> string option
method environment : string array
end
class type windows_api =
object
method get_appdata : string
method get_local_appdata : string
method get_common_appdata : string
Reads from HKEY_LOCAL_MACHINE
Reads from HKEY_LOCAL_MACHINE
end
class type system =
object
inherit filesystem
inherit processes
inherit environment
method argv : string array
method time : float
method isatty : Unix.file_descr -> bool
(** True if we're on Unix and running as root; we must take care to avoid creating files in the wrong
* place when running under sudo. *)
method running_as_root : bool
method platform : Platform.t
method windows_api : windows_api option
(** In dry-run mode, returns the underlying system. *)
method bypass_dryrun : system
end
let on_windows = Filename.dir_sep <> "/"
* The string used to separate paths ( " : " on Unix , " ; " on Windows ) .
let path_sep = if on_windows then ";" else ":"
* Join a relative path onto a base .
@raise Safe_exn . T if the second path is not relative .
@raise Safe_exn.T if the second path is not relative. *)
let (+/) a b =
if b = "" then
a
else if Filename.is_relative b then
Filename.concat a b
else
Safe_exn.failf "Attempt to append absolute path: %s + %s" a b
let log_debug = Logging.log_debug
let log_info = Logging.log_info
let log_warning = Logging.log_warning
(** [with_errors_logged note f] is [f ()], except that if it raises any exception, the
exception is logged at warning level with the message provided by [note]. The exception
is not re-raised. *)
let with_errors_logged note f =
Lwt.catch f
(fun ex ->
note (log_warning ~ex);
Lwt.return ()
)
(** [default d opt] unwraps option [opt], returning [d] if it was [None]. *)
let default d = function
| None -> d
| Some x -> x
(** A null coalescing operator. *)
let (|?) maybe default =
match maybe with
| Some v -> v
| None -> Lazy.force default
let if_some fn = function
| None -> ()
| Some x -> fn x
let pipe_some fn = function
| None -> None
| Some x -> fn x
let map_some fn = function
| None -> None
| Some x -> Some (fn x)
let (>>=) = Lwt.(>>=)
let (>|=) = Lwt.(>|=)
let return = Lwt.return
| null | https://raw.githubusercontent.com/0install/0install/22eebdbe51a9f46cda29eed3e9e02e37e36b2d18/src/support/common.ml | ocaml | * Common types and functions. This module is intended to be opened.
* Raise this to exit the program. Allows finally blocks to run.
* Define an interface for interacting with the system, so we can replace it
in unit-tests.
* Returns [false] for a broken symlink.
* Low-level interface, in case you need to process the exit status yourself.
* True if we're on Unix and running as root; we must take care to avoid creating files in the wrong
* place when running under sudo.
* In dry-run mode, returns the underlying system.
* [with_errors_logged note f] is [f ()], except that if it raises any exception, the
exception is logged at warning level with the message provided by [note]. The exception
is not re-raised.
* [default d opt] unwraps option [opt], returning [d] if it was [None].
* A null coalescing operator. | Copyright ( C ) 2013 , the README file for details , or visit .
* See the README file for details, or visit .
*)
type filepath = string
exception System_exit of int
module Platform =
struct
type t = {
* OS , e.g. " Linux "
* OS version , e.g. " 3.10.3 - 1 - ARCH "
* CPU type , e.g. " x86_64 "
}
end
class type filesystem =
object
method with_open_in : open_flag list -> (in_channel -> 'a) -> filepath -> 'a
method with_open_out : open_flag list -> mode:Unix.file_perm -> (out_channel -> 'a) -> filepath -> 'a
method atomic_write : open_flag list -> mode:Unix.file_perm -> (out_channel -> 'a) -> filepath -> 'a
method mkdir : filepath -> Unix.file_perm -> unit
method file_exists : filepath -> bool
method lstat : filepath -> Unix.stats option
method stat : filepath -> Unix.stats option
method unlink : filepath -> unit
method rmdir : filepath -> unit
method getcwd : filepath
method chdir : filepath -> unit
method hardlink : filepath -> filepath -> unit
method rename : filepath -> filepath -> unit
method readdir : filepath -> (string array, exn) result
method chmod : filepath -> Unix.file_perm -> unit
method set_mtime : filepath -> float -> unit
method symlink : target:filepath -> newlink:filepath -> unit
method readlink : filepath -> filepath option
end
class type processes =
object
method exec : 'a. ?search_path:bool -> ?env:string array -> string list -> 'a
method spawn_detach : ?search_path:bool -> ?env:string array -> string list -> unit
method create_process : ?env:string array -> string list -> Unix.file_descr -> Unix.file_descr -> Unix.file_descr -> int
* [ reap_child ? kill_first : signal child_pid ] calls [ waitpid ] to collect the child .
@raise Safe_exn . T if it did n't exit with a status of 0 ( success ) .
@raise Safe_exn.T if it didn't exit with a status of 0 (success). *)
method reap_child : ?kill_first:int -> int -> unit
method waitpid_non_intr : int -> (int * Unix.process_status)
end
class type environment =
object
method getenv : Env.name -> string option
method environment : string array
end
class type windows_api =
object
method get_appdata : string
method get_local_appdata : string
method get_common_appdata : string
Reads from HKEY_LOCAL_MACHINE
Reads from HKEY_LOCAL_MACHINE
end
class type system =
object
inherit filesystem
inherit processes
inherit environment
method argv : string array
method time : float
method isatty : Unix.file_descr -> bool
method running_as_root : bool
method platform : Platform.t
method windows_api : windows_api option
method bypass_dryrun : system
end
let on_windows = Filename.dir_sep <> "/"
* The string used to separate paths ( " : " on Unix , " ; " on Windows ) .
let path_sep = if on_windows then ";" else ":"
* Join a relative path onto a base .
@raise Safe_exn . T if the second path is not relative .
@raise Safe_exn.T if the second path is not relative. *)
let (+/) a b =
if b = "" then
a
else if Filename.is_relative b then
Filename.concat a b
else
Safe_exn.failf "Attempt to append absolute path: %s + %s" a b
let log_debug = Logging.log_debug
let log_info = Logging.log_info
let log_warning = Logging.log_warning
let with_errors_logged note f =
Lwt.catch f
(fun ex ->
note (log_warning ~ex);
Lwt.return ()
)
let default d = function
| None -> d
| Some x -> x
let (|?) maybe default =
match maybe with
| Some v -> v
| None -> Lazy.force default
let if_some fn = function
| None -> ()
| Some x -> fn x
let pipe_some fn = function
| None -> None
| Some x -> fn x
let map_some fn = function
| None -> None
| Some x -> Some (fn x)
let (>>=) = Lwt.(>>=)
let (>|=) = Lwt.(>|=)
let return = Lwt.return
|
5ad40411c1a59586b35b4f68e5459cd5191f117cad43862e51e1b51298772c31 | robertluo/fun-map | fun_map_test.cljc | (ns robertluo.fun-map-test
(:require
[clojure.test :refer [deftest testing is]]
#?(:clj
[robertluo.fun-map :refer [fun-map? fnk fw fun-map closeable life-cycle-map touch halt! lookup]]
:cljs
[robertluo.fun-map
:as fm
:refer [fun-map? fun-map touch life-cycle-map closeable halt!]
:refer-macros [fw fnk]])))
(deftest fun-map-test
(testing "predict funmap"
(is (= true (fun-map? (fun-map {})))))
(testing "computed attribute of other attributes"
(is (= 10 (:c (fun-map {:a/a 3 :b 7 :c (fnk [:a/a b] (+ a b))}))))
(is (= 1000 (:c (fun-map {:a 10 :b (fnk [a] (* a a)) :c (fnk [b] (* 10 b))})))))
(testing "equiv test. ! Maybe very expensive"
(is (= {:a 3 :b 4} (fun-map {:a 3 :b (fnk [a] (inc a))})))
(is (= (fun-map {:a 3 :b (fnk [a] (inc a))}) {:a 3 :b 4})))
(testing "function will be invoked just once"
(let [f (let [z (atom 0)]
(fw {:keys [a]} (+ (swap! z inc) a)))
m (fun-map {:a 3 :b f})]
(is (= {:a 3 :b 4} m))
(is (= {:a 3 :b 4} m))))
(testing "merge fun-map with another map"
(is (= {:a 3 :b 4} (merge (fun-map {:a (fnk [] 3)})
(fun-map {:b (fnk [a] (inc a))})))))
(testing "merge fun-map with plain map"
(is (= {:a 5 :b 6}
(merge (fun-map {:b (fnk [a] (inc a))})
{:a 5}))))
(testing "dissoc"
(is (= {:a 3} (dissoc (fun-map {:a 3 :b 4}) :b))))
(testing "meta data support"
(is (= {:msg "ok"} (meta (with-meta (fun-map {:a 3}) {:msg "ok"}))))))
(deftest reassoc-test
(testing "when reassoc value to fun-map, its function can be re-invoked"
(is (= 11
(-> (fun-map {:a 2 :b (fnk [a] (inc a))}) (assoc :a 10) :b)))))
#?(:clj
(deftest dref-test
(testing "delay, future, delayed future value will be deref when accessed"
(is (= {:a 3 :b 4 :c 5}
(fun-map {:a (delay 3) :b (future 4) :c (delay (future 5))}))))
#_(testing "with :no-ref specified, ref value will not automatically dereffed"
(is (= 3 (-> (fun-map {:a (delay 3)} :no-ref true) :a deref))))))
(deftest trace-map-test
(testing "invocation record"
(let [traced (atom [])
m (fun-map {:a 5
:b (fnk [a] (inc a))
:c (fnk [b] (inc b))}
:trace-fn (fn [k v] (swap! traced conj [k v])))]
(is (= {:a 5 :b 6 :c 7} m))
(is (= [[:b 6] [:c 7]]
@traced)))))
(deftest normal-function-value-test
(testing "when a function hasn't :wrap meta, it will be stored as is"
(is (= "ok"
((-> (fun-map {:a (fn [] "ok")}) :a))))))
(deftest life-cycle-map-test
(testing "a life cycle map will halt! its components in order"
(let [close-order (atom [])
component (fn [k]
(closeable nil
(fn [] (swap! close-order conj k))))
sys (life-cycle-map
{:a (fnk [] (component :a)) :b (fnk [a] a (component :b))})]
(:b sys)
(halt! sys)
(is (= [:b :a] @close-order)))))
(deftest touch-test
(testing "touching a fun-map will call all functions inside"
(let [far (atom 0)
m (-> (array-map :a (fnk [] (swap! far inc)) :b (fnk [] (swap! far inc)))
fun-map
touch)]
(is (= 2 @far))
(is (= {:a 1 :b 2} m)))))
(deftest merge-trace-test
(testing "trace-fn should ok for merging"
(let [marker (atom {})
_ #(swap! marker assoc % %2)
a (fun-map {:a (fnk [] 0)} :trace-fn (fn [k v] (swap! marker conj [k v])))
b (fun-map {:b (fnk [a] (inc a))})
a (merge a b)]
(is (= {:a 0 :b 1} a))
(is (= {:a 0 :b 1} @marker)))))
(deftest closeable-test
(testing "put a closeable value into life cycle map will get closed"
(let [marker (atom 0)
m (touch (life-cycle-map
{:a (fnk [] (closeable 3 #(swap! marker inc)))}))]
(is (= {:a 3} m))
#?(:cljs (halt! m)
:clj (.close m))
(is (= 1 @marker)))))
(deftest fw-test
(testing "fw macro using normal destructure syntax to define wrapper"
(is (= {:a 3 :b 5}
(fun-map {:a (fw {} 3) :b (fw {:keys [a] :focus [a]} (+ a 2))})))))
(deftest fnk-focus-test
(testing "fnk automatically focus on its dependencies, re-run when dependencies change"
(let [input (atom 5)
a (fun-map {:a input :b (fnk [a] (* a 2))})]
(touch a)
(reset! input 7)
(is (= 14 (:b a)))))
(testing "if no focus function define, the function wrapper will just invoke once"
(let [input (atom [3 4])
a (fun-map {:a input :cnt (fw {:keys [a]} (count a))})]
(is (= 2 (:cnt a)))
(reset! input (range 10))
(is (= 2 (:cnt a))))))
(deftest naive-fw-test
(testing "choose empty wrappers, no value will be cached"
(let [a (atom 0)
m (fun-map {:a (fw {:wrappers []} (swap! a inc))})]
(is (= 1 (:a m)))
(is (= 2 (:a m))))))
#?(:clj
(deftest parallel-execution-test
(let [a (atom 5)
m (fun-map {:a (delay (Thread/sleep 200) @a)
:b (delay (Thread/sleep 200) 20)
:z (delay (Thread/sleep 100) (reset! a 10))
:c (fw {:keys [_ b z] :par? true} (* z b))})]
(is (= 200 (:c m))))))
(deftest idempotent-test
(is (= {} (merge (fun-map (fun-map {})) {}))))
#?(:clj
(deftest lookup-test
(is (= 3 (get (lookup identity) 3)))
(is (= [:foo :foo] (find (lookup identity) :foo)))))
| null | https://raw.githubusercontent.com/robertluo/fun-map/ea2d418dac2b77171f877c4d6fbc4d14d72ea04d/test/robertluo/fun_map_test.cljc | clojure | (ns robertluo.fun-map-test
(:require
[clojure.test :refer [deftest testing is]]
#?(:clj
[robertluo.fun-map :refer [fun-map? fnk fw fun-map closeable life-cycle-map touch halt! lookup]]
:cljs
[robertluo.fun-map
:as fm
:refer [fun-map? fun-map touch life-cycle-map closeable halt!]
:refer-macros [fw fnk]])))
(deftest fun-map-test
(testing "predict funmap"
(is (= true (fun-map? (fun-map {})))))
(testing "computed attribute of other attributes"
(is (= 10 (:c (fun-map {:a/a 3 :b 7 :c (fnk [:a/a b] (+ a b))}))))
(is (= 1000 (:c (fun-map {:a 10 :b (fnk [a] (* a a)) :c (fnk [b] (* 10 b))})))))
(testing "equiv test. ! Maybe very expensive"
(is (= {:a 3 :b 4} (fun-map {:a 3 :b (fnk [a] (inc a))})))
(is (= (fun-map {:a 3 :b (fnk [a] (inc a))}) {:a 3 :b 4})))
(testing "function will be invoked just once"
(let [f (let [z (atom 0)]
(fw {:keys [a]} (+ (swap! z inc) a)))
m (fun-map {:a 3 :b f})]
(is (= {:a 3 :b 4} m))
(is (= {:a 3 :b 4} m))))
(testing "merge fun-map with another map"
(is (= {:a 3 :b 4} (merge (fun-map {:a (fnk [] 3)})
(fun-map {:b (fnk [a] (inc a))})))))
(testing "merge fun-map with plain map"
(is (= {:a 5 :b 6}
(merge (fun-map {:b (fnk [a] (inc a))})
{:a 5}))))
(testing "dissoc"
(is (= {:a 3} (dissoc (fun-map {:a 3 :b 4}) :b))))
(testing "meta data support"
(is (= {:msg "ok"} (meta (with-meta (fun-map {:a 3}) {:msg "ok"}))))))
(deftest reassoc-test
(testing "when reassoc value to fun-map, its function can be re-invoked"
(is (= 11
(-> (fun-map {:a 2 :b (fnk [a] (inc a))}) (assoc :a 10) :b)))))
#?(:clj
(deftest dref-test
(testing "delay, future, delayed future value will be deref when accessed"
(is (= {:a 3 :b 4 :c 5}
(fun-map {:a (delay 3) :b (future 4) :c (delay (future 5))}))))
#_(testing "with :no-ref specified, ref value will not automatically dereffed"
(is (= 3 (-> (fun-map {:a (delay 3)} :no-ref true) :a deref))))))
(deftest trace-map-test
(testing "invocation record"
(let [traced (atom [])
m (fun-map {:a 5
:b (fnk [a] (inc a))
:c (fnk [b] (inc b))}
:trace-fn (fn [k v] (swap! traced conj [k v])))]
(is (= {:a 5 :b 6 :c 7} m))
(is (= [[:b 6] [:c 7]]
@traced)))))
(deftest normal-function-value-test
(testing "when a function hasn't :wrap meta, it will be stored as is"
(is (= "ok"
((-> (fun-map {:a (fn [] "ok")}) :a))))))
(deftest life-cycle-map-test
(testing "a life cycle map will halt! its components in order"
(let [close-order (atom [])
component (fn [k]
(closeable nil
(fn [] (swap! close-order conj k))))
sys (life-cycle-map
{:a (fnk [] (component :a)) :b (fnk [a] a (component :b))})]
(:b sys)
(halt! sys)
(is (= [:b :a] @close-order)))))
(deftest touch-test
(testing "touching a fun-map will call all functions inside"
(let [far (atom 0)
m (-> (array-map :a (fnk [] (swap! far inc)) :b (fnk [] (swap! far inc)))
fun-map
touch)]
(is (= 2 @far))
(is (= {:a 1 :b 2} m)))))
(deftest merge-trace-test
(testing "trace-fn should ok for merging"
(let [marker (atom {})
_ #(swap! marker assoc % %2)
a (fun-map {:a (fnk [] 0)} :trace-fn (fn [k v] (swap! marker conj [k v])))
b (fun-map {:b (fnk [a] (inc a))})
a (merge a b)]
(is (= {:a 0 :b 1} a))
(is (= {:a 0 :b 1} @marker)))))
(deftest closeable-test
(testing "put a closeable value into life cycle map will get closed"
(let [marker (atom 0)
m (touch (life-cycle-map
{:a (fnk [] (closeable 3 #(swap! marker inc)))}))]
(is (= {:a 3} m))
#?(:cljs (halt! m)
:clj (.close m))
(is (= 1 @marker)))))
(deftest fw-test
(testing "fw macro using normal destructure syntax to define wrapper"
(is (= {:a 3 :b 5}
(fun-map {:a (fw {} 3) :b (fw {:keys [a] :focus [a]} (+ a 2))})))))
(deftest fnk-focus-test
(testing "fnk automatically focus on its dependencies, re-run when dependencies change"
(let [input (atom 5)
a (fun-map {:a input :b (fnk [a] (* a 2))})]
(touch a)
(reset! input 7)
(is (= 14 (:b a)))))
(testing "if no focus function define, the function wrapper will just invoke once"
(let [input (atom [3 4])
a (fun-map {:a input :cnt (fw {:keys [a]} (count a))})]
(is (= 2 (:cnt a)))
(reset! input (range 10))
(is (= 2 (:cnt a))))))
(deftest naive-fw-test
(testing "choose empty wrappers, no value will be cached"
(let [a (atom 0)
m (fun-map {:a (fw {:wrappers []} (swap! a inc))})]
(is (= 1 (:a m)))
(is (= 2 (:a m))))))
#?(:clj
(deftest parallel-execution-test
(let [a (atom 5)
m (fun-map {:a (delay (Thread/sleep 200) @a)
:b (delay (Thread/sleep 200) 20)
:z (delay (Thread/sleep 100) (reset! a 10))
:c (fw {:keys [_ b z] :par? true} (* z b))})]
(is (= 200 (:c m))))))
(deftest idempotent-test
(is (= {} (merge (fun-map (fun-map {})) {}))))
#?(:clj
(deftest lookup-test
(is (= 3 (get (lookup identity) 3)))
(is (= [:foo :foo] (find (lookup identity) :foo)))))
| |
5457480c11d2d0936dfb9c0f88b014c48fb2584856d909efe305d9e01ac97eed | Palmik/data-store | TSSO.hs | module Main
where
import Control.DeepSeq (NFData(rnf))
import Common
import qualified Data.List
import qualified TS.B01
import qualified Data.Table as T
main :: IO ()
main = do
return $! rnf elements
return $! rnf test
test :: TS.B01.TS
test = fromList elements
elements :: [C01]
elements = map (\x -> C01 x (x `div` s) [x .. x + s]) [0 .. 40000]
where
s = 5
fromList :: [C01] -> TS.B01.TS
fromList = Data.List.foldl' (\acc x -> insert x $! acc) TS.B01.empty
insert :: C01 -> TS.B01.TS -> TS.B01.TS
insert t x = snd $! T.insert' t x
| null | https://raw.githubusercontent.com/Palmik/data-store/20131a9d6d310c29b57fd9e3b508f0335e1113b4/benchmarks/src/TSSO.hs | haskell | module Main
where
import Control.DeepSeq (NFData(rnf))
import Common
import qualified Data.List
import qualified TS.B01
import qualified Data.Table as T
main :: IO ()
main = do
return $! rnf elements
return $! rnf test
test :: TS.B01.TS
test = fromList elements
elements :: [C01]
elements = map (\x -> C01 x (x `div` s) [x .. x + s]) [0 .. 40000]
where
s = 5
fromList :: [C01] -> TS.B01.TS
fromList = Data.List.foldl' (\acc x -> insert x $! acc) TS.B01.empty
insert :: C01 -> TS.B01.TS -> TS.B01.TS
insert t x = snd $! T.insert' t x
| |
43c9522a9755e9bd1919c70ae16ad9f937ba739fc062917f7d4e3803e4ed7d5c | carotene/carotene | redis_subscriber.erl | -module(redis_subscriber).
-behaviour(gen_server).
-export([start_link/1, start/1, stop/1]).
-export([init/1, handle_call/3, handle_cast/2, terminate/2, handle_info/2, code_change/3]).
-record(state, {client, channel, reply_pid}).
start_link(Client) ->
Opts = [],
gen_server:start_link(?MODULE, [Client], Opts).
start(Client) ->
Opts = [],
gen_server:start(?MODULE, [Client], Opts).
stop(Pid) ->
gen_server:call(Pid, stop, infinity).
init([Client]) ->
{ok, #state{client = Client}}.
handle_info({'DOWN', _Ref, process, _Pid, _}, State) ->
{stop, normal, State};
handle_info({message, Msg}, #state{reply_pid = ReplyPid} = State) ->
ReplyPid ! {received_message, Msg},
{noreply, State};
handle_info(shutdown, State) ->
{stop, normal, State}.
handle_call(stop, _From, State) ->
{stop, normal, ok, State}.
handle_cast({subscribe, Channel, from, ReplyTo}, State = #state{client = Client}) ->
erlang:monitor(process, ReplyTo),
gen_server:cast(Client, {subscribe, [Channel], self()}),
{noreply, State#state{channel = Channel, reply_pid = ReplyTo}};
handle_cast(_Message, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
State.
| null | https://raw.githubusercontent.com/carotene/carotene/963ecad344ec1c318c173ad828a5af3c000ddbfc/src/redis_subscriber.erl | erlang | -module(redis_subscriber).
-behaviour(gen_server).
-export([start_link/1, start/1, stop/1]).
-export([init/1, handle_call/3, handle_cast/2, terminate/2, handle_info/2, code_change/3]).
-record(state, {client, channel, reply_pid}).
start_link(Client) ->
Opts = [],
gen_server:start_link(?MODULE, [Client], Opts).
start(Client) ->
Opts = [],
gen_server:start(?MODULE, [Client], Opts).
stop(Pid) ->
gen_server:call(Pid, stop, infinity).
init([Client]) ->
{ok, #state{client = Client}}.
handle_info({'DOWN', _Ref, process, _Pid, _}, State) ->
{stop, normal, State};
handle_info({message, Msg}, #state{reply_pid = ReplyPid} = State) ->
ReplyPid ! {received_message, Msg},
{noreply, State};
handle_info(shutdown, State) ->
{stop, normal, State}.
handle_call(stop, _From, State) ->
{stop, normal, ok, State}.
handle_cast({subscribe, Channel, from, ReplyTo}, State = #state{client = Client}) ->
erlang:monitor(process, ReplyTo),
gen_server:cast(Client, {subscribe, [Channel], self()}),
{noreply, State#state{channel = Channel, reply_pid = ReplyTo}};
handle_cast(_Message, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
State.
| |
ec6a0c65845839e12832acf1859249ef8b01876929e3305d81229038d9f6ccad | benrady/specific | test_double.clj | (ns specific.test-double
(:require [specific.gene :as gene]
[clojure.string :as string]
[clojure.test :as ctest]
[clojure.spec.test.alpha :as stest]
[clojure.spec.alpha :as spec]))
(defn- report-fail [m]
(ctest/do-report (assoc m :type :fail)))
(defn- record-calls [calls & args]
(let [arg-vec (vec (or args []))]
(swap! calls update-in [ctest/*testing-contexts*] (comp vec conj) arg-vec)
args))
(defn- expected-msg [{:keys [path pred val reason via in] :as problem}]
(:pred problem))
(defn- actual-msg [problem]
(or (:val problem) []))
(defn- spec-name [spec]
(:clojure.spec/name (meta spec)))
(defn- explain-str-data [data]
(with-out-str (spec/explain-out data)))
(defn- remove-in [exp-data]
(update exp-data :clojure.spec/problems (fn [p] (map #(assoc % :in []) p))))
(defn- file-and-line []
(let [s (first (drop-while
#(let [cl-name (.getClassName ^StackTraceElement %)]
(or (string/starts-with? cl-name "java.lang.")
(string/starts-with? cl-name "clojure.")
(string/starts-with? cl-name "specific.test_double$")))
(.getStackTrace (Thread/currentThread))))]
{:file (.getFileName s) :line (.getLineNumber s)}))
(defn- build-reports [exp-data]
(for [problem (:clojure.spec/problems exp-data)]
(merge (file-and-line)
{:message (explain-str-data exp-data)
:expected (expected-msg problem)
:actual (actual-msg problem)})))
(defn- check-args [via args-spec args]
(when-not (spec/valid? args-spec args)
(let [exp-data (remove-in (spec/explain-data* args-spec [:args] [via] [] args))]
(doall (map report-fail (build-reports exp-data))))))
(defn- validate-and-generate [fn-spec args]
(when-let [args-spec (:args fn-spec)]
(check-args (spec-name fn-spec) args-spec args))
(gene/det-sample (:ret fn-spec)))
(defn- add-meta [f calls]
(with-meta f {:specific-calls calls}))
(defn- no-spec-report [fn-sym]
{:type :fail
:message "No clojure.spec defined"
:expected (str "clojure.spec for " fn-sym)
:actual nil})
(defn spy-fn [f]
(let [fn-spec (spec/get-spec f)
calls (atom {})]
(add-meta (comp (partial apply f) (partial record-calls calls)) calls)))
(defn stub-fn
([] (stub-fn nil))
([retval]
(let [calls (atom {})
f (comp (constantly retval) (partial record-calls calls))]
(add-meta f calls))))
(defn mock-fn [fn-sym]
(let [fn-spec (spec/get-spec fn-sym)
calls (atom {})]
(add-meta (let [call-fn (partial record-calls calls)]
(if (nil? fn-spec)
(no-spec-report fn-sym)
(comp (partial validate-and-generate fn-spec) call-fn)))
calls)))
| null | https://raw.githubusercontent.com/benrady/specific/e1b790f2be2d577e49817472177bcc0136a2545a/src/specific/test_double.clj | clojure | (ns specific.test-double
(:require [specific.gene :as gene]
[clojure.string :as string]
[clojure.test :as ctest]
[clojure.spec.test.alpha :as stest]
[clojure.spec.alpha :as spec]))
(defn- report-fail [m]
(ctest/do-report (assoc m :type :fail)))
(defn- record-calls [calls & args]
(let [arg-vec (vec (or args []))]
(swap! calls update-in [ctest/*testing-contexts*] (comp vec conj) arg-vec)
args))
(defn- expected-msg [{:keys [path pred val reason via in] :as problem}]
(:pred problem))
(defn- actual-msg [problem]
(or (:val problem) []))
(defn- spec-name [spec]
(:clojure.spec/name (meta spec)))
(defn- explain-str-data [data]
(with-out-str (spec/explain-out data)))
(defn- remove-in [exp-data]
(update exp-data :clojure.spec/problems (fn [p] (map #(assoc % :in []) p))))
(defn- file-and-line []
(let [s (first (drop-while
#(let [cl-name (.getClassName ^StackTraceElement %)]
(or (string/starts-with? cl-name "java.lang.")
(string/starts-with? cl-name "clojure.")
(string/starts-with? cl-name "specific.test_double$")))
(.getStackTrace (Thread/currentThread))))]
{:file (.getFileName s) :line (.getLineNumber s)}))
(defn- build-reports [exp-data]
(for [problem (:clojure.spec/problems exp-data)]
(merge (file-and-line)
{:message (explain-str-data exp-data)
:expected (expected-msg problem)
:actual (actual-msg problem)})))
(defn- check-args [via args-spec args]
(when-not (spec/valid? args-spec args)
(let [exp-data (remove-in (spec/explain-data* args-spec [:args] [via] [] args))]
(doall (map report-fail (build-reports exp-data))))))
(defn- validate-and-generate [fn-spec args]
(when-let [args-spec (:args fn-spec)]
(check-args (spec-name fn-spec) args-spec args))
(gene/det-sample (:ret fn-spec)))
(defn- add-meta [f calls]
(with-meta f {:specific-calls calls}))
(defn- no-spec-report [fn-sym]
{:type :fail
:message "No clojure.spec defined"
:expected (str "clojure.spec for " fn-sym)
:actual nil})
(defn spy-fn [f]
(let [fn-spec (spec/get-spec f)
calls (atom {})]
(add-meta (comp (partial apply f) (partial record-calls calls)) calls)))
(defn stub-fn
([] (stub-fn nil))
([retval]
(let [calls (atom {})
f (comp (constantly retval) (partial record-calls calls))]
(add-meta f calls))))
(defn mock-fn [fn-sym]
(let [fn-spec (spec/get-spec fn-sym)
calls (atom {})]
(add-meta (let [call-fn (partial record-calls calls)]
(if (nil? fn-spec)
(no-spec-report fn-sym)
(comp (partial validate-and-generate fn-spec) call-fn)))
calls)))
| |
5bae2045fe9b48a7ba107ed50b4db8ab5a81152f28f7916caad2f0fa18592bb7 | hadolint/hadolint | DL3057.hs | module Hadolint.Rule.DL3057 (rule) where
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import qualified Data.Text as Text
import Hadolint.Rule
import Language.Docker.Syntax
data StageID = StageID
{ src :: Text.Text,
name :: Text.Text,
line :: Linenumber
} deriving (Show, Eq, Ord)
data Acc
= Acc StageID (Set.Set StageID) (Set.Set StageID)
| Empty
deriving (Show)
rule :: Rule args
rule = veryCustomRule check (emptyState Empty) markFailures
where
code = "DL3057"
severity = DLIgnoreC
message = "`HEALTHCHECK` instruction missing."
check line state (From BaseImage {image, alias = Just als}) =
state |> modify
( currentStage
(imageName image)
(StageID (imageName image) (unImageAlias als) line)
)
check line state (From BaseImage {image, alias = Nothing}) =
state |> modify
( currentStage
(imageName image)
(StageID (imageName image) (imageName image) line)
)
check _ state (Healthcheck _) = state |> modify goodStage
check _ state _ = state
markFailures :: State Acc -> Failures
markFailures (State fails (Acc _ _ b)) = Set.foldl' (Seq.|>) fails (Set.map makeFail b)
markFailures st = failures st
makeFail (StageID _ _ line) = CheckFailure {..}
# INLINEABLE rule #
currentStage :: Text.Text -> StageID -> Acc -> Acc
currentStage src stageid (Acc _ g b)
| not $ Set.null (Set.filter (predicate src) g) = Acc stageid (g |> Set.insert stageid) b
| otherwise = Acc stageid g (b |> Set.insert stageid)
where
predicate n0 StageID {name = n1} = n1 == n0
currentStage _ stageid Empty = Acc stageid Set.empty (Set.singleton stageid)
goodStage :: Acc -> Acc
goodStage (Acc stageid g b) = do
let nowGood = recurseGood b stageid
let good =
g
|> Set.union nowGood
|> Set.insert stageid
bad =
b
|> flip Set.difference nowGood
|> Set.delete stageid
in Acc
stageid
good
bad
where
predicate StageID { src = s1 } StageID { name = n1 } = n1 == s1
recurseGood :: Set.Set StageID -> StageID -> Set.Set StageID
recurseGood bad sid = do
let g1 = Set.filter (predicate sid) bad -- bad stages to be marked good
b1 = Set.difference bad g1 -- bad stages not to be marked good
in if Set.null g1
then g1
else Set.union g1 $ Set.unions $ Set.map (recurseGood b1) g1
goodStage Empty = Empty
| null | https://raw.githubusercontent.com/hadolint/hadolint/be6658f1a3a4f5f02d04f990e69fb28680491b71/src/Hadolint/Rule/DL3057.hs | haskell | bad stages to be marked good
bad stages not to be marked good | module Hadolint.Rule.DL3057 (rule) where
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import qualified Data.Text as Text
import Hadolint.Rule
import Language.Docker.Syntax
data StageID = StageID
{ src :: Text.Text,
name :: Text.Text,
line :: Linenumber
} deriving (Show, Eq, Ord)
data Acc
= Acc StageID (Set.Set StageID) (Set.Set StageID)
| Empty
deriving (Show)
rule :: Rule args
rule = veryCustomRule check (emptyState Empty) markFailures
where
code = "DL3057"
severity = DLIgnoreC
message = "`HEALTHCHECK` instruction missing."
check line state (From BaseImage {image, alias = Just als}) =
state |> modify
( currentStage
(imageName image)
(StageID (imageName image) (unImageAlias als) line)
)
check line state (From BaseImage {image, alias = Nothing}) =
state |> modify
( currentStage
(imageName image)
(StageID (imageName image) (imageName image) line)
)
check _ state (Healthcheck _) = state |> modify goodStage
check _ state _ = state
markFailures :: State Acc -> Failures
markFailures (State fails (Acc _ _ b)) = Set.foldl' (Seq.|>) fails (Set.map makeFail b)
markFailures st = failures st
makeFail (StageID _ _ line) = CheckFailure {..}
# INLINEABLE rule #
currentStage :: Text.Text -> StageID -> Acc -> Acc
currentStage src stageid (Acc _ g b)
| not $ Set.null (Set.filter (predicate src) g) = Acc stageid (g |> Set.insert stageid) b
| otherwise = Acc stageid g (b |> Set.insert stageid)
where
predicate n0 StageID {name = n1} = n1 == n0
currentStage _ stageid Empty = Acc stageid Set.empty (Set.singleton stageid)
goodStage :: Acc -> Acc
goodStage (Acc stageid g b) = do
let nowGood = recurseGood b stageid
let good =
g
|> Set.union nowGood
|> Set.insert stageid
bad =
b
|> flip Set.difference nowGood
|> Set.delete stageid
in Acc
stageid
good
bad
where
predicate StageID { src = s1 } StageID { name = n1 } = n1 == s1
recurseGood :: Set.Set StageID -> StageID -> Set.Set StageID
recurseGood bad sid = do
in if Set.null g1
then g1
else Set.union g1 $ Set.unions $ Set.map (recurseGood b1) g1
goodStage Empty = Empty
|
62058da252e1ce44e66c24f1554c945df2ff7633fc4cd62150d3ec3154aea106 | jeromesimeon/Galax | compile_annotate.mli | (***********************************************************************)
(* *)
(* GALAX *)
(* XQuery Engine *)
(* *)
Copyright 2001 - 2007 .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
(* Module: Compile_annotate
Description:
This module implements the walker to calculate annotations on the
algebra AST.
*)
(****************************)
(* Main annotation function *)
(****************************)
(* This function has a side-effect of filing out the
compile_annotations field for an operator. It should only be called
once. This condition is asserted inside the code. It will raise an
Internal_Error if this does not hold.
It calls itself on the entire sub-tree. This means it should only
be called once on the root of each sub-tree.
*)
val annotate_algebraic_expression : ('a, 'b) Xquery_algebra_ast.aalgop_expr -> unit
val reannotate_algebraic_expression : ('a, 'b) Xquery_algebra_ast.aalgop_expr -> unit
val annotate_algebraic_module : ('a, 'b, 'c) Xquery_algebra_ast.aalgop_xmodule -> unit
| null | https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/compile/compile_annotate.mli | ocaml | *********************************************************************
GALAX
XQuery Engine
Distributed only by permission.
*********************************************************************
Module: Compile_annotate
Description:
This module implements the walker to calculate annotations on the
algebra AST.
**************************
Main annotation function
**************************
This function has a side-effect of filing out the
compile_annotations field for an operator. It should only be called
once. This condition is asserted inside the code. It will raise an
Internal_Error if this does not hold.
It calls itself on the entire sub-tree. This means it should only
be called once on the root of each sub-tree.
| Copyright 2001 - 2007 .
val annotate_algebraic_expression : ('a, 'b) Xquery_algebra_ast.aalgop_expr -> unit
val reannotate_algebraic_expression : ('a, 'b) Xquery_algebra_ast.aalgop_expr -> unit
val annotate_algebraic_module : ('a, 'b, 'c) Xquery_algebra_ast.aalgop_xmodule -> unit
|
57346cf693aff63c7b9589e500759745b0353ca074634dd06dcfa1537e6f6d9b | haskus/packages | Shift.hs | # LANGUAGE MagicHash #
# LANGUAGE CPP #
-- | Bit shifts
module Haskus.Binary.Bits.Shift
( ShiftableBits (..)
, SignedShiftableBits (..)
)
where
import Haskus.Number.Word
import Haskus.Number.Int
import GHC.Exts
import GHC.Num
#include "MachDeps.h"
#if !MIN_VERSION_GLASGOW_HASKELL (9,0,0,0)
wordToInt# :: Word -> Int#
wordToInt# (W# w) = word2Int# w
integerShiftL :: Integer -> Word -> Integer
integerShiftL x w = shiftLInteger x (wordToInt# w)
integerShiftR :: Integer -> Word -> Integer
integerShiftR x w = shiftRInteger x (wordToInt# w)
naturalShiftL :: Natural -> Word -> Natural
naturalShiftL x w = shiftLNatural x (fromIntegral w)
naturalShiftR :: Natural -> Word -> Natural
naturalShiftR x w = shiftRNatural x (fromIntegral w)
#endif
-- | Bit shifts
--
-- "Checked" means that there is an additional test to ensure that the shift
-- offset is valid (less than the bit count). If you are sure that the offset is
-- valid, use the "unchecked" version which should be faster.
--
-- To shift signed numbers, see `SignedShiftableBits` class methods.
class ShiftableBits a where
-- | Checked right shift
shiftR :: a -> Word -> a
-- | Checked left shift
shiftL :: a -> Word -> a
-- | Unchecked right shift
uncheckedShiftR :: a -> Word -> a
-- | Unchecked left shift
uncheckedShiftL :: a -> Word -> a
-- | Checked shift to the left if positive, to the right if negative
shift :: a -> Int -> a
shift a i
| i > 0 = shiftL a (fromIntegral i)
| i < 0 = shiftR a (fromIntegral (negate i))
| otherwise = a
-- | Unchecked shift to the left if positive, to the right if negative
uncheckedShift :: a -> Int -> a
uncheckedShift a i
| i > 0 = uncheckedShiftL a (fromIntegral i)
| i < 0 = uncheckedShiftR a (fromIntegral (negate i))
| otherwise = a
-- | Signed bit shifts
--
-- "Signed" means that the sign bit (the higher order bit):
-- - propagates to the right during right shifts and
-- - keeps its value during left shifts (except when all other bits are 0)
--
-- "Checked" means that there is an additional test to ensure that the shift
-- offset is valid (less than the bit count). If you are sure that the offset is
-- valid, use the "unchecked" version which should be faster.
class SignedShiftableBits a where
-- | Checked signed right shift
signedShiftR :: a -> Word -> a
-- | Checked signed left shift
signedShiftL :: a -> Word -> a
-- | Unchecked signed right shift
uncheckedSignedShiftR :: a -> Word -> a
-- | Unchecked signed left shift
uncheckedSignedShiftL :: a -> Word -> a
-- | Checked signed shift to the left if positive, to the right if negative
signedShift :: a -> Int -> a
signedShift a i
| i > 0 = signedShiftL a (fromIntegral i)
| i < 0 = signedShiftR a (fromIntegral (negate i))
| otherwise = a
-- | Unchecked signed shift to the left if positive, to the right if negative
uncheckedSignedShift :: a -> Int -> a
uncheckedSignedShift a i
| i > 0 = uncheckedSignedShiftL a (fromIntegral i)
| i < 0 = uncheckedSignedShiftR a (fromIntegral (negate i))
| otherwise = a
instance ShiftableBits Word where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(W# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` WORD_SIZE_IN_BITS##) = W# 0##
| otherwise = W# (x# `uncheckedShiftL#` word2Int# i#)
(W# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` WORD_SIZE_IN_BITS##) = W# 0##
| otherwise = W# (x# `uncheckedShiftRL#` word2Int# i#)
(W# x#) `uncheckedShiftL` (W# i#) = W# (x# `uncheckedShiftL#` word2Int# i#)
(W# x#) `uncheckedShiftR` (W# i#) = W# (x# `uncheckedShiftRL#` word2Int# i#)
instance ShiftableBits Word8 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(W8# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 8##) = W8# 0##
| otherwise = W8# (narrow8Word# (x# `uncheckedShiftL#` word2Int# i#))
(W8# x#) `uncheckedShiftL` (W# i#) = W8# (narrow8Word# (x# `uncheckedShiftL#` word2Int# i#))
(W8# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 8##) = W8# 0##
| otherwise = W8# (x# `uncheckedShiftRL#` word2Int# i#)
(W8# x#) `uncheckedShiftR` (W# i#) = W8# (x# `uncheckedShiftRL#` word2Int# i#)
instance ShiftableBits Word16 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(W16# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 16##) = W16# 0##
| otherwise = W16# (narrow16Word# (x# `uncheckedShiftL#` word2Int# i#))
(W16# x#) `uncheckedShiftL` (W# i#) = W16# (narrow16Word# (x# `uncheckedShiftL#` word2Int# i#))
(W16# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 16##) = W16# 0##
| otherwise = W16# (x# `uncheckedShiftRL#` word2Int# i#)
(W16# x#) `uncheckedShiftR` (W# i#) = W16# (x# `uncheckedShiftRL#` word2Int# i#)
instance ShiftableBits Word32 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(W32# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 32##) = W32# 0##
| otherwise = W32# (narrow32Word# (x# `uncheckedShiftL#` word2Int# i#))
(W32# x#) `uncheckedShiftL` (W# i#) = W32# (narrow32Word# (x# `uncheckedShiftL#` word2Int# i#))
(W32# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 32##) = W32# 0##
| otherwise = W32# (x# `uncheckedShiftRL#` word2Int# i#)
(W32# x#) `uncheckedShiftR` (W# i#) = W32# (x# `uncheckedShiftRL#` word2Int# i#)
instance ShiftableBits Word64 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(W64# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 64##) = W64# 0##
| otherwise = W64# (x# `uncheckedShiftL#` word2Int# i#)
(W64# x#) `uncheckedShiftL` (W# i#) = W64# (x# `uncheckedShiftL#` word2Int# i#)
(W64# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 64##) = W64# 0##
| otherwise = W64# (x# `uncheckedShiftRL#` word2Int# i#)
(W64# x#) `uncheckedShiftR` (W# i#) = W64# (x# `uncheckedShiftRL#` word2Int# i#)
instance ShiftableBits Int where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(I# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` WORD_SIZE_IN_BITS##) = I# 0#
| otherwise = I# (x# `uncheckedIShiftL#` word2Int# i#)
(I# x#) `uncheckedShiftL` (W# i#) = I# (x# `uncheckedIShiftL#` word2Int# i#)
(I# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` WORD_SIZE_IN_BITS##) = I# 0#
| otherwise = I# (x# `uncheckedIShiftRL#` word2Int# i#)
(I# x#) `uncheckedShiftR` (W# i#) = I# (x# `uncheckedIShiftRL#` word2Int# i#)
instance ShiftableBits Int8 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(I8# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 8##) = I8# 0#
| otherwise = I8# (narrow8Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I8# x#) `uncheckedShiftL` (W# i#) = I8# (narrow8Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I8# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 8##) = I8# 0#
| otherwise = I8# (word2Int# (narrow8Word# (int2Word# x#) `uncheckedShiftRL#` word2Int# i#))
(I8# x#) `uncheckedShiftR` (W# i#) = I8# (word2Int# (narrow8Word# (int2Word# x#) `uncheckedShiftRL#` word2Int# i#))
instance ShiftableBits Int16 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(I16# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 16##) = I16# 0#
| otherwise = I16# (narrow16Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I16# x#) `uncheckedShiftL` (W# i#) = I16# (narrow16Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I16# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 16##) = I16# 0#
| otherwise = I16# (word2Int# (narrow16Word# (int2Word# x#) `uncheckedShiftRL#` word2Int# i#))
(I16# x#) `uncheckedShiftR` (W# i#) = I16# (word2Int# (narrow16Word# (int2Word# x#) `uncheckedShiftRL#` word2Int# i#))
instance ShiftableBits Int32 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(I32# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 32##) = I32# 0#
| otherwise = I32# (narrow32Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I32# x#) `uncheckedShiftL` (W# i#) = I32# (narrow32Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I32# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 32##) = I32# 0#
| otherwise = I32# (word2Int# (narrow32Word# (int2Word# x#) `uncheckedShiftRL#` word2Int# i#))
(I32# x#) `uncheckedShiftR` (W# i#) = I32# (word2Int# (narrow32Word# (int2Word# x#) `uncheckedShiftRL#` word2Int# i#))
instance ShiftableBits Int64 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(I64# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 64##) = I64# 0#
| otherwise = I64# (x# `uncheckedIShiftL#` word2Int# i#)
(I64# x#) `uncheckedShiftL` (W# i#) = I64# (x# `uncheckedIShiftL#` word2Int# i#)
(I64# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 64##) = I64# 0#
| otherwise = I64# (word2Int# (int2Word# x# `uncheckedShiftRL#` word2Int# i#))
(I64# x#) `uncheckedShiftR` (W# i#) = I64# (word2Int# (int2Word# x# `uncheckedShiftRL#` word2Int# i#))
instance SignedShiftableBits Int where
(I# x#) `signedShiftL` (W# i#) = I# (x# `iShiftL#` word2Int# i#)
(I# x#) `signedShiftR` (W# i#) = I# (x# `iShiftRA#` word2Int# i#)
(I# x#) `uncheckedSignedShiftL` (W# i#) = I# (x# `uncheckedIShiftL#` word2Int# i#)
(I# x#) `uncheckedSignedShiftR` (W# i#) = I# (x# `uncheckedIShiftRA#` word2Int# i#)
instance SignedShiftableBits Int8 where
(I8# x#) `signedShiftL` (W# i#) = I8# (narrow8Int# (x# `iShiftL#` word2Int# i#))
(I8# x#) `signedShiftR` (W# i#) = I8# (x# `iShiftRA#` word2Int# i#)
(I8# x#) `uncheckedSignedShiftL` (W# i#) = I8# (narrow8Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I8# x#) `uncheckedSignedShiftR` (W# i#) = I8# (x# `uncheckedIShiftRA#` word2Int# i#)
instance SignedShiftableBits Int16 where
(I16# x#) `signedShiftL` (W# i#) = I16# (narrow16Int# (x# `iShiftL#` word2Int# i#))
(I16# x#) `signedShiftR` (W# i#) = I16# (x# `iShiftRA#` word2Int# i#)
(I16# x#) `uncheckedSignedShiftL` (W# i#) = I16# (narrow16Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I16# x#) `uncheckedSignedShiftR` (W# i#) = I16# (x# `uncheckedIShiftRA#` word2Int# i#)
instance SignedShiftableBits Int32 where
(I32# x#) `signedShiftL` (W# i#) = I32# (narrow32Int# (x# `iShiftL#` word2Int# i#))
(I32# x#) `signedShiftR` (W# i#) = I32# (x# `iShiftRA#` word2Int# i#)
(I32# x#) `uncheckedSignedShiftL` (W# i#) = I32# (narrow32Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I32# x#) `uncheckedSignedShiftR` (W# i#) = I32# (x# `uncheckedIShiftRA#` word2Int# i#)
instance SignedShiftableBits Int64 where
(I64# x#) `signedShiftL` (W# i#) = I64# (x# `iShiftL#` word2Int# i#)
(I64# x#) `signedShiftR` (W# i#) = I64# (x# `iShiftRA#` word2Int# i#)
(I64# x#) `uncheckedSignedShiftL` (W# i#) = I64# (x# `uncheckedIShiftL#` word2Int# i#)
(I64# x#) `uncheckedSignedShiftR` (W# i#) = I64# (x# `uncheckedIShiftRA#` word2Int# i#)
instance ShiftableBits Integer where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
x `shiftL` w = integerShiftL x w
x `shiftR` w = integerShiftR x w
uncheckedShiftL = shiftL
uncheckedShiftR = shiftR
instance ShiftableBits Natural where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
x `shiftL` w = naturalShiftL x w
x `shiftR` w = naturalShiftR x w
uncheckedShiftL = shiftL
uncheckedShiftR = shiftR
| null | https://raw.githubusercontent.com/haskus/packages/7f013d16d984923668447a628598b7335408afd4/haskus-binary/src/lib/Haskus/Binary/Bits/Shift.hs | haskell | | Bit shifts
| Bit shifts
"Checked" means that there is an additional test to ensure that the shift
offset is valid (less than the bit count). If you are sure that the offset is
valid, use the "unchecked" version which should be faster.
To shift signed numbers, see `SignedShiftableBits` class methods.
| Checked right shift
| Checked left shift
| Unchecked right shift
| Unchecked left shift
| Checked shift to the left if positive, to the right if negative
| Unchecked shift to the left if positive, to the right if negative
| Signed bit shifts
"Signed" means that the sign bit (the higher order bit):
- propagates to the right during right shifts and
- keeps its value during left shifts (except when all other bits are 0)
"Checked" means that there is an additional test to ensure that the shift
offset is valid (less than the bit count). If you are sure that the offset is
valid, use the "unchecked" version which should be faster.
| Checked signed right shift
| Checked signed left shift
| Unchecked signed right shift
| Unchecked signed left shift
| Checked signed shift to the left if positive, to the right if negative
| Unchecked signed shift to the left if positive, to the right if negative | # LANGUAGE MagicHash #
# LANGUAGE CPP #
module Haskus.Binary.Bits.Shift
( ShiftableBits (..)
, SignedShiftableBits (..)
)
where
import Haskus.Number.Word
import Haskus.Number.Int
import GHC.Exts
import GHC.Num
#include "MachDeps.h"
#if !MIN_VERSION_GLASGOW_HASKELL (9,0,0,0)
wordToInt# :: Word -> Int#
wordToInt# (W# w) = word2Int# w
integerShiftL :: Integer -> Word -> Integer
integerShiftL x w = shiftLInteger x (wordToInt# w)
integerShiftR :: Integer -> Word -> Integer
integerShiftR x w = shiftRInteger x (wordToInt# w)
naturalShiftL :: Natural -> Word -> Natural
naturalShiftL x w = shiftLNatural x (fromIntegral w)
naturalShiftR :: Natural -> Word -> Natural
naturalShiftR x w = shiftRNatural x (fromIntegral w)
#endif
class ShiftableBits a where
shiftR :: a -> Word -> a
shiftL :: a -> Word -> a
uncheckedShiftR :: a -> Word -> a
uncheckedShiftL :: a -> Word -> a
shift :: a -> Int -> a
shift a i
| i > 0 = shiftL a (fromIntegral i)
| i < 0 = shiftR a (fromIntegral (negate i))
| otherwise = a
uncheckedShift :: a -> Int -> a
uncheckedShift a i
| i > 0 = uncheckedShiftL a (fromIntegral i)
| i < 0 = uncheckedShiftR a (fromIntegral (negate i))
| otherwise = a
class SignedShiftableBits a where
signedShiftR :: a -> Word -> a
signedShiftL :: a -> Word -> a
uncheckedSignedShiftR :: a -> Word -> a
uncheckedSignedShiftL :: a -> Word -> a
signedShift :: a -> Int -> a
signedShift a i
| i > 0 = signedShiftL a (fromIntegral i)
| i < 0 = signedShiftR a (fromIntegral (negate i))
| otherwise = a
uncheckedSignedShift :: a -> Int -> a
uncheckedSignedShift a i
| i > 0 = uncheckedSignedShiftL a (fromIntegral i)
| i < 0 = uncheckedSignedShiftR a (fromIntegral (negate i))
| otherwise = a
instance ShiftableBits Word where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(W# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` WORD_SIZE_IN_BITS##) = W# 0##
| otherwise = W# (x# `uncheckedShiftL#` word2Int# i#)
(W# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` WORD_SIZE_IN_BITS##) = W# 0##
| otherwise = W# (x# `uncheckedShiftRL#` word2Int# i#)
(W# x#) `uncheckedShiftL` (W# i#) = W# (x# `uncheckedShiftL#` word2Int# i#)
(W# x#) `uncheckedShiftR` (W# i#) = W# (x# `uncheckedShiftRL#` word2Int# i#)
instance ShiftableBits Word8 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(W8# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 8##) = W8# 0##
| otherwise = W8# (narrow8Word# (x# `uncheckedShiftL#` word2Int# i#))
(W8# x#) `uncheckedShiftL` (W# i#) = W8# (narrow8Word# (x# `uncheckedShiftL#` word2Int# i#))
(W8# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 8##) = W8# 0##
| otherwise = W8# (x# `uncheckedShiftRL#` word2Int# i#)
(W8# x#) `uncheckedShiftR` (W# i#) = W8# (x# `uncheckedShiftRL#` word2Int# i#)
instance ShiftableBits Word16 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(W16# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 16##) = W16# 0##
| otherwise = W16# (narrow16Word# (x# `uncheckedShiftL#` word2Int# i#))
(W16# x#) `uncheckedShiftL` (W# i#) = W16# (narrow16Word# (x# `uncheckedShiftL#` word2Int# i#))
(W16# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 16##) = W16# 0##
| otherwise = W16# (x# `uncheckedShiftRL#` word2Int# i#)
(W16# x#) `uncheckedShiftR` (W# i#) = W16# (x# `uncheckedShiftRL#` word2Int# i#)
instance ShiftableBits Word32 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(W32# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 32##) = W32# 0##
| otherwise = W32# (narrow32Word# (x# `uncheckedShiftL#` word2Int# i#))
(W32# x#) `uncheckedShiftL` (W# i#) = W32# (narrow32Word# (x# `uncheckedShiftL#` word2Int# i#))
(W32# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 32##) = W32# 0##
| otherwise = W32# (x# `uncheckedShiftRL#` word2Int# i#)
(W32# x#) `uncheckedShiftR` (W# i#) = W32# (x# `uncheckedShiftRL#` word2Int# i#)
instance ShiftableBits Word64 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(W64# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 64##) = W64# 0##
| otherwise = W64# (x# `uncheckedShiftL#` word2Int# i#)
(W64# x#) `uncheckedShiftL` (W# i#) = W64# (x# `uncheckedShiftL#` word2Int# i#)
(W64# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 64##) = W64# 0##
| otherwise = W64# (x# `uncheckedShiftRL#` word2Int# i#)
(W64# x#) `uncheckedShiftR` (W# i#) = W64# (x# `uncheckedShiftRL#` word2Int# i#)
instance ShiftableBits Int where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(I# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` WORD_SIZE_IN_BITS##) = I# 0#
| otherwise = I# (x# `uncheckedIShiftL#` word2Int# i#)
(I# x#) `uncheckedShiftL` (W# i#) = I# (x# `uncheckedIShiftL#` word2Int# i#)
(I# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` WORD_SIZE_IN_BITS##) = I# 0#
| otherwise = I# (x# `uncheckedIShiftRL#` word2Int# i#)
(I# x#) `uncheckedShiftR` (W# i#) = I# (x# `uncheckedIShiftRL#` word2Int# i#)
instance ShiftableBits Int8 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(I8# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 8##) = I8# 0#
| otherwise = I8# (narrow8Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I8# x#) `uncheckedShiftL` (W# i#) = I8# (narrow8Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I8# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 8##) = I8# 0#
| otherwise = I8# (word2Int# (narrow8Word# (int2Word# x#) `uncheckedShiftRL#` word2Int# i#))
(I8# x#) `uncheckedShiftR` (W# i#) = I8# (word2Int# (narrow8Word# (int2Word# x#) `uncheckedShiftRL#` word2Int# i#))
instance ShiftableBits Int16 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(I16# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 16##) = I16# 0#
| otherwise = I16# (narrow16Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I16# x#) `uncheckedShiftL` (W# i#) = I16# (narrow16Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I16# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 16##) = I16# 0#
| otherwise = I16# (word2Int# (narrow16Word# (int2Word# x#) `uncheckedShiftRL#` word2Int# i#))
(I16# x#) `uncheckedShiftR` (W# i#) = I16# (word2Int# (narrow16Word# (int2Word# x#) `uncheckedShiftRL#` word2Int# i#))
instance ShiftableBits Int32 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(I32# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 32##) = I32# 0#
| otherwise = I32# (narrow32Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I32# x#) `uncheckedShiftL` (W# i#) = I32# (narrow32Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I32# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 32##) = I32# 0#
| otherwise = I32# (word2Int# (narrow32Word# (int2Word# x#) `uncheckedShiftRL#` word2Int# i#))
(I32# x#) `uncheckedShiftR` (W# i#) = I32# (word2Int# (narrow32Word# (int2Word# x#) `uncheckedShiftRL#` word2Int# i#))
instance ShiftableBits Int64 where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
(I64# x#) `shiftL` (W# i#)
| isTrue# (i# `geWord#` 64##) = I64# 0#
| otherwise = I64# (x# `uncheckedIShiftL#` word2Int# i#)
(I64# x#) `uncheckedShiftL` (W# i#) = I64# (x# `uncheckedIShiftL#` word2Int# i#)
(I64# x#) `shiftR` (W# i#)
| isTrue# (i# `geWord#` 64##) = I64# 0#
| otherwise = I64# (word2Int# (int2Word# x# `uncheckedShiftRL#` word2Int# i#))
(I64# x#) `uncheckedShiftR` (W# i#) = I64# (word2Int# (int2Word# x# `uncheckedShiftRL#` word2Int# i#))
instance SignedShiftableBits Int where
(I# x#) `signedShiftL` (W# i#) = I# (x# `iShiftL#` word2Int# i#)
(I# x#) `signedShiftR` (W# i#) = I# (x# `iShiftRA#` word2Int# i#)
(I# x#) `uncheckedSignedShiftL` (W# i#) = I# (x# `uncheckedIShiftL#` word2Int# i#)
(I# x#) `uncheckedSignedShiftR` (W# i#) = I# (x# `uncheckedIShiftRA#` word2Int# i#)
instance SignedShiftableBits Int8 where
(I8# x#) `signedShiftL` (W# i#) = I8# (narrow8Int# (x# `iShiftL#` word2Int# i#))
(I8# x#) `signedShiftR` (W# i#) = I8# (x# `iShiftRA#` word2Int# i#)
(I8# x#) `uncheckedSignedShiftL` (W# i#) = I8# (narrow8Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I8# x#) `uncheckedSignedShiftR` (W# i#) = I8# (x# `uncheckedIShiftRA#` word2Int# i#)
instance SignedShiftableBits Int16 where
(I16# x#) `signedShiftL` (W# i#) = I16# (narrow16Int# (x# `iShiftL#` word2Int# i#))
(I16# x#) `signedShiftR` (W# i#) = I16# (x# `iShiftRA#` word2Int# i#)
(I16# x#) `uncheckedSignedShiftL` (W# i#) = I16# (narrow16Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I16# x#) `uncheckedSignedShiftR` (W# i#) = I16# (x# `uncheckedIShiftRA#` word2Int# i#)
instance SignedShiftableBits Int32 where
(I32# x#) `signedShiftL` (W# i#) = I32# (narrow32Int# (x# `iShiftL#` word2Int# i#))
(I32# x#) `signedShiftR` (W# i#) = I32# (x# `iShiftRA#` word2Int# i#)
(I32# x#) `uncheckedSignedShiftL` (W# i#) = I32# (narrow32Int# (x# `uncheckedIShiftL#` word2Int# i#))
(I32# x#) `uncheckedSignedShiftR` (W# i#) = I32# (x# `uncheckedIShiftRA#` word2Int# i#)
instance SignedShiftableBits Int64 where
(I64# x#) `signedShiftL` (W# i#) = I64# (x# `iShiftL#` word2Int# i#)
(I64# x#) `signedShiftR` (W# i#) = I64# (x# `iShiftRA#` word2Int# i#)
(I64# x#) `uncheckedSignedShiftL` (W# i#) = I64# (x# `uncheckedIShiftL#` word2Int# i#)
(I64# x#) `uncheckedSignedShiftR` (W# i#) = I64# (x# `uncheckedIShiftRA#` word2Int# i#)
instance ShiftableBits Integer where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
x `shiftL` w = integerShiftL x w
x `shiftR` w = integerShiftR x w
uncheckedShiftL = shiftL
uncheckedShiftR = shiftR
instance ShiftableBits Natural where
# INLINABLE shiftR #
# INLINABLE shiftL #
# INLINABLE uncheckedShiftL #
# INLINABLE uncheckedShiftR #
x `shiftL` w = naturalShiftL x w
x `shiftR` w = naturalShiftR x w
uncheckedShiftL = shiftL
uncheckedShiftR = shiftR
|
ac223853516166a5698e96f202fec7651f01992305ae86290fd9ab4cb7499090 | basho/machi | seq0.erl | -module(seq0).
%% -------------------------------------------------------------------
%%
Copyright ( c ) 2007 - 2014 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-compile(export_all).
-include_lib("kernel/include/file.hrl").
-define(CONFIG_DIR, "./config").
-define(DATA_DIR, "./data").
seq(Server, Prefix, Size) when is_binary(Prefix), is_integer(Size), Size > -1 ->
Server ! {seq, self(), Prefix, Size},
receive
{assignment, File, Offset} ->
{File, Offset}
after 1*1000 ->
bummer
end.
seq_direct(Prefix, Size) when is_binary(Prefix), is_integer(Size), Size > -1 ->
RegName = make_regname(Prefix),
seq(RegName, Prefix, Size).
start_server() ->
start_server(?MODULE).
start_server(Name) ->
spawn_link(fun() -> run_server(Name) end).
run_server(Name) ->
register(Name, self()),
ets:new(?MODULE, [named_table, public, {write_concurrency, true}]),
server_loop().
server_loop() ->
receive
{seq, From, Prefix, Size} ->
spawn(fun() -> server_dispatch(From, Prefix, Size) end),
server_loop()
end.
server_dispatch(From, Prefix, Size) ->
RegName = make_regname(Prefix),
case whereis(RegName) of
undefined ->
start_prefix_server(Prefix),
timer:sleep(1),
server_dispatch(From, Prefix, Size);
Pid ->
Pid ! {seq, From, Prefix, Size}
end,
exit(normal).
start_prefix_server(Prefix) ->
spawn(fun() -> run_prefix_server(Prefix) end).
run_prefix_server(Prefix) ->
true = register(make_regname(Prefix), self()),
ok = filelib:ensure_dir(?CONFIG_DIR ++ "/unused"),
ok = filelib:ensure_dir(?DATA_DIR ++ "/unused"),
FileNum = read_max_filenum(Prefix) + 1,
ok = increment_max_filenum(Prefix),
prefix_server_loop(Prefix, FileNum).
prefix_server_loop(Prefix, FileNum) ->
File = make_data_filename(Prefix, FileNum),
prefix_server_loop(Prefix, File, FileNum, 0).
prefix_server_loop(Prefix, File, FileNum, Offset) ->
receive
{seq, From, Prefix, Size} ->
From ! {assignment, File, Offset},
prefix_server_loop(Prefix, File, FileNum, Offset + Size)
after 30*1000 ->
io:format("timeout: ~p server stopping\n", [Prefix]),
exit(normal)
end.
make_regname(Prefix) ->
erlang:binary_to_atom(Prefix, latin1).
make_config_filename(Prefix) ->
lists:flatten(io_lib:format("~s/~s", [?CONFIG_DIR, Prefix])).
make_data_filename(Prefix, FileNum) ->
erlang:iolist_to_binary(io_lib:format("~s/~s.~w",
[?DATA_DIR, Prefix, FileNum])).
read_max_filenum(Prefix) ->
case file:read_file_info(make_config_filename(Prefix)) of
{error, enoent} ->
0;
{ok, FI} ->
FI#file_info.size
end.
increment_max_filenum(Prefix) ->
{ok, FH} = file:open(make_config_filename(Prefix), [append]),
ok = file:write(FH, "x"),
%% ok = file:sync(FH),
ok = file:close(FH).
%%%%%%%%%%%%%%%%%
-define(SEQ, ?MODULE).
new(1) ->
start_server(),
timer:sleep(100),
{ok, unused};
new(_Id) ->
{ok, unused}.
run(null, _KeyGen, _ValgueGen, State) ->
{ok, State};
run(keygen_then_null, KeyGen, _ValgueGen, State) ->
_Prefix = KeyGen(),
{ok, State};
run(seq, KeyGen, _ValgueGen, State) ->
Prefix = KeyGen(),
{_, _} = ?SEQ:seq(?SEQ, Prefix, 1),
{ok, State};
run(seq_direct, KeyGen, _ValgueGen, State) ->
Prefix = KeyGen(),
Name = ?SEQ:make_regname(Prefix),
case get(Name) of
undefined ->
case whereis(Name) of
undefined ->
{_, _} = ?SEQ:seq(?SEQ, Prefix, 1);
Pid ->
put(Name, Pid),
{_, _} = ?SEQ:seq(Pid, Prefix, 1)
end;
Pid ->
{_, _} = ?SEQ:seq(Pid, Prefix, 1)
end,
{ok, State};
run(seq_ets, KeyGen, _ValgueGen, State) ->
Tab = ?MODULE,
Prefix = KeyGen(),
Res = try
BigNum = ets:update_counter(Tab, Prefix, 1),
BigBin = <<BigNum:80/big>>,
<<FileNum:32/big, Offset:48/big>> = BigBin,
if Offset rem 1000 = = 0 - >
io : format("~p,~p " , [ , Offset ] ) ;
%% true ->
%% ok
%% end,
{fakefake, FileNum, Offset}
catch error:badarg ->
FileNum2 = 1, Offset2 = 0,
FileBin = <<FileNum2:32/big>>,
OffsetBin = <<Offset2:48/big>>,
Glop = <<FileBin/binary, OffsetBin/binary>>,
<<Base:80/big>> = Glop,
if Prefix = = < < " 42 " > > - > io : format("base:~w\n " , [ Base ] ) ; true - > ok end ,
%% Base = 0,
case ets:insert_new(Tab, {Prefix, Base}) of
true ->
{<<"fakefakefake">>, Base};
false ->
Result2 = ets:update_counter(Tab, Prefix, 1),
{<<"fakefakefake">>, Result2}
end
end,
Res = Res,
{ok, State}.
| null | https://raw.githubusercontent.com/basho/machi/e87bd59a9777d805b00f9e9981467eb28e28390c/prototype/demo-day-hack/seq0.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
ok = file:sync(FH),
true ->
ok
end,
Base = 0, | -module(seq0).
Copyright ( c ) 2007 - 2014 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-compile(export_all).
-include_lib("kernel/include/file.hrl").
-define(CONFIG_DIR, "./config").
-define(DATA_DIR, "./data").
seq(Server, Prefix, Size) when is_binary(Prefix), is_integer(Size), Size > -1 ->
Server ! {seq, self(), Prefix, Size},
receive
{assignment, File, Offset} ->
{File, Offset}
after 1*1000 ->
bummer
end.
seq_direct(Prefix, Size) when is_binary(Prefix), is_integer(Size), Size > -1 ->
RegName = make_regname(Prefix),
seq(RegName, Prefix, Size).
start_server() ->
start_server(?MODULE).
start_server(Name) ->
spawn_link(fun() -> run_server(Name) end).
run_server(Name) ->
register(Name, self()),
ets:new(?MODULE, [named_table, public, {write_concurrency, true}]),
server_loop().
server_loop() ->
receive
{seq, From, Prefix, Size} ->
spawn(fun() -> server_dispatch(From, Prefix, Size) end),
server_loop()
end.
server_dispatch(From, Prefix, Size) ->
RegName = make_regname(Prefix),
case whereis(RegName) of
undefined ->
start_prefix_server(Prefix),
timer:sleep(1),
server_dispatch(From, Prefix, Size);
Pid ->
Pid ! {seq, From, Prefix, Size}
end,
exit(normal).
start_prefix_server(Prefix) ->
spawn(fun() -> run_prefix_server(Prefix) end).
run_prefix_server(Prefix) ->
true = register(make_regname(Prefix), self()),
ok = filelib:ensure_dir(?CONFIG_DIR ++ "/unused"),
ok = filelib:ensure_dir(?DATA_DIR ++ "/unused"),
FileNum = read_max_filenum(Prefix) + 1,
ok = increment_max_filenum(Prefix),
prefix_server_loop(Prefix, FileNum).
prefix_server_loop(Prefix, FileNum) ->
File = make_data_filename(Prefix, FileNum),
prefix_server_loop(Prefix, File, FileNum, 0).
prefix_server_loop(Prefix, File, FileNum, Offset) ->
receive
{seq, From, Prefix, Size} ->
From ! {assignment, File, Offset},
prefix_server_loop(Prefix, File, FileNum, Offset + Size)
after 30*1000 ->
io:format("timeout: ~p server stopping\n", [Prefix]),
exit(normal)
end.
make_regname(Prefix) ->
erlang:binary_to_atom(Prefix, latin1).
make_config_filename(Prefix) ->
lists:flatten(io_lib:format("~s/~s", [?CONFIG_DIR, Prefix])).
make_data_filename(Prefix, FileNum) ->
erlang:iolist_to_binary(io_lib:format("~s/~s.~w",
[?DATA_DIR, Prefix, FileNum])).
read_max_filenum(Prefix) ->
case file:read_file_info(make_config_filename(Prefix)) of
{error, enoent} ->
0;
{ok, FI} ->
FI#file_info.size
end.
increment_max_filenum(Prefix) ->
{ok, FH} = file:open(make_config_filename(Prefix), [append]),
ok = file:write(FH, "x"),
ok = file:close(FH).
-define(SEQ, ?MODULE).
new(1) ->
start_server(),
timer:sleep(100),
{ok, unused};
new(_Id) ->
{ok, unused}.
run(null, _KeyGen, _ValgueGen, State) ->
{ok, State};
run(keygen_then_null, KeyGen, _ValgueGen, State) ->
_Prefix = KeyGen(),
{ok, State};
run(seq, KeyGen, _ValgueGen, State) ->
Prefix = KeyGen(),
{_, _} = ?SEQ:seq(?SEQ, Prefix, 1),
{ok, State};
run(seq_direct, KeyGen, _ValgueGen, State) ->
Prefix = KeyGen(),
Name = ?SEQ:make_regname(Prefix),
case get(Name) of
undefined ->
case whereis(Name) of
undefined ->
{_, _} = ?SEQ:seq(?SEQ, Prefix, 1);
Pid ->
put(Name, Pid),
{_, _} = ?SEQ:seq(Pid, Prefix, 1)
end;
Pid ->
{_, _} = ?SEQ:seq(Pid, Prefix, 1)
end,
{ok, State};
run(seq_ets, KeyGen, _ValgueGen, State) ->
Tab = ?MODULE,
Prefix = KeyGen(),
Res = try
BigNum = ets:update_counter(Tab, Prefix, 1),
BigBin = <<BigNum:80/big>>,
<<FileNum:32/big, Offset:48/big>> = BigBin,
if Offset rem 1000 = = 0 - >
io : format("~p,~p " , [ , Offset ] ) ;
{fakefake, FileNum, Offset}
catch error:badarg ->
FileNum2 = 1, Offset2 = 0,
FileBin = <<FileNum2:32/big>>,
OffsetBin = <<Offset2:48/big>>,
Glop = <<FileBin/binary, OffsetBin/binary>>,
<<Base:80/big>> = Glop,
if Prefix = = < < " 42 " > > - > io : format("base:~w\n " , [ Base ] ) ; true - > ok end ,
case ets:insert_new(Tab, {Prefix, Base}) of
true ->
{<<"fakefakefake">>, Base};
false ->
Result2 = ets:update_counter(Tab, Prefix, 1),
{<<"fakefakefake">>, Result2}
end
end,
Res = Res,
{ok, State}.
|
8baa83d2b9229454d976d0fa1981379a29724cc363bd3266a517cbb931926cbe | kkinnear/zprint | optionfn.cljc | (ns ^:no-doc zprint.optionfn
#?@(:cljs [[:require-macros
[zprint.macros :refer
[dbg dbg-s dbg-pr dbg-s-pr dbg-form dbg-print zfuture]]]])
(:require #?@(:clj [[zprint.macros :refer
[dbg-pr dbg-s-pr dbg dbg-s dbg-form dbg-print zfuture]]])
[zprint.rewrite :refer [sort-dependencies]]
[zprint.util :refer [column-alignment cumulative-alignment]]))
;;
;; Contains functions which can be called with {:option-fn <fn>} to produce
a new options map . Option - fns which produce a " guide " are in guide.cljc .
;; The optionfns here are called the same way, but just produce a basic
;; option map.
;;
(defn rodfn
"Given a structure which starts with defn or fn format it using the
'rules of defn'."
([] "rodfn")
; If you call an option-fn with partial because it has its own options map,
; the "no-argument" arity must include the options map!
([rod-options] "rodfn")
; Since we have released this before, we will also allow it to be called
; without rod-options (since this is a drop-in replacement for rodguide).
([options len sexpr] (rodfn {} options len sexpr))
([rod-options options len sexpr]
(let [multi-arity-nl? (get rod-options :multi-arity-nl? true)
fn-name? (symbol? (second sexpr))
docstring? (string? (nth sexpr (if fn-name? 2 1)))
multi-arity? (not (vector? (nth sexpr
(cond (and fn-name? docstring?) 3
(or fn-name? docstring?) 2
:else 1))))
nl-count (cond (and multi-arity? multi-arity-nl? docstring?) [1 2]
(and multi-arity? multi-arity-nl?) [2]
:else [1])
option-map {:list {:nl-count nl-count},
:next-inner {:list {:option-fn nil}},
:next-inner-restore [[:list :nl-count]]}
option-map (cond (and fn-name? docstring?)
(assoc option-map :fn-style :arg1-force-nl-body)
(and fn-name? (not multi-arity?))
(assoc option-map :fn-style :arg2-force-nl-body)
fn-name? (assoc option-map
:fn-style :arg1-force-nl-body)
(not multi-arity?) (assoc option-map
:fn-style :arg1-force-nl-body)
:else (assoc option-map :fn-style :flow-body))]
(if multi-arity?
(assoc option-map
:next-inner {:list {:option-fn nil},
:fn-map {:vector :force-nl},
:next-inner-restore [[:fn-map :vector]]})
option-map))))
; Use this to use the above:
;
; (czprint rod4
; {:parse-string? true
; :fn-map {"defn" [:none {:list {:option-fn rodfn}}]}})
(defn meta-base-fn
"Look at a list, and if it has metadata, then based on the kind of
metadata, try to do it differently than the normal metadata output."
([] "meta-base-fn")
([opts n exprs]
(when (meta (second exprs))
#_(println (meta (second exprs)))
(let [zfn-map (:zfn-map opts)
zloc-seq-nc ((:zmap-no-comment zfn-map) identity (:zloc opts))
meta-zloc (second zloc-seq-nc)
#_(println "tag:" ((:ztag zfn-map) meta))
meta-seq ((:zmap-no-comment zfn-map) identity meta-zloc)
#_(println "count meta-seq:" (count meta-seq)
"meta-seq:" (map (:zstring zfn-map) meta-seq)
"meta-seq-tag:" (map (:ztag zfn-map) meta-seq))]
(if (= :meta ((:ztag zfn-map) (second meta-seq)))
; Figure out next-inner restore
nil
{:meta {:split? true},
:list {:hang-expand 0},
:fn-style (if (and (map? (meta (second exprs)))
(> (count (keys (meta (second exprs)))) 1))
:arg1-body
:arg2),
:next-inner-restore [[:list :hang-expand]]})))))
;;
;; When given (fn* ...), turn it back into #(...%...).
;;
(defn fn*->%
"Given a structure starting with fn*, turn it back into a #(...) anon fn."
([] "fn*->%")
([options n exprs]
#_(println zloc)
(when (= (:ztype options) :sexpr)
; We know we've got a struct
(let [caller (:caller options)
zloc (:zloc options)
l-str (:l-str options)
arg-vec (second zloc)
arg-count (count arg-vec)
[arg-vec final-value]
(if (and (>= arg-count 2) (= (nth arg-vec (- arg-count 2)) '&))
[(conj (into [] (take (- arg-count 2) arg-vec))
(nth arg-vec (dec arg-count))) "&"]
[arg-vec nil])
arg-count (count arg-vec)
replace-map (zipmap arg-vec
(mapv (comp symbol (partial str "%"))
(if (= arg-count 1)
[""]
(if final-value
(conj (mapv inc (range (dec arg-count)))
final-value)
(mapv inc (range arg-count))))))
new-zloc (clojure.walk/prewalk-replace replace-map (nth zloc 2))]
{:list {:option-fn nil},
:new-zloc new-zloc,
:new-l-str (str "#" l-str)}))))
(defn sort-deps
"option-fn interface to sort-dependencies"
([] "sort-deps")
([options n exprs]
(when (= (:ztype options) :zipper)
(let [caller (:caller options)
zloc (:zloc options)
new-zloc (sort-dependencies caller options zloc)]
{:new-zloc new-zloc, :list {:option-fn nil}}))))
(defn regexfn
"Match functions that are not found in the :fn-map against a
series of regular expression rules. These rules are supplied as
a set of pairs in a vector as the first argument. Each pair
should be a regular expression paired with an options map. If
the regex matches, will return the associated options map.
Process the pairs in the order they appear in the vector. If
none of the regex expressions match, return nil."
([rules-vec] "regexfn")
([rules-vec options len sexpr]
(let [fn-name (first sexpr)
fn-str (str fn-name)
rule-pairs (partition 2 2 (repeat nil) rules-vec)
result (reduce #(when (re-find (first %2) fn-str)
(reduced (second %2)))
nil
rule-pairs)]
result)))
(defn rulesfn
"Match functions that are not found in the :fn-map against a
series of rules. These rules are supplied as a set of pairs in
a vector as the first argument to rulesfn. Each pair could be a
regular expression paired with an options map or a function paired
with an options map. If the left-hand-side of the pair is a
regex, and the regex matches the string representation of the
first element in the list, return the associated options map. If
the left-hand-side of the pair is a function, supply the string
representation of the first element of the list as the single
argument to the function. If the function returns a non-nil
result, return the options map from that pair. Process the pairs
in the order they appear in the vector. If none of the regex
expressions match or functions return non-nil, return nil."
([rules-vec] "rulesfn")
([rules-vec options len sexpr]
(let [fn-name (first sexpr)
fn-str (str fn-name)
rule-pairs (partition 2 2 (repeat nil) rules-vec)
result (reduce #(let [lhs (first %2)]
(cond (fn? lhs) (when (lhs fn-str)
(reduced (second %2)))
:else (when (re-find (first %2) fn-str)
(reduced (second %2)))))
nil
rule-pairs)]
result)))
| null | https://raw.githubusercontent.com/kkinnear/zprint/c5806b5c7a7455f626a0fd26a36367da94af09a6/src/zprint/optionfn.cljc | clojure |
Contains functions which can be called with {:option-fn <fn>} to produce
The optionfns here are called the same way, but just produce a basic
option map.
If you call an option-fn with partial because it has its own options map,
the "no-argument" arity must include the options map!
Since we have released this before, we will also allow it to be called
without rod-options (since this is a drop-in replacement for rodguide).
Use this to use the above:
(czprint rod4
{:parse-string? true
:fn-map {"defn" [:none {:list {:option-fn rodfn}}]}})
Figure out next-inner restore
When given (fn* ...), turn it back into #(...%...).
We know we've got a struct | (ns ^:no-doc zprint.optionfn
#?@(:cljs [[:require-macros
[zprint.macros :refer
[dbg dbg-s dbg-pr dbg-s-pr dbg-form dbg-print zfuture]]]])
(:require #?@(:clj [[zprint.macros :refer
[dbg-pr dbg-s-pr dbg dbg-s dbg-form dbg-print zfuture]]])
[zprint.rewrite :refer [sort-dependencies]]
[zprint.util :refer [column-alignment cumulative-alignment]]))
a new options map . Option - fns which produce a " guide " are in guide.cljc .
(defn rodfn
"Given a structure which starts with defn or fn format it using the
'rules of defn'."
([] "rodfn")
([rod-options] "rodfn")
([options len sexpr] (rodfn {} options len sexpr))
([rod-options options len sexpr]
(let [multi-arity-nl? (get rod-options :multi-arity-nl? true)
fn-name? (symbol? (second sexpr))
docstring? (string? (nth sexpr (if fn-name? 2 1)))
multi-arity? (not (vector? (nth sexpr
(cond (and fn-name? docstring?) 3
(or fn-name? docstring?) 2
:else 1))))
nl-count (cond (and multi-arity? multi-arity-nl? docstring?) [1 2]
(and multi-arity? multi-arity-nl?) [2]
:else [1])
option-map {:list {:nl-count nl-count},
:next-inner {:list {:option-fn nil}},
:next-inner-restore [[:list :nl-count]]}
option-map (cond (and fn-name? docstring?)
(assoc option-map :fn-style :arg1-force-nl-body)
(and fn-name? (not multi-arity?))
(assoc option-map :fn-style :arg2-force-nl-body)
fn-name? (assoc option-map
:fn-style :arg1-force-nl-body)
(not multi-arity?) (assoc option-map
:fn-style :arg1-force-nl-body)
:else (assoc option-map :fn-style :flow-body))]
(if multi-arity?
(assoc option-map
:next-inner {:list {:option-fn nil},
:fn-map {:vector :force-nl},
:next-inner-restore [[:fn-map :vector]]})
option-map))))
(defn meta-base-fn
"Look at a list, and if it has metadata, then based on the kind of
metadata, try to do it differently than the normal metadata output."
([] "meta-base-fn")
([opts n exprs]
(when (meta (second exprs))
#_(println (meta (second exprs)))
(let [zfn-map (:zfn-map opts)
zloc-seq-nc ((:zmap-no-comment zfn-map) identity (:zloc opts))
meta-zloc (second zloc-seq-nc)
#_(println "tag:" ((:ztag zfn-map) meta))
meta-seq ((:zmap-no-comment zfn-map) identity meta-zloc)
#_(println "count meta-seq:" (count meta-seq)
"meta-seq:" (map (:zstring zfn-map) meta-seq)
"meta-seq-tag:" (map (:ztag zfn-map) meta-seq))]
(if (= :meta ((:ztag zfn-map) (second meta-seq)))
nil
{:meta {:split? true},
:list {:hang-expand 0},
:fn-style (if (and (map? (meta (second exprs)))
(> (count (keys (meta (second exprs)))) 1))
:arg1-body
:arg2),
:next-inner-restore [[:list :hang-expand]]})))))
(defn fn*->%
"Given a structure starting with fn*, turn it back into a #(...) anon fn."
([] "fn*->%")
([options n exprs]
#_(println zloc)
(when (= (:ztype options) :sexpr)
(let [caller (:caller options)
zloc (:zloc options)
l-str (:l-str options)
arg-vec (second zloc)
arg-count (count arg-vec)
[arg-vec final-value]
(if (and (>= arg-count 2) (= (nth arg-vec (- arg-count 2)) '&))
[(conj (into [] (take (- arg-count 2) arg-vec))
(nth arg-vec (dec arg-count))) "&"]
[arg-vec nil])
arg-count (count arg-vec)
replace-map (zipmap arg-vec
(mapv (comp symbol (partial str "%"))
(if (= arg-count 1)
[""]
(if final-value
(conj (mapv inc (range (dec arg-count)))
final-value)
(mapv inc (range arg-count))))))
new-zloc (clojure.walk/prewalk-replace replace-map (nth zloc 2))]
{:list {:option-fn nil},
:new-zloc new-zloc,
:new-l-str (str "#" l-str)}))))
(defn sort-deps
"option-fn interface to sort-dependencies"
([] "sort-deps")
([options n exprs]
(when (= (:ztype options) :zipper)
(let [caller (:caller options)
zloc (:zloc options)
new-zloc (sort-dependencies caller options zloc)]
{:new-zloc new-zloc, :list {:option-fn nil}}))))
(defn regexfn
"Match functions that are not found in the :fn-map against a
series of regular expression rules. These rules are supplied as
a set of pairs in a vector as the first argument. Each pair
should be a regular expression paired with an options map. If
the regex matches, will return the associated options map.
Process the pairs in the order they appear in the vector. If
none of the regex expressions match, return nil."
([rules-vec] "regexfn")
([rules-vec options len sexpr]
(let [fn-name (first sexpr)
fn-str (str fn-name)
rule-pairs (partition 2 2 (repeat nil) rules-vec)
result (reduce #(when (re-find (first %2) fn-str)
(reduced (second %2)))
nil
rule-pairs)]
result)))
(defn rulesfn
"Match functions that are not found in the :fn-map against a
series of rules. These rules are supplied as a set of pairs in
a vector as the first argument to rulesfn. Each pair could be a
regular expression paired with an options map or a function paired
with an options map. If the left-hand-side of the pair is a
regex, and the regex matches the string representation of the
first element in the list, return the associated options map. If
the left-hand-side of the pair is a function, supply the string
representation of the first element of the list as the single
argument to the function. If the function returns a non-nil
result, return the options map from that pair. Process the pairs
in the order they appear in the vector. If none of the regex
expressions match or functions return non-nil, return nil."
([rules-vec] "rulesfn")
([rules-vec options len sexpr]
(let [fn-name (first sexpr)
fn-str (str fn-name)
rule-pairs (partition 2 2 (repeat nil) rules-vec)
result (reduce #(let [lhs (first %2)]
(cond (fn? lhs) (when (lhs fn-str)
(reduced (second %2)))
:else (when (re-find (first %2) fn-str)
(reduced (second %2)))))
nil
rule-pairs)]
result)))
|
36c2b6b34b079a44844e5ba82e9e83aa4a44dd7ed2e8d8d547e48da4d91f7b8a | staples-sparx/clj-schema | contracts.clj | (ns clj-schema.contracts
"Unobtrusively apply contracts to functions vars"
(:use [clj-schema.schema :refer [def-map-schema optional-path schema? sequence-of]]
[clj-schema.simple-schemas :refer [Anything]]
[clj-schema.validation :refer [validation-errors]])
(:require [robert.hooke :as hooke]))
(def-map-schema ^:private contract-schema :loose
[[:var] var?
(optional-path [:sampling-rate]) [:or nil fn? [number? #(>= % 0) #(<= % 100)]]
(optional-path [:input-schema]) Anything
(optional-path [:input-schema-on-failure]) [:or nil fn?]
(optional-path [:output-schema]) Anything
(optional-path [:output-schema-on-failure]) [:or nil fn?]])
(defn- check? [sampling-rate args]
(cond (not sampling-rate) true
(fn? sampling-rate) (> (apply sampling-rate args) (rand 100))
:else (> sampling-rate (rand 100))))
(defn- schema-checker-fn [{:keys [var
sampling-rate
input-schema
input-schema-on-failure
input-schema-on-success
output-schema
output-schema-on-failure
output-schema-on-success]}]
(fn [f & args]
(let [check? (check? sampling-rate args)]
(when check?
(let [errors (and input-schema (validation-errors input-schema args))]
(if (seq errors)
(if input-schema-on-failure
(input-schema-on-failure var (vec args) errors)
(throw (Exception. (str "Errors found in inputs, " (vec args) ", to " var ": " errors))))
(when input-schema-on-success
(input-schema-on-success var (vec args))))))
(let [result (apply f args)]
(when check?
(let [errors (and output-schema (validation-errors output-schema result))]
(if (seq errors)
(if output-schema-on-failure
(output-schema-on-failure var result errors)
(throw (Exception. (str "Errors found in outputs, " result ", from " var ": " errors))))
(when output-schema-on-success
(output-schema-on-success var result)))))
result))))
(defn add-contracts!
"Wrap vars specified in contract maps such that they check
inputs and outputs against supplied schemas.
Example fully-decked-out contract:
{:var #'f
0 - 100 ( percent )
;; or ... :sampling-rate (fn [a b c] (sampling-rate a b c))
;; ... can take a fn here that gets the args sent to the fn (#'f)
:input-schema (schema/sequence-of [:or String clojure.lang.Keyword])
:input-schema-on-failure (fn [f input errors]
(log/error [f input errors]))
:input-schema-on-success (fn [f input]
(log/info [f input]))
:output-schema String
:output-schema-on-failure (fn [f result errors]
(log/error [f result errors]))
:output-schema-on-success (fn [f result]
(log/info [f result]))}"
[contracts]
(when-let [errors (seq (validation-errors (sequence-of contract-schema) contracts))]
(throw (Exception. (str "contracts were not valid: " contracts errors))))
(doseq [c contracts]
(hooke/add-hook (:var c) ::contract (schema-checker-fn c))))
(defn remove-contracts!
"Removes all contracts that were added by calling clj-schema.contracts/add-contracts!"
[contracts]
(when-let [errors (seq (validation-errors (sequence-of contract-schema) contracts))]
(throw (Exception. (str "contracts were not valid: " contracts errors))))
(doseq [c contracts]
(hooke/remove-hook (:var c) ::contract)))
| null | https://raw.githubusercontent.com/staples-sparx/clj-schema/398cd9cbd40f6bdcf3304ebf471e989497541471/src/clj_schema/contracts.clj | clojure | or ... :sampling-rate (fn [a b c] (sampling-rate a b c))
... can take a fn here that gets the args sent to the fn (#'f) | (ns clj-schema.contracts
"Unobtrusively apply contracts to functions vars"
(:use [clj-schema.schema :refer [def-map-schema optional-path schema? sequence-of]]
[clj-schema.simple-schemas :refer [Anything]]
[clj-schema.validation :refer [validation-errors]])
(:require [robert.hooke :as hooke]))
(def-map-schema ^:private contract-schema :loose
[[:var] var?
(optional-path [:sampling-rate]) [:or nil fn? [number? #(>= % 0) #(<= % 100)]]
(optional-path [:input-schema]) Anything
(optional-path [:input-schema-on-failure]) [:or nil fn?]
(optional-path [:output-schema]) Anything
(optional-path [:output-schema-on-failure]) [:or nil fn?]])
(defn- check? [sampling-rate args]
(cond (not sampling-rate) true
(fn? sampling-rate) (> (apply sampling-rate args) (rand 100))
:else (> sampling-rate (rand 100))))
(defn- schema-checker-fn [{:keys [var
sampling-rate
input-schema
input-schema-on-failure
input-schema-on-success
output-schema
output-schema-on-failure
output-schema-on-success]}]
(fn [f & args]
(let [check? (check? sampling-rate args)]
(when check?
(let [errors (and input-schema (validation-errors input-schema args))]
(if (seq errors)
(if input-schema-on-failure
(input-schema-on-failure var (vec args) errors)
(throw (Exception. (str "Errors found in inputs, " (vec args) ", to " var ": " errors))))
(when input-schema-on-success
(input-schema-on-success var (vec args))))))
(let [result (apply f args)]
(when check?
(let [errors (and output-schema (validation-errors output-schema result))]
(if (seq errors)
(if output-schema-on-failure
(output-schema-on-failure var result errors)
(throw (Exception. (str "Errors found in outputs, " result ", from " var ": " errors))))
(when output-schema-on-success
(output-schema-on-success var result)))))
result))))
(defn add-contracts!
"Wrap vars specified in contract maps such that they check
inputs and outputs against supplied schemas.
Example fully-decked-out contract:
{:var #'f
0 - 100 ( percent )
:input-schema (schema/sequence-of [:or String clojure.lang.Keyword])
:input-schema-on-failure (fn [f input errors]
(log/error [f input errors]))
:input-schema-on-success (fn [f input]
(log/info [f input]))
:output-schema String
:output-schema-on-failure (fn [f result errors]
(log/error [f result errors]))
:output-schema-on-success (fn [f result]
(log/info [f result]))}"
[contracts]
(when-let [errors (seq (validation-errors (sequence-of contract-schema) contracts))]
(throw (Exception. (str "contracts were not valid: " contracts errors))))
(doseq [c contracts]
(hooke/add-hook (:var c) ::contract (schema-checker-fn c))))
(defn remove-contracts!
"Removes all contracts that were added by calling clj-schema.contracts/add-contracts!"
[contracts]
(when-let [errors (seq (validation-errors (sequence-of contract-schema) contracts))]
(throw (Exception. (str "contracts were not valid: " contracts errors))))
(doseq [c contracts]
(hooke/remove-hook (:var c) ::contract)))
|
47bfc806fe074378aaf9bf27b53331a2720da5ae3be68b31dbed8acb6a8461f6 | imandra-ai/ocaml-cimgui | types_gen_c.ml |
let prelude = {|
#include <stddef.h>
//#include "imgui.h"
#include "cimgui.h"
#include "cimgui_impl.h"
union anon_union1 {
int val_i; float val_f; void* val_p;
};
|}
let () =
print_endline prelude;
Cstubs_structs.write_c Format.std_formatter (module Imgui_generated_types.Make);
()
| null | https://raw.githubusercontent.com/imandra-ai/ocaml-cimgui/55c71078705099ea60d91630610f6d750fd07abb/src/bindgen/types_gen_c.ml | ocaml |
let prelude = {|
#include <stddef.h>
//#include "imgui.h"
#include "cimgui.h"
#include "cimgui_impl.h"
union anon_union1 {
int val_i; float val_f; void* val_p;
};
|}
let () =
print_endline prelude;
Cstubs_structs.write_c Format.std_formatter (module Imgui_generated_types.Make);
()
| |
f60139c7fb6a722d0dab9307e812c063f15b8e0854e4ca9aa8db917509136bf6 | stepcut/plugins | Main.hs |
import System.Plugins
import System.Directory
a = "Foo.hs" -- uesr code
b = "Bar.hs" -- trusted code. Result is "Bar.o"
c = "Out.hs"
main = do
status <- mergeTo a b c
f <- case status of
MergeFailure e -> error "mergeto failure"
MergeSuccess _ _ f -> return f
print $ f == c
status <- mergeTo a b c
f' <- case status of
MergeFailure e -> error "mergeto failure"
MergeSuccess ReComp _ f -> error "unnec. mergeto"
MergeSuccess NotReq _ f -> return f -- good, not req
print $ f == f' && f == c
status <- make f' []
o <- case status of
MakeFailure e -> error "make failed"
MakeSuccess _ o -> return o
m_v <- load o [] [] "resource"
v <- case m_v of
LoadSuccess _ v -> return v
_ -> error "load failed"
putStrLn $ show $ (v :: Int)
makeCleaner c
| null | https://raw.githubusercontent.com/stepcut/plugins/52c660b5bc71182627d14c1d333d0234050cac01/testsuite/makewith/mergeto0/Main.hs | haskell | uesr code
trusted code. Result is "Bar.o"
good, not req |
import System.Plugins
import System.Directory
c = "Out.hs"
main = do
status <- mergeTo a b c
f <- case status of
MergeFailure e -> error "mergeto failure"
MergeSuccess _ _ f -> return f
print $ f == c
status <- mergeTo a b c
f' <- case status of
MergeFailure e -> error "mergeto failure"
MergeSuccess ReComp _ f -> error "unnec. mergeto"
print $ f == f' && f == c
status <- make f' []
o <- case status of
MakeFailure e -> error "make failed"
MakeSuccess _ o -> return o
m_v <- load o [] [] "resource"
v <- case m_v of
LoadSuccess _ v -> return v
_ -> error "load failed"
putStrLn $ show $ (v :: Int)
makeCleaner c
|
7ddae65d9fa6df8d20fa07104ae56a1d6a18b3633edf22d3ca84b2393a56de68 | basho/riak_test | ts_cluster_riak_shell_regression_log.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2016 Basho Technologies , Inc.
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(ts_cluster_riak_shell_regression_log).
-behavior(riak_test).
-include_lib("eunit/include/eunit.hrl").
-export([confirm/0]).
-define(DONT_INCREMENT_PROMPT, false).
-define(LOG_FILE, "priv/riak_shell/riak_shell_regression1.log").
%% we cant run the test in this process as it receives various messages
%% and the running test interprets then as being messages to the shell
confirm() ->
Nodes = ts_setup:start_cluster(3),
_Conn = ts_setup:conn(Nodes),
lager:info("Built a cluster of ~p~n", [Nodes]),
Self = self(),
_Pid = spawn_link(fun() -> load_log_file(Self) end),
Got1 = riak_shell_test_util:loop(),
Result = ts_data:assert("Regression Log", pass, Got1),
ts_data:results([
Result
]),
pass.
load_log_file(Pid) ->
State = riak_shell_test_util:shell_init(),
lager:info("~n~nLoad the log -------------------------", []),
Cmds = [
{{match, "No Regression Errors."},
ts_data:flat_format("regression_log \"~s\";", [?LOG_FILE])}
],
Result = riak_shell_test_util:run_commands(Cmds, State,
?DONT_INCREMENT_PROMPT),
lager:info("~n~n------------------------------------------------------", []),
Pid ! Result.
| null | https://raw.githubusercontent.com/basho/riak_test/8170137b283061ba94bc85bf42575021e26c929d/tests/ts_cluster_riak_shell_regression_log.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
we cant run the test in this process as it receives various messages
and the running test interprets then as being messages to the shell | Copyright ( c ) 2016 Basho Technologies , Inc.
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(ts_cluster_riak_shell_regression_log).
-behavior(riak_test).
-include_lib("eunit/include/eunit.hrl").
-export([confirm/0]).
-define(DONT_INCREMENT_PROMPT, false).
-define(LOG_FILE, "priv/riak_shell/riak_shell_regression1.log").
confirm() ->
Nodes = ts_setup:start_cluster(3),
_Conn = ts_setup:conn(Nodes),
lager:info("Built a cluster of ~p~n", [Nodes]),
Self = self(),
_Pid = spawn_link(fun() -> load_log_file(Self) end),
Got1 = riak_shell_test_util:loop(),
Result = ts_data:assert("Regression Log", pass, Got1),
ts_data:results([
Result
]),
pass.
load_log_file(Pid) ->
State = riak_shell_test_util:shell_init(),
lager:info("~n~nLoad the log -------------------------", []),
Cmds = [
{{match, "No Regression Errors."},
ts_data:flat_format("regression_log \"~s\";", [?LOG_FILE])}
],
Result = riak_shell_test_util:run_commands(Cmds, State,
?DONT_INCREMENT_PROMPT),
lager:info("~n~n------------------------------------------------------", []),
Pid ! Result.
|
435bf9ad5a1e3c419704e2dcaedbc0fe5c41326d82becac96aa75f45ee05d3ee | sky-big/RabbitMQ | rabbit_mgmt_wm_bindings.erl | The contents of this file are subject to the Mozilla Public License
Version 1.1 ( the " License " ) ; you may not use this file except in
%% compliance with the License. You may obtain a copy of the License at
%% /
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
%% License for the specific language governing rights and limitations
%% under the License.
%%
The Original Code is RabbitMQ Management Plugin .
%%
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2010 - 2014 GoPivotal , Inc. All rights reserved .
%%
-module(rabbit_mgmt_wm_bindings).
-export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
-export([allowed_methods/2, post_is_create/2, create_path/2]).
-export([content_types_accepted/2, accept_content/2, resource_exists/2]).
-export([basic/1, augmented/2]).
-include("rabbit_mgmt.hrl").
-include("webmachine.hrl").
-include("amqp_client.hrl").
%%--------------------------------------------------------------------
init([Mode]) ->
{ok, {Mode, #context{}}}.
content_types_provided(ReqData, Context) ->
{[{"application/json", to_json}], ReqData, Context}.
resource_exists(ReqData, {Mode, Context}) ->
{case list_bindings(Mode, ReqData) of
vhost_not_found -> false;
_ -> true
end, ReqData, {Mode, Context}}.
content_types_accepted(ReqData, Context) ->
{[{"application/json", accept_content}], ReqData, Context}.
allowed_methods(ReqData, {Mode, Context}) ->
{case Mode of
source_destination -> ['HEAD', 'GET', 'POST'];
_ -> ['HEAD', 'GET']
end, ReqData, {Mode, Context}}.
post_is_create(ReqData, Context) ->
{true, ReqData, Context}.
to_json(ReqData, {Mode, Context}) ->
Bs = [rabbit_mgmt_format:binding(B) || B <- list_bindings(Mode, ReqData)],
rabbit_mgmt_util:reply_list(
rabbit_mgmt_util:filter_vhost(Bs, ReqData, Context),
["vhost", "source", "type", "destination",
"routing_key", "properties_key"],
ReqData, {Mode, Context}).
create_path(ReqData, Context) ->
{"dummy", ReqData, Context}.
accept_content(ReqData, {_Mode, Context}) ->
Source = rabbit_mgmt_util:id(source, ReqData),
Dest = rabbit_mgmt_util:id(destination, ReqData),
DestType = rabbit_mgmt_util:id(dtype, ReqData),
VHost = rabbit_mgmt_util:vhost(ReqData),
{ok, Props} = rabbit_mgmt_util:decode(wrq:req_body(ReqData)),
{Method, Key, Args} = method_key_args(DestType, Source, Dest, Props),
Response = rabbit_mgmt_util:amqp_request(VHost, ReqData, Context, Method),
case Response of
{{halt, _}, _, _} = Res ->
Res;
{true, ReqData, Context2} ->
Loc = rabbit_web_dispatch_util:relativise(
wrq:path(ReqData),
binary_to_list(
rabbit_mgmt_format:url(
"/api/bindings/~s/e/~s/~s/~s/~s",
[VHost, Source, DestType, Dest,
rabbit_mgmt_format:pack_binding_props(Key, Args)]))),
{true, rabbit_mgmt_util:set_resp_header("Location", Loc, ReqData),
Context2}
end.
is_authorized(ReqData, {Mode, Context}) ->
{Res, RD2, C2} = rabbit_mgmt_util:is_authorized_vhost(ReqData, Context),
{Res, RD2, {Mode, C2}}.
%%--------------------------------------------------------------------
basic(ReqData) ->
[rabbit_mgmt_format:binding(B) ||
B <- list_bindings(all, ReqData)].
augmented(ReqData, Context) ->
rabbit_mgmt_util:filter_vhost(basic(ReqData), ReqData, Context).
method_key_args(<<"q">>, Source, Dest, Props) ->
M = #'queue.bind'{routing_key = K, arguments = A} =
rabbit_mgmt_util:props_to_method(
'queue.bind', Props,
[], [{exchange, Source}, {queue, Dest}]),
{M, K, A};
method_key_args(<<"e">>, Source, Dest, Props) ->
M = #'exchange.bind'{routing_key = K, arguments = A} =
rabbit_mgmt_util:props_to_method(
'exchange.bind', Props,
[], [{source, Source}, {destination, Dest}]),
{M, K, A}.
%%--------------------------------------------------------------------
list_bindings(all, ReqData) ->
rabbit_mgmt_util:all_or_one_vhost(ReqData,
fun (VHost) ->
rabbit_binding:list(VHost)
end);
list_bindings(exchange_source, ReqData) ->
rabbit_binding:list_for_source(r(exchange, exchange, ReqData));
list_bindings(exchange_destination, ReqData) ->
rabbit_binding:list_for_destination(r(exchange, exchange, ReqData));
list_bindings(queue, ReqData) ->
rabbit_binding:list_for_destination(r(queue, destination, ReqData));
list_bindings(source_destination, ReqData) ->
DestType = rabbit_mgmt_util:destination_type(ReqData),
rabbit_binding:list_for_source_and_destination(
r(exchange, source, ReqData),
r(DestType, destination, ReqData)).
r(Type, Name, ReqData) ->
rabbit_misc:r(rabbit_mgmt_util:vhost(ReqData), Type,
rabbit_mgmt_util:id(Name, ReqData)).
| null | https://raw.githubusercontent.com/sky-big/RabbitMQ/d7a773e11f93fcde4497c764c9fa185aad049ce2/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_bindings.erl | erlang | compliance with the License. You may obtain a copy of the License at
/
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
License for the specific language governing rights and limitations
under the License.
--------------------------------------------------------------------
--------------------------------------------------------------------
-------------------------------------------------------------------- | The contents of this file are subject to the Mozilla Public License
Version 1.1 ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
The Original Code is RabbitMQ Management Plugin .
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2010 - 2014 GoPivotal , Inc. All rights reserved .
-module(rabbit_mgmt_wm_bindings).
-export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
-export([allowed_methods/2, post_is_create/2, create_path/2]).
-export([content_types_accepted/2, accept_content/2, resource_exists/2]).
-export([basic/1, augmented/2]).
-include("rabbit_mgmt.hrl").
-include("webmachine.hrl").
-include("amqp_client.hrl").
init([Mode]) ->
{ok, {Mode, #context{}}}.
content_types_provided(ReqData, Context) ->
{[{"application/json", to_json}], ReqData, Context}.
resource_exists(ReqData, {Mode, Context}) ->
{case list_bindings(Mode, ReqData) of
vhost_not_found -> false;
_ -> true
end, ReqData, {Mode, Context}}.
content_types_accepted(ReqData, Context) ->
{[{"application/json", accept_content}], ReqData, Context}.
allowed_methods(ReqData, {Mode, Context}) ->
{case Mode of
source_destination -> ['HEAD', 'GET', 'POST'];
_ -> ['HEAD', 'GET']
end, ReqData, {Mode, Context}}.
post_is_create(ReqData, Context) ->
{true, ReqData, Context}.
to_json(ReqData, {Mode, Context}) ->
Bs = [rabbit_mgmt_format:binding(B) || B <- list_bindings(Mode, ReqData)],
rabbit_mgmt_util:reply_list(
rabbit_mgmt_util:filter_vhost(Bs, ReqData, Context),
["vhost", "source", "type", "destination",
"routing_key", "properties_key"],
ReqData, {Mode, Context}).
create_path(ReqData, Context) ->
{"dummy", ReqData, Context}.
accept_content(ReqData, {_Mode, Context}) ->
Source = rabbit_mgmt_util:id(source, ReqData),
Dest = rabbit_mgmt_util:id(destination, ReqData),
DestType = rabbit_mgmt_util:id(dtype, ReqData),
VHost = rabbit_mgmt_util:vhost(ReqData),
{ok, Props} = rabbit_mgmt_util:decode(wrq:req_body(ReqData)),
{Method, Key, Args} = method_key_args(DestType, Source, Dest, Props),
Response = rabbit_mgmt_util:amqp_request(VHost, ReqData, Context, Method),
case Response of
{{halt, _}, _, _} = Res ->
Res;
{true, ReqData, Context2} ->
Loc = rabbit_web_dispatch_util:relativise(
wrq:path(ReqData),
binary_to_list(
rabbit_mgmt_format:url(
"/api/bindings/~s/e/~s/~s/~s/~s",
[VHost, Source, DestType, Dest,
rabbit_mgmt_format:pack_binding_props(Key, Args)]))),
{true, rabbit_mgmt_util:set_resp_header("Location", Loc, ReqData),
Context2}
end.
is_authorized(ReqData, {Mode, Context}) ->
{Res, RD2, C2} = rabbit_mgmt_util:is_authorized_vhost(ReqData, Context),
{Res, RD2, {Mode, C2}}.
basic(ReqData) ->
[rabbit_mgmt_format:binding(B) ||
B <- list_bindings(all, ReqData)].
augmented(ReqData, Context) ->
rabbit_mgmt_util:filter_vhost(basic(ReqData), ReqData, Context).
method_key_args(<<"q">>, Source, Dest, Props) ->
M = #'queue.bind'{routing_key = K, arguments = A} =
rabbit_mgmt_util:props_to_method(
'queue.bind', Props,
[], [{exchange, Source}, {queue, Dest}]),
{M, K, A};
method_key_args(<<"e">>, Source, Dest, Props) ->
M = #'exchange.bind'{routing_key = K, arguments = A} =
rabbit_mgmt_util:props_to_method(
'exchange.bind', Props,
[], [{source, Source}, {destination, Dest}]),
{M, K, A}.
list_bindings(all, ReqData) ->
rabbit_mgmt_util:all_or_one_vhost(ReqData,
fun (VHost) ->
rabbit_binding:list(VHost)
end);
list_bindings(exchange_source, ReqData) ->
rabbit_binding:list_for_source(r(exchange, exchange, ReqData));
list_bindings(exchange_destination, ReqData) ->
rabbit_binding:list_for_destination(r(exchange, exchange, ReqData));
list_bindings(queue, ReqData) ->
rabbit_binding:list_for_destination(r(queue, destination, ReqData));
list_bindings(source_destination, ReqData) ->
DestType = rabbit_mgmt_util:destination_type(ReqData),
rabbit_binding:list_for_source_and_destination(
r(exchange, source, ReqData),
r(DestType, destination, ReqData)).
r(Type, Name, ReqData) ->
rabbit_misc:r(rabbit_mgmt_util:vhost(ReqData), Type,
rabbit_mgmt_util:id(Name, ReqData)).
|
68572567ae237358f1aa269d8ae7a092347d4e5db1dac942c0f4c9cce1464790 | GrammaTech/sel | html.lisp | (defpackage :software-evolution-library/software/html
(:nicknames :sel/software/html :sel/sw/html)
(:use :gt/full
:software-evolution-library
:software-evolution-library/software/tree-sitter-base
:software-evolution-library/software/template))
(in-package :software-evolution-library/software/tree-sitter)
(in-readtable :curry-compose-reader-macros)
;;;===================================================
;;; Generate the language definitions
;;;===================================================
(create-tree-sitter-language-cache "html")
;;;===================================================
(define-language-alias-mappings html ("html"))
#+:TREE-SITTER-HTML
(progn
) ; #+:TREE-SITTER-HTML
| null | https://raw.githubusercontent.com/GrammaTech/sel/d7b297361ff8418805a228504a5365cd05ed55f8/software/html.lisp | lisp | ===================================================
Generate the language definitions
===================================================
===================================================
#+:TREE-SITTER-HTML | (defpackage :software-evolution-library/software/html
(:nicknames :sel/software/html :sel/sw/html)
(:use :gt/full
:software-evolution-library
:software-evolution-library/software/tree-sitter-base
:software-evolution-library/software/template))
(in-package :software-evolution-library/software/tree-sitter)
(in-readtable :curry-compose-reader-macros)
(create-tree-sitter-language-cache "html")
(define-language-alias-mappings html ("html"))
#+:TREE-SITTER-HTML
(progn
|
a9b7b17c2f38a85ead20a82dfc9febac1e08a26dba0f643c133e2940fe463ba8 | pallet/pallet | providers.clj | (ns pallet.task.providers
"Provide information on the supported and enabled providers."
(:require
[pallet.compute :refer [supported-providers]]))
(defn providers
"Provide information on the supported and enabled providers."
{:no-service-required true}
[& _]
(println "Pallet uses its own and jcloud's providers.\n")
(doseq [name (supported-providers)]
(println (format " %s" name)))
(println "\nProviders can be enabled by adding a dependency on a pallet or\n")
(println "jclouds provider into your project.clj or pom.xml."))
| null | https://raw.githubusercontent.com/pallet/pallet/30226008d243c1072dcfa1f27150173d6d71c36d/src/pallet/task/providers.clj | clojure | (ns pallet.task.providers
"Provide information on the supported and enabled providers."
(:require
[pallet.compute :refer [supported-providers]]))
(defn providers
"Provide information on the supported and enabled providers."
{:no-service-required true}
[& _]
(println "Pallet uses its own and jcloud's providers.\n")
(doseq [name (supported-providers)]
(println (format " %s" name)))
(println "\nProviders can be enabled by adding a dependency on a pallet or\n")
(println "jclouds provider into your project.clj or pom.xml."))
| |
20ec81a005c281c0e7c1013b020357248cf8cd5857bfd2bf11ce3abc89b565aa | mk270/archipelago | test_main.ml |
Archipelago , a multi - user dungeon ( MUD ) server , by ( C ) 2009 - 2012
This programme is free software ; you may redistribute and/or modify
it under the terms of the GNU Affero General Public Licence as published by
the Free Software Foundation , either version 3 of said Licence , or
( at your option ) any later version .
Archipelago, a multi-user dungeon (MUD) server, by Martin Keegan
Copyright (C) 2009-2012 Martin Keegan
This programme is free software; you may redistribute and/or modify
it under the terms of the GNU Affero General Public Licence as published by
the Free Software Foundation, either version 3 of said Licence, or
(at your option) any later version.
*)
(* let room_name = "Main Room", Name.Definite, Name.Singular in
let room = Model.Create.create_room room_name ~desc:"A room.\n" in
*)
let test_grammar2 () =
let obj_name = "Zach", Name.NoAdam, Name.Singular in
let obj = Model.Create.create_item obj_name ~desc:"Zach the player"
~flags:[] ~stats:[] in
print_endline (Grammar.render ~actor:obj "%Av %vperf:log on.")
let test_grammar1 () =
let obj_name = "dental floss", Name.Mass, Name.Singular in
let obj = Model.Create.create_item obj_name ~desc:"A bit of floss"
~flags:[] ~stats:[] in
print_endline (Grammar.render "You %vperf:pick up %pu." ~patient:obj ~person:Grammar.Second)
let test_main () =
print_endline "running tests";
Lexicon.init ();
test_grammar1 ();
test_grammar2 ();
flush_all ()
(*
let _ = test_main ()
*)
| null | https://raw.githubusercontent.com/mk270/archipelago/4241bdc994da6d846637bcc079051405ee905c9b/src/main/test_main.ml | ocaml | let room_name = "Main Room", Name.Definite, Name.Singular in
let room = Model.Create.create_room room_name ~desc:"A room.\n" in
let _ = test_main ()
|
Archipelago , a multi - user dungeon ( MUD ) server , by ( C ) 2009 - 2012
This programme is free software ; you may redistribute and/or modify
it under the terms of the GNU Affero General Public Licence as published by
the Free Software Foundation , either version 3 of said Licence , or
( at your option ) any later version .
Archipelago, a multi-user dungeon (MUD) server, by Martin Keegan
Copyright (C) 2009-2012 Martin Keegan
This programme is free software; you may redistribute and/or modify
it under the terms of the GNU Affero General Public Licence as published by
the Free Software Foundation, either version 3 of said Licence, or
(at your option) any later version.
*)
let test_grammar2 () =
let obj_name = "Zach", Name.NoAdam, Name.Singular in
let obj = Model.Create.create_item obj_name ~desc:"Zach the player"
~flags:[] ~stats:[] in
print_endline (Grammar.render ~actor:obj "%Av %vperf:log on.")
let test_grammar1 () =
let obj_name = "dental floss", Name.Mass, Name.Singular in
let obj = Model.Create.create_item obj_name ~desc:"A bit of floss"
~flags:[] ~stats:[] in
print_endline (Grammar.render "You %vperf:pick up %pu." ~patient:obj ~person:Grammar.Second)
let test_main () =
print_endline "running tests";
Lexicon.init ();
test_grammar1 ();
test_grammar2 ();
flush_all ()
|
c83e6f9d924409a3ad1fcc2bd31963216b5b87f02f1578731cb435e1dc8b96ed | imitator-model-checker/imitator | ModelConverter.mli | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* IMITATOR
*
* Laboratoire Spécification et Vérification ( ENS Cachan & CNRS , France )
* Université Paris 13 , LIPN , CNRS , France
* Université de Lorraine , CNRS , , LORIA , Nancy , France
*
* Module description : Convert a parsing structure into an abstract model
*
* File contributors : ,
* Created : 2009/09/09
*
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* IMITATOR
*
* Laboratoire Spécification et Vérification (ENS Cachan & CNRS, France)
* Université Paris 13, LIPN, CNRS, France
* Université de Lorraine, CNRS, Inria, LORIA, Nancy, France
*
* Module description: Convert a parsing structure into an abstract model
*
* File contributors : Étienne André, Jaime Arias
* Created : 2009/09/09
*
************************************************************)
(****************************************************************)
(** Modules *)
(****************************************************************)
(****************************************************************)
(** Exceptions *)
(****************************************************************)
exception InvalidProperty
(****************************************************************)
(** Types *)
(****************************************************************)
(****************************************************************)
(** Conversion functions *)
(****************************************************************)
(** Convert the parsed model and the parsed property into an abstract model and an abstract property *)
val abstract_structures_of_parsing_structures : Options.imitator_options -> ParsingStructure.parsed_model -> (ParsingStructure.parsed_property option) -> AbstractModel.abstract_model * (AbstractProperty.abstract_property option)
(** Check and convert the parsing structure into an abstract property *)
val abstract_model_of_parsed_property : Options.imitator_options - > AbstractModel.abstract_model * useful_parsing_model_information - > ParsingStructure.parsed_property - > ImitatorUtilities.synthesis_algorithm
(*(** Check and convert the parsed reference parameter valuation into an abstract representation *)
Options.imitator_options - >
(** Check and convert the parsed hyper-rectangle into an abstract representation *)
Options.imitator_options - >
(** Get clocks index used on the updates *)
val get_clocks_in_updates : AbstractModel.updates -> Automaton.clock_index list
| null | https://raw.githubusercontent.com/imitator-model-checker/imitator/105408ae2bd8c3e3291f286e4d127defd492a58b/src/ModelConverter.mli | ocaml | **************************************************************
* Modules
**************************************************************
**************************************************************
* Exceptions
**************************************************************
**************************************************************
* Types
**************************************************************
**************************************************************
* Conversion functions
**************************************************************
* Convert the parsed model and the parsed property into an abstract model and an abstract property
* Check and convert the parsing structure into an abstract property
(** Check and convert the parsed reference parameter valuation into an abstract representation
* Check and convert the parsed hyper-rectangle into an abstract representation
* Get clocks index used on the updates | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* IMITATOR
*
* Laboratoire Spécification et Vérification ( ENS Cachan & CNRS , France )
* Université Paris 13 , LIPN , CNRS , France
* Université de Lorraine , CNRS , , LORIA , Nancy , France
*
* Module description : Convert a parsing structure into an abstract model
*
* File contributors : ,
* Created : 2009/09/09
*
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* IMITATOR
*
* Laboratoire Spécification et Vérification (ENS Cachan & CNRS, France)
* Université Paris 13, LIPN, CNRS, France
* Université de Lorraine, CNRS, Inria, LORIA, Nancy, France
*
* Module description: Convert a parsing structure into an abstract model
*
* File contributors : Étienne André, Jaime Arias
* Created : 2009/09/09
*
************************************************************)
exception InvalidProperty
val abstract_structures_of_parsing_structures : Options.imitator_options -> ParsingStructure.parsed_model -> (ParsingStructure.parsed_property option) -> AbstractModel.abstract_model * (AbstractProperty.abstract_property option)
val abstract_model_of_parsed_property : Options.imitator_options - > AbstractModel.abstract_model * useful_parsing_model_information - > ParsingStructure.parsed_property - > ImitatorUtilities.synthesis_algorithm
Options.imitator_options - >
Options.imitator_options - >
val get_clocks_in_updates : AbstractModel.updates -> Automaton.clock_index list
|
822875dbd328eef4d99f4be88135468c3ba17f33ef4b30167f629450a8fcacc5 | grin-compiler/grin | AST.hs | # LANGUAGE TupleSections , LambdaCase , OverloadedStrings #
module Grin.ExtendedSyntax.Parse.AST
( parseGrin
, parseProg
, parseDef
, parseExpr
) where
import Data.Char
import Data.Void
import Data.Text (Text)
import qualified Data.Text as T
import Control.Applicative (empty)
import Control.Monad (void, mzero)
import Text.Megaparsec
import qualified Text.Megaparsec.Char.Lexer as L
import Text.Megaparsec.Char as C
import qualified Data.Set as Set
import Grin.ExtendedSyntax.Grin
import Grin.ExtendedSyntax.Parse.Basic
import Grin.ExtendedSyntax.Parse.TypeEnv
-- grin syntax
def :: Parser Def
def = Def <$> try (L.indentGuard sc EQ pos1 *> var) <*> many var <* op "=" <*> (L.indentGuard sc GT pos1 >>= expr)
expr :: Pos -> Parser Exp
expr i = L.indentGuard sc EQ i >>
try ((\pat e b -> EBind e pat b) <$> try (bindingPat <* op "<-") <*> simpleExp i <*> expr i ) <|>
simpleExp i
simpleExp :: Pos -> Parser SimpleExp
simpleExp i = SReturn <$ kw "pure" <*> value <|>
ECase <$ kw "case" <*> var <* kw "of" <*> (L.indentGuard sc GT i >>= some . alternative) <|>
SStore <$ kw "store" <*> var <|>
SFetch <$ kw "fetch" <*> var <|>
SUpdate <$ kw "update" <*> var <*> var <|>
SBlock <$ kw "do" <*> (L.indentGuard sc GT i >>= expr) <|>
-- FIXME: remove '$' from app syntax, fix 'value' parser with using 'lineFold' instead
SApp <$> primNameOrDefName <* (optional $ op "$") <*> many var
primNameOrDefName :: Parser Name
primNameOrDefName = nMap ("_"<>) <$ char '_' <*> var <|> var
alternative :: Pos -> Parser Alt
alternative i = Alt <$> try (L.indentGuard sc EQ i *> altPat) <*> (op "@" *> var) <* op "->" <*> (L.indentGuard sc GT i >>= expr)
-- NOTE: The parser `value` already handles the parentheses around "complex" values,
-- and we don't want to parenthesize variables, literals and units.
bindingPat :: Parser BPat
bindingPat =
VarPat <$> var <|>
mkAsPat <$> parens ((,) <$> tag <*> many var) <*> (op "@" *> var)
where mkAsPat (tag, fields) var = AsPat tag fields var
altPat :: Parser CPat
altPat = parens (NodePat <$> tag <*> many var) <|>
DefaultPat <$ kw "#default" <|>
LitPat <$> literal
-- #undefined can hold simple types as well as node types
value :: Parser Val
value = Lit <$> literal <|>
Var <$> var <|>
try (parens $ ConstTagNode <$> tag <*> many var) <|>
try (Unit <$ op "()") <|>
Undefined <$> parens (kw "#undefined" *> op "::" *> typeAnnot)
literal :: Parser Lit
literal = (try $ LFloat . realToFrac <$> signedFloat) <|>
(try $ LWord64 . fromIntegral <$> lexeme (L.decimal <* C.char 'u')) <|>
(try $ LInt64 . fromIntegral <$> signedInteger) <|>
(try $ LBool <$> (True <$ kw "#True" <|> False <$ kw "#False")) <|>
(try $ LString <$> lexeme (C.char '#' *> quotedString)) <|>
(try $ LChar <$> lexeme (C.string "#'" *> (escaped <|> anySingle) <* C.char '\''))
satisfyM :: (a -> Bool) -> Parser a -> Parser a
satisfyM pred parser = do
x <- parser
if pred x
then pure x
else mzero
-- externals
externalBlock :: Parser [External]
externalBlock = do
L.indentGuard sc EQ pos1
ext <- const PrimOp <$> kw "primop" <|> const FFI <$> kw "ffi"
eff <- const False <$> kw "pure" <|> const True <$> kw "effectful"
i <- L.indentGuard sc GT pos1
libs <- externalLibs i
some $ try (external ext eff i libs)
externOS :: Parser OS
externOS = choice
[ Darwin <$ kw "darwin"
, FreeBSD <$ kw "freebsd"
, Linux <$ kw "linux"
, Android <$ kw "android"
, MinGW <$ kw "mingw"
, Win <$ kw "win"
, NetBSD <$ kw "netbsd"
, OpenBSD <$ kw "openbsd"
]
externalLibs :: Pos -> Parser [(OS, Text)]
externalLibs i =
many
$ L.indentGuard sc EQ i
*> L.lexeme sc
((,)
<$> externOS
<*> between
(char '"')
(char '"')
(takeWhile1P (Just "library") (/= '"')))
external :: ExternalKind -> Bool -> Pos -> [(OS, Text)] -> Parser External
external ext eff i libs = do
L.indentGuard sc EQ i
name <- var
L.indentGuard sc GT i >> op "::"
ty <- reverse <$> sepBy1 (L.indentGuard sc GT i >> L.lexeme sc tyP ) (L.indentGuard sc GT i >> op "->")
let (retTy:argTyRev) = ty
pure External
{ eName = name
, eRetType = retTy
, eArgsType = reverse argTyRev
, eEffectful = eff
, eKind = ext
, eLibs = libs
}
tyP :: Parser Ty
tyP =
TyVar <$ C.char '%' <*> var <|>
braces (TyCon <$> var <*> many tyP) <|>
TySimple <$> try simpleType
-- top-level API
grinModule :: Parser Exp
grinModule = Program <$> (concat <$> many (try externalBlock)) <*> many def <* sc <* eof
parseGrin :: String -> Text -> Either (ParseErrorBundle Text Void) Exp
parseGrin filename content = runParser grinModule filename (withoutTypeAnnots content)
parseProg :: Text -> Exp
parseProg src = either (error . errorBundlePretty) id . parseGrin "" $ withoutTypeAnnots src
parseDef :: Text -> Exp
parseDef src = either (error . errorBundlePretty) id . runParser (def <* sc <* eof) "" $ withoutTypeAnnots src
parseExpr :: Text -> Exp
parseExpr src = either (error . errorBundlePretty) id . runParser (expr pos1 <* sc <* eof) "" $ withoutTypeAnnots src
withoutTypeAnnots :: Text -> Text
withoutTypeAnnots = T.unlines
. map skipIfAnnot
. T.lines
where skipIfAnnot line
| Just ('%',_) <- T.uncons . T.dropWhile isSpace $ line = ""
| otherwise = line
| null | https://raw.githubusercontent.com/grin-compiler/grin/572fc24b2a89ad15c058b363aa26c6913b1b1765/grin/src/Grin/ExtendedSyntax/Parse/AST.hs | haskell | grin syntax
FIXME: remove '$' from app syntax, fix 'value' parser with using 'lineFold' instead
NOTE: The parser `value` already handles the parentheses around "complex" values,
and we don't want to parenthesize variables, literals and units.
#undefined can hold simple types as well as node types
externals
top-level API | # LANGUAGE TupleSections , LambdaCase , OverloadedStrings #
module Grin.ExtendedSyntax.Parse.AST
( parseGrin
, parseProg
, parseDef
, parseExpr
) where
import Data.Char
import Data.Void
import Data.Text (Text)
import qualified Data.Text as T
import Control.Applicative (empty)
import Control.Monad (void, mzero)
import Text.Megaparsec
import qualified Text.Megaparsec.Char.Lexer as L
import Text.Megaparsec.Char as C
import qualified Data.Set as Set
import Grin.ExtendedSyntax.Grin
import Grin.ExtendedSyntax.Parse.Basic
import Grin.ExtendedSyntax.Parse.TypeEnv
def :: Parser Def
def = Def <$> try (L.indentGuard sc EQ pos1 *> var) <*> many var <* op "=" <*> (L.indentGuard sc GT pos1 >>= expr)
expr :: Pos -> Parser Exp
expr i = L.indentGuard sc EQ i >>
try ((\pat e b -> EBind e pat b) <$> try (bindingPat <* op "<-") <*> simpleExp i <*> expr i ) <|>
simpleExp i
simpleExp :: Pos -> Parser SimpleExp
simpleExp i = SReturn <$ kw "pure" <*> value <|>
ECase <$ kw "case" <*> var <* kw "of" <*> (L.indentGuard sc GT i >>= some . alternative) <|>
SStore <$ kw "store" <*> var <|>
SFetch <$ kw "fetch" <*> var <|>
SUpdate <$ kw "update" <*> var <*> var <|>
SBlock <$ kw "do" <*> (L.indentGuard sc GT i >>= expr) <|>
SApp <$> primNameOrDefName <* (optional $ op "$") <*> many var
primNameOrDefName :: Parser Name
primNameOrDefName = nMap ("_"<>) <$ char '_' <*> var <|> var
alternative :: Pos -> Parser Alt
alternative i = Alt <$> try (L.indentGuard sc EQ i *> altPat) <*> (op "@" *> var) <* op "->" <*> (L.indentGuard sc GT i >>= expr)
bindingPat :: Parser BPat
bindingPat =
VarPat <$> var <|>
mkAsPat <$> parens ((,) <$> tag <*> many var) <*> (op "@" *> var)
where mkAsPat (tag, fields) var = AsPat tag fields var
altPat :: Parser CPat
altPat = parens (NodePat <$> tag <*> many var) <|>
DefaultPat <$ kw "#default" <|>
LitPat <$> literal
value :: Parser Val
value = Lit <$> literal <|>
Var <$> var <|>
try (parens $ ConstTagNode <$> tag <*> many var) <|>
try (Unit <$ op "()") <|>
Undefined <$> parens (kw "#undefined" *> op "::" *> typeAnnot)
literal :: Parser Lit
literal = (try $ LFloat . realToFrac <$> signedFloat) <|>
(try $ LWord64 . fromIntegral <$> lexeme (L.decimal <* C.char 'u')) <|>
(try $ LInt64 . fromIntegral <$> signedInteger) <|>
(try $ LBool <$> (True <$ kw "#True" <|> False <$ kw "#False")) <|>
(try $ LString <$> lexeme (C.char '#' *> quotedString)) <|>
(try $ LChar <$> lexeme (C.string "#'" *> (escaped <|> anySingle) <* C.char '\''))
satisfyM :: (a -> Bool) -> Parser a -> Parser a
satisfyM pred parser = do
x <- parser
if pred x
then pure x
else mzero
externalBlock :: Parser [External]
externalBlock = do
L.indentGuard sc EQ pos1
ext <- const PrimOp <$> kw "primop" <|> const FFI <$> kw "ffi"
eff <- const False <$> kw "pure" <|> const True <$> kw "effectful"
i <- L.indentGuard sc GT pos1
libs <- externalLibs i
some $ try (external ext eff i libs)
externOS :: Parser OS
externOS = choice
[ Darwin <$ kw "darwin"
, FreeBSD <$ kw "freebsd"
, Linux <$ kw "linux"
, Android <$ kw "android"
, MinGW <$ kw "mingw"
, Win <$ kw "win"
, NetBSD <$ kw "netbsd"
, OpenBSD <$ kw "openbsd"
]
externalLibs :: Pos -> Parser [(OS, Text)]
externalLibs i =
many
$ L.indentGuard sc EQ i
*> L.lexeme sc
((,)
<$> externOS
<*> between
(char '"')
(char '"')
(takeWhile1P (Just "library") (/= '"')))
external :: ExternalKind -> Bool -> Pos -> [(OS, Text)] -> Parser External
external ext eff i libs = do
L.indentGuard sc EQ i
name <- var
L.indentGuard sc GT i >> op "::"
ty <- reverse <$> sepBy1 (L.indentGuard sc GT i >> L.lexeme sc tyP ) (L.indentGuard sc GT i >> op "->")
let (retTy:argTyRev) = ty
pure External
{ eName = name
, eRetType = retTy
, eArgsType = reverse argTyRev
, eEffectful = eff
, eKind = ext
, eLibs = libs
}
tyP :: Parser Ty
tyP =
TyVar <$ C.char '%' <*> var <|>
braces (TyCon <$> var <*> many tyP) <|>
TySimple <$> try simpleType
grinModule :: Parser Exp
grinModule = Program <$> (concat <$> many (try externalBlock)) <*> many def <* sc <* eof
parseGrin :: String -> Text -> Either (ParseErrorBundle Text Void) Exp
parseGrin filename content = runParser grinModule filename (withoutTypeAnnots content)
parseProg :: Text -> Exp
parseProg src = either (error . errorBundlePretty) id . parseGrin "" $ withoutTypeAnnots src
parseDef :: Text -> Exp
parseDef src = either (error . errorBundlePretty) id . runParser (def <* sc <* eof) "" $ withoutTypeAnnots src
parseExpr :: Text -> Exp
parseExpr src = either (error . errorBundlePretty) id . runParser (expr pos1 <* sc <* eof) "" $ withoutTypeAnnots src
withoutTypeAnnots :: Text -> Text
withoutTypeAnnots = T.unlines
. map skipIfAnnot
. T.lines
where skipIfAnnot line
| Just ('%',_) <- T.uncons . T.dropWhile isSpace $ line = ""
| otherwise = line
|
ce4cf788194cdeabdf435c41ee938a8038fa3e1444b4163ee38a08803d39f64a | tarides/dune-release | config.ml | open Dune_release
let invalid_config_key key =
Rresult.R.error_msgf "%S is not a valid global config field" key
let show_val = function None -> "<unset>" | Some x -> x
let log_val s =
Logs.app (fun l -> l "%s" s);
Ok ()
let log_val_opt string_opt =
Logs.app (fun l -> l "%s" (show_val string_opt));
Ok ()
let no_config_message =
"You don't have a dune-release config file yet. You can create one by \
running `dune-release config create` or simply wait for dune-release to \
prompt you when it will actually need it."
let show key =
let open Rresult.R.Infix in
Config.load () >>= function
| None ->
App_log.status (fun l -> l "%s" no_config_message);
Ok ()
| Some config -> (
match key with
| None ->
let pretty_fields = Config.pretty_fields config in
StdLabels.List.iter pretty_fields ~f:(fun (key, value) ->
Logs.app (fun l -> l "%s: %s" key (show_val value)));
Ok ()
| Some "user" ->
Logs.warn (fun l -> l "%s" Deprecate.Config_user.config_field_use);
log_val_opt config.user
| Some "remote" -> log_val config.remote
| Some "local" -> log_val (Fpath.to_string config.local)
| Some "keep-v" ->
log_val_opt (Stdext.Option.map ~f:string_of_bool config.keep_v)
| Some "auto-open" ->
log_val_opt (Stdext.Option.map ~f:string_of_bool config.auto_open)
| Some key -> invalid_config_key key)
let to_bool ~field value =
match String.lowercase_ascii value with
| "true" -> Ok true
| "false" -> Ok false
| _ -> Rresult.R.error_msgf "Invalid value %S for field %s" value field
let set key value =
let open Rresult.R.Infix in
Config.load () >>= function
| None -> Rresult.R.error_msgf "%s" no_config_message
| Some config ->
let updated =
match key with
| "user" ->
App_log.unhappy (fun l ->
l "%s" Deprecate.Config_user.config_field_use);
Ok { config with user = Some value }
| "remote" -> Ok { config with remote = value }
| "local" ->
Fpath.of_string value >>| fun v -> { config with local = v }
| "keep-v" ->
to_bool ~field:key value >>| fun v ->
{ config with keep_v = Some v }
| "auto-open" ->
to_bool ~field:key value >>| fun v ->
{ config with auto_open = Some v }
| _ -> invalid_config_key key
in
updated >>= Config.save >>= fun () -> Ok ()
let create () =
let open Rresult.R.Infix in
Config.load () >>= function
| None -> Config.create ()
| Some _ ->
App_log.status (fun l ->
l
"You already have a dune-release configuration file. Use \
`dune-release config set` to modify it.");
Ok ()
let default_usage ?raw () =
let cmd = "dune-release config" in
match raw with Some () -> cmd | None -> Printf.sprintf "$(b,%s)" cmd
let show_usage ?raw () =
let cmd = "dune-release config show" in
let key = "KEY" in
match raw with
| Some () -> Printf.sprintf "%s [%s]" cmd key
| None -> Printf.sprintf "$(b,%s) [$(i,%s)]" cmd key
let set_usage ?raw () =
let cmd = "dune-release config set" in
let key = "KEY" in
let value = "VALUE" in
match raw with
| Some () -> Printf.sprintf "%s %s %s" cmd key value
| None -> Printf.sprintf "$(b,%s) $(i,%s) $(i,%s)" cmd key value
let create_usage ?raw () =
let cmd = "dune-release config create" in
match raw with Some () -> cmd | None -> Printf.sprintf "$(b,%s)" cmd
let invalid_usage () =
Rresult.R.error_msgf
"Invalid dune-release config invocation. Usage:\n%s\n%s\n%s"
(default_usage ~raw:() ()) (show_usage ~raw:() ()) (set_usage ~raw:() ())
let run action key_opt value_opt =
let open Rresult in
(let res =
match (action, key_opt, value_opt) with
| "show", key, None -> show key
| "set", Some key, Some value -> set key value
| "create", None, None -> create ()
| _ -> invalid_usage ()
in
res >>= fun () -> Ok 0)
|> Cli.handle_error
let man =
let open Cmdliner in
[
`S Manpage.s_synopsis;
`P (default_usage ());
`P (show_usage ());
`P (set_usage ());
`P (create_usage ());
`S "GLOBAL CONFIGURATION FIELDS";
`P
"Here are the existing fields of dune-release's global config file. Only \
those values should be used as $(i,KEY):";
`P
("$(b,user): The Github username of the opam-repository fork. Used to \
open the final PR to opam-repository."
^ Deprecate.Config_user.config_field_doc);
`P
"$(b,remote): The URL to your remote Github opam-repository fork. Used \
to open the final PR to opam-repository.";
`P
"$(b,local): The path to your local clone of opam-repository. Used to \
open the final PR to opam-repository.";
`P
"$(b,keep-v): Whether or not the 'v' prefix in git tags should make it \
to the final version number.";
`P
"$(b,auto-open): Whether dune-release should open your browser to the \
newly created opam-repository PR or not.";
]
let action =
let docv = "ACTION" in
let doc =
"The action to perform, either $(b,show) the config or $(b,set) a config \
field"
in
Cmdliner.Arg.(value & pos 0 string "show" & info ~doc ~docv [])
let key =
let docv = "KEY" in
let doc =
"The configuration field to set or print. For $(b,show), if no key is \
provided, the entire config will be printed."
in
Cmdliner.Arg.(value & pos 1 (some string) None & info ~doc ~docv [])
let value =
let docv = "VALUE" in
let doc = "The new field value" in
Cmdliner.Arg.(value & pos 2 (some string) None & info ~doc ~docv [])
let term = Cmdliner.Term.(const run $ action $ key $ value)
let info =
let doc = "Displays or update dune-release global configuration" in
Cmdliner.Cmd.info ~doc ~man "config"
let cmd = Cmdliner.Cmd.v info term
| null | https://raw.githubusercontent.com/tarides/dune-release/6bfed0f299b82c0931c78d4e216fd0efedff0673/bin/config.ml | ocaml | open Dune_release
let invalid_config_key key =
Rresult.R.error_msgf "%S is not a valid global config field" key
let show_val = function None -> "<unset>" | Some x -> x
let log_val s =
Logs.app (fun l -> l "%s" s);
Ok ()
let log_val_opt string_opt =
Logs.app (fun l -> l "%s" (show_val string_opt));
Ok ()
let no_config_message =
"You don't have a dune-release config file yet. You can create one by \
running `dune-release config create` or simply wait for dune-release to \
prompt you when it will actually need it."
let show key =
let open Rresult.R.Infix in
Config.load () >>= function
| None ->
App_log.status (fun l -> l "%s" no_config_message);
Ok ()
| Some config -> (
match key with
| None ->
let pretty_fields = Config.pretty_fields config in
StdLabels.List.iter pretty_fields ~f:(fun (key, value) ->
Logs.app (fun l -> l "%s: %s" key (show_val value)));
Ok ()
| Some "user" ->
Logs.warn (fun l -> l "%s" Deprecate.Config_user.config_field_use);
log_val_opt config.user
| Some "remote" -> log_val config.remote
| Some "local" -> log_val (Fpath.to_string config.local)
| Some "keep-v" ->
log_val_opt (Stdext.Option.map ~f:string_of_bool config.keep_v)
| Some "auto-open" ->
log_val_opt (Stdext.Option.map ~f:string_of_bool config.auto_open)
| Some key -> invalid_config_key key)
let to_bool ~field value =
match String.lowercase_ascii value with
| "true" -> Ok true
| "false" -> Ok false
| _ -> Rresult.R.error_msgf "Invalid value %S for field %s" value field
let set key value =
let open Rresult.R.Infix in
Config.load () >>= function
| None -> Rresult.R.error_msgf "%s" no_config_message
| Some config ->
let updated =
match key with
| "user" ->
App_log.unhappy (fun l ->
l "%s" Deprecate.Config_user.config_field_use);
Ok { config with user = Some value }
| "remote" -> Ok { config with remote = value }
| "local" ->
Fpath.of_string value >>| fun v -> { config with local = v }
| "keep-v" ->
to_bool ~field:key value >>| fun v ->
{ config with keep_v = Some v }
| "auto-open" ->
to_bool ~field:key value >>| fun v ->
{ config with auto_open = Some v }
| _ -> invalid_config_key key
in
updated >>= Config.save >>= fun () -> Ok ()
let create () =
let open Rresult.R.Infix in
Config.load () >>= function
| None -> Config.create ()
| Some _ ->
App_log.status (fun l ->
l
"You already have a dune-release configuration file. Use \
`dune-release config set` to modify it.");
Ok ()
let default_usage ?raw () =
let cmd = "dune-release config" in
match raw with Some () -> cmd | None -> Printf.sprintf "$(b,%s)" cmd
let show_usage ?raw () =
let cmd = "dune-release config show" in
let key = "KEY" in
match raw with
| Some () -> Printf.sprintf "%s [%s]" cmd key
| None -> Printf.sprintf "$(b,%s) [$(i,%s)]" cmd key
let set_usage ?raw () =
let cmd = "dune-release config set" in
let key = "KEY" in
let value = "VALUE" in
match raw with
| Some () -> Printf.sprintf "%s %s %s" cmd key value
| None -> Printf.sprintf "$(b,%s) $(i,%s) $(i,%s)" cmd key value
let create_usage ?raw () =
let cmd = "dune-release config create" in
match raw with Some () -> cmd | None -> Printf.sprintf "$(b,%s)" cmd
let invalid_usage () =
Rresult.R.error_msgf
"Invalid dune-release config invocation. Usage:\n%s\n%s\n%s"
(default_usage ~raw:() ()) (show_usage ~raw:() ()) (set_usage ~raw:() ())
let run action key_opt value_opt =
let open Rresult in
(let res =
match (action, key_opt, value_opt) with
| "show", key, None -> show key
| "set", Some key, Some value -> set key value
| "create", None, None -> create ()
| _ -> invalid_usage ()
in
res >>= fun () -> Ok 0)
|> Cli.handle_error
let man =
let open Cmdliner in
[
`S Manpage.s_synopsis;
`P (default_usage ());
`P (show_usage ());
`P (set_usage ());
`P (create_usage ());
`S "GLOBAL CONFIGURATION FIELDS";
`P
"Here are the existing fields of dune-release's global config file. Only \
those values should be used as $(i,KEY):";
`P
("$(b,user): The Github username of the opam-repository fork. Used to \
open the final PR to opam-repository."
^ Deprecate.Config_user.config_field_doc);
`P
"$(b,remote): The URL to your remote Github opam-repository fork. Used \
to open the final PR to opam-repository.";
`P
"$(b,local): The path to your local clone of opam-repository. Used to \
open the final PR to opam-repository.";
`P
"$(b,keep-v): Whether or not the 'v' prefix in git tags should make it \
to the final version number.";
`P
"$(b,auto-open): Whether dune-release should open your browser to the \
newly created opam-repository PR or not.";
]
let action =
let docv = "ACTION" in
let doc =
"The action to perform, either $(b,show) the config or $(b,set) a config \
field"
in
Cmdliner.Arg.(value & pos 0 string "show" & info ~doc ~docv [])
let key =
let docv = "KEY" in
let doc =
"The configuration field to set or print. For $(b,show), if no key is \
provided, the entire config will be printed."
in
Cmdliner.Arg.(value & pos 1 (some string) None & info ~doc ~docv [])
let value =
let docv = "VALUE" in
let doc = "The new field value" in
Cmdliner.Arg.(value & pos 2 (some string) None & info ~doc ~docv [])
let term = Cmdliner.Term.(const run $ action $ key $ value)
let info =
let doc = "Displays or update dune-release global configuration" in
Cmdliner.Cmd.info ~doc ~man "config"
let cmd = Cmdliner.Cmd.v info term
| |
07e346293a14d603dadf37cfd999a49022854c036c9d484fff98892478ee9558 | SuzanneSoy/type-expander | main.rkt | #lang racket/base
(require racket/require
(subtract-in typed/racket type-expander)
type-expander)
(provide (all-from-out typed/racket
type-expander))
(module reader syntax/module-reader
type-expander/lang/main) | null | https://raw.githubusercontent.com/SuzanneSoy/type-expander/b182b9422083bf8adee71d6543f78372ad801ede/lang/main.rkt | racket | #lang racket/base
(require racket/require
(subtract-in typed/racket type-expander)
type-expander)
(provide (all-from-out typed/racket
type-expander))
(module reader syntax/module-reader
type-expander/lang/main) | |
63b2aabb3bff9b96096c64567644a87d4a33ed7734b460e5bf710e5bdbca6687 | runtimeverification/haskell-backend | InfoExecBreadth.hs | # LANGUAGE NoStrict #
# LANGUAGE NoStrictData #
|
Copyright : ( c ) Runtime Verification , 2020 - 2021
License : BSD-3 - Clause
Copyright : (c) Runtime Verification, 2020-2021
License : BSD-3-Clause
-}
module Kore.Log.InfoExecBreadth (
InfoExecBreadth,
ExecBreadth (..),
infoExecBreadth,
) where
import Log
import Numeric.Natural
import Prelude.Kore
import Pretty (
Pretty,
)
import Pretty qualified
newtype ExecBreadth = ExecBreadth {getExecBreadth :: Natural}
deriving stock (Show)
instance Pretty ExecBreadth where
pretty = Pretty.pretty . getExecBreadth
newtype InfoExecBreadth = InfoExecBreadth {breadth :: ExecBreadth}
deriving stock (Show)
instance Pretty InfoExecBreadth where
pretty (InfoExecBreadth breadth) =
Pretty.hsep
[ "number of concurrent branches:"
, Pretty.pretty breadth
]
instance Entry InfoExecBreadth where
entrySeverity _ = Info
oneLineDoc (InfoExecBreadth (ExecBreadth execBreadth)) =
Pretty.pretty execBreadth
helpDoc _ = "log number of concurrent branches"
infoExecBreadth :: MonadLog log => ExecBreadth -> log ()
infoExecBreadth = logEntry . InfoExecBreadth
| null | https://raw.githubusercontent.com/runtimeverification/haskell-backend/c86e9d2c0a2d7800a3cb49443d962463f88175d1/kore/src/Kore/Log/InfoExecBreadth.hs | haskell | # LANGUAGE NoStrict #
# LANGUAGE NoStrictData #
|
Copyright : ( c ) Runtime Verification , 2020 - 2021
License : BSD-3 - Clause
Copyright : (c) Runtime Verification, 2020-2021
License : BSD-3-Clause
-}
module Kore.Log.InfoExecBreadth (
InfoExecBreadth,
ExecBreadth (..),
infoExecBreadth,
) where
import Log
import Numeric.Natural
import Prelude.Kore
import Pretty (
Pretty,
)
import Pretty qualified
newtype ExecBreadth = ExecBreadth {getExecBreadth :: Natural}
deriving stock (Show)
instance Pretty ExecBreadth where
pretty = Pretty.pretty . getExecBreadth
newtype InfoExecBreadth = InfoExecBreadth {breadth :: ExecBreadth}
deriving stock (Show)
instance Pretty InfoExecBreadth where
pretty (InfoExecBreadth breadth) =
Pretty.hsep
[ "number of concurrent branches:"
, Pretty.pretty breadth
]
instance Entry InfoExecBreadth where
entrySeverity _ = Info
oneLineDoc (InfoExecBreadth (ExecBreadth execBreadth)) =
Pretty.pretty execBreadth
helpDoc _ = "log number of concurrent branches"
infoExecBreadth :: MonadLog log => ExecBreadth -> log ()
infoExecBreadth = logEntry . InfoExecBreadth
| |
f7d7a2f58d3f466be24768a26d009dcebbe6fdd4316721c77ea5fc209352b432 | 3b/cl-vulkan | vkinfo.lisp | (in-package #:vk)
(vk:with-instance (instance)
(when instance
(let ((devices (enumerate-physical-devices instance)))
(format t "~&instance layers: ~s~%"
(enumerate-instance-layer-properties))
(format t "~&instance extensions: ~s~%"
(enumerate-instance-extension-properties ""))
(format t "~&got ~d devices~%" (length devices))
(loop for device in devices
for i from 0
for props = (get-physical-device-properties device)
do (format t "device ~d: ~a~%"
i (getf props :device-name))
(format t "device layers: ~s~%"
(enumerate-device-layer-properties device))
(format t "device extensions: ~s~%"
(enumerate-device-extension-properties device ""))
(format t "queue families: ~s~%"
(get-physical-device-queue-family-properties device))
(format t " limits:~% ~{~s ~s~^~% ~}~%" (getf props :limits))
(remf props :limits)
(format t " properties:~% ~{~s ~s~^~% ~}~%" props)
(format t " features:~% ~{~s ~S~^~% ~}~%"
(get-physical-device-features device))
(let ((format :r8-snorm))
(format t " properties of format ~s :~%~{ ~s ~s~%~}" format
(get-physical-device-format-properties device format)))
(format t " physical device memory properties:~%~{ ~s ~s~%~}"
(get-physical-device-memory-properties device))))))
| null | https://raw.githubusercontent.com/3b/cl-vulkan/6514a1dfd168ac8d50acd923044bea79881a2685/examples/vkinfo.lisp | lisp | (in-package #:vk)
(vk:with-instance (instance)
(when instance
(let ((devices (enumerate-physical-devices instance)))
(format t "~&instance layers: ~s~%"
(enumerate-instance-layer-properties))
(format t "~&instance extensions: ~s~%"
(enumerate-instance-extension-properties ""))
(format t "~&got ~d devices~%" (length devices))
(loop for device in devices
for i from 0
for props = (get-physical-device-properties device)
do (format t "device ~d: ~a~%"
i (getf props :device-name))
(format t "device layers: ~s~%"
(enumerate-device-layer-properties device))
(format t "device extensions: ~s~%"
(enumerate-device-extension-properties device ""))
(format t "queue families: ~s~%"
(get-physical-device-queue-family-properties device))
(format t " limits:~% ~{~s ~s~^~% ~}~%" (getf props :limits))
(remf props :limits)
(format t " properties:~% ~{~s ~s~^~% ~}~%" props)
(format t " features:~% ~{~s ~S~^~% ~}~%"
(get-physical-device-features device))
(let ((format :r8-snorm))
(format t " properties of format ~s :~%~{ ~s ~s~%~}" format
(get-physical-device-format-properties device format)))
(format t " physical device memory properties:~%~{ ~s ~s~%~}"
(get-physical-device-memory-properties device))))))
| |
1d55f89becd7151eb3e8e677134c8045562f06f5081f11a188da3c633e9dc36d | lgessler/glam | session.cljs | (ns glam.models.session
(:require
[com.fulcrologic.fulcro.components :as comp :refer [defsc]]
[com.fulcrologic.fulcro.mutations :as m :refer [defmutation]]
[com.fulcrologic.fulcro.routing.dynamic-routing :as dr]
[com.fulcrologic.fulcro.ui-state-machines :as sm]
[com.fulcrologic.guardrails.core :refer [>defn => | ?]]
[glam.client.application :refer [SPA]]
[glam.client.router :as r]
[glam.models.user :refer [valid-email valid-password]]
[taoensso.timbre :as log]
[com.fulcrologic.fulcro.algorithms.form-state :as fs]
[com.fulcrologic.fulcro.algorithms.denormalize :as fdn]
[com.fulcrologic.fulcro.data-fetch :as df]))
(def session-ident [:component/id :session])
query - only defsc for normalization
(defsc Session
[_ {:keys [:session/valid? :user/email :user/id :session/server-error-msg :user/admin?]}]
{:query [:session/valid? :user/admin? :session/server-error-msg :user/email :user/id :ui/loading?
:ui/browser-state]
:ident (fn [] session-ident)
:pre-merge (fn [{:keys [data-tree]}]
(merge
{:session/valid? false
:user/admin? false
:user/email ""
:user/id nil
:session/server-error-msg nil
:ui/browser-state {}}
data-tree))
:initial-state {:session/valid? false :user/admin? false :user/email "" :user/id nil :session/server-error-msg nil}})
;; For local state that doesn't need to go to the server
(defn session-get [session-map k]
(get-in session-map [:ui/browser-state k]))
(defn session-assoc [session-map k data]
(-> session-map
(update :ui/browser-state #(if (associative? %) % {}))
(assoc-in [:ui/browser-state k] data)))
(defn session-update [session-map k fn & args]
(apply (partial update-in (update session-map :ui/browser-state #(if (associative? %) % {}))
[:ui/browser-state k]
fn)
args))
(def session-join {session-ident (comp/get-query Session)})
(defn get-session [props] (get props session-ident))
(defn valid-session? [props] (:session/valid? (get props session-ident)))
(defn admin? [props] (:user/admin? (get props session-ident)))
(defn clear [env]
(sm/assoc-aliased env :error ""))
(defn logout [env]
(log/info "logout env: " env)
(let [env
(-> env
(clear)
(sm/assoc-aliased :username "" :session-valid? false :current-user "")
(sm/trigger-remote-mutation :actor/login-form `logout {::sm/mutation-remote :session})
(sm/activate :state/logged-out))]
(r/route-to! :home)
env))
(defn login [{::sm/keys [event-data] :as env}]
(log/info "Logging in")
(-> env
(clear)
(sm/trigger-remote-mutation :actor/login-form 'glam.models.session/login
{:username (:username event-data)
:password (:password event-data)
::m/returning (sm/actor-class env :actor/current-session)
::sm/ok-event :event/complete
::sm/error-event :event/failed
::sm/mutation-remote :session})
(sm/activate :state/checking-session)))
(defn process-session-result
"Called on app boot and to validate logging in. See if we have a session from the backend."
[env error-message chroute?]
(let [success? (sm/alias-value env :session-valid?)]
(log/info "PROCESS SESSION RESULT , CHROUTE? " chroute? ", SUCCESS? " success?)
(cond
(and chroute? success?)
(r/route-to! :projects)
(not success?)
(r/route-to! :home))
(cond-> (clear env)
success? (->
(sm/assoc-aliased :modal-open? false)
(sm/activate :state/logged-in))
(not success?) (->
(sm/assoc-aliased :error error-message)
(sm/activate :state/logged-out)))))
(defn initial-load [env]
(sm/load env
::current-session
:actor/current-session
{::sm/ok-event :event/complete
::sm/error-event :event/failed}))
(def global-events
{:event/close-modal {::sm/handler (fn [env] (sm/assoc-aliased env :modal-open? false))}
:event/toggle-modal {::sm/handler (fn [env] (sm/update-aliased env :modal-open? not))}})
(defn get-server-mutation-err
[result-or-env]
(let [result (or (some-> result-or-env ::sm/event-data ::sm/mutation-result) result-or-env)
body (:body result)
mutation-sym (-> body keys first)]
(let [error (-> body mutation-sym :server/message)]
(if (nil? error)
"There was an error sending your request."
error))))
;; todo adapt this to load all app start data - including session
(sm/defstatemachine session-machine
{::sm/actors
#{:actor/login-form :actor/current-session}
::sm/aliases
{:username [:actor/login-form :user/email]
:error [:actor/login-form :ui/error]
:modal-open? [:actor/login-form :ui/open?]
:session-valid? [:actor/current-session :session/valid?]
:current-user [:actor/current-session :user/email]}
::sm/states
{:initial
{::sm/target-states #{:state/logged-in :state/logged-out}
::sm/events {::sm/started {::sm/handler #(-> % (sm/assoc-aliased :error "") initial-load)}
:event/failed {::sm/target-state :state/logged-out}
:event/complete {::sm/target-states #{:state/logged-in :state/logged-out}
handles the first session request on app boot
::sm/handler #(process-session-result % "" false)}}}
:state/checking-session
{::sm/events (merge global-events
{:event/failed {::sm/target-states #{:state/logged-out}
::sm/handler (fn [env]
(-> env
(clear)
(sm/activate :state/logged-out)
(sm/assoc-aliased :error (get-server-mutation-err env))))}
:event/complete {::sm/target-states #{:state/logged-out :state/logged-in}
::sm/handler #(process-session-result % "Invalid Credentials." true)}})}
:state/logged-in
{::sm/events (merge global-events
{:event/logout {::sm/target-states #{:state/logged-out}
::sm/handler logout}})}
:state/logged-out
{::sm/events (merge global-events
{:event/signup-success {::sm/target-state :state/logged-in}
:event/login {::sm/target-states #{:state/checking-session}
::sm/handler login}})}}})
;; signup
(def signup-ident [:component/id :signup])
(defmutation signup [_]
(action [{:keys [state]}]
(log/info "Starting signup mutation")
(swap! state
(fn [s]
(-> s
(fs/mark-complete* signup-ident)
(assoc-in [df/marker-table ::signup] {:status :loading})))))
(ok-action [{:keys [app state result]}]
(let [state @state
session (fdn/db->tree (comp/get-query Session) session-ident state)]
(log/info "Signup success result: " result)
(df/remove-load-marker! app ::signup)
(when (:session/valid? session)
(r/route-to! :projects)
(sm/trigger! app ::session :event/signup-success))))
(error-action [{:keys [app]}]
(df/remove-load-marker! app ::signup))
(session [{:keys [state] :as env}]
(let [{:account/keys [email password password-again]} (get-in @state signup-ident)]
(let [valid? (boolean (and (valid-email email) (valid-password password)
(= password password-again)))]
(when valid?
(-> env (m/returning Session)))))))
| null | https://raw.githubusercontent.com/lgessler/glam/9789168d7fe46814d2b09ad2f2e980f96c2b3633/src/main/glam/models/session.cljs | clojure | For local state that doesn't need to go to the server
todo adapt this to load all app start data - including session
signup | (ns glam.models.session
(:require
[com.fulcrologic.fulcro.components :as comp :refer [defsc]]
[com.fulcrologic.fulcro.mutations :as m :refer [defmutation]]
[com.fulcrologic.fulcro.routing.dynamic-routing :as dr]
[com.fulcrologic.fulcro.ui-state-machines :as sm]
[com.fulcrologic.guardrails.core :refer [>defn => | ?]]
[glam.client.application :refer [SPA]]
[glam.client.router :as r]
[glam.models.user :refer [valid-email valid-password]]
[taoensso.timbre :as log]
[com.fulcrologic.fulcro.algorithms.form-state :as fs]
[com.fulcrologic.fulcro.algorithms.denormalize :as fdn]
[com.fulcrologic.fulcro.data-fetch :as df]))
(def session-ident [:component/id :session])
query - only defsc for normalization
(defsc Session
[_ {:keys [:session/valid? :user/email :user/id :session/server-error-msg :user/admin?]}]
{:query [:session/valid? :user/admin? :session/server-error-msg :user/email :user/id :ui/loading?
:ui/browser-state]
:ident (fn [] session-ident)
:pre-merge (fn [{:keys [data-tree]}]
(merge
{:session/valid? false
:user/admin? false
:user/email ""
:user/id nil
:session/server-error-msg nil
:ui/browser-state {}}
data-tree))
:initial-state {:session/valid? false :user/admin? false :user/email "" :user/id nil :session/server-error-msg nil}})
(defn session-get [session-map k]
(get-in session-map [:ui/browser-state k]))
(defn session-assoc [session-map k data]
(-> session-map
(update :ui/browser-state #(if (associative? %) % {}))
(assoc-in [:ui/browser-state k] data)))
(defn session-update [session-map k fn & args]
(apply (partial update-in (update session-map :ui/browser-state #(if (associative? %) % {}))
[:ui/browser-state k]
fn)
args))
(def session-join {session-ident (comp/get-query Session)})
(defn get-session [props] (get props session-ident))
(defn valid-session? [props] (:session/valid? (get props session-ident)))
(defn admin? [props] (:user/admin? (get props session-ident)))
(defn clear [env]
(sm/assoc-aliased env :error ""))
(defn logout [env]
(log/info "logout env: " env)
(let [env
(-> env
(clear)
(sm/assoc-aliased :username "" :session-valid? false :current-user "")
(sm/trigger-remote-mutation :actor/login-form `logout {::sm/mutation-remote :session})
(sm/activate :state/logged-out))]
(r/route-to! :home)
env))
(defn login [{::sm/keys [event-data] :as env}]
(log/info "Logging in")
(-> env
(clear)
(sm/trigger-remote-mutation :actor/login-form 'glam.models.session/login
{:username (:username event-data)
:password (:password event-data)
::m/returning (sm/actor-class env :actor/current-session)
::sm/ok-event :event/complete
::sm/error-event :event/failed
::sm/mutation-remote :session})
(sm/activate :state/checking-session)))
(defn process-session-result
"Called on app boot and to validate logging in. See if we have a session from the backend."
[env error-message chroute?]
(let [success? (sm/alias-value env :session-valid?)]
(log/info "PROCESS SESSION RESULT , CHROUTE? " chroute? ", SUCCESS? " success?)
(cond
(and chroute? success?)
(r/route-to! :projects)
(not success?)
(r/route-to! :home))
(cond-> (clear env)
success? (->
(sm/assoc-aliased :modal-open? false)
(sm/activate :state/logged-in))
(not success?) (->
(sm/assoc-aliased :error error-message)
(sm/activate :state/logged-out)))))
(defn initial-load [env]
(sm/load env
::current-session
:actor/current-session
{::sm/ok-event :event/complete
::sm/error-event :event/failed}))
(def global-events
{:event/close-modal {::sm/handler (fn [env] (sm/assoc-aliased env :modal-open? false))}
:event/toggle-modal {::sm/handler (fn [env] (sm/update-aliased env :modal-open? not))}})
(defn get-server-mutation-err
[result-or-env]
(let [result (or (some-> result-or-env ::sm/event-data ::sm/mutation-result) result-or-env)
body (:body result)
mutation-sym (-> body keys first)]
(let [error (-> body mutation-sym :server/message)]
(if (nil? error)
"There was an error sending your request."
error))))
(sm/defstatemachine session-machine
{::sm/actors
#{:actor/login-form :actor/current-session}
::sm/aliases
{:username [:actor/login-form :user/email]
:error [:actor/login-form :ui/error]
:modal-open? [:actor/login-form :ui/open?]
:session-valid? [:actor/current-session :session/valid?]
:current-user [:actor/current-session :user/email]}
::sm/states
{:initial
{::sm/target-states #{:state/logged-in :state/logged-out}
::sm/events {::sm/started {::sm/handler #(-> % (sm/assoc-aliased :error "") initial-load)}
:event/failed {::sm/target-state :state/logged-out}
:event/complete {::sm/target-states #{:state/logged-in :state/logged-out}
handles the first session request on app boot
::sm/handler #(process-session-result % "" false)}}}
:state/checking-session
{::sm/events (merge global-events
{:event/failed {::sm/target-states #{:state/logged-out}
::sm/handler (fn [env]
(-> env
(clear)
(sm/activate :state/logged-out)
(sm/assoc-aliased :error (get-server-mutation-err env))))}
:event/complete {::sm/target-states #{:state/logged-out :state/logged-in}
::sm/handler #(process-session-result % "Invalid Credentials." true)}})}
:state/logged-in
{::sm/events (merge global-events
{:event/logout {::sm/target-states #{:state/logged-out}
::sm/handler logout}})}
:state/logged-out
{::sm/events (merge global-events
{:event/signup-success {::sm/target-state :state/logged-in}
:event/login {::sm/target-states #{:state/checking-session}
::sm/handler login}})}}})
(def signup-ident [:component/id :signup])
(defmutation signup [_]
(action [{:keys [state]}]
(log/info "Starting signup mutation")
(swap! state
(fn [s]
(-> s
(fs/mark-complete* signup-ident)
(assoc-in [df/marker-table ::signup] {:status :loading})))))
(ok-action [{:keys [app state result]}]
(let [state @state
session (fdn/db->tree (comp/get-query Session) session-ident state)]
(log/info "Signup success result: " result)
(df/remove-load-marker! app ::signup)
(when (:session/valid? session)
(r/route-to! :projects)
(sm/trigger! app ::session :event/signup-success))))
(error-action [{:keys [app]}]
(df/remove-load-marker! app ::signup))
(session [{:keys [state] :as env}]
(let [{:account/keys [email password password-again]} (get-in @state signup-ident)]
(let [valid? (boolean (and (valid-email email) (valid-password password)
(= password password-again)))]
(when valid?
(-> env (m/returning Session)))))))
|
854aba5d5d822afab023506e127bbccfaa80d63259d8803bb762686c4e2a41af | arcfide/oleg | packages.scm | Interface definitions first
Utilities
(define-interface parser-errors-interface
(export parser-error
parser-error?))
(define-interface input-parses-interface
(export peek-next-char
assert-curr-char
skip-until skip-while
next-token next-token-of
read-text-line
read-string
parser-error))
(define-interface ssax-warnings-interface
(export ssax:warn))
(define-interface assertions-interface
(export ((assert assure) :syntax)))
(define-interface coutputs-interface
(export cout cerr nl))
(define-interface ppretty-prints-interface
(export pp))
(define-interface crementing-interface
(export inc dec))
(define-interface oleg-utils-interface
(export any?
list-intersperse list-intersperse!
list-tail-diff
string-rindex
substring?
string->integer
string-split
make-char-quotator))
(define-interface control-flows-interface
(export (when :syntax)
(begin0 :syntax)))
(define-interface find-strings-interface
(export find-string-from-port?))
(define-interface catch-errors-interface
(export (failed? :syntax)))
(define-interface char-encodings-interface
(export ucscode->char
char-return
char-tab
char-newline))
(define-interface lookup-defs-interface
(export (lookup-def :syntax)))
;; The Meat
(define-interface sxml-tree-trans-interface
(export SRV:send-reply
post-order pre-post-order pre-post-order-splice replace-range))
(define-interface sxml-to-html-interface
(export SXML->HTML
enattr
entag
string->goodHTML))
(define-interface sxml-to-html-ext-interface
(export make-header
make-navbar
make-footer
universal-conversion-rules
universal-protected-rules
alist-conv-rules))
(define-interface ssax-interface
(export xml-token? xml-token-kind xml-token-head
make-empty-attlist attlist-add
attlist-null?
attlist-remove-top
attlist->alist attlist-fold
ssax:uri-string->symbol
ssax:skip-internal-dtd
ssax:read-pi-body-as-string
ssax:reverse-collect-str-drop-ws
ssax:read-markup-token
ssax:read-cdata-body
ssax:read-char-ref
ssax:read-attributes
ssax:complete-start-tag
ssax:read-external-id
ssax:read-char-data
((ssax:make-parser ssax:make-pi-parser ssax:make-elem-parser) :syntax)
ssax:xml->sxml))
(define-interface sxpath-interface
(export nodeset?
node-typeof?
map-union
sxpath))
;; Structures
Utilities
(define-structure define-opt (export (define-opt :syntax))
(open scheme
srfi-23)
(files define-opt))
(define-structure parser-errors-vanilla parser-errors-interface
(open scheme exceptions conditions formats)
(begin
(define-condition-type &parser-error &error
parser-error?)
(define (format-list list)
(apply string-append (map format-x list)))
(define (format-x thing)
(format #f "~A" thing))
(define (parser-error port message . rest)
(raise
(condition
(&parser-error)
(&message
(message (format-list (cons message rest))))
(&irritants
(values (list port ))))))))
(define (make-input-parses parser-errors-structure)
(structure input-parses-interface
(open scheme
ascii
(subset srfi-13 (string-concatenate-reverse))
define-opt
crementing
char-encodings
parser-errors-structure)
(files input-parse)))
(define input-parses-vanilla (make-input-parses parser-errors-vanilla))
(define-structure assertions assertions-interface
(open scheme
big-util)
(files assert))
(define-structure coutputs coutputs-interface
(open scheme i/o)
(files output))
(define-structure ppretty-prints ppretty-prints-interface
(open scheme pp)
(begin
(define pp p)))
(define-structure crementing crementing-interface
(open scheme)
(begin
(define (inc n) (+ n 1))
(define (dec n) (- n 1))))
(define-structure oleg-utils oleg-utils-interface
(open scheme
(subset srfi-13 (string-index-right string-contains string-null?))
srfi-23
crementing)
(files util))
(define-structure char-encodings char-encodings-interface
(open scheme
ascii)
(begin
(define ucscode->char ascii->char)
(define char-return (ascii->char 13))
(define char-tab (ascii->char 9))
(define char-newline (ascii->char 10))))
(define-structure lookup-defs lookup-defs-interface
(open scheme
coutputs
srfi-23) ; ERROR
(files lookup-def))
(define-structure oleg-string-ports (export with-output-to-string
call-with-input-string
with-input-from-string)
(open scheme extended-ports i/o-internal)
(begin
(define (with-output-to-string thunk)
(call-with-string-output-port
(lambda (port)
(call-with-current-output-port port thunk))))
(define (call-with-input-string string proc)
(proc (make-string-input-port string)))
(define with-input-from-string call-with-input-string)))
(define-structure control-flows control-flows-interface
(open scheme)
(files control))
(define-structure find-strings find-strings-interface
(open scheme
crementing)
(files look-for-str))
(define-structure catch-errors catch-errors-interface
(open scheme handle)
(begin
(define-syntax failed?
(syntax-rules ()
((failed? stmts ...)
(thunk-failed? (lambda () stmts ...)))))
(define (thunk-failed? thunk)
(call-with-current-continuation
(lambda (return)
(with-handler
(lambda (condition more)
(return #t))
(lambda ()
(thunk)
#f)))))))
;; The Meat
(define-structure sxml-tree-trans sxml-tree-trans-interface
(open scheme
assertions
LET*-VALUES
srfi-23) ; ERROR
(files "SXML-tree-trans.scm"))
(define-structure sxml-to-html sxml-to-html-interface
(open scheme
coutputs assertions
oleg-utils
sxml-tree-trans)
(files "SXML-to-HTML.scm"))
(define-structure sxml-to-html-ext sxml-to-html-ext-interface
(open scheme
srfi-23
oleg-utils
coutputs
assertions
crementing
lookup-defs
sxml-to-html
sxml-tree-trans
posix-files)
(begin
(define (OS:file-length path)
(file-info-size (get-file-info path))))
(files "SXML-to-HTML-ext.scm"))
(define (make-ssax input-parses-structure ssax-warnings-structure)
(structure ssax-interface
(open scheme
oleg-utils control-flows find-strings
ascii
assertions
coutputs catch-errors
oleg-string-ports
input-parses-structure
ssax-warnings-structure
char-encodings
crementing
(subset srfi-1 (cons*))
srfi-6 ; OPEN-INPUT-STRING
srfi-11 ; LET-VALUES
(subset srfi-13 (string-index
string-null?
string-concatenate-reverse/shared
string-concatenate/shared))
srfi-23
ppretty-prints)
(files "SSAX-code.scm")))
(define-structure ssax-warnings-vanilla ssax-warnings-interface
(open scheme
coutputs)
(files ssax-warn-vanilla))
(define ssax-vanilla (make-ssax input-parses-vanilla
ssax-warnings-vanilla))
(define-structure sxpath sxpath-interface
(open scheme
crementing
assertions
coutputs
pp
srfi-23) ; ERROR
(begin
(define pretty-print p))
(files "SXPath-old.scm"))
| null | https://raw.githubusercontent.com/arcfide/oleg/c6826870436925fd4c873c01d7fcc24a7a7f95dc/ssax/lib/packages.scm | scheme | The Meat
Structures
ERROR
The Meat
ERROR
OPEN-INPUT-STRING
LET-VALUES
ERROR | Interface definitions first
Utilities
(define-interface parser-errors-interface
(export parser-error
parser-error?))
(define-interface input-parses-interface
(export peek-next-char
assert-curr-char
skip-until skip-while
next-token next-token-of
read-text-line
read-string
parser-error))
(define-interface ssax-warnings-interface
(export ssax:warn))
(define-interface assertions-interface
(export ((assert assure) :syntax)))
(define-interface coutputs-interface
(export cout cerr nl))
(define-interface ppretty-prints-interface
(export pp))
(define-interface crementing-interface
(export inc dec))
(define-interface oleg-utils-interface
(export any?
list-intersperse list-intersperse!
list-tail-diff
string-rindex
substring?
string->integer
string-split
make-char-quotator))
(define-interface control-flows-interface
(export (when :syntax)
(begin0 :syntax)))
(define-interface find-strings-interface
(export find-string-from-port?))
(define-interface catch-errors-interface
(export (failed? :syntax)))
(define-interface char-encodings-interface
(export ucscode->char
char-return
char-tab
char-newline))
(define-interface lookup-defs-interface
(export (lookup-def :syntax)))
(define-interface sxml-tree-trans-interface
(export SRV:send-reply
post-order pre-post-order pre-post-order-splice replace-range))
(define-interface sxml-to-html-interface
(export SXML->HTML
enattr
entag
string->goodHTML))
(define-interface sxml-to-html-ext-interface
(export make-header
make-navbar
make-footer
universal-conversion-rules
universal-protected-rules
alist-conv-rules))
(define-interface ssax-interface
(export xml-token? xml-token-kind xml-token-head
make-empty-attlist attlist-add
attlist-null?
attlist-remove-top
attlist->alist attlist-fold
ssax:uri-string->symbol
ssax:skip-internal-dtd
ssax:read-pi-body-as-string
ssax:reverse-collect-str-drop-ws
ssax:read-markup-token
ssax:read-cdata-body
ssax:read-char-ref
ssax:read-attributes
ssax:complete-start-tag
ssax:read-external-id
ssax:read-char-data
((ssax:make-parser ssax:make-pi-parser ssax:make-elem-parser) :syntax)
ssax:xml->sxml))
(define-interface sxpath-interface
(export nodeset?
node-typeof?
map-union
sxpath))
Utilities
(define-structure define-opt (export (define-opt :syntax))
(open scheme
srfi-23)
(files define-opt))
(define-structure parser-errors-vanilla parser-errors-interface
(open scheme exceptions conditions formats)
(begin
(define-condition-type &parser-error &error
parser-error?)
(define (format-list list)
(apply string-append (map format-x list)))
(define (format-x thing)
(format #f "~A" thing))
(define (parser-error port message . rest)
(raise
(condition
(&parser-error)
(&message
(message (format-list (cons message rest))))
(&irritants
(values (list port ))))))))
(define (make-input-parses parser-errors-structure)
(structure input-parses-interface
(open scheme
ascii
(subset srfi-13 (string-concatenate-reverse))
define-opt
crementing
char-encodings
parser-errors-structure)
(files input-parse)))
(define input-parses-vanilla (make-input-parses parser-errors-vanilla))
(define-structure assertions assertions-interface
(open scheme
big-util)
(files assert))
(define-structure coutputs coutputs-interface
(open scheme i/o)
(files output))
(define-structure ppretty-prints ppretty-prints-interface
(open scheme pp)
(begin
(define pp p)))
(define-structure crementing crementing-interface
(open scheme)
(begin
(define (inc n) (+ n 1))
(define (dec n) (- n 1))))
(define-structure oleg-utils oleg-utils-interface
(open scheme
(subset srfi-13 (string-index-right string-contains string-null?))
srfi-23
crementing)
(files util))
(define-structure char-encodings char-encodings-interface
(open scheme
ascii)
(begin
(define ucscode->char ascii->char)
(define char-return (ascii->char 13))
(define char-tab (ascii->char 9))
(define char-newline (ascii->char 10))))
(define-structure lookup-defs lookup-defs-interface
(open scheme
coutputs
(files lookup-def))
(define-structure oleg-string-ports (export with-output-to-string
call-with-input-string
with-input-from-string)
(open scheme extended-ports i/o-internal)
(begin
(define (with-output-to-string thunk)
(call-with-string-output-port
(lambda (port)
(call-with-current-output-port port thunk))))
(define (call-with-input-string string proc)
(proc (make-string-input-port string)))
(define with-input-from-string call-with-input-string)))
(define-structure control-flows control-flows-interface
(open scheme)
(files control))
(define-structure find-strings find-strings-interface
(open scheme
crementing)
(files look-for-str))
(define-structure catch-errors catch-errors-interface
(open scheme handle)
(begin
(define-syntax failed?
(syntax-rules ()
((failed? stmts ...)
(thunk-failed? (lambda () stmts ...)))))
(define (thunk-failed? thunk)
(call-with-current-continuation
(lambda (return)
(with-handler
(lambda (condition more)
(return #t))
(lambda ()
(thunk)
#f)))))))
(define-structure sxml-tree-trans sxml-tree-trans-interface
(open scheme
assertions
LET*-VALUES
(files "SXML-tree-trans.scm"))
(define-structure sxml-to-html sxml-to-html-interface
(open scheme
coutputs assertions
oleg-utils
sxml-tree-trans)
(files "SXML-to-HTML.scm"))
(define-structure sxml-to-html-ext sxml-to-html-ext-interface
(open scheme
srfi-23
oleg-utils
coutputs
assertions
crementing
lookup-defs
sxml-to-html
sxml-tree-trans
posix-files)
(begin
(define (OS:file-length path)
(file-info-size (get-file-info path))))
(files "SXML-to-HTML-ext.scm"))
(define (make-ssax input-parses-structure ssax-warnings-structure)
(structure ssax-interface
(open scheme
oleg-utils control-flows find-strings
ascii
assertions
coutputs catch-errors
oleg-string-ports
input-parses-structure
ssax-warnings-structure
char-encodings
crementing
(subset srfi-1 (cons*))
(subset srfi-13 (string-index
string-null?
string-concatenate-reverse/shared
string-concatenate/shared))
srfi-23
ppretty-prints)
(files "SSAX-code.scm")))
(define-structure ssax-warnings-vanilla ssax-warnings-interface
(open scheme
coutputs)
(files ssax-warn-vanilla))
(define ssax-vanilla (make-ssax input-parses-vanilla
ssax-warnings-vanilla))
(define-structure sxpath sxpath-interface
(open scheme
crementing
assertions
coutputs
pp
(begin
(define pretty-print p))
(files "SXPath-old.scm"))
|
af0d17c69737370022b68dbee614b3018861e48890c88bf43cec0c4ba5deb249 | JustusAdam/schedule-planner | App.hs | # LANGUAGE FlexibleContexts #
|
Module : $ Header$
Description : Connector from IO to logic
Copyright : ( c ) , 2015
License : LGPL-3
Maintainer :
Stability : experimental
Portability : POSIX
Sort of the Main script for all the common operations , independant of the
program instance ( webservice , command line )
Module : $Header$
Description : Connector from IO to logic
Copyright : (c) Justus Adam, 2015
License : LGPL-3
Maintainer :
Stability : experimental
Portability : POSIX
Sort of the Main script for all the common operations, independant of the
program instance (webservice, command line)
-}
module SchedulePlanner.App
( reportAndPrint
, reportAndExecute
, serverCalculation
) where
import Control.Arrow ((&&&))
import Control.Monad (void)
import Control.Monad.Writer (Writer, runWriter, tell, when)
import Data.Aeson (eitherDecode, encode)
import Data.ByteString.Lazy as LBS (ByteString, toStrict)
import qualified Data.Map as Map (elems, keys)
import Data.Maybe (isNothing)
import Data.Monoid.Unicode
import Data.String (fromString)
import Prelude.Unicode
import SchedulePlanner.Calculator (MappedLessons (..),
MappedSchedule (..), calcFromMap,
mapToSubject, weigh)
import SchedulePlanner.Serialize (DataFile (DataFile),
formatSchedule, scheduleToJson,
shortSubject)
import ClassyPrelude
-- |Print a string if debug is enabled
printDebug :: (MonadWriter Text m, Show a) => Bool -> a -> m Text ()
printDebug debugMode = when debugMode . tell . pack . show
{-|
Calculation on internal data structures.
-}
calculate :: DataFile -> Maybe [MappedSchedule Text]
calculate (DataFile rules lessons) =
calcFromMap $ mapToSubject $ weigh rules lessons
{-|
Calculation wrapped into server I/O compatible data structures.
-}
serverCalculation :: ByteString -> ByteString
serverCalculation =
either
(fromString . ("Error:" ++) . show)
(maybe
"\"No schedule could be calculated\""
(encode . map scheduleToJson)
. calculate)
. eitherDecode
{-|
Evaluates the transformed json, compiles (useful) error messages, runs the algorithm
and returns a writer of any output created.
-}
reportAndExecute :: MonadWriter Text m => Text -> Bool -> DataFile -> m Text ()
reportAndExecute outputFormat debugMode (DataFile rules lessons)
| isNothing calculated = tell "Calculation failed, no valid schedule possible"
| outputFormat' == "print" = do
tell "\n"
_ <- mapM (printDebug debugMode) rules
tell "\n"
tell "\n"
_ <- mapM (printDebug debugMode) weighted
tell "\n"
tell "Legend:"
_ <- mapM (tell . pack . show . (shortSubject &&& id) ) (Map.keys mlRaw)
tell "\n"
void $ maybe (error "Unexpected missing result") pc calculated
| outputFormat' == "json" =
void $ maybe (error "unexpected missing result") (tell . decodeUtf8 . toStrict . encode . concatMap (Map.elems . unMapSchedule)) calculated
| otherwise = tell "invalid output format"
where
outputFormat' = toLower outputFormat
weighted = weigh rules lessons
mappedLessons@(MappedLessons mlRaw) = mapToSubject weighted
pc = mapM (tell . ("\n\n" ⊕) . formatSchedule)
calculated = calcFromMap mappedLessons
{-|
perform the calculation and print the result to the command line
-}
reportAndPrint :: Text -> Bool -> Maybe String -> ByteString -> IO()
reportAndPrint outputFormat debugMode outFile =
maybe putStrLn writeFile outFile . either
(pack . ("Stopped execution due to a severe problem with the input data:" ++) . show)
(snd . runWriter . reportAndExecute outputFormat debugMode)
. eitherDecode
| null | https://raw.githubusercontent.com/JustusAdam/schedule-planner/ba501c4c8332b5e11379fb500d867fdc8082d482/src/SchedulePlanner/App.hs | haskell | |Print a string if debug is enabled
|
Calculation on internal data structures.
|
Calculation wrapped into server I/O compatible data structures.
|
Evaluates the transformed json, compiles (useful) error messages, runs the algorithm
and returns a writer of any output created.
|
perform the calculation and print the result to the command line
| # LANGUAGE FlexibleContexts #
|
Module : $ Header$
Description : Connector from IO to logic
Copyright : ( c ) , 2015
License : LGPL-3
Maintainer :
Stability : experimental
Portability : POSIX
Sort of the Main script for all the common operations , independant of the
program instance ( webservice , command line )
Module : $Header$
Description : Connector from IO to logic
Copyright : (c) Justus Adam, 2015
License : LGPL-3
Maintainer :
Stability : experimental
Portability : POSIX
Sort of the Main script for all the common operations, independant of the
program instance (webservice, command line)
-}
module SchedulePlanner.App
( reportAndPrint
, reportAndExecute
, serverCalculation
) where
import Control.Arrow ((&&&))
import Control.Monad (void)
import Control.Monad.Writer (Writer, runWriter, tell, when)
import Data.Aeson (eitherDecode, encode)
import Data.ByteString.Lazy as LBS (ByteString, toStrict)
import qualified Data.Map as Map (elems, keys)
import Data.Maybe (isNothing)
import Data.Monoid.Unicode
import Data.String (fromString)
import Prelude.Unicode
import SchedulePlanner.Calculator (MappedLessons (..),
MappedSchedule (..), calcFromMap,
mapToSubject, weigh)
import SchedulePlanner.Serialize (DataFile (DataFile),
formatSchedule, scheduleToJson,
shortSubject)
import ClassyPrelude
printDebug :: (MonadWriter Text m, Show a) => Bool -> a -> m Text ()
printDebug debugMode = when debugMode . tell . pack . show
calculate :: DataFile -> Maybe [MappedSchedule Text]
calculate (DataFile rules lessons) =
calcFromMap $ mapToSubject $ weigh rules lessons
serverCalculation :: ByteString -> ByteString
serverCalculation =
either
(fromString . ("Error:" ++) . show)
(maybe
"\"No schedule could be calculated\""
(encode . map scheduleToJson)
. calculate)
. eitherDecode
reportAndExecute :: MonadWriter Text m => Text -> Bool -> DataFile -> m Text ()
reportAndExecute outputFormat debugMode (DataFile rules lessons)
| isNothing calculated = tell "Calculation failed, no valid schedule possible"
| outputFormat' == "print" = do
tell "\n"
_ <- mapM (printDebug debugMode) rules
tell "\n"
tell "\n"
_ <- mapM (printDebug debugMode) weighted
tell "\n"
tell "Legend:"
_ <- mapM (tell . pack . show . (shortSubject &&& id) ) (Map.keys mlRaw)
tell "\n"
void $ maybe (error "Unexpected missing result") pc calculated
| outputFormat' == "json" =
void $ maybe (error "unexpected missing result") (tell . decodeUtf8 . toStrict . encode . concatMap (Map.elems . unMapSchedule)) calculated
| otherwise = tell "invalid output format"
where
outputFormat' = toLower outputFormat
weighted = weigh rules lessons
mappedLessons@(MappedLessons mlRaw) = mapToSubject weighted
pc = mapM (tell . ("\n\n" ⊕) . formatSchedule)
calculated = calcFromMap mappedLessons
reportAndPrint :: Text -> Bool -> Maybe String -> ByteString -> IO()
reportAndPrint outputFormat debugMode outFile =
maybe putStrLn writeFile outFile . either
(pack . ("Stopped execution due to a severe problem with the input data:" ++) . show)
(snd . runWriter . reportAndExecute outputFormat debugMode)
. eitherDecode
|
2a2e0d0b1934931dbe91f0d80f645d401dff08ad6e80d64eaab05bf9fb70452e | BardurArantsson/cqrs | PersistedEvent.hs | module Data.CQRS.Types.PersistedEvent
( PersistedEvent(..)
, PersistedEvent'(..)
) where
import Data.CQRS.Internal.PersistedEvent
| null | https://raw.githubusercontent.com/BardurArantsson/cqrs/2491d83e2bcd68c883aaea33cdce6c5ea8c0cd1a/cqrs-core/src/Data/CQRS/Types/PersistedEvent.hs | haskell | module Data.CQRS.Types.PersistedEvent
( PersistedEvent(..)
, PersistedEvent'(..)
) where
import Data.CQRS.Internal.PersistedEvent
| |
ae8c2ce61d3d86c45ae257dc689ea0d37e2727b82249e2bbb20ddd1a282da77b | ajnsit/wai-routes | Types.hs | # LANGUAGE DeriveFunctor #
{-# LANGUAGE DeriveLift #-}
-- | Warning! This module is considered internal and may have breaking changes
module Routes.TH.Types
( -- * Data types
Resource (..)
, ResourceTree (..)
, Piece (..)
, Dispatch (..)
, CheckOverlap
, FlatResource (..)
-- ** Helper functions
, resourceMulti
, resourceTreePieces
, resourceTreeName
, flatten
) where
import Language.Haskell.TH.Syntax
data ResourceTree typ
= ResourceLeaf (Resource typ)
| ResourceParent String CheckOverlap [Piece typ] [ResourceTree typ]
deriving (Lift, Show, Functor)
resourceTreePieces :: ResourceTree typ -> [Piece typ]
resourceTreePieces (ResourceLeaf r) = resourcePieces r
resourceTreePieces (ResourceParent _ _ x _) = x
resourceTreeName :: ResourceTree typ -> String
resourceTreeName (ResourceLeaf r) = resourceName r
resourceTreeName (ResourceParent x _ _ _) = x
data Resource typ = Resource
{ resourceName :: String
, resourcePieces :: [Piece typ]
, resourceDispatch :: Dispatch typ
, resourceAttrs :: [String]
, resourceCheck :: CheckOverlap
}
deriving (Lift, Show, Functor)
type CheckOverlap = Bool
data Piece typ = Static String | Dynamic typ
deriving (Lift, Show)
instance Functor Piece where
fmap _ (Static s) = Static s
fmap f (Dynamic t) = Dynamic (f t)
data Dispatch typ =
Methods
{ methodsMulti :: Maybe typ -- ^ type of the multi piece at the end
, methodsMethods :: [String] -- ^ supported request methods
}
| Subsite
{ subsiteType :: typ
, subsiteFunc :: String
}
deriving (Lift, Show)
instance Functor Dispatch where
fmap f (Methods a b) = Methods (fmap f a) b
fmap f (Subsite a b) = Subsite (f a) b
resourceMulti :: Resource typ -> Maybe typ
resourceMulti Resource { resourceDispatch = Methods (Just t) _ } = Just t
resourceMulti _ = Nothing
data FlatResource a = FlatResource
{ frParentPieces :: [(String, [Piece a])]
, frName :: String
, frPieces :: [Piece a]
, frDispatch :: Dispatch a
, frCheck :: Bool
} deriving (Show)
flatten :: [ResourceTree a] -> [FlatResource a]
flatten =
concatMap (go id True)
where
go front check' (ResourceLeaf (Resource a b c _ check)) = [FlatResource (front []) a b c (check' && check)]
go front check' (ResourceParent name check pieces children) =
concatMap (go (front . ((name, pieces):)) (check && check')) children
| null | https://raw.githubusercontent.com/ajnsit/wai-routes/4d6b240af57a95353373ddba81c4905db0234459/src/Routes/TH/Types.hs | haskell | # LANGUAGE DeriveLift #
| Warning! This module is considered internal and may have breaking changes
* Data types
** Helper functions
^ type of the multi piece at the end
^ supported request methods | # LANGUAGE DeriveFunctor #
module Routes.TH.Types
Resource (..)
, ResourceTree (..)
, Piece (..)
, Dispatch (..)
, CheckOverlap
, FlatResource (..)
, resourceMulti
, resourceTreePieces
, resourceTreeName
, flatten
) where
import Language.Haskell.TH.Syntax
data ResourceTree typ
= ResourceLeaf (Resource typ)
| ResourceParent String CheckOverlap [Piece typ] [ResourceTree typ]
deriving (Lift, Show, Functor)
resourceTreePieces :: ResourceTree typ -> [Piece typ]
resourceTreePieces (ResourceLeaf r) = resourcePieces r
resourceTreePieces (ResourceParent _ _ x _) = x
resourceTreeName :: ResourceTree typ -> String
resourceTreeName (ResourceLeaf r) = resourceName r
resourceTreeName (ResourceParent x _ _ _) = x
data Resource typ = Resource
{ resourceName :: String
, resourcePieces :: [Piece typ]
, resourceDispatch :: Dispatch typ
, resourceAttrs :: [String]
, resourceCheck :: CheckOverlap
}
deriving (Lift, Show, Functor)
type CheckOverlap = Bool
data Piece typ = Static String | Dynamic typ
deriving (Lift, Show)
instance Functor Piece where
fmap _ (Static s) = Static s
fmap f (Dynamic t) = Dynamic (f t)
data Dispatch typ =
Methods
}
| Subsite
{ subsiteType :: typ
, subsiteFunc :: String
}
deriving (Lift, Show)
instance Functor Dispatch where
fmap f (Methods a b) = Methods (fmap f a) b
fmap f (Subsite a b) = Subsite (f a) b
resourceMulti :: Resource typ -> Maybe typ
resourceMulti Resource { resourceDispatch = Methods (Just t) _ } = Just t
resourceMulti _ = Nothing
data FlatResource a = FlatResource
{ frParentPieces :: [(String, [Piece a])]
, frName :: String
, frPieces :: [Piece a]
, frDispatch :: Dispatch a
, frCheck :: Bool
} deriving (Show)
flatten :: [ResourceTree a] -> [FlatResource a]
flatten =
concatMap (go id True)
where
go front check' (ResourceLeaf (Resource a b c _ check)) = [FlatResource (front []) a b c (check' && check)]
go front check' (ResourceParent name check pieces children) =
concatMap (go (front . ((name, pieces):)) (check && check')) children
|
9435f7368adb1a6cc6323484f1a7380a9a598e9696622c822b09734a17efb58f | fission-codes/fission | Types.hs | module Fission.Error.Types
( module Fission.Error.NotFound.Types
, module Fission.Error.AlreadyExists.Types
, module Fission.Error.InvalidURL.Types
, module Fission.Error.Mismatch.Types
, module Fission.Error.ParseError.Types
) where
import Fission.Error.AlreadyExists.Types
import Fission.Error.InvalidURL.Types
import Fission.Error.Mismatch.Types
import Fission.Error.NotFound.Types
import Fission.Error.ParseError.Types
| null | https://raw.githubusercontent.com/fission-codes/fission/afaae0dc5f83f4e35a3d4fdbdea2608a8d49bef8/fission-core/library/Fission/Error/Types.hs | haskell | module Fission.Error.Types
( module Fission.Error.NotFound.Types
, module Fission.Error.AlreadyExists.Types
, module Fission.Error.InvalidURL.Types
, module Fission.Error.Mismatch.Types
, module Fission.Error.ParseError.Types
) where
import Fission.Error.AlreadyExists.Types
import Fission.Error.InvalidURL.Types
import Fission.Error.Mismatch.Types
import Fission.Error.NotFound.Types
import Fission.Error.ParseError.Types
| |
8987fb4d489b4c93613982990597c4ed22e632960cce47bfa9ac2807c06c27ab | theodormoroianu/SecondYearCourses | HaskellChurch_20210415103439.hs | {-# LANGUAGE RankNTypes #-}
module HaskellChurch where
A boolean is any way to choose between two alternatives
newtype CBool = CBool {cIf :: forall t. t -> t -> t}
instance Show CBool where
show b = show $ cIf b True False
| null | https://raw.githubusercontent.com/theodormoroianu/SecondYearCourses/5e359e6a7cf588a527d27209bf53b4ce6b8d5e83/FLP/Laboratoare/Lab%209/.history/HaskellChurch_20210415103439.hs | haskell | # LANGUAGE RankNTypes # | module HaskellChurch where
A boolean is any way to choose between two alternatives
newtype CBool = CBool {cIf :: forall t. t -> t -> t}
instance Show CBool where
show b = show $ cIf b True False
|
8aa890c314487c7422664be269c91d4961762e3d46b6c8abaa5465ec190d462c | sol/hpack | Haskell.hs | module Hpack.Haskell (
isModule
, isModuleNameComponent
, isQualifiedIdentifier
, isIdentifier
) where
import Data.Char
isModule :: [String] -> Bool
isModule name = (not . null) name && all isModuleNameComponent name
isModuleNameComponent :: String -> Bool
isModuleNameComponent name = case name of
x : xs -> isUpper x && all isIdChar xs
_ -> False
isQualifiedIdentifier :: [String] -> Bool
isQualifiedIdentifier name = case reverse name of
x : xs -> isIdentifier x && isModule xs
_ -> False
isIdentifier :: String -> Bool
isIdentifier name = case name of
x : xs -> isLower x && all isIdChar xs && name `notElem` reserved
_ -> False
reserved :: [String]
reserved = [
"case"
, "class"
, "data"
, "default"
, "deriving"
, "do"
, "else"
, "foreign"
, "if"
, "import"
, "in"
, "infix"
, "infixl"
, "infixr"
, "instance"
, "let"
, "module"
, "newtype"
, "of"
, "then"
, "type"
, "where"
, "_"
]
isIdChar :: Char -> Bool
isIdChar c = isAlphaNum c || c == '_' || c == '\''
| null | https://raw.githubusercontent.com/sol/hpack/f1aa0c48eb696d66be8c1bcf68528253eab09178/src/Hpack/Haskell.hs | haskell | module Hpack.Haskell (
isModule
, isModuleNameComponent
, isQualifiedIdentifier
, isIdentifier
) where
import Data.Char
isModule :: [String] -> Bool
isModule name = (not . null) name && all isModuleNameComponent name
isModuleNameComponent :: String -> Bool
isModuleNameComponent name = case name of
x : xs -> isUpper x && all isIdChar xs
_ -> False
isQualifiedIdentifier :: [String] -> Bool
isQualifiedIdentifier name = case reverse name of
x : xs -> isIdentifier x && isModule xs
_ -> False
isIdentifier :: String -> Bool
isIdentifier name = case name of
x : xs -> isLower x && all isIdChar xs && name `notElem` reserved
_ -> False
reserved :: [String]
reserved = [
"case"
, "class"
, "data"
, "default"
, "deriving"
, "do"
, "else"
, "foreign"
, "if"
, "import"
, "in"
, "infix"
, "infixl"
, "infixr"
, "instance"
, "let"
, "module"
, "newtype"
, "of"
, "then"
, "type"
, "where"
, "_"
]
isIdChar :: Char -> Bool
isIdChar c = isAlphaNum c || c == '_' || c == '\''
| |
77127a6770b0615aa37acd26a4f2d50e0ef52477a1697bb79eefeeaa619662e3 | kanaka/instacheck | reduce.cljc | (ns instacheck.reduce
(:require [clojure.pprint :refer [pprint]]
[clojure.set :as set]
[instacheck.util :as util]
[instacheck.grammar :as grammar]
[instacheck.weights :as weights]
[instacheck.codegen :as codegen]))
(def memoized-distance-trek
(memoize weights/distance-trek))
Weight reducer functions . If parsed - weight is zero ignore ( return
;; start-weight). Must eventually return 0.
(defn reducer-zero
"If parsed-weight > 0 then returns 0"
[start-weight parsed-weight]
(if (= parsed-weight 0)
start-weight
0))
(defn reducer-half
"If parsed-weight > 0 then returns start-weight divided in two and
rounded down."
[start-weight parsed-weight]
(if (= parsed-weight 0)
start-weight
(int (/ start-weight 2))))
(defn reducer-div
"If parsed-weight > 0 then returns the start-weight divided by the
divisor:
(partial reducer-div 4)"
[divisor start-weight parsed-weight]
(if (= parsed-weight 0)
start-weight
(int (/ start-weight divisor))))
(defn reducer-ladder
"If parsed-weight > 0 then returns the next weight in seq-ladder
that is lower than start-weight. Designed to be used as a partial
like this:
(partial reducer-ladder [30 10 3 1])
The values in the ladder will be sorted in descending order and an
implicit zero is added to the end."
[seq-ladder start-weight parsed-weight]
(if (= parsed-weight 0)
start-weight
(let [norm-ladder (-> seq-ladder set (conj 0) sort reverse)]
(or (some #(if (< % start-weight) % nil) norm-ladder) 0))))
(defn reduce-wtrek
"Takes a grammar and wtrek and returns a new reduced wtrek with
weights reduced/propagated according to reduce-mode.
If the optional reduced-subset node set is then only those nodes
will be propagated. If reduced-subset is not specified then all
reducible/weighted nodes will be considered. The former may result
in a wtrek that is not fully reduced but the latter can take a while
for large grammars/wtreks.
The way that weights are reduced/propagated depends on reduce-mode:
:zero
If all siblings of a node have a zero weight, reduce parent edge
weights to zero.
Algorithm/psuedocode:
- pend := reduced-subset OR all weighted nodes in the tree
- while pend:
- node := pop(pend)
- mcw := max-child-weight(node)
- if mcw > 0: continue at while
- foreach pnode of parents(node):
- push(pend, pnode)
- wtrek[pnode] := mcw
:max-child:
If all siblings of a node have a weight that is less
than parent edge weight then reduce the parent edge weights to
the largest sibling weight.
Algorithm/psuedocode:
- pend := reduced-subset OR all weighted nodes in the tree
- while pend:
- node := pop(pend)
- mcw := max-child-weight(node)
- foreach pnode of parents(node):
- if pnode child weight towards node > mcw
- then:
- push(pend, pnode)
- wtrek[pnode] := mcw
:reducer:
When all siblings of a node are zero, reduce parent edge weights
by reducer-fn function and distribute the removed weights to
valid (no removed descendant) child edges of node.
Algorithm/psuedocode:
- pend := reduced-subset OR all weighted nodes in the tree
- while pend:
- node := pop(pend)
- mcw := max-child-weight(node)
- if mcw > 0: continue at while
- acc := 0
- foreach pnode of parents(node):
- tmp := wtrek[pnode]
- wtrek[pnode] := reducer-fn(wtrek[pnode])
- acc += tmp - wtrek[pnode]
- if max-child-weight(pnode) == 0:
- push(pend, pnode)
- cnodes := children-with-no-removed-descendants(node)
- foreach cnode of cnodes:
- wtrek[code] += acc / count(cnodes)
Any zero weights in the :wtrek map represent a node edge that has
been removed. If all edges of a node are 0 then this is represents
a node that has been removed and the removal must be propagated up
the tree to the next weighted node edge. If the propagation of
removed nodes (0 weights) reaches the root/start of the grammar and
cannot propagate further then an exception is thrown because this
represents an invalid weighted grammar: grammar productions could
reach the removed node from the root/start rule (a removed node does
not exist in the sense that epsilon does).
The propagation of node removals continues until there are no more
pending node to remove. A node may have more than one parent which
means the number of nodes being considered during propagation may
increase temporarily but already removed nodes will not be added
again so the process will eventually terminate."
[grammar start-wtrek & [{:keys [reduced-subset reduce-mode reducer-fn]
:or {reduce-mode :zero
reducer-fn reducer-zero}
:as opts}]]
(assert (#{:zero :max-child :reducer} reduce-mode)
(str "Invalid :reduce-mode " reduce-mode))
(when (= :reducer reduce-mode)
(assert reducer-fn ":reducer reduce-mode requires reducer-fn"))
(let [grammar-start [(:start (meta grammar))]
start-pend (set (filter #(grammar/WEIGHTED (last %))
(map pop
(or reduced-subset
(keys (weights/wtrek grammar))))))]
(loop [wtrek start-wtrek
pend start-pend]
(if (not (seq pend))
wtrek
(let [[node & pend-left] pend
kids (grammar/children-of-node grammar node)
kid-weights (vals (select-keys wtrek kids))
;; _ (prn :node node :kids kids :kid-weights kid-weights)
max-kid-w (apply max kid-weights)
;; nparents is weighted child edges of parents leading
;; to node. all-nparents is the same but includes the
;; grammar root path if there is no weighted path
;; between node and the root.
all-nparents (grammar/get-ancestors
grammar node #(or (= grammar-start %)
(and (grammar/WEIGHTED (last (pop %)))
(contains? wtrek %))))
nparents (disj all-nparents grammar-start)
;; big-nparents is reduction candidates. max-kid-w being
;; greater than 0 only applies in the :max-child case.
big-nparents (set (filter #(> (get wtrek %) max-kid-w)
nparents))]
;;(prn :node node :max-kid-w max-kid-w :nparents nparents :big-parent big-nparents)
;; If node is removed and there are no weighted nodes
;; between it and the grammar start/root then it's an
;; invalid state.
(when (and (= 0 max-kid-w)
(contains? all-nparents grammar-start))
(throw (ex-info
(str "Node " node " removed, has root as parent")
{:type :reduce-wtrek
:cause :no-parents
:grammar grammar
:start-wtrek start-wtrek
:opts opts
:start-pend start-pend
:node node
:wtrek wtrek})))
(cond
: zero and : reducer reduce - mode only apply when all
children are zero . : max - child reduce - mode applies
;; whenever the largest child is less than the parent.
(and (#{:zero :reducer} reduce-mode)
(not= 0 max-kid-w))
(recur wtrek pend-left)
(#{:zero :max-child} reduce-mode)
(let [new-pend (set/union pend-left
(set (map pop big-nparents)))
new-wtrek (reduce (fn [tk p] (assoc tk p max-kid-w))
wtrek
big-nparents)]
(recur new-wtrek new-pend))
:reducer
All children of node are zero at this point .
(let [new-wtrek1 (reduce (fn [tk p]
(let [w (get tk p)]
(assoc tk p (reducer-fn w w))))
wtrek
big-nparents)
zerod-parents (set (filter #(and (= 0 (get new-wtrek1 %))
(not= (get wtrek %)
(get new-wtrek1 %)))
big-nparents))
;; we need to recur to check zero'd parents
new-pend (set/union pend-left
(set (map pop zerod-parents)))
;; accumulate the total reduction (might be multiple
;; parents reduced)
acc-weights (reduce
#(+ %1 (- (get wtrek %2) (get new-wtrek1 %2)))
0
big-nparents)
;; only consider kids with no removed descendants
removed? (partial weights/removed-node? grammar new-wtrek1)
valid-kids (filter (fn [k]
(empty? (grammar/get-descendants
grammar k removed?)))
kids)
;;_ (prn :zerod-parents zerod-parents :acc-weights acc-weights)
;; Distribute weight evenly to the valid children
new-wtrek2 (reduce
(fn [tk kid]
(assoc tk kid
(int (Math/ceil
(/ acc-weights
(count valid-kids))))))
new-wtrek1
valid-kids)]
;; (prn :reducer :node node :big-nparents big-nparents :zerod-parents zerod-parents)
(recur new-wtrek2 new-pend))))))))
;; ---------
(defn reduce-wtrek-with-weights
"Takes a grammar, wtrek, a weights-to-reduce map, a reduce-mode
keyword, and a reducer-fn. A path from weights-to-reduce is selected
based on pick-mode. For that path the reducer-fn is called with the
weight for the path from wtrek and the weight for the path from
weights-to-reduce. Based on those two values the reducer-fn should
return a new value to be updated in the wtrek.
pick-mode values:
:weight - randomly pick a node weighted by node weights.
:dist - randomly pick a node weighted by node distances
from the start node
:weight-dist - randomly pick a node weighted by node weights
multiplied by node distances from the start node.
The resulting wtrek will then be passed to the reduce-wtrek function
to propogate the weight reduction according reduce-mode."
[grammar wtrek weights-to-reduce
& [{:keys [reduce-mode reducer-fn pick-mode pick-pred rnd-obj]
:or {reduce-mode :zero
reducer-fn reducer-half
pick-mode :weight-dist
pick-pred identity}
:as opts}]]
(assert (#{:weight :dist :weight-dist} pick-mode)
(str "Invalid :pick-mode " pick-mode))
(let [big? #(and % (> % 0))
bigs (filter #(and (big? (get weights-to-reduce %))
(big? (get wtrek %)))
(keys weights-to-reduce))
;; _ (prn :bigs)
;; _ (pprint bigs)
distances (memoized-distance-trek grammar)
grouped (group-by #(or (get wtrek %) 0)
bigs)
;; _ (prn :distances distances)
;; _ (prn :grouped grouped)
;; _ (pprint (sort-by key grouped))
weighted-paths (for [[w ps] grouped
p ps
:when (pick-pred p)]
[p (condp = pick-mode
:weight w
:dist (get distances p)
:weight-dist (* w (get distances p)))])
rpath (when (seq weighted-paths)
(util/weighted-rand-nth weighted-paths rnd-obj))]
( prn : rpath rpath : wtrek - w ( get wtrek rpath ) : wtr - w ( get weights - to - reduce rpath ) )
(if rpath
(let [new-wtrek (assoc wtrek rpath (reducer-fn
(get wtrek rpath)
(get weights-to-reduce rpath)))
rsubset #{rpath}]
(reduce-wtrek grammar new-wtrek (assoc opts :reduced-subset rsubset)))
(do
;; (println "******************* no rpath *******************")
wtrek))))
;; ---
(defn- prune-node*
"Internal: Used by prune-node* to prune rule bodies/productions
based on :wtrek"
[node wtrek cur-path]
(let [epsilon? #(= :epsilon (:tag %))
tag (:tag node)]
(cond
(and (grammar/CHILD-EDGE (last cur-path))
(grammar/WEIGHTED (last (pop cur-path)))
(contains? #{0 nil} (get wtrek cur-path)))
{:tag :epsilon}
(:parsers node) ;; :alt, :cat
(let [ps (filter
#(not (epsilon? %))
(map-indexed
(fn [idx n]
(prune-node* n wtrek (conj cur-path tag idx)))
(:parsers node)))]
(cond
(= 0 (count ps)) {:tag :epsilon}
(= 1 (count ps)) (first ps)
:else (assoc node :parsers ps)))
(:parser2 node) ;; :ord
(let [p1 (prune-node* (:parser1 node) wtrek (conj cur-path tag 0))
p2 (prune-node* (:parser1 node) wtrek (conj cur-path tag 1))]
(cond (and (epsilon? p1)
(epsilon? p2)) {:tag :epsilon}
(epsilon? p1) p2
(epsilon? p2) p1
:else (merge node {:parser1 p1 :parser2 p2})))
(:parser node) ;; :opt, :start, :plus
(let [n (prune-node* (:parser node) wtrek (conj cur-path tag 0))]
(if (epsilon? n)
n
(assoc node :parser n)))
:else ;; :nt, :string, :regexp, :epsilon
node)))
(defn prune-grammar
"Takes a grammar and returns a pruned grammar based on keys
specified in the options map. Three different prune operations are
performed:
- Removes rules listed in :removed
- Prune rule bodies/productions based on :wtrek
- If :start is specified in the options or is on the meta of the
grammar, then rules are removed that cannot be reached (directly
or indirectly) from the start rule/production.."
[grammar {:keys [wtrek start removed] :as ctx}]
(let [wtrek (or wtrek (weights/wtrek grammar 100))
start (or start (:start (meta grammar)))
;; Remove rules listed in removed
g1 (select-keys
grammar
(set/difference (set (keys grammar)) (set removed)))
Prune rule bodies using wtrek
g2 (reduce
(fn [g [r n]] (assoc g r (prune-node* n wtrek [r])))
g1
g1)
;; Remove rules that are never reached from start rule
used (if start
(let [deps (util/tree-deps g2)]
(loop [used #{}
pend #{start}]
(if (seq pend)
(let [new-used (set/union used pend)
pend-deps (apply set/union (vals (select-keys deps pend)))
new-pend (set/difference pend-deps new-used)]
(recur new-used new-pend))
used)))
(set (keys g2)))
g3 (select-keys g2 used)]
g3))
(defn prune-grammar->sorted-ebnf
[grammar {:keys [wtrek cycle-set] :as ctx}]
(let [red-grammar (prune-grammar grammar {:wtrek wtrek})
acyclic-grammar (apply dissoc red-grammar cycle-set)
rule-order (codegen/check-and-order-rules acyclic-grammar)
ordered (concat
(map #(vector % (get acyclic-grammar %)) rule-order)
(select-keys red-grammar cycle-set))]
(grammar/grammar->ebnf (reverse ordered))))
| null | https://raw.githubusercontent.com/kanaka/instacheck/ba2095f9b9691812342c54100cd7de83a5b9a74d/src/instacheck/reduce.cljc | clojure | start-weight). Must eventually return 0.
_ (prn :node node :kids kids :kid-weights kid-weights)
nparents is weighted child edges of parents leading
to node. all-nparents is the same but includes the
grammar root path if there is no weighted path
between node and the root.
big-nparents is reduction candidates. max-kid-w being
greater than 0 only applies in the :max-child case.
(prn :node node :max-kid-w max-kid-w :nparents nparents :big-parent big-nparents)
If node is removed and there are no weighted nodes
between it and the grammar start/root then it's an
invalid state.
whenever the largest child is less than the parent.
we need to recur to check zero'd parents
accumulate the total reduction (might be multiple
parents reduced)
only consider kids with no removed descendants
_ (prn :zerod-parents zerod-parents :acc-weights acc-weights)
Distribute weight evenly to the valid children
(prn :reducer :node node :big-nparents big-nparents :zerod-parents zerod-parents)
---------
_ (prn :bigs)
_ (pprint bigs)
_ (prn :distances distances)
_ (prn :grouped grouped)
_ (pprint (sort-by key grouped))
(println "******************* no rpath *******************")
---
:alt, :cat
:ord
:opt, :start, :plus
:nt, :string, :regexp, :epsilon
Remove rules listed in removed
Remove rules that are never reached from start rule | (ns instacheck.reduce
(:require [clojure.pprint :refer [pprint]]
[clojure.set :as set]
[instacheck.util :as util]
[instacheck.grammar :as grammar]
[instacheck.weights :as weights]
[instacheck.codegen :as codegen]))
(def memoized-distance-trek
(memoize weights/distance-trek))
Weight reducer functions . If parsed - weight is zero ignore ( return
(defn reducer-zero
"If parsed-weight > 0 then returns 0"
[start-weight parsed-weight]
(if (= parsed-weight 0)
start-weight
0))
(defn reducer-half
"If parsed-weight > 0 then returns start-weight divided in two and
rounded down."
[start-weight parsed-weight]
(if (= parsed-weight 0)
start-weight
(int (/ start-weight 2))))
(defn reducer-div
"If parsed-weight > 0 then returns the start-weight divided by the
divisor:
(partial reducer-div 4)"
[divisor start-weight parsed-weight]
(if (= parsed-weight 0)
start-weight
(int (/ start-weight divisor))))
(defn reducer-ladder
"If parsed-weight > 0 then returns the next weight in seq-ladder
that is lower than start-weight. Designed to be used as a partial
like this:
(partial reducer-ladder [30 10 3 1])
The values in the ladder will be sorted in descending order and an
implicit zero is added to the end."
[seq-ladder start-weight parsed-weight]
(if (= parsed-weight 0)
start-weight
(let [norm-ladder (-> seq-ladder set (conj 0) sort reverse)]
(or (some #(if (< % start-weight) % nil) norm-ladder) 0))))
(defn reduce-wtrek
"Takes a grammar and wtrek and returns a new reduced wtrek with
weights reduced/propagated according to reduce-mode.
If the optional reduced-subset node set is then only those nodes
will be propagated. If reduced-subset is not specified then all
reducible/weighted nodes will be considered. The former may result
in a wtrek that is not fully reduced but the latter can take a while
for large grammars/wtreks.
The way that weights are reduced/propagated depends on reduce-mode:
:zero
If all siblings of a node have a zero weight, reduce parent edge
weights to zero.
Algorithm/psuedocode:
- pend := reduced-subset OR all weighted nodes in the tree
- while pend:
- node := pop(pend)
- mcw := max-child-weight(node)
- if mcw > 0: continue at while
- foreach pnode of parents(node):
- push(pend, pnode)
- wtrek[pnode] := mcw
:max-child:
If all siblings of a node have a weight that is less
than parent edge weight then reduce the parent edge weights to
the largest sibling weight.
Algorithm/psuedocode:
- pend := reduced-subset OR all weighted nodes in the tree
- while pend:
- node := pop(pend)
- mcw := max-child-weight(node)
- foreach pnode of parents(node):
- if pnode child weight towards node > mcw
- then:
- push(pend, pnode)
- wtrek[pnode] := mcw
:reducer:
When all siblings of a node are zero, reduce parent edge weights
by reducer-fn function and distribute the removed weights to
valid (no removed descendant) child edges of node.
Algorithm/psuedocode:
- pend := reduced-subset OR all weighted nodes in the tree
- while pend:
- node := pop(pend)
- mcw := max-child-weight(node)
- if mcw > 0: continue at while
- acc := 0
- foreach pnode of parents(node):
- tmp := wtrek[pnode]
- wtrek[pnode] := reducer-fn(wtrek[pnode])
- acc += tmp - wtrek[pnode]
- if max-child-weight(pnode) == 0:
- push(pend, pnode)
- cnodes := children-with-no-removed-descendants(node)
- foreach cnode of cnodes:
- wtrek[code] += acc / count(cnodes)
Any zero weights in the :wtrek map represent a node edge that has
been removed. If all edges of a node are 0 then this is represents
a node that has been removed and the removal must be propagated up
the tree to the next weighted node edge. If the propagation of
removed nodes (0 weights) reaches the root/start of the grammar and
cannot propagate further then an exception is thrown because this
represents an invalid weighted grammar: grammar productions could
reach the removed node from the root/start rule (a removed node does
not exist in the sense that epsilon does).
The propagation of node removals continues until there are no more
pending node to remove. A node may have more than one parent which
means the number of nodes being considered during propagation may
increase temporarily but already removed nodes will not be added
again so the process will eventually terminate."
[grammar start-wtrek & [{:keys [reduced-subset reduce-mode reducer-fn]
:or {reduce-mode :zero
reducer-fn reducer-zero}
:as opts}]]
(assert (#{:zero :max-child :reducer} reduce-mode)
(str "Invalid :reduce-mode " reduce-mode))
(when (= :reducer reduce-mode)
(assert reducer-fn ":reducer reduce-mode requires reducer-fn"))
(let [grammar-start [(:start (meta grammar))]
start-pend (set (filter #(grammar/WEIGHTED (last %))
(map pop
(or reduced-subset
(keys (weights/wtrek grammar))))))]
(loop [wtrek start-wtrek
pend start-pend]
(if (not (seq pend))
wtrek
(let [[node & pend-left] pend
kids (grammar/children-of-node grammar node)
kid-weights (vals (select-keys wtrek kids))
max-kid-w (apply max kid-weights)
all-nparents (grammar/get-ancestors
grammar node #(or (= grammar-start %)
(and (grammar/WEIGHTED (last (pop %)))
(contains? wtrek %))))
nparents (disj all-nparents grammar-start)
big-nparents (set (filter #(> (get wtrek %) max-kid-w)
nparents))]
(when (and (= 0 max-kid-w)
(contains? all-nparents grammar-start))
(throw (ex-info
(str "Node " node " removed, has root as parent")
{:type :reduce-wtrek
:cause :no-parents
:grammar grammar
:start-wtrek start-wtrek
:opts opts
:start-pend start-pend
:node node
:wtrek wtrek})))
(cond
: zero and : reducer reduce - mode only apply when all
children are zero . : max - child reduce - mode applies
(and (#{:zero :reducer} reduce-mode)
(not= 0 max-kid-w))
(recur wtrek pend-left)
(#{:zero :max-child} reduce-mode)
(let [new-pend (set/union pend-left
(set (map pop big-nparents)))
new-wtrek (reduce (fn [tk p] (assoc tk p max-kid-w))
wtrek
big-nparents)]
(recur new-wtrek new-pend))
:reducer
All children of node are zero at this point .
(let [new-wtrek1 (reduce (fn [tk p]
(let [w (get tk p)]
(assoc tk p (reducer-fn w w))))
wtrek
big-nparents)
zerod-parents (set (filter #(and (= 0 (get new-wtrek1 %))
(not= (get wtrek %)
(get new-wtrek1 %)))
big-nparents))
new-pend (set/union pend-left
(set (map pop zerod-parents)))
acc-weights (reduce
#(+ %1 (- (get wtrek %2) (get new-wtrek1 %2)))
0
big-nparents)
removed? (partial weights/removed-node? grammar new-wtrek1)
valid-kids (filter (fn [k]
(empty? (grammar/get-descendants
grammar k removed?)))
kids)
new-wtrek2 (reduce
(fn [tk kid]
(assoc tk kid
(int (Math/ceil
(/ acc-weights
(count valid-kids))))))
new-wtrek1
valid-kids)]
(recur new-wtrek2 new-pend))))))))
(defn reduce-wtrek-with-weights
"Takes a grammar, wtrek, a weights-to-reduce map, a reduce-mode
keyword, and a reducer-fn. A path from weights-to-reduce is selected
based on pick-mode. For that path the reducer-fn is called with the
weight for the path from wtrek and the weight for the path from
weights-to-reduce. Based on those two values the reducer-fn should
return a new value to be updated in the wtrek.
pick-mode values:
:weight - randomly pick a node weighted by node weights.
:dist - randomly pick a node weighted by node distances
from the start node
:weight-dist - randomly pick a node weighted by node weights
multiplied by node distances from the start node.
The resulting wtrek will then be passed to the reduce-wtrek function
to propogate the weight reduction according reduce-mode."
[grammar wtrek weights-to-reduce
& [{:keys [reduce-mode reducer-fn pick-mode pick-pred rnd-obj]
:or {reduce-mode :zero
reducer-fn reducer-half
pick-mode :weight-dist
pick-pred identity}
:as opts}]]
(assert (#{:weight :dist :weight-dist} pick-mode)
(str "Invalid :pick-mode " pick-mode))
(let [big? #(and % (> % 0))
bigs (filter #(and (big? (get weights-to-reduce %))
(big? (get wtrek %)))
(keys weights-to-reduce))
distances (memoized-distance-trek grammar)
grouped (group-by #(or (get wtrek %) 0)
bigs)
weighted-paths (for [[w ps] grouped
p ps
:when (pick-pred p)]
[p (condp = pick-mode
:weight w
:dist (get distances p)
:weight-dist (* w (get distances p)))])
rpath (when (seq weighted-paths)
(util/weighted-rand-nth weighted-paths rnd-obj))]
( prn : rpath rpath : wtrek - w ( get wtrek rpath ) : wtr - w ( get weights - to - reduce rpath ) )
(if rpath
(let [new-wtrek (assoc wtrek rpath (reducer-fn
(get wtrek rpath)
(get weights-to-reduce rpath)))
rsubset #{rpath}]
(reduce-wtrek grammar new-wtrek (assoc opts :reduced-subset rsubset)))
(do
wtrek))))
(defn- prune-node*
"Internal: Used by prune-node* to prune rule bodies/productions
based on :wtrek"
[node wtrek cur-path]
(let [epsilon? #(= :epsilon (:tag %))
tag (:tag node)]
(cond
(and (grammar/CHILD-EDGE (last cur-path))
(grammar/WEIGHTED (last (pop cur-path)))
(contains? #{0 nil} (get wtrek cur-path)))
{:tag :epsilon}
(let [ps (filter
#(not (epsilon? %))
(map-indexed
(fn [idx n]
(prune-node* n wtrek (conj cur-path tag idx)))
(:parsers node)))]
(cond
(= 0 (count ps)) {:tag :epsilon}
(= 1 (count ps)) (first ps)
:else (assoc node :parsers ps)))
(let [p1 (prune-node* (:parser1 node) wtrek (conj cur-path tag 0))
p2 (prune-node* (:parser1 node) wtrek (conj cur-path tag 1))]
(cond (and (epsilon? p1)
(epsilon? p2)) {:tag :epsilon}
(epsilon? p1) p2
(epsilon? p2) p1
:else (merge node {:parser1 p1 :parser2 p2})))
(let [n (prune-node* (:parser node) wtrek (conj cur-path tag 0))]
(if (epsilon? n)
n
(assoc node :parser n)))
node)))
(defn prune-grammar
"Takes a grammar and returns a pruned grammar based on keys
specified in the options map. Three different prune operations are
performed:
- Removes rules listed in :removed
- Prune rule bodies/productions based on :wtrek
- If :start is specified in the options or is on the meta of the
grammar, then rules are removed that cannot be reached (directly
or indirectly) from the start rule/production.."
[grammar {:keys [wtrek start removed] :as ctx}]
(let [wtrek (or wtrek (weights/wtrek grammar 100))
start (or start (:start (meta grammar)))
g1 (select-keys
grammar
(set/difference (set (keys grammar)) (set removed)))
Prune rule bodies using wtrek
g2 (reduce
(fn [g [r n]] (assoc g r (prune-node* n wtrek [r])))
g1
g1)
used (if start
(let [deps (util/tree-deps g2)]
(loop [used #{}
pend #{start}]
(if (seq pend)
(let [new-used (set/union used pend)
pend-deps (apply set/union (vals (select-keys deps pend)))
new-pend (set/difference pend-deps new-used)]
(recur new-used new-pend))
used)))
(set (keys g2)))
g3 (select-keys g2 used)]
g3))
(defn prune-grammar->sorted-ebnf
[grammar {:keys [wtrek cycle-set] :as ctx}]
(let [red-grammar (prune-grammar grammar {:wtrek wtrek})
acyclic-grammar (apply dissoc red-grammar cycle-set)
rule-order (codegen/check-and-order-rules acyclic-grammar)
ordered (concat
(map #(vector % (get acyclic-grammar %)) rule-order)
(select-keys red-grammar cycle-set))]
(grammar/grammar->ebnf (reverse ordered))))
|
00bdefee377bb9042a2ac95c274c80a548740ca88bb4cc85df4e7d91edb85403 | vbmithr/ocaml-binance | binance_rest.ml | open Core
open Fastrest
open Json_encoding
open Binance
let url = Uri.make ~scheme:"https" ~host:"api.binance.com" ()
let or_error enc =
let encoding =
conv (fun _ -> assert false)
(fun (code, msg) -> Error.createf "%d: %s" code msg)
(obj2 (req "code" int) (req "msg" string)) in
union [
case encoding
(function Ok _ -> None | Error e -> Some e)
(function e -> Error e) ;
case enc
(function Ok v -> Some v | Error _ -> None)
(function v -> Ok v) ;
]
let authf srv { key ; secret ; meta = _ } =
let ps = match srv.params with
| Form ps -> ps
| Json (_,_) -> assert false in
let ps =
("timestamp", [Int.to_string (Time_ns.(to_int_ns_since_epoch (now ()) / 1_000_000))]) ::
("recvWindow", [Int.to_string 1_000]) ::
ps in
let headers = Httpaf.Headers.of_list [
"X-MBX-APIKEY", key ;
] in
let ps_encoded = Uri.encoded_of_query ps in
let signature =
Digestif.SHA256.(hmac_string ~key:secret ps_encoded |> to_hex) in
let ps = List.rev (("signature", [signature]) :: List.rev ps) in
{ Fastrest.params = Form ps ; headers }
let authf_keyonly { Fastrest.params ; _ } { Fastrest.key ; _ } =
let headers = Httpaf.Headers.of_list ["X-MBX-APIKEY", key] in
{ Fastrest.params ; headers }
let with_path_and_query ~path ~query uri =
Uri.with_query (Uri.with_path uri path) query
module ExchangeInfo = struct
let encoding =
conv
(fun syms -> (), syms)
(fun ((), syms) -> syms)
(merge_objs unit
(obj1
(req "symbols" (list Sym.encoding))))
let get =
Fastrest.get (or_error encoding)
(Uri.with_path url "api/v1/exchangeInfo")
end
module Depth = struct
type t = {
last_update_id : int64 ;
bids : Level.t list ;
asks : Level.t list ;
} [@@deriving sexp]
let encoding =
conv
(fun { last_update_id ; bids ; asks } -> (last_update_id, bids, asks))
(fun (last_update_id, bids, asks) -> { last_update_id ; bids ; asks })
(obj3
(req "lastUpdateId" int53)
(req "bids" (list Level.encoding))
(req "asks" (list Level.encoding)))
let get ?(limit=100) symbol =
if not (List.mem ~equal:Int.equal [5; 10; 20; 50; 100; 500; 1000] limit) then
invalid_argf "Depth.get: invalid limit %d, must belong to [5; \
10; 20; 50; 100; 500; 1000]" limit () ;
Fastrest.get
(or_error encoding)
(with_path_and_query url
~path:"api/v1/depth"
~query:["symbol", [String.uppercase symbol] ;
"limit", [string_of_int limit]])
end
module User = struct
module Balance = struct
type t = {
asset : string ;
free : float ;
locked : float ;
}
let encoding =
conv
(fun { asset ; free ; locked } -> (asset, free, locked))
(fun (asset, free, locked) -> { asset ; free ; locked })
(obj3
(req "asset" string)
(req "free" safe_float)
(req "locked" safe_float))
end
module AccountInfo = struct
type t = {
makerC : int ;
takerC : int ;
buyerC : int ;
sellerC : int ;
trade : bool ;
withdraw : bool ;
deposit : bool ;
timestamp : Ptime.t ;
balances : Balance.t list ;
accountType : string ;
}
let encoding =
conv
(fun { makerC ; takerC ; buyerC ; sellerC ;
trade ; withdraw ; deposit ; timestamp ;
balances ; accountType } ->
(makerC, takerC, buyerC, sellerC, trade,
withdraw, deposit, timestamp, balances, accountType))
(fun (makerC, takerC, buyerC, sellerC, trade,
withdraw, deposit, timestamp, balances, accountType) ->
{ makerC ; takerC ; buyerC ; sellerC ;
trade ; withdraw ; deposit ; timestamp ;
balances ; accountType })
(obj10
(req "makerCommission" int)
(req "takerCommission" int)
(req "buyerCommission" int)
(req "sellerCommission" int)
(req "canTrade" bool)
(req "canWithdraw" bool)
(req "canDeposit" bool)
(req "updateTime" Ptime.encoding)
(req "balances" (list Balance.encoding))
(req "accountType" string))
let pp ppf t =
Json_repr.(pp (module Yojson) ppf (Yojson_repr.construct encoding t))
let to_string = Fmt.to_to_string pp
end
module OrderStatus = struct
type t = {
symbol : string ;
orderId : int ;
clientOrderId : string ;
price : float ;
origQty : float ;
executedQty : float ;
ordStatus : OrderStatus.t ;
timeInForce : TimeInForce.t ;
ordType : OrderType.t ;
side : Side.t ;
stopPrice : float ;
icebergQty : float ;
time : Ptime.t ;
isWorking : bool ;
}
let base_status_obj =
obj10
(req "symbol" string)
(req "orderId" int)
(req "clientOrderId" string)
(req "price" safe_float)
(req "origQty" safe_float)
(req "executedQty" safe_float)
(req "status" OrderStatus.encoding)
(req "timeInForce" TimeInForce.encoding)
(req "type" OrderType.encoding)
(req "side" Side.encoding)
let order_response_encoding =
conv
(fun { symbol ; orderId ; clientOrderId ;
price ; origQty ; executedQty ;
ordStatus ; timeInForce ; ordType ;
side ; stopPrice = _ ; icebergQty = _ ;
time ; isWorking = _ } ->
((symbol, orderId, clientOrderId, price, origQty,
executedQty, ordStatus, timeInForce, ordType, side),
time))
(fun ((symbol, orderId, clientOrderId, price, origQty,
executedQty, ordStatus, timeInForce, ordType, side),
time) -> { symbol ; orderId ; clientOrderId ;
price ; origQty ; executedQty ;
ordStatus ; timeInForce ; ordType ;
side ; stopPrice = 0.; icebergQty = 0.;
time ; isWorking = false})
(merge_objs base_status_obj
(obj1 (req "transactTime" Ptime.encoding)))
let encoding =
conv
(fun { symbol ; orderId ; clientOrderId ;
price ; origQty ; executedQty ;
ordStatus ; timeInForce ; ordType ;
side ; stopPrice ; icebergQty ;
time ; isWorking } ->
((symbol, orderId, clientOrderId, price, origQty,
executedQty, ordStatus, timeInForce, ordType, side),
(stopPrice, icebergQty, time, isWorking)))
(fun ((symbol, orderId, clientOrderId, price, origQty,
executedQty, ordStatus, timeInForce, ordType, side),
(stopPrice, icebergQty, time, isWorking)) ->
{ symbol ; orderId ; clientOrderId ;
price ; origQty ; executedQty ;
ordStatus ; timeInForce ; ordType ;
side ; stopPrice ; icebergQty ;
time ; isWorking })
(merge_objs base_status_obj
(obj4
(req "stopPrice" safe_float)
(req "icebergQty" float)
(req "time" Ptime.encoding)
(req "isWorking" bool)))
let pp ppf t =
Json_repr.(pp (module Yojson) ppf (Yojson_repr.construct encoding t))
let to_string = Fmt.to_to_string pp
end
let order
?(dry_run=false) ~symbol
~side ~kind ?timeInForce
~qty ?price ?clientOrdID
?stopPx ?icebergQty () =
let params = List.filter_opt [
Some ("symbol", [symbol]) ;
Some ("side", [Side.to_string side]) ;
Some ("type", [OrderType.to_string kind]) ;
Option.map timeInForce ~f:(fun tif -> "timeInForce", [TimeInForce.to_string tif]) ;
Some ("quantity", [Printf.sprintf "%.6f" qty]) ;
Option.map price ~f:(fun p -> "price", [Printf.sprintf "%.6f" p]) ;
Option.map clientOrdID ~f:(fun id -> "newClientOrderId", [id]) ;
Option.map stopPx ~f:(fun p -> "stopPrice", [Printf.sprintf "%.6f" p]) ;
Option.map icebergQty ~f:(fun q -> "icebergQty", [Printf.sprintf "%.6f" q]) ;
] in
let enc =
union [
case empty (function _ -> None) (function () -> None) ;
case OrderStatus.order_response_encoding
Fn.id (fun orderStatus -> Some orderStatus) ;
] in
Fastrest.post_form ~params ~auth:authf (or_error enc)
(Uri.with_path url ("api/v3/order" ^ if dry_run then "/test" else ""))
let open_orders symbol =
Fastrest.get ~auth:authf
(or_error (list OrderStatus.encoding))
Uri.(with_query (with_path url "api/v3/openOrders") ["symbol", [symbol]])
let account_info () =
Fastrest.get ~auth:authf
(or_error AccountInfo.encoding)
(Uri.with_path url "api/v3/account")
let myTrades symbol =
let q = ["symbol", [symbol]] in
Fastrest.get ~auth:authf
(or_error (list empty))
Uri.(with_query (with_path url "api/v3/myTrades") q)
module Stream = struct
let encoding =
conv Fn.id Fn.id (obj1 (req "listenKey" string))
let start () =
Fastrest.post_form ~auth:authf_keyonly
(or_error encoding)
(Uri.with_path url "api/v1/userDataStream")
let renew ~listenKey =
Fastrest.put_form
~auth:authf_keyonly
~params:["listenKey", [listenKey]]
(or_error empty)
(Uri.with_path url "api/v1/userDataStream")
let close ~listenKey =
Fastrest.delete ~auth:authf_keyonly
(or_error empty)
Uri.(with_query (with_path url "api/v1/userDataStream")
["listenKey", [listenKey]])
end
end
| null | https://raw.githubusercontent.com/vbmithr/ocaml-binance/8ee18a9f87423d592a9c9b06a816acb028800fbb/src/binance_rest.ml | ocaml | open Core
open Fastrest
open Json_encoding
open Binance
let url = Uri.make ~scheme:"https" ~host:"api.binance.com" ()
let or_error enc =
let encoding =
conv (fun _ -> assert false)
(fun (code, msg) -> Error.createf "%d: %s" code msg)
(obj2 (req "code" int) (req "msg" string)) in
union [
case encoding
(function Ok _ -> None | Error e -> Some e)
(function e -> Error e) ;
case enc
(function Ok v -> Some v | Error _ -> None)
(function v -> Ok v) ;
]
let authf srv { key ; secret ; meta = _ } =
let ps = match srv.params with
| Form ps -> ps
| Json (_,_) -> assert false in
let ps =
("timestamp", [Int.to_string (Time_ns.(to_int_ns_since_epoch (now ()) / 1_000_000))]) ::
("recvWindow", [Int.to_string 1_000]) ::
ps in
let headers = Httpaf.Headers.of_list [
"X-MBX-APIKEY", key ;
] in
let ps_encoded = Uri.encoded_of_query ps in
let signature =
Digestif.SHA256.(hmac_string ~key:secret ps_encoded |> to_hex) in
let ps = List.rev (("signature", [signature]) :: List.rev ps) in
{ Fastrest.params = Form ps ; headers }
let authf_keyonly { Fastrest.params ; _ } { Fastrest.key ; _ } =
let headers = Httpaf.Headers.of_list ["X-MBX-APIKEY", key] in
{ Fastrest.params ; headers }
let with_path_and_query ~path ~query uri =
Uri.with_query (Uri.with_path uri path) query
module ExchangeInfo = struct
let encoding =
conv
(fun syms -> (), syms)
(fun ((), syms) -> syms)
(merge_objs unit
(obj1
(req "symbols" (list Sym.encoding))))
let get =
Fastrest.get (or_error encoding)
(Uri.with_path url "api/v1/exchangeInfo")
end
module Depth = struct
type t = {
last_update_id : int64 ;
bids : Level.t list ;
asks : Level.t list ;
} [@@deriving sexp]
let encoding =
conv
(fun { last_update_id ; bids ; asks } -> (last_update_id, bids, asks))
(fun (last_update_id, bids, asks) -> { last_update_id ; bids ; asks })
(obj3
(req "lastUpdateId" int53)
(req "bids" (list Level.encoding))
(req "asks" (list Level.encoding)))
let get ?(limit=100) symbol =
if not (List.mem ~equal:Int.equal [5; 10; 20; 50; 100; 500; 1000] limit) then
invalid_argf "Depth.get: invalid limit %d, must belong to [5; \
10; 20; 50; 100; 500; 1000]" limit () ;
Fastrest.get
(or_error encoding)
(with_path_and_query url
~path:"api/v1/depth"
~query:["symbol", [String.uppercase symbol] ;
"limit", [string_of_int limit]])
end
module User = struct
module Balance = struct
type t = {
asset : string ;
free : float ;
locked : float ;
}
let encoding =
conv
(fun { asset ; free ; locked } -> (asset, free, locked))
(fun (asset, free, locked) -> { asset ; free ; locked })
(obj3
(req "asset" string)
(req "free" safe_float)
(req "locked" safe_float))
end
module AccountInfo = struct
type t = {
makerC : int ;
takerC : int ;
buyerC : int ;
sellerC : int ;
trade : bool ;
withdraw : bool ;
deposit : bool ;
timestamp : Ptime.t ;
balances : Balance.t list ;
accountType : string ;
}
let encoding =
conv
(fun { makerC ; takerC ; buyerC ; sellerC ;
trade ; withdraw ; deposit ; timestamp ;
balances ; accountType } ->
(makerC, takerC, buyerC, sellerC, trade,
withdraw, deposit, timestamp, balances, accountType))
(fun (makerC, takerC, buyerC, sellerC, trade,
withdraw, deposit, timestamp, balances, accountType) ->
{ makerC ; takerC ; buyerC ; sellerC ;
trade ; withdraw ; deposit ; timestamp ;
balances ; accountType })
(obj10
(req "makerCommission" int)
(req "takerCommission" int)
(req "buyerCommission" int)
(req "sellerCommission" int)
(req "canTrade" bool)
(req "canWithdraw" bool)
(req "canDeposit" bool)
(req "updateTime" Ptime.encoding)
(req "balances" (list Balance.encoding))
(req "accountType" string))
let pp ppf t =
Json_repr.(pp (module Yojson) ppf (Yojson_repr.construct encoding t))
let to_string = Fmt.to_to_string pp
end
module OrderStatus = struct
type t = {
symbol : string ;
orderId : int ;
clientOrderId : string ;
price : float ;
origQty : float ;
executedQty : float ;
ordStatus : OrderStatus.t ;
timeInForce : TimeInForce.t ;
ordType : OrderType.t ;
side : Side.t ;
stopPrice : float ;
icebergQty : float ;
time : Ptime.t ;
isWorking : bool ;
}
let base_status_obj =
obj10
(req "symbol" string)
(req "orderId" int)
(req "clientOrderId" string)
(req "price" safe_float)
(req "origQty" safe_float)
(req "executedQty" safe_float)
(req "status" OrderStatus.encoding)
(req "timeInForce" TimeInForce.encoding)
(req "type" OrderType.encoding)
(req "side" Side.encoding)
let order_response_encoding =
conv
(fun { symbol ; orderId ; clientOrderId ;
price ; origQty ; executedQty ;
ordStatus ; timeInForce ; ordType ;
side ; stopPrice = _ ; icebergQty = _ ;
time ; isWorking = _ } ->
((symbol, orderId, clientOrderId, price, origQty,
executedQty, ordStatus, timeInForce, ordType, side),
time))
(fun ((symbol, orderId, clientOrderId, price, origQty,
executedQty, ordStatus, timeInForce, ordType, side),
time) -> { symbol ; orderId ; clientOrderId ;
price ; origQty ; executedQty ;
ordStatus ; timeInForce ; ordType ;
side ; stopPrice = 0.; icebergQty = 0.;
time ; isWorking = false})
(merge_objs base_status_obj
(obj1 (req "transactTime" Ptime.encoding)))
let encoding =
conv
(fun { symbol ; orderId ; clientOrderId ;
price ; origQty ; executedQty ;
ordStatus ; timeInForce ; ordType ;
side ; stopPrice ; icebergQty ;
time ; isWorking } ->
((symbol, orderId, clientOrderId, price, origQty,
executedQty, ordStatus, timeInForce, ordType, side),
(stopPrice, icebergQty, time, isWorking)))
(fun ((symbol, orderId, clientOrderId, price, origQty,
executedQty, ordStatus, timeInForce, ordType, side),
(stopPrice, icebergQty, time, isWorking)) ->
{ symbol ; orderId ; clientOrderId ;
price ; origQty ; executedQty ;
ordStatus ; timeInForce ; ordType ;
side ; stopPrice ; icebergQty ;
time ; isWorking })
(merge_objs base_status_obj
(obj4
(req "stopPrice" safe_float)
(req "icebergQty" float)
(req "time" Ptime.encoding)
(req "isWorking" bool)))
let pp ppf t =
Json_repr.(pp (module Yojson) ppf (Yojson_repr.construct encoding t))
let to_string = Fmt.to_to_string pp
end
let order
?(dry_run=false) ~symbol
~side ~kind ?timeInForce
~qty ?price ?clientOrdID
?stopPx ?icebergQty () =
let params = List.filter_opt [
Some ("symbol", [symbol]) ;
Some ("side", [Side.to_string side]) ;
Some ("type", [OrderType.to_string kind]) ;
Option.map timeInForce ~f:(fun tif -> "timeInForce", [TimeInForce.to_string tif]) ;
Some ("quantity", [Printf.sprintf "%.6f" qty]) ;
Option.map price ~f:(fun p -> "price", [Printf.sprintf "%.6f" p]) ;
Option.map clientOrdID ~f:(fun id -> "newClientOrderId", [id]) ;
Option.map stopPx ~f:(fun p -> "stopPrice", [Printf.sprintf "%.6f" p]) ;
Option.map icebergQty ~f:(fun q -> "icebergQty", [Printf.sprintf "%.6f" q]) ;
] in
let enc =
union [
case empty (function _ -> None) (function () -> None) ;
case OrderStatus.order_response_encoding
Fn.id (fun orderStatus -> Some orderStatus) ;
] in
Fastrest.post_form ~params ~auth:authf (or_error enc)
(Uri.with_path url ("api/v3/order" ^ if dry_run then "/test" else ""))
let open_orders symbol =
Fastrest.get ~auth:authf
(or_error (list OrderStatus.encoding))
Uri.(with_query (with_path url "api/v3/openOrders") ["symbol", [symbol]])
let account_info () =
Fastrest.get ~auth:authf
(or_error AccountInfo.encoding)
(Uri.with_path url "api/v3/account")
let myTrades symbol =
let q = ["symbol", [symbol]] in
Fastrest.get ~auth:authf
(or_error (list empty))
Uri.(with_query (with_path url "api/v3/myTrades") q)
module Stream = struct
let encoding =
conv Fn.id Fn.id (obj1 (req "listenKey" string))
let start () =
Fastrest.post_form ~auth:authf_keyonly
(or_error encoding)
(Uri.with_path url "api/v1/userDataStream")
let renew ~listenKey =
Fastrest.put_form
~auth:authf_keyonly
~params:["listenKey", [listenKey]]
(or_error empty)
(Uri.with_path url "api/v1/userDataStream")
let close ~listenKey =
Fastrest.delete ~auth:authf_keyonly
(or_error empty)
Uri.(with_query (with_path url "api/v1/userDataStream")
["listenKey", [listenKey]])
end
end
| |
cae3ed6595aaaed6cfb7a6684fdf936fd8830083a0ff06b6b152979456fb6a79 | mabragor/cg-llvm | high-level-structure.lisp |
(in-package #:cg-llvm)
(cl-interpol:enable-interpol-syntax)
(enable-read-macro-tokens)
(quasiquote-2.0:enable-quasiquote-2.0)
(define-cg-llvm-rule symbol-table-entry ()
(|| target-datalayout
target-triple
inline-assembly
attribute-group
metadata-entry
(fail-parse "Not implemented")))
(define-cg-llvm-rule llvm-element ()
(|| global-variable-definition
function-declaration
function-definition
symbol-table-entry))
(define-plural-rule llvm-elements llvm-element (? whitespace))
(define-cg-llvm-rule llvm-module ()
(progm (? whitespace) `(module ,@llvm-elements) (? whitespace)))
| null | https://raw.githubusercontent.com/mabragor/cg-llvm/4a8375316d8ebf36125929c5d1a872721099d29c/src/high-level-structure.lisp | lisp |
(in-package #:cg-llvm)
(cl-interpol:enable-interpol-syntax)
(enable-read-macro-tokens)
(quasiquote-2.0:enable-quasiquote-2.0)
(define-cg-llvm-rule symbol-table-entry ()
(|| target-datalayout
target-triple
inline-assembly
attribute-group
metadata-entry
(fail-parse "Not implemented")))
(define-cg-llvm-rule llvm-element ()
(|| global-variable-definition
function-declaration
function-definition
symbol-table-entry))
(define-plural-rule llvm-elements llvm-element (? whitespace))
(define-cg-llvm-rule llvm-module ()
(progm (? whitespace) `(module ,@llvm-elements) (? whitespace)))
| |
79fa38701d16245dc7c29b4d226c5ad4708d6772850b96c9fbd98acb9129250d | dajac/react | semaphore_test.clj | (ns react.semaphore-test
(:use clojure.test)
(:require [react.semaphore :as semaphore]))
(deftest semaphore-test
(testing "Idle"
(let [s (semaphore/async-semaphore 1)]
(is (= (-> s .state deref :permits) 1))
(is (= (-> s .state deref :waiters count) 0))))
(testing "Acquire / Release"
(let [s (semaphore/async-semaphore 1)
a (semaphore/acquire s)
b (semaphore/acquire s)
c (semaphore/acquire s)]
(is (= (-> s .state deref :permits) 0))
(is (= (-> s .state deref :waiters count) 2))
(is (= @a nil))
(is (false? (realized? b)))
(is (false? (realized? c)))
(semaphore/release s)
(is (= (-> s .state deref :permits) 0))
(is (= (-> s .state deref :waiters count) 1))
(is (= @b nil))
(is (false? (realized? c)))
(semaphore/release s)
(is (= (-> s .state deref :permits) 0))
(is (= (-> s .state deref :waiters count) 0))
(is (= @c nil))
(semaphore/release s)
(is (= (-> s .state deref :permits) 1))
(is (= (-> s .state deref :waiters count) 0))))
(testing "Acquire / Release with max-waiters"
(let [s (semaphore/async-semaphore 1 :max-waiters 1)
a (semaphore/acquire s)
b (semaphore/acquire s)]
(is (= (-> s .max-waiters) 1))
(is (= (-> s .state deref :permits) 0))
(is (= (-> s .state deref :waiters count) 1))
(is (= @a nil))
(is (false? (realized? b)))
(is (thrown? java.util.concurrent.RejectedExecutionException
@(semaphore/acquire s))))))
| null | https://raw.githubusercontent.com/dajac/react/b45d549d0f8be0167c8c814418358ae82afe4ce8/test/react/semaphore_test.clj | clojure | (ns react.semaphore-test
(:use clojure.test)
(:require [react.semaphore :as semaphore]))
(deftest semaphore-test
(testing "Idle"
(let [s (semaphore/async-semaphore 1)]
(is (= (-> s .state deref :permits) 1))
(is (= (-> s .state deref :waiters count) 0))))
(testing "Acquire / Release"
(let [s (semaphore/async-semaphore 1)
a (semaphore/acquire s)
b (semaphore/acquire s)
c (semaphore/acquire s)]
(is (= (-> s .state deref :permits) 0))
(is (= (-> s .state deref :waiters count) 2))
(is (= @a nil))
(is (false? (realized? b)))
(is (false? (realized? c)))
(semaphore/release s)
(is (= (-> s .state deref :permits) 0))
(is (= (-> s .state deref :waiters count) 1))
(is (= @b nil))
(is (false? (realized? c)))
(semaphore/release s)
(is (= (-> s .state deref :permits) 0))
(is (= (-> s .state deref :waiters count) 0))
(is (= @c nil))
(semaphore/release s)
(is (= (-> s .state deref :permits) 1))
(is (= (-> s .state deref :waiters count) 0))))
(testing "Acquire / Release with max-waiters"
(let [s (semaphore/async-semaphore 1 :max-waiters 1)
a (semaphore/acquire s)
b (semaphore/acquire s)]
(is (= (-> s .max-waiters) 1))
(is (= (-> s .state deref :permits) 0))
(is (= (-> s .state deref :waiters count) 1))
(is (= @a nil))
(is (false? (realized? b)))
(is (thrown? java.util.concurrent.RejectedExecutionException
@(semaphore/acquire s))))))
| |
41fd55a11b053a7726585bd762e812c8c5c74b2047a73ecd1609e78645c0d79f | may-liu/qtalk | make_muc_pic.erl | -module(make_muc_pic).
-export([make_muc_pic/2]).
-include("logger.hrl").
-include("qunar_ejabberd_extend.hrl").
-export([send_update_muc_pic/3]).
make_muc_pic(Server,Muc) ->
case catch ets:lookup(muc_users,Muc) of
[] ->
ok;
[{Muc,L}] ->
case http_make_new_pic(Server,Muc,L) of
null ->
ok;
Muc_Pic ->
send_update_muc_pic(Server,Muc,Muc_Pic)
end;
_ ->
ok
end.
http_make_new_pic(Server,Muc,Users) ->
Num = length(Users),
Pics =
lists:flatmap(fun({User,Host}) ->
case Host =:= Server of
true ->
case catch ets:lookup(vcard_version,User) of
[] ->
[];
[Vv] when is_record(Vv,vcard_version) ->
[complete_url(Vv#vcard_version.url)];
_ ->
[]
end;
_ ->
[]
end end,Users),
%Key = str:concat(Muc,str:concat(<<"_">>,integer_to_binary(Num))),
Key = Muc,
Url = "",
Header = [],
Type = "application/json",
HTTPOptions = [],
Options = [],
Body = rfc4627:encode({obj,[{"urls",Pics},{"key",Key}]}),
case catch http_client:http_post(Server,Url,Header,Type,Body,HTTPOptions,Options) of
{ok, {_Status,_Headers, Res}} ->
case rfc4627:decode(Res) of
{ok,{obj,Args},_} ->
case proplists:get_value("data",Args) of
Data when is_binary(Data) ->
proplists : ) ;
case str:str(Data,<<"qunar.com/">>) of
0 ->
<<"file/xxxx.png">>;
P ->
str:substr(Data,P + 10,size(Data))
end;
_ ->
<<"file/xxxx.png">>
end;
_ ->
<<"file/xxxx.png">>
end;
_ ->
<<"file/xxxx.png">>
end.
complete_url(Url) ->
case catch str:left(Url,4) of
<<"http">> ->
Url;
Url1 when is_binary(Url) ->
str:concat(<<"/">>,Url);
_ ->
[]
end.
send_update_muc_pic(Server,Muc,Pic) ->
Full_Muc = str:concat(Muc,str:concat(<<"@conference.">>,Server)),
Body = rfc4627:encode([{obj,[{<<"muc_name">>,Full_Muc},{<<"pic">>,Pic}]}]),
Url = "",
Header = [],
Type = "application/json",
HTTPOptions = [],
Options = [],
case catch http_client:http_post(Server,Url,Header,Type,Body,HTTPOptions,Options) of
{ok, {Status,Headers, Res}} ->
case rfc4627:decode(Res) of
{ok,{obj,Args},_} ->
?DEBUG("Args ~p ~n",[Args]),
true;
E ->
false
end;
Error ->
false
end.
| null | https://raw.githubusercontent.com/may-liu/qtalk/f5431e5a7123975e9656e7ab239e674ce33713cd/qtalk_opensource/src/make_muc_pic.erl | erlang | Key = str:concat(Muc,str:concat(<<"_">>,integer_to_binary(Num))), | -module(make_muc_pic).
-export([make_muc_pic/2]).
-include("logger.hrl").
-include("qunar_ejabberd_extend.hrl").
-export([send_update_muc_pic/3]).
make_muc_pic(Server,Muc) ->
case catch ets:lookup(muc_users,Muc) of
[] ->
ok;
[{Muc,L}] ->
case http_make_new_pic(Server,Muc,L) of
null ->
ok;
Muc_Pic ->
send_update_muc_pic(Server,Muc,Muc_Pic)
end;
_ ->
ok
end.
http_make_new_pic(Server,Muc,Users) ->
Num = length(Users),
Pics =
lists:flatmap(fun({User,Host}) ->
case Host =:= Server of
true ->
case catch ets:lookup(vcard_version,User) of
[] ->
[];
[Vv] when is_record(Vv,vcard_version) ->
[complete_url(Vv#vcard_version.url)];
_ ->
[]
end;
_ ->
[]
end end,Users),
Key = Muc,
Url = "",
Header = [],
Type = "application/json",
HTTPOptions = [],
Options = [],
Body = rfc4627:encode({obj,[{"urls",Pics},{"key",Key}]}),
case catch http_client:http_post(Server,Url,Header,Type,Body,HTTPOptions,Options) of
{ok, {_Status,_Headers, Res}} ->
case rfc4627:decode(Res) of
{ok,{obj,Args},_} ->
case proplists:get_value("data",Args) of
Data when is_binary(Data) ->
proplists : ) ;
case str:str(Data,<<"qunar.com/">>) of
0 ->
<<"file/xxxx.png">>;
P ->
str:substr(Data,P + 10,size(Data))
end;
_ ->
<<"file/xxxx.png">>
end;
_ ->
<<"file/xxxx.png">>
end;
_ ->
<<"file/xxxx.png">>
end.
complete_url(Url) ->
case catch str:left(Url,4) of
<<"http">> ->
Url;
Url1 when is_binary(Url) ->
str:concat(<<"/">>,Url);
_ ->
[]
end.
send_update_muc_pic(Server,Muc,Pic) ->
Full_Muc = str:concat(Muc,str:concat(<<"@conference.">>,Server)),
Body = rfc4627:encode([{obj,[{<<"muc_name">>,Full_Muc},{<<"pic">>,Pic}]}]),
Url = "",
Header = [],
Type = "application/json",
HTTPOptions = [],
Options = [],
case catch http_client:http_post(Server,Url,Header,Type,Body,HTTPOptions,Options) of
{ok, {Status,Headers, Res}} ->
case rfc4627:decode(Res) of
{ok,{obj,Args},_} ->
?DEBUG("Args ~p ~n",[Args]),
true;
E ->
false
end;
Error ->
false
end.
|
d2cbb2867e01fe93576459d1051445c48199ab5db96961adf3be0f60534c70e1 | soupi/sdl2-snake | Snake.hs | | This is an example application of the sdl2 package .
We will implement the game ' Snake ' .
This module is composed of the following parts :
1 . Main Loop
Contains boilerplate code that is common to most games . Responsible for the lifecycle of the game .
2 . Types and Constants
The model of the game . Defines the relevant data types for this game .
3 . Events
Process the subset of events we care about .
4 . Update
Updates the game every frame according to the logic of the game . This is where the fun stuff happens .
5 . Render
This is where we convert the current state of the game to something we can display on screen .
We will implement the game 'Snake'.
This module is composed of the following parts:
1. Main Loop
Contains boilerplate code that is common to most games. Responsible for the lifecycle of the game.
2. Types and Constants
The model of the game. Defines the relevant data types for this game.
3. Events
Process the subset of events we care about.
4. Update
Updates the game every frame according to the logic of the game. This is where the fun stuff happens.
5. Render
This is where we convert the current state of the game to something we can display on screen.
-}
# LANGUAGE ViewPatterns , OverloadedStrings #
module Snake where
import Prelude hiding (head, init, tail)
import Data.Word (Word32)
import Data.List.NonEmpty (NonEmpty((:|)), head, init, tail, toList)
import Foreign.C.Types (CInt)
import Control.Monad (when, unless)
import Control.Concurrent (threadDelay)
import SDL.Vect (Point(P), V2(..), V4(..))
import SDL (($=))
import System.Random (StdGen, mkStdGen, randomR)
import qualified SDL
---------------
Main Loop --
---------------
run :: IO ()
run = do
SDL.initialize [SDL.InitVideo]
SDL.HintRenderScaleQuality $= SDL.ScaleLinear
do renderQuality <- SDL.get SDL.HintRenderScaleQuality
when (renderQuality /= SDL.ScaleLinear) $
putStrLn "Warning: Linear texture filtering not enabled!"
window <-
SDL.createWindow
"Snake"
SDL.defaultWindow {SDL.windowInitialSize = V2 screenWidth screenHeight}
SDL.showWindow window
renderer <-
SDL.createRenderer
window
(-1)
SDL.RendererConfig
{ SDL.rendererType = SDL.AcceleratedRenderer
, SDL.rendererTargetTexture = False
}
SDL.rendererDrawColor renderer $= V4 0 0 0 0 -- black background
loop renderer initGameState
SDL.destroyRenderer renderer
SDL.destroyWindow window
SDL.quit
loop :: SDL.Renderer -> GameState -> IO ()
loop renderer state = do
-- measure ticks at the start
start <- SDL.ticks
-- the pattern
events <- fetchEvents
let state' = update events state
render renderer state'
-- measure ticks at the end and regulate FPS
end <- SDL.ticks
regulateFPS 60 start end
-- decide whether to continue or not
unless (eQuit events) (loop renderer state')
-- | Will wait until ticks pass
regulateFPS :: Word32 -> Word32 -> Word32 -> IO ()
regulateFPS fps start end
| fps == 0 = pure ()
| otherwise = do
let
ticksPerFrame = 1000 `div` fps
interval = end - start
gap = ticksPerFrame - interval
delayFor
| gap < ticksPerFrame =
fromIntegral $ max 0 gap
| otherwise =
fromIntegral ticksPerFrame
threadDelay $ delayFor * 1000 -- threadDelay works in microseconds
-------------------------
-- Types and Constants --
-------------------------
data GameState
= GameState
{ sSnake :: NonEmpty (V2 CInt) -- we model the snake as a non-empty list of blocks
, sDirection :: (Maybe Direction)
, sStatus :: SnakeStatus
, sFood :: Maybe (V2 CInt)
, sMoveTimer :: Int -- we use timers to control when stuff should happen
, sFoodTimer :: Int
this is used to generate new food at psuedo random locations
}
deriving Show
data Direction
= DirUp
| DirDown
| DirLeft
| DirRight
deriving Show
data SnakeStatus
= Alive
| Dead
| CollidedWithTail
| CollidedWithWall
deriving (Show, Eq)
data MyEvents
= MyEvents
{ eQuit :: Bool
, eArrowUp :: Bool
, eArrowDown :: Bool
, eArrowLeft :: Bool
, eArrowRight :: Bool
}
deriving Show
initGameState :: GameState
initGameState = GameState
{ sSnake = V2 (blockSize * 7) (blockSize * 7) :| []
, sDirection = Just DirRight
, sStatus = Alive
, sFood = Just $ V2 (23 * blockSize) (14 * blockSize)
, sMoveTimer = 13 -- the units are frames
, sFoodTimer = 360 -- the units are frames
, sRandomGen = mkStdGen 17
}
getX :: V2 CInt -> CInt
getX (V2 locX _) = locX
getY :: V2 CInt -> CInt
getY (V2 _ locY) = locY
snakeBodyBlockSize :: V2 CInt
snakeBodyBlockSize = V2 blockSize blockSize
blockSize :: CInt
blockSize = 24
screenWidth, screenHeight :: CInt
(screenWidth, screenHeight) = (720, 576)
---------------------
-- Game Processing --
---------------------
------------
-- Events --
------------
-- | Will return a record with the relevant event states
fetchEvents :: IO MyEvents
fetchEvents = do
events <- SDL.pollEvents
isKeyPressed <- SDL.getKeyboardState
pure $ MyEvents
{ eQuit = elem SDL.QuitEvent $ map SDL.eventPayload events
, eArrowUp = isKeyPressed SDL.ScancodeUp
, eArrowDown = isKeyPressed SDL.ScancodeDown
, eArrowLeft = isKeyPressed SDL.ScancodeLeft
, eArrowRight = isKeyPressed SDL.ScancodeRight
}
------------
-- Update --
------------
-- | Will update the game each frame
update :: MyEvents -> GameState -> GameState
update events state
| sStatus state == Dead = state
| otherwise =
collide . updateFood . moveAndEat . changeDir events $ state
changeDir :: MyEvents -> GameState -> GameState
changeDir events state
| eArrowUp events = state { sDirection = Just DirUp }
| eArrowDown events = state { sDirection = Just DirDown }
| eArrowLeft events = state { sDirection = Just DirLeft }
| eArrowRight events = state { sDirection = Just DirRight }
| otherwise = state
moveAndEat :: GameState -> GameState
moveAndEat state
| sMoveTimer state == 0 =
state
{ sMoveTimer = sMoveTimer initGameState - min (sMoveTimer initGameState - 1) (length (sSnake state))
, sSnake =
newBlock (sDirection state) (head $ sSnake state)
:| (if ate state then toList else init) (sSnake state)
, sFood =
if ate state
then Nothing
else sFood state
, sFoodTimer =
if ate state
then 60
else sFoodTimer state - 1
}
| otherwise = state
{ sMoveTimer = sMoveTimer state - 1
, sFoodTimer = sFoodTimer state - 1
}
ate :: GameState -> Bool
ate state = Just (head $ sSnake state) == sFood state
newBlock :: Maybe Direction -> V2 CInt -> V2 CInt
newBlock dir (V2 locX locY) = case dir of
Just DirUp -> V2 locX (locY - getY snakeBodyBlockSize)
Just DirDown -> V2 locX (locY + getY snakeBodyBlockSize)
Just DirLeft -> V2 (locX - getX snakeBodyBlockSize) locY
Just DirRight -> V2 (locX + getX snakeBodyBlockSize) locY
Nothing -> V2 locX locY
updateFood :: GameState -> GameState
updateFood state
| sFoodTimer state == 0 =
let
((* blockSize) -> x, stdGen') = randomR (4, div screenWidth blockSize - 4) (sRandomGen state)
((* blockSize) -> y, stdGen'') = randomR (4, div screenHeight blockSize - 4) stdGen'
in state
{ sFoodTimer = sFoodTimer initGameState
, sFood = maybe (Just $ V2 x y) (const Nothing) $ sFood state
, sRandomGen = stdGen''
}
| otherwise =
state
collide :: GameState -> GameState
collide state
| sStatus state /= Alive =
state
{ sStatus = Dead
, sDirection = Nothing
}
| any (head (sSnake state) ==) (tail $ sSnake state) =
state
{ sStatus = CollidedWithTail
, sDirection = Nothing
}
| getX (head $ sSnake state) < 0
|| getX (head $ sSnake state) >= screenWidth
|| getY (head $ sSnake state) < 0
|| getY (head $ sSnake state) >= screenHeight
= state
{ sStatus = CollidedWithWall
, sDirection = Nothing
}
| otherwise = state
------------
-- Render --
------------
-- | Will render the game on screen
render :: SDL.Renderer -> GameState -> IO ()
render renderer state = do
SDL.rendererDrawColor renderer $= V4 0 0 0 0
SDL.clear renderer
let
drawBlock location =
SDL.fillRect renderer $
Just $ SDL.Rectangle (P location) snakeBodyBlockSize
SDL.rendererDrawColor renderer $= V4 maxBound 0 0 maxBound
mapM_ drawBlock $ sFood state
SDL.rendererDrawColor renderer $= V4 maxBound maxBound maxBound maxBound
mapM_ drawBlock $ sSnake state
when (sStatus state == CollidedWithTail) $
putStrLn "The snake collided with it's tail :("
when (sStatus state == CollidedWithWall) $
putStrLn "The snake collided with the wall :("
SDL.present renderer
| null | https://raw.githubusercontent.com/soupi/sdl2-snake/84a4129d946f02d864592d3c9e40e272752c501b/src/Snake.hs | haskell | -------------
-------------
black background
measure ticks at the start
the pattern
measure ticks at the end and regulate FPS
decide whether to continue or not
| Will wait until ticks pass
threadDelay works in microseconds
-----------------------
Types and Constants --
-----------------------
we model the snake as a non-empty list of blocks
we use timers to control when stuff should happen
the units are frames
the units are frames
-------------------
Game Processing --
-------------------
----------
Events --
----------
| Will return a record with the relevant event states
----------
Update --
----------
| Will update the game each frame
----------
Render --
----------
| Will render the game on screen | | This is an example application of the sdl2 package .
We will implement the game ' Snake ' .
This module is composed of the following parts :
1 . Main Loop
Contains boilerplate code that is common to most games . Responsible for the lifecycle of the game .
2 . Types and Constants
The model of the game . Defines the relevant data types for this game .
3 . Events
Process the subset of events we care about .
4 . Update
Updates the game every frame according to the logic of the game . This is where the fun stuff happens .
5 . Render
This is where we convert the current state of the game to something we can display on screen .
We will implement the game 'Snake'.
This module is composed of the following parts:
1. Main Loop
Contains boilerplate code that is common to most games. Responsible for the lifecycle of the game.
2. Types and Constants
The model of the game. Defines the relevant data types for this game.
3. Events
Process the subset of events we care about.
4. Update
Updates the game every frame according to the logic of the game. This is where the fun stuff happens.
5. Render
This is where we convert the current state of the game to something we can display on screen.
-}
# LANGUAGE ViewPatterns , OverloadedStrings #
module Snake where
import Prelude hiding (head, init, tail)
import Data.Word (Word32)
import Data.List.NonEmpty (NonEmpty((:|)), head, init, tail, toList)
import Foreign.C.Types (CInt)
import Control.Monad (when, unless)
import Control.Concurrent (threadDelay)
import SDL.Vect (Point(P), V2(..), V4(..))
import SDL (($=))
import System.Random (StdGen, mkStdGen, randomR)
import qualified SDL
run :: IO ()
run = do
SDL.initialize [SDL.InitVideo]
SDL.HintRenderScaleQuality $= SDL.ScaleLinear
do renderQuality <- SDL.get SDL.HintRenderScaleQuality
when (renderQuality /= SDL.ScaleLinear) $
putStrLn "Warning: Linear texture filtering not enabled!"
window <-
SDL.createWindow
"Snake"
SDL.defaultWindow {SDL.windowInitialSize = V2 screenWidth screenHeight}
SDL.showWindow window
renderer <-
SDL.createRenderer
window
(-1)
SDL.RendererConfig
{ SDL.rendererType = SDL.AcceleratedRenderer
, SDL.rendererTargetTexture = False
}
loop renderer initGameState
SDL.destroyRenderer renderer
SDL.destroyWindow window
SDL.quit
loop :: SDL.Renderer -> GameState -> IO ()
loop renderer state = do
start <- SDL.ticks
events <- fetchEvents
let state' = update events state
render renderer state'
end <- SDL.ticks
regulateFPS 60 start end
unless (eQuit events) (loop renderer state')
regulateFPS :: Word32 -> Word32 -> Word32 -> IO ()
regulateFPS fps start end
| fps == 0 = pure ()
| otherwise = do
let
ticksPerFrame = 1000 `div` fps
interval = end - start
gap = ticksPerFrame - interval
delayFor
| gap < ticksPerFrame =
fromIntegral $ max 0 gap
| otherwise =
fromIntegral ticksPerFrame
data GameState
= GameState
, sDirection :: (Maybe Direction)
, sStatus :: SnakeStatus
, sFood :: Maybe (V2 CInt)
, sFoodTimer :: Int
this is used to generate new food at psuedo random locations
}
deriving Show
data Direction
= DirUp
| DirDown
| DirLeft
| DirRight
deriving Show
data SnakeStatus
= Alive
| Dead
| CollidedWithTail
| CollidedWithWall
deriving (Show, Eq)
data MyEvents
= MyEvents
{ eQuit :: Bool
, eArrowUp :: Bool
, eArrowDown :: Bool
, eArrowLeft :: Bool
, eArrowRight :: Bool
}
deriving Show
initGameState :: GameState
initGameState = GameState
{ sSnake = V2 (blockSize * 7) (blockSize * 7) :| []
, sDirection = Just DirRight
, sStatus = Alive
, sFood = Just $ V2 (23 * blockSize) (14 * blockSize)
, sRandomGen = mkStdGen 17
}
getX :: V2 CInt -> CInt
getX (V2 locX _) = locX
getY :: V2 CInt -> CInt
getY (V2 _ locY) = locY
snakeBodyBlockSize :: V2 CInt
snakeBodyBlockSize = V2 blockSize blockSize
blockSize :: CInt
blockSize = 24
screenWidth, screenHeight :: CInt
(screenWidth, screenHeight) = (720, 576)
fetchEvents :: IO MyEvents
fetchEvents = do
events <- SDL.pollEvents
isKeyPressed <- SDL.getKeyboardState
pure $ MyEvents
{ eQuit = elem SDL.QuitEvent $ map SDL.eventPayload events
, eArrowUp = isKeyPressed SDL.ScancodeUp
, eArrowDown = isKeyPressed SDL.ScancodeDown
, eArrowLeft = isKeyPressed SDL.ScancodeLeft
, eArrowRight = isKeyPressed SDL.ScancodeRight
}
update :: MyEvents -> GameState -> GameState
update events state
| sStatus state == Dead = state
| otherwise =
collide . updateFood . moveAndEat . changeDir events $ state
changeDir :: MyEvents -> GameState -> GameState
changeDir events state
| eArrowUp events = state { sDirection = Just DirUp }
| eArrowDown events = state { sDirection = Just DirDown }
| eArrowLeft events = state { sDirection = Just DirLeft }
| eArrowRight events = state { sDirection = Just DirRight }
| otherwise = state
moveAndEat :: GameState -> GameState
moveAndEat state
| sMoveTimer state == 0 =
state
{ sMoveTimer = sMoveTimer initGameState - min (sMoveTimer initGameState - 1) (length (sSnake state))
, sSnake =
newBlock (sDirection state) (head $ sSnake state)
:| (if ate state then toList else init) (sSnake state)
, sFood =
if ate state
then Nothing
else sFood state
, sFoodTimer =
if ate state
then 60
else sFoodTimer state - 1
}
| otherwise = state
{ sMoveTimer = sMoveTimer state - 1
, sFoodTimer = sFoodTimer state - 1
}
ate :: GameState -> Bool
ate state = Just (head $ sSnake state) == sFood state
newBlock :: Maybe Direction -> V2 CInt -> V2 CInt
newBlock dir (V2 locX locY) = case dir of
Just DirUp -> V2 locX (locY - getY snakeBodyBlockSize)
Just DirDown -> V2 locX (locY + getY snakeBodyBlockSize)
Just DirLeft -> V2 (locX - getX snakeBodyBlockSize) locY
Just DirRight -> V2 (locX + getX snakeBodyBlockSize) locY
Nothing -> V2 locX locY
updateFood :: GameState -> GameState
updateFood state
| sFoodTimer state == 0 =
let
((* blockSize) -> x, stdGen') = randomR (4, div screenWidth blockSize - 4) (sRandomGen state)
((* blockSize) -> y, stdGen'') = randomR (4, div screenHeight blockSize - 4) stdGen'
in state
{ sFoodTimer = sFoodTimer initGameState
, sFood = maybe (Just $ V2 x y) (const Nothing) $ sFood state
, sRandomGen = stdGen''
}
| otherwise =
state
collide :: GameState -> GameState
collide state
| sStatus state /= Alive =
state
{ sStatus = Dead
, sDirection = Nothing
}
| any (head (sSnake state) ==) (tail $ sSnake state) =
state
{ sStatus = CollidedWithTail
, sDirection = Nothing
}
| getX (head $ sSnake state) < 0
|| getX (head $ sSnake state) >= screenWidth
|| getY (head $ sSnake state) < 0
|| getY (head $ sSnake state) >= screenHeight
= state
{ sStatus = CollidedWithWall
, sDirection = Nothing
}
| otherwise = state
render :: SDL.Renderer -> GameState -> IO ()
render renderer state = do
SDL.rendererDrawColor renderer $= V4 0 0 0 0
SDL.clear renderer
let
drawBlock location =
SDL.fillRect renderer $
Just $ SDL.Rectangle (P location) snakeBodyBlockSize
SDL.rendererDrawColor renderer $= V4 maxBound 0 0 maxBound
mapM_ drawBlock $ sFood state
SDL.rendererDrawColor renderer $= V4 maxBound maxBound maxBound maxBound
mapM_ drawBlock $ sSnake state
when (sStatus state == CollidedWithTail) $
putStrLn "The snake collided with it's tail :("
when (sStatus state == CollidedWithWall) $
putStrLn "The snake collided with the wall :("
SDL.present renderer
|
3f4a4589e1407e7d59fb5d58de4e277b2f79bc5ca54d3be8ffd1d00a1a4fd23c | let-def/menhir | packedIntArray.mli | (**************************************************************************)
(* *)
Menhir
(* *)
, INRIA Rocquencourt
, PPS , Université Paris Diderot
(* *)
Copyright 2005 - 2008 Institut National de Recherche en Informatique
(* et en Automatique. All rights reserved. This file is distributed *)
under the terms of the GNU Library General Public License , with the
(* special exception on linking described in file LICENSE. *)
(* *)
(**************************************************************************)
A packed integer array is represented as a pair of an integer [ k ] and
a string [ s ] . The integer [ k ] is the number of bits per integer that we
use . The string [ s ] is just an array of bits , which is read in 8 - bit
chunks .
a string [s]. The integer [k] is the number of bits per integer that we
use. The string [s] is just an array of bits, which is read in 8-bit
chunks. *)
The ocaml programming language treats string literals and array literals
in slightly different ways : the former are statically allocated , while
the latter are dynamically allocated . ( This is rather arbitrary . ) In the
context of Menhir 's table - based back - end , where compact , immutable
integer arrays are needed , ocaml strings are preferable to ocaml arrays .
in slightly different ways: the former are statically allocated, while
the latter are dynamically allocated. (This is rather arbitrary.) In the
context of Menhir's table-based back-end, where compact, immutable
integer arrays are needed, ocaml strings are preferable to ocaml arrays. *)
type t =
int * string
(* [pack a] turns an array of integers into a packed integer array. *)
(* Because the sign bit is the most significant bit, the magnitude of
any negative number is the word size. In other words, [pack] does
not achieve any space savings as soon as [a] contains any negative
numbers, even if they are ``small''. *)
val pack: int array -> t
(* [get t i] returns the integer stored in the packed array [t] at index [i]. *)
(* Together, [pack] and [get] satisfy the following property: if the index [i]
is within bounds, then [get (pack a) i] equals [a.(i)]. *)
val get: t -> int -> int
[ get1 t i ] returns the integer stored in the packed array [ t ] at index [ i ] .
It assumes ( and does not check ) that the array 's bit width is [ 1 ] . The
parameter [ t ] is just a string .
It assumes (and does not check) that the array's bit width is [1]. The
parameter [t] is just a string. *)
val get1: string -> int -> int
| null | https://raw.githubusercontent.com/let-def/menhir/e8ba7bef219acd355798072c42abbd11335ecf09/src/packedIntArray.mli | ocaml | ************************************************************************
et en Automatique. All rights reserved. This file is distributed
special exception on linking described in file LICENSE.
************************************************************************
[pack a] turns an array of integers into a packed integer array.
Because the sign bit is the most significant bit, the magnitude of
any negative number is the word size. In other words, [pack] does
not achieve any space savings as soon as [a] contains any negative
numbers, even if they are ``small''.
[get t i] returns the integer stored in the packed array [t] at index [i].
Together, [pack] and [get] satisfy the following property: if the index [i]
is within bounds, then [get (pack a) i] equals [a.(i)]. | Menhir
, INRIA Rocquencourt
, PPS , Université Paris Diderot
Copyright 2005 - 2008 Institut National de Recherche en Informatique
under the terms of the GNU Library General Public License , with the
A packed integer array is represented as a pair of an integer [ k ] and
a string [ s ] . The integer [ k ] is the number of bits per integer that we
use . The string [ s ] is just an array of bits , which is read in 8 - bit
chunks .
a string [s]. The integer [k] is the number of bits per integer that we
use. The string [s] is just an array of bits, which is read in 8-bit
chunks. *)
The ocaml programming language treats string literals and array literals
in slightly different ways : the former are statically allocated , while
the latter are dynamically allocated . ( This is rather arbitrary . ) In the
context of Menhir 's table - based back - end , where compact , immutable
integer arrays are needed , ocaml strings are preferable to ocaml arrays .
in slightly different ways: the former are statically allocated, while
the latter are dynamically allocated. (This is rather arbitrary.) In the
context of Menhir's table-based back-end, where compact, immutable
integer arrays are needed, ocaml strings are preferable to ocaml arrays. *)
type t =
int * string
val pack: int array -> t
val get: t -> int -> int
[ get1 t i ] returns the integer stored in the packed array [ t ] at index [ i ] .
It assumes ( and does not check ) that the array 's bit width is [ 1 ] . The
parameter [ t ] is just a string .
It assumes (and does not check) that the array's bit width is [1]. The
parameter [t] is just a string. *)
val get1: string -> int -> int
|
8688feef3f8f79905e54bc9c1e10146f573e97a89c2dd68d939925a08c190c80 | racket/redex | poly-stlc-4.rkt | #lang racket/base
(require redex/benchmark
"util.rkt"
redex/reduction-semantics)
(provide (all-defined-out))
(define the-error "the type of cons is incorrect")
(define-rewrite bug4
(∀ a (a → ((list a) → (list a))))
==>
(∀ a (a → ((list a) → a)))
#:context (define-metafunction)
#:once-only)
(include/rewrite (lib "redex/examples/poly-stlc.rkt") poly-stlc bug4)
(include/rewrite "generators.rkt" generators bug-mod-rw)
(define small-counter-example
(term ((+ 0) (([cons @ int] 0) [nil @ int]))))
(test small-counter-example)
| null | https://raw.githubusercontent.com/racket/redex/4c2dc96d90cedeb08ec1850575079b952c5ad396/redex-benchmark/redex/benchmark/models/poly-stlc/poly-stlc-4.rkt | racket | #lang racket/base
(require redex/benchmark
"util.rkt"
redex/reduction-semantics)
(provide (all-defined-out))
(define the-error "the type of cons is incorrect")
(define-rewrite bug4
(∀ a (a → ((list a) → (list a))))
==>
(∀ a (a → ((list a) → a)))
#:context (define-metafunction)
#:once-only)
(include/rewrite (lib "redex/examples/poly-stlc.rkt") poly-stlc bug4)
(include/rewrite "generators.rkt" generators bug-mod-rw)
(define small-counter-example
(term ((+ 0) (([cons @ int] 0) [nil @ int]))))
(test small-counter-example)
| |
889efd83910d8c1c71c669562d5256baeda5823e29a74737bc18824af811cdd1 | mightybyte/zeus | Db.hs | # LANGUAGE AllowAmbiguousTypes #
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ImpredicativeTypes #-}
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NoMonomorphismRestriction #
# OPTIONS_GHC -fno - warn - missing - signatures #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE UndecidableInstances #
module Backend.Db where
------------------------------------------------------------------------------
import Data.Time
import Database.Beam
import Database.Beam.Migrate.Generics
import Database.Beam.Migrate.Simple
import Database.Beam.Sqlite.Connection
import Database.SQLite.Simple
------------------------------------------------------------------------------
import Common.Types.BinaryCache
import Common.Types.CacheJob
import Common.Types.CachedHash
import Common.Types.CiSettings
import Common.Types.ConnectedAccount
import Common.Types.Builder
import Common.Types.BuildJob
import Common.Types.JobStatus
import Common.Types.Repo
import Common.Types.RepoBuildInfo
------------------------------------------------------------------------------
------------------------------------------------------------------------------
data CiDb f = CiDb
{ _ciDb_connectedAccounts :: f (TableEntity ConnectedAccountT)
, _ciDb_repos :: f (TableEntity RepoT)
, _ciDb_builders :: f (TableEntity BuilderT)
, _ciDb_buildJobs :: f (TableEntity BuildJobT)
, _ciDb_ciSettings :: f (TableEntity CiSettingsT)
, _ciDb_cacheJobs :: f (TableEntity CacheJobT)
, _ciDb_binaryCaches :: f (TableEntity BinaryCacheT)
, _ciDb_cachedHashes :: f (TableEntity CachedHashT)
} deriving (Generic, Database be)
ciDbChecked : : BeamMigrateSqlBackend be = > be CiDb
--ciDbChecked = defaultMigratableDbSettings @_ @CiDb
ciDbChecked :: CheckedDatabaseSettings Sqlite CiDb
ciDbChecked = defaultMigratableDbSettings
ciDb : : DatabaseSettings be CiDb
ciDb :: DatabaseSettings Sqlite CiDb
ciDb = unCheckDatabase ciDbChecked
-- `withDbModification`
renamingFields ( snakify . ( /= ' _ ' ) . defaultFieldName )
CiDb (TableLens ciDb_connectedAccounts)
(TableLens ciDb_repos)
(TableLens ciDb_builders)
(TableLens ciDb_buildJobs)
(TableLens ciDb_ciSettings)
(TableLens ciDb_cacheJobs)
(TableLens ciDb_binaryCache)
(TableLens ciDb_cachedHash)
= dbLenses
populateDb :: Connection -> IO ()
populateDb conn = do
now <- getCurrentTime
-- let accounts =
[ ConnectedAccount default _ ( _ " mightybyte " ) ( val _ " 0000000000000000000000000000000000000000 " ) ]
let rbi = RepoBuildInfo
"dummy" "mightybyte/dummy" RepoPush "ssh://..." "https://..." "1234"
"a8cd23" "Dummy commit" "Alice Coder"
(Just "")
start = addUTCTime (-82) now
runBeamSqlite conn $ do
runInsert $ insert (_ciDb_buildJobs ciDb) $ insertExpressions
[ BuildJob default_ (val_ rbi) (val_ start) (val_ $ Just start) (val_ $ Just now) (val_ JobSucceeded)
]
| null | https://raw.githubusercontent.com/mightybyte/zeus/5566e3244f1ab0cd245e4c36be315a5abddb5810/backend/src/Backend/Db.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE GADTs #
# LANGUAGE ImpredicativeTypes #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
# LANGUAGE TypeSynonymInstances #
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
ciDbChecked = defaultMigratableDbSettings @_ @CiDb
`withDbModification`
let accounts = | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NoMonomorphismRestriction #
# OPTIONS_GHC -fno - warn - missing - signatures #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
module Backend.Db where
import Data.Time
import Database.Beam
import Database.Beam.Migrate.Generics
import Database.Beam.Migrate.Simple
import Database.Beam.Sqlite.Connection
import Database.SQLite.Simple
import Common.Types.BinaryCache
import Common.Types.CacheJob
import Common.Types.CachedHash
import Common.Types.CiSettings
import Common.Types.ConnectedAccount
import Common.Types.Builder
import Common.Types.BuildJob
import Common.Types.JobStatus
import Common.Types.Repo
import Common.Types.RepoBuildInfo
data CiDb f = CiDb
{ _ciDb_connectedAccounts :: f (TableEntity ConnectedAccountT)
, _ciDb_repos :: f (TableEntity RepoT)
, _ciDb_builders :: f (TableEntity BuilderT)
, _ciDb_buildJobs :: f (TableEntity BuildJobT)
, _ciDb_ciSettings :: f (TableEntity CiSettingsT)
, _ciDb_cacheJobs :: f (TableEntity CacheJobT)
, _ciDb_binaryCaches :: f (TableEntity BinaryCacheT)
, _ciDb_cachedHashes :: f (TableEntity CachedHashT)
} deriving (Generic, Database be)
ciDbChecked : : BeamMigrateSqlBackend be = > be CiDb
ciDbChecked :: CheckedDatabaseSettings Sqlite CiDb
ciDbChecked = defaultMigratableDbSettings
ciDb : : DatabaseSettings be CiDb
ciDb :: DatabaseSettings Sqlite CiDb
ciDb = unCheckDatabase ciDbChecked
renamingFields ( snakify . ( /= ' _ ' ) . defaultFieldName )
CiDb (TableLens ciDb_connectedAccounts)
(TableLens ciDb_repos)
(TableLens ciDb_builders)
(TableLens ciDb_buildJobs)
(TableLens ciDb_ciSettings)
(TableLens ciDb_cacheJobs)
(TableLens ciDb_binaryCache)
(TableLens ciDb_cachedHash)
= dbLenses
populateDb :: Connection -> IO ()
populateDb conn = do
now <- getCurrentTime
[ ConnectedAccount default _ ( _ " mightybyte " ) ( val _ " 0000000000000000000000000000000000000000 " ) ]
let rbi = RepoBuildInfo
"dummy" "mightybyte/dummy" RepoPush "ssh://..." "https://..." "1234"
"a8cd23" "Dummy commit" "Alice Coder"
(Just "")
start = addUTCTime (-82) now
runBeamSqlite conn $ do
runInsert $ insert (_ciDb_buildJobs ciDb) $ insertExpressions
[ BuildJob default_ (val_ rbi) (val_ start) (val_ $ Just start) (val_ $ Just now) (val_ JobSucceeded)
]
|
6753b7929075614278bf7c094291f5d16ebab4028e3935ce17753e41a67403c0 | billstclair/trubanc-lisp | square.lisp | square.lisp -- implementation of the Square block cipher
based on a public domain implementation by ( FIXME ! )
(in-package :crypto)
(declaim (type (simple-array (unsigned-byte 8) (256))
alogtable logtable))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defconst alogtable
#.(let ((table (make-array 256 :element-type '(unsigned-byte 8)
:initial-element 1)))
(do ((i 1 (1+ i)))
((>= i 256) table)
(let ((j (ash (aref table (1- i)) 1)))
(when (logbitp 8 j)
(setf j (logxor j #x1f5)))
(setf (aref table i) (logand j #xff))))))
)
(defconst logtable
#.(let ((table (make-array 256 :element-type '(unsigned-byte 8)
:initial-element 0)))
(do ((i 1 (1+ i)))
((>= i 256) (setf (aref table 1) 0) table)
(setf (aref table (aref alogtable i)) i))))
(declaim (type (simple-array (unsigned-byte 8) (4 4))
g-matrix inverse-g-matrix))
(defconst g-matrix (make-array (list 4 4) :element-type '(unsigned-byte 8)
:initial-contents
(list (list 2 1 1 3)
(list 3 2 1 1)
(list 1 3 2 1)
(list 1 1 3 2))))
(defconst inverse-g-matrix (make-array (list 4 4) :element-type '(unsigned-byte 8)
:initial-contents
(list (list #xe #x9 #xd #xb)
(list #xb #xe #x9 #xd)
(list #xd #xb #xe #x9)
(list #x9 #xd #xb #xe))))
(declaim (type (simple-array (unsigned-byte 8) (256))
s-encryption-table s-decryption-table))
(defconst s-encryption-table
#8@(177 206 195 149 90 173 231 2 77 68 251 145 12 135 161 80
203 103 84 221 70 143 225 78 240 253 252 235 249 196 26 110
94 245 204 141 28 86 67 254 7 97 248 117 89 255 3 34
138 209 19 238 136 0 14 52 21 128 148 227 237 181 83 35
75 71 23 167 144 53 171 216 184 223 79 87 154 146 219 27
60 200 153 4 142 224 215 125 133 187 64 44 58 69 241 66
101 32 65 24 114 37 147 112 54 5 242 11 163 121 236 8
39 49 50 182 124 176 10 115 91 123 183 129 210 13 106 38
158 88 156 131 116 179 172 48 122 105 119 15 174 33 222 208
46 151 16 164 152 168 212 104 45 98 41 109 22 73 118 199
232 193 150 55 229 202 244 233 99 18 194 166 20 188 211 40
175 47 230 36 82 198 160 9 189 140 207 93 17 95 1 197
159 61 162 155 201 59 190 81 25 31 63 92 178 239 74 205
191 186 111 100 217 243 62 180 170 220 213 6 192 126 246 102
108 132 113 56 185 29 127 157 72 139 42 218 165 51 130 57
214 120 134 250 228 43 169 30 137 96 107 234 85 76 247 226))
(defconst s-decryption-table
#8@(53 190 7 46 83 105 219 40 111 183 118 107 12 125 54 139
146 188 169 50 172 56 156 66 99 200 30 79 36 229 247 201
97 141 47 63 179 101 127 112 175 154 234 245 91 152 144 177
135 113 114 237 55 69 104 163 227 239 92 197 80 193 214 202
90 98 95 38 9 93 20 65 232 157 206 64 253 8 23 74
15 199 180 62 18 252 37 75 129 44 4 120 203 187 32 189
249 41 153 168 211 96 223 17 151 137 126 250 224 155 31 210
103 226 100 119 132 43 158 138 241 109 136 121 116 87 221 230
57 123 238 131 225 88 242 13 52 248 48 233 185 35 84 21
68 11 77 102 58 3 162 145 148 82 76 195 130 231 128 192
182 14 194 108 147 236 171 67 149 246 216 70 134 5 140 176
117 0 204 133 215 61 115 122 72 228 209 89 173 184 198 208
220 161 170 2 29 191 181 159 81 196 165 16 34 207 1 186
143 49 124 174 150 218 240 86 71 212 235 78 217 19 142 73
85 22 255 59 244 164 178 6 160 167 251 27 110 60 51 205
24 94 106 213 166 33 222 254 42 28 243 10 26 25 39 45))
(declaim (type (simple-array (unsigned-byte 32) (256))
t-encryption-table t-decryption-table))
(defconst t-encryption-table
#32@(#x97b1b126 #x69cecea7 #x73c3c3b0 #xdf95954a
#xb45a5aee #xafadad02 #x3be7e7dc #x04020206
#x9a4d4dd7 #x884444cc #x03fbfbf8 #xd7919146
#x180c0c14 #xfb87877c #xb7a1a116 #xa05050f0
#x63cbcba8 #xce6767a9 #xa85454fc #x4fdddd92
#x8c4646ca #xeb8f8f64 #x37e1e1d6 #x9c4e4ed2
#x15f0f0e5 #x0ffdfdf2 #x0dfcfcf1 #x23ebebc8
#x07f9f9fe #x7dc4c4b9 #x341a1a2e #xdc6e6eb2
#xbc5e5ee2 #x1ff5f5ea #x6dcccca1 #xef8d8d62
#x381c1c24 #xac5656fa #x864343c5 #x09fefef7
#x0e070709 #xc26161a3 #x05f8f8fd #xea75759f
#xb25959eb #x0bfffff4 #x06030305 #x44222266
#xe18a8a6b #x57d1d186 #x26131335 #x29eeeec7
#xe588886d #x00000000 #x1c0e0e12 #x6834345c
#x2a15153f #xf5808075 #xdd949449 #x33e3e3d0
#x2fededc2 #x9fb5b52a #xa65353f5 #x46232365
#x964b4bdd #x8e4747c9 #x2e171739 #xbba7a71c
#xd5909045 #x6a35355f #xa3abab08 #x45d8d89d
#x85b8b83d #x4bdfdf94 #x9e4f4fd1 #xae5757f9
#xc19a9a5b #xd1929243 #x43dbdb98 #x361b1b2d
#x783c3c44 #x65c8c8ad #xc799995e #x0804040c
#xe98e8e67 #x35e0e0d5 #x5bd7d78c #xfa7d7d87
#xff85857a #x83bbbb38 #x804040c0 #x582c2c74
#x743a3a4e #x8a4545cf #x17f1f1e6 #x844242c6
#xca6565af #x40202060 #x824141c3 #x30181828
#xe4727296 #x4a25256f #xd3939340 #xe0707090
#x6c36365a #x0a05050f #x11f2f2e3 #x160b0b1d
#xb3a3a310 #xf279798b #x2dececc1 #x10080818
#x4e272769 #x62313153 #x64323256 #x99b6b62f
#xf87c7c84 #x95b0b025 #x140a0a1e #xe6737395
#xb65b5bed #xf67b7b8d #x9bb7b72c #xf7818176
#x51d2d283 #x1a0d0d17 #xd46a6abe #x4c26266a
#xc99e9e57 #xb05858e8 #xcd9c9c51 #xf3838370
#xe874749c #x93b3b320 #xadacac01 #x60303050
#xf47a7a8e #xd26969bb #xee777799 #x1e0f0f11
#xa9aeae07 #x42212163 #x49dede97 #x55d0d085
#x5c2e2e72 #xdb97974c #x20101030 #xbda4a419
#xc598985d #xa5a8a80d #x5dd4d489 #xd06868b8
#x5a2d2d77 #xc46262a6 #x5229297b #xda6d6db7
#x2c16163a #x924949db #xec76769a #x7bc7c7bc
#x25e8e8cd #x77c1c1b6 #xd996964f #x6e373759
#x3fe5e5da #x61cacaab #x1df4f4e9 #x27e9e9ce
#xc66363a5 #x24121236 #x71c2c2b3 #xb9a6a61f
#x2814143c #x8dbcbc31 #x53d3d380 #x50282878
#xabafaf04 #x5e2f2f71 #x39e6e6df #x4824246c
#xa45252f6 #x79c6c6bf #xb5a0a015 #x1209091b
#x8fbdbd32 #xed8c8c61 #x6bcfcfa4 #xba5d5de7
#x22111133 #xbe5f5fe1 #x02010103 #x7fc5c5ba
#xcb9f9f54 #x7a3d3d47 #xb1a2a213 #xc39b9b58
#x67c9c9ae #x763b3b4d #x89bebe37 #xa25151f3
#x3219192b #x3e1f1f21 #x7e3f3f41 #xb85c5ce4
#x91b2b223 #x2befefc4 #x944a4ade #x6fcdcda2
#x8bbfbf34 #x81baba3b #xde6f6fb1 #xc86464ac
#x47d9d99e #x13f3f3e0 #x7c3e3e42 #x9db4b429
#xa1aaaa0b #x4ddcdc91 #x5fd5d58a #x0c06060a
#x75c0c0b5 #xfc7e7e82 #x19f6f6ef #xcc6666aa
#xd86c6cb4 #xfd848479 #xe2717193 #x70383848
#x87b9b93e #x3a1d1d27 #xfe7f7f81 #xcf9d9d52
#x904848d8 #xe38b8b68 #x542a2a7e #x41dada9b
#xbfa5a51a #x66333355 #xf1828273 #x7239394b
#x59d6d68f #xf0787888 #xf986867f #x01fafafb
#x3de4e4d9 #x562b2b7d #xa7a9a90e #x3c1e1e22
#xe789896e #xc06060a0 #xd66b6bbd #x21eaeacb
#xaa5555ff #x984c4cd4 #x1bf7f7ec #x31e2e2d3))
(defconst t-decryption-table
#32@(#xe368bc02 #x5585620c #x2a3f2331 #x61ab13f7
#x98d46d72 #x21cb9a19 #x3c22a461 #x459d3dcd
#x05fdb423 #x2bc4075f #x9b2c01c0 #x3dd9800f
#x486c5c74 #xf97f7e85 #xf173ab1f #xb6edde0e
#x283c6bed #x4997781a #x9f2a918d #xc9579f33
#xa907a8aa #xa50ded7d #x7c422d8f #x764db0c9
#x4d91e857 #xcea963cc #xb4ee96d2 #x3028e1b6
#x0df161b9 #xbd196726 #x419bad80 #xc0a06ec7
#x5183f241 #x92dbf034 #x6fa21efc #x8f32ce4c
#x13e03373 #x69a7c66d #xe56d6493 #xbf1a2ffa
#xbb1cbfb7 #x587403b5 #xe76e2c4f #x5d89b796
#xe89c052a #x446619a3 #x342e71fb #x0ff22965
#xfe81827a #xb11322f1 #xa30835ec #xcd510f7e
#xff7aa614 #x5c7293f8 #x2fc29712 #xf370e3c3
#x992f491c #xd1431568 #xc2a3261b #x88cc32b3
#x8acf7a6f #xb0e8069f #x7a47f51e #xd2bb79da
#xe6950821 #x4398e55c #xd0b83106 #x11e37baf
#x7e416553 #xccaa2b10 #xd8b4e49c #x6456a7d4
#xfb7c3659 #x724b2084 #xea9f4df6 #x6a5faadf
#x2dc1dfce #x70486858 #xcaaff381 #x0605d891
#x5a774b69 #x94de28a5 #x39df1042 #x813bc347
#xfc82caa6 #x23c8d2c5 #x03f86cb2 #x080cd59a
#xdab7ac40 #x7db909e1 #x3824342c #xcf5247a2
#xdcb274d1 #x63a85b2b #x35d55595 #x479e7511
#x15e5ebe2 #x4b9430c6 #x4a6f14a8 #x91239c86
#x4c6acc39 #x5f8aff4a #x0406904d #xee99ddbb
#x1e1152ca #xaaffc418 #xeb646998 #x07fefcff
#x8b345e01 #x567d0ebe #xbae79bd9 #x4263c132
#x75b5dc7b #x97264417 #x67aecb66 #x95250ccb
#xec9a9567 #x57862ad0 #x60503799 #xb8e4d305
#x65ad83ba #x19efae35 #xa4f6c913 #xc15b4aa9
#x873e1bd6 #xa0f0595e #x18148a5b #xaf02703b
#xab04e076 #xdd4950bf #xdf4a1863 #xc6a5b656
#x853d530a #xfa871237 #x77b694a7 #x4665517f
#xed61b109 #x1bece6e9 #xd5458525 #xf5753b52
#x7fba413d #x27ce4288 #xb2eb4e43 #xd6bde997
#x527b9ef3 #x62537f45 #x2c3afba0 #x7bbcd170
#xb91ff76b #x121b171d #xfd79eec8 #x3a277cf0
#x0c0a45d7 #x96dd6079 #x2233f6ab #xacfa1c89
#xc8acbb5d #xa10b7d30 #xd4bea14b #xbee10b94
#x25cd0a54 #x547e4662 #xa2f31182 #x17e6a33e
#x263566e6 #xc3580275 #x83388b9b #x7844bdc2
#x020348dc #x4f92a08b #x2e39b37c #x4e6984e5
#xf0888f71 #x362d3927 #x9cd2fd3f #x01fb246e
#x893716dd #x00000000 #xf68d57e0 #xe293986c
#x744ef815 #x9320d45a #xad0138e7 #xd3405db4
#x1a17c287 #xb3106a2d #x5078d62f #xf48e1f3c
#xa70ea5a1 #x71b34c36 #x9ad725ae #x5e71db24
#x161d8750 #xef62f9d5 #x8d318690 #x1c121a16
#xa6f581cf #x5b8c6f07 #x37d61d49 #x6e593a92
#x84c67764 #x86c53fb8 #xd746cdf9 #xe090d0b0
#x29c74f83 #xe49640fd #x0e090d0b #x6da15620
#x8ec9ea22 #xdb4c882e #xf776738e #xb515b2bc
#x10185fc1 #x322ba96a #x6ba48eb1 #xaef95455
#x406089ee #x6655ef08 #xe9672144 #x3e21ecbd
#x2030be77 #xf28bc7ad #x80c0e729 #x141ecf8c
#xbce24348 #xc4a6fe8a #x31d3c5d8 #xb716fa60
#x5380ba9d #xd94fc0f2 #x1de93e78 #x24362e3a
#xe16bf4de #xcb54d7ef #x09f7f1f4 #x82c3aff5
#x0bf4b928 #x9d29d951 #xc75e9238 #xf8845aeb
#x90d8b8e8 #xdeb13c0d #x33d08d04 #x685ce203
#xc55ddae4 #x3bdc589e #x0a0f9d46 #x3fdac8d3
#x598f27db #xa8fc8cc4 #x79bf99ac #x6c5a724e
#x8ccaa2fe #x9ed1b5e3 #x1fea76a4 #x73b004ea))
(declaim (inline mul8))
(defun mul8 (a b)
(declare (type (unsigned-byte 8) a b))
(if (or (zerop a) (zerop b))
0
(aref alogtable (mod (+ (aref logtable a) (aref logtable b)) 255))))
;;; this function only runs during the key generation process, so consing
;;; is acceptable.
(defun transform (in in-offset out out-offset)
(declare (type (simple-array (unsigned-byte 32) (*)) in out))
(let ((a-matrix (make-array (list 4 4) :element-type '(unsigned-byte 8)))
(b-matrix (make-array (list 4 4) :element-type '(unsigned-byte 8)
:initial-element 0)))
(macrolet ((inref (index)
`(aref in (+ ,index in-offset)))
(outref (index)
`(aref out (+ ,index out-offset))))
(dotimes (i 4)
(dotimes (j 4)
(setf (aref a-matrix i j)
(logand (ash (inref i) (- (- 24 (* j 8)))) #xff))))
(dotimes (i 4)
(dotimes (j 4)
(dotimes (k 4)
(setf (aref b-matrix i j)
(logand
(logxor (mul8 (aref a-matrix i k) (aref g-matrix k j))
(aref b-matrix i j))
#xff)))))
(dotimes (i 4)
(setf (outref i) 0)
(dotimes (j 4)
(setf (outref i)
(logxor (outref i)
(ash (aref b-matrix i j) (- 24 (* j 8))))))))))
(defun generate-round-keys (key n-rounds encrypt-roundkeys decrypt-roundkeys)
(declare (type (simple-array (unsigned-byte 32) (*))
encrypt-roundkeys decrypt-roundkeys)
(type (simple-array (unsigned-byte 8) (16)) key))
(let ((offset (make-array n-rounds :element-type '(unsigned-byte 8)
:initial-element 1))
(tempkeys (make-array (* (1+ n-rounds) 4) :element-type '(unsigned-byte 32))))
(declare (type (simple-array (unsigned-byte 8) (*)) offset)
(type (simple-array (unsigned-byte 32) (*)) tempkeys))
;; hack for stupid C array punning
(macrolet ((mdref (array i j)
`(aref ,array (+ (* ,i 4) ,j))))
(do ((i 1 (1+ i)))
((>= i n-rounds))
(setf (aref offset i) (mul8 2 (aref offset (1- i)))))
(dotimes (i 4)
(setf (mdref tempkeys 0 i) (ub32ref/be key (* 4 i))))
(do ((i 1 (1+ i)))
((>= i (1+ n-rounds)))
(setf (mdref tempkeys i 0)
(logxor (mdref tempkeys (1- i) 0)
(rol32 (mdref tempkeys (1- i) 3) 8)
(ash (aref offset (1- i)) 24))
(mdref tempkeys i 1)
(logxor (mdref tempkeys (1- i) 1) (mdref tempkeys i 0))
(mdref tempkeys i 2)
(logxor (mdref tempkeys (1- i) 2) (mdref tempkeys i 1))
(mdref tempkeys i 3)
(logxor (mdref tempkeys (1- i) 3) (mdref tempkeys i 2))))
(dotimes (i n-rounds)
(transform tempkeys (* i 4) encrypt-roundkeys (* i 4)))
(dotimes (i 4)
(setf (mdref encrypt-roundkeys n-rounds i)
(mdref tempkeys n-rounds i)))
(dotimes (i n-rounds)
(dotimes (j 4)
(setf (mdref decrypt-roundkeys i j)
(mdref tempkeys (- n-rounds i) j))))
(dotimes (i 4)
(setf (mdref decrypt-roundkeys n-rounds i)
(mdref encrypt-roundkeys 0 i))))))
(declaim (inline square-munge-block))
(defun square-munge-block (round-keys n-rounds t-array s-array
plaintext plaintext-start
ciphertext ciphertext-start)
(declare (type (simple-array (unsigned-byte 8) (*)) plaintext ciphertext)
(type (simple-array (unsigned-byte 8) (256)) s-array)
(type (simple-array (unsigned-byte 32) (*)) round-keys)
(type (simple-array (unsigned-byte 32) (256)) t-array))
(declare (type (integer 0 #.(- array-dimension-limit 16))
plaintext-start ciphertext-start))
(with-words ((b0 b1 b2 b3) plaintext plaintext-start)
(let ((a0 0) (a1 0) (a2 0) (a3 0))
(declare (type (unsigned-byte 32) a0 a1 a2 a3))
;; initial key addition
(setf b0 (logxor b0 (aref round-keys 0))
b1 (logxor b1 (aref round-keys 1))
b2 (logxor b2 (aref round-keys 2))
b3 (logxor b3 (aref round-keys 3)))
;; full rounds
(do ((i 0 (1+ i))
(rk-offset 4 (+ rk-offset 4)))
((>= i (1- n-rounds)))
(macrolet ((mix (tmpvar bytefun)
`(setf ,tmpvar
(logxor (aref t-array (,bytefun b0))
(mod32+ (mod32ash (aref t-array (,bytefun b1)) -8)
(mod32ash (aref t-array (,bytefun b1)) 24))
(mod32+ (mod32ash (aref t-array (,bytefun b2)) -16)
(mod32ash (aref t-array (,bytefun b2)) 16))
(mod32+ (mod32ash (aref t-array (,bytefun b3)) -24)
(mod32ash (aref t-array (,bytefun b3)) 8))))))
(mix a0 fourth-byte)
(mix a1 third-byte)
(mix a2 second-byte)
(mix a3 first-byte)
(setf b0 (logxor a0 (aref round-keys (+ rk-offset 0)))
b1 (logxor a1 (aref round-keys (+ rk-offset 1)))
b2 (logxor a2 (aref round-keys (+ rk-offset 2)))
b3 (logxor a3 (aref round-keys (+ rk-offset 3)))))))
;; last round
(macrolet ((last-round (bytefun)
`(mod32+ (mod32ash (aref s-array (,bytefun b0)) 24)
(mod32+ (mod32ash (aref s-array (,bytefun b1)) 16)
(mod32+ (mod32ash (aref s-array (,bytefun b2)) 8)
(mod32ash (aref s-array (,bytefun b3)) 0)))))
(rkref (index)
`(aref round-keys (+ ,index (* n-rounds 4)))))
(let ((t0 (last-round fourth-byte))
(t1 (last-round third-byte))
(t2 (last-round second-byte))
(t3 (last-round first-byte)))
(declare (type (unsigned-byte 32) t0 t1 t2 t3))
(flet ((apply-rk (temp round-key)
(declare (type (unsigned-byte 32) temp round-key))
(logxor temp round-key)))
(declare (inline apply-rk))
(store-words ciphertext ciphertext-start
(apply-rk t0 (rkref 0))
(apply-rk t1 (rkref 1))
(apply-rk t2 (rkref 2))
(apply-rk t3 (rkref 3))))))))
(defclass square (cipher 16-byte-block-mixin)
((encryption-round-keys :accessor encryption-round-keys
:type (simple-array (unsigned-byte 32) (*)))
(decryption-round-keys :accessor decryption-round-keys
:type (simple-array (unsigned-byte 32) (*)))
(n-rounds :initarg :n-rounds :reader n-rounds))
(:default-initargs :n-rounds 8))
(define-block-encryptor square 16
(let ((n-rounds (n-rounds context))
(round-keys (encryption-round-keys context)))
(square-munge-block round-keys n-rounds t-encryption-table s-encryption-table
plaintext plaintext-start ciphertext ciphertext-start)))
(define-block-decryptor square 16
(let ((n-rounds (n-rounds context))
(round-keys (decryption-round-keys context)))
(square-munge-block round-keys n-rounds t-decryption-table s-decryption-table
ciphertext ciphertext-start plaintext plaintext-start)))
(defmethod schedule-key ((cipher square) key)
(let ((encryption-schedule (make-array (* 4 (1+ (n-rounds cipher)))
:element-type '(unsigned-byte 32)))
(decryption-schedule (make-array (* 4 (1+ (n-rounds cipher)))
:element-type '(unsigned-byte 32))))
(generate-round-keys key (n-rounds cipher)
encryption-schedule decryption-schedule)
(setf (encryption-round-keys cipher) encryption-schedule
(decryption-round-keys cipher) decryption-schedule)
cipher))
(defcipher square
(:encrypt-function square-encrypt-block)
(:decrypt-function square-decrypt-block)
(:block-length 16)
(:key-length (:fixed 16)))
| null | https://raw.githubusercontent.com/billstclair/trubanc-lisp/5436d2eca5b1ed10bc47eec7080f6cb90f98ca65/systems/ironclad_0.26/square.lisp | lisp | this function only runs during the key generation process, so consing
is acceptable.
hack for stupid C array punning
initial key addition
full rounds
last round | square.lisp -- implementation of the Square block cipher
based on a public domain implementation by ( FIXME ! )
(in-package :crypto)
(declaim (type (simple-array (unsigned-byte 8) (256))
alogtable logtable))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defconst alogtable
#.(let ((table (make-array 256 :element-type '(unsigned-byte 8)
:initial-element 1)))
(do ((i 1 (1+ i)))
((>= i 256) table)
(let ((j (ash (aref table (1- i)) 1)))
(when (logbitp 8 j)
(setf j (logxor j #x1f5)))
(setf (aref table i) (logand j #xff))))))
)
(defconst logtable
#.(let ((table (make-array 256 :element-type '(unsigned-byte 8)
:initial-element 0)))
(do ((i 1 (1+ i)))
((>= i 256) (setf (aref table 1) 0) table)
(setf (aref table (aref alogtable i)) i))))
(declaim (type (simple-array (unsigned-byte 8) (4 4))
g-matrix inverse-g-matrix))
(defconst g-matrix (make-array (list 4 4) :element-type '(unsigned-byte 8)
:initial-contents
(list (list 2 1 1 3)
(list 3 2 1 1)
(list 1 3 2 1)
(list 1 1 3 2))))
(defconst inverse-g-matrix (make-array (list 4 4) :element-type '(unsigned-byte 8)
:initial-contents
(list (list #xe #x9 #xd #xb)
(list #xb #xe #x9 #xd)
(list #xd #xb #xe #x9)
(list #x9 #xd #xb #xe))))
(declaim (type (simple-array (unsigned-byte 8) (256))
s-encryption-table s-decryption-table))
(defconst s-encryption-table
#8@(177 206 195 149 90 173 231 2 77 68 251 145 12 135 161 80
203 103 84 221 70 143 225 78 240 253 252 235 249 196 26 110
94 245 204 141 28 86 67 254 7 97 248 117 89 255 3 34
138 209 19 238 136 0 14 52 21 128 148 227 237 181 83 35
75 71 23 167 144 53 171 216 184 223 79 87 154 146 219 27
60 200 153 4 142 224 215 125 133 187 64 44 58 69 241 66
101 32 65 24 114 37 147 112 54 5 242 11 163 121 236 8
39 49 50 182 124 176 10 115 91 123 183 129 210 13 106 38
158 88 156 131 116 179 172 48 122 105 119 15 174 33 222 208
46 151 16 164 152 168 212 104 45 98 41 109 22 73 118 199
232 193 150 55 229 202 244 233 99 18 194 166 20 188 211 40
175 47 230 36 82 198 160 9 189 140 207 93 17 95 1 197
159 61 162 155 201 59 190 81 25 31 63 92 178 239 74 205
191 186 111 100 217 243 62 180 170 220 213 6 192 126 246 102
108 132 113 56 185 29 127 157 72 139 42 218 165 51 130 57
214 120 134 250 228 43 169 30 137 96 107 234 85 76 247 226))
(defconst s-decryption-table
#8@(53 190 7 46 83 105 219 40 111 183 118 107 12 125 54 139
146 188 169 50 172 56 156 66 99 200 30 79 36 229 247 201
97 141 47 63 179 101 127 112 175 154 234 245 91 152 144 177
135 113 114 237 55 69 104 163 227 239 92 197 80 193 214 202
90 98 95 38 9 93 20 65 232 157 206 64 253 8 23 74
15 199 180 62 18 252 37 75 129 44 4 120 203 187 32 189
249 41 153 168 211 96 223 17 151 137 126 250 224 155 31 210
103 226 100 119 132 43 158 138 241 109 136 121 116 87 221 230
57 123 238 131 225 88 242 13 52 248 48 233 185 35 84 21
68 11 77 102 58 3 162 145 148 82 76 195 130 231 128 192
182 14 194 108 147 236 171 67 149 246 216 70 134 5 140 176
117 0 204 133 215 61 115 122 72 228 209 89 173 184 198 208
220 161 170 2 29 191 181 159 81 196 165 16 34 207 1 186
143 49 124 174 150 218 240 86 71 212 235 78 217 19 142 73
85 22 255 59 244 164 178 6 160 167 251 27 110 60 51 205
24 94 106 213 166 33 222 254 42 28 243 10 26 25 39 45))
(declaim (type (simple-array (unsigned-byte 32) (256))
t-encryption-table t-decryption-table))
(defconst t-encryption-table
#32@(#x97b1b126 #x69cecea7 #x73c3c3b0 #xdf95954a
#xb45a5aee #xafadad02 #x3be7e7dc #x04020206
#x9a4d4dd7 #x884444cc #x03fbfbf8 #xd7919146
#x180c0c14 #xfb87877c #xb7a1a116 #xa05050f0
#x63cbcba8 #xce6767a9 #xa85454fc #x4fdddd92
#x8c4646ca #xeb8f8f64 #x37e1e1d6 #x9c4e4ed2
#x15f0f0e5 #x0ffdfdf2 #x0dfcfcf1 #x23ebebc8
#x07f9f9fe #x7dc4c4b9 #x341a1a2e #xdc6e6eb2
#xbc5e5ee2 #x1ff5f5ea #x6dcccca1 #xef8d8d62
#x381c1c24 #xac5656fa #x864343c5 #x09fefef7
#x0e070709 #xc26161a3 #x05f8f8fd #xea75759f
#xb25959eb #x0bfffff4 #x06030305 #x44222266
#xe18a8a6b #x57d1d186 #x26131335 #x29eeeec7
#xe588886d #x00000000 #x1c0e0e12 #x6834345c
#x2a15153f #xf5808075 #xdd949449 #x33e3e3d0
#x2fededc2 #x9fb5b52a #xa65353f5 #x46232365
#x964b4bdd #x8e4747c9 #x2e171739 #xbba7a71c
#xd5909045 #x6a35355f #xa3abab08 #x45d8d89d
#x85b8b83d #x4bdfdf94 #x9e4f4fd1 #xae5757f9
#xc19a9a5b #xd1929243 #x43dbdb98 #x361b1b2d
#x783c3c44 #x65c8c8ad #xc799995e #x0804040c
#xe98e8e67 #x35e0e0d5 #x5bd7d78c #xfa7d7d87
#xff85857a #x83bbbb38 #x804040c0 #x582c2c74
#x743a3a4e #x8a4545cf #x17f1f1e6 #x844242c6
#xca6565af #x40202060 #x824141c3 #x30181828
#xe4727296 #x4a25256f #xd3939340 #xe0707090
#x6c36365a #x0a05050f #x11f2f2e3 #x160b0b1d
#xb3a3a310 #xf279798b #x2dececc1 #x10080818
#x4e272769 #x62313153 #x64323256 #x99b6b62f
#xf87c7c84 #x95b0b025 #x140a0a1e #xe6737395
#xb65b5bed #xf67b7b8d #x9bb7b72c #xf7818176
#x51d2d283 #x1a0d0d17 #xd46a6abe #x4c26266a
#xc99e9e57 #xb05858e8 #xcd9c9c51 #xf3838370
#xe874749c #x93b3b320 #xadacac01 #x60303050
#xf47a7a8e #xd26969bb #xee777799 #x1e0f0f11
#xa9aeae07 #x42212163 #x49dede97 #x55d0d085
#x5c2e2e72 #xdb97974c #x20101030 #xbda4a419
#xc598985d #xa5a8a80d #x5dd4d489 #xd06868b8
#x5a2d2d77 #xc46262a6 #x5229297b #xda6d6db7
#x2c16163a #x924949db #xec76769a #x7bc7c7bc
#x25e8e8cd #x77c1c1b6 #xd996964f #x6e373759
#x3fe5e5da #x61cacaab #x1df4f4e9 #x27e9e9ce
#xc66363a5 #x24121236 #x71c2c2b3 #xb9a6a61f
#x2814143c #x8dbcbc31 #x53d3d380 #x50282878
#xabafaf04 #x5e2f2f71 #x39e6e6df #x4824246c
#xa45252f6 #x79c6c6bf #xb5a0a015 #x1209091b
#x8fbdbd32 #xed8c8c61 #x6bcfcfa4 #xba5d5de7
#x22111133 #xbe5f5fe1 #x02010103 #x7fc5c5ba
#xcb9f9f54 #x7a3d3d47 #xb1a2a213 #xc39b9b58
#x67c9c9ae #x763b3b4d #x89bebe37 #xa25151f3
#x3219192b #x3e1f1f21 #x7e3f3f41 #xb85c5ce4
#x91b2b223 #x2befefc4 #x944a4ade #x6fcdcda2
#x8bbfbf34 #x81baba3b #xde6f6fb1 #xc86464ac
#x47d9d99e #x13f3f3e0 #x7c3e3e42 #x9db4b429
#xa1aaaa0b #x4ddcdc91 #x5fd5d58a #x0c06060a
#x75c0c0b5 #xfc7e7e82 #x19f6f6ef #xcc6666aa
#xd86c6cb4 #xfd848479 #xe2717193 #x70383848
#x87b9b93e #x3a1d1d27 #xfe7f7f81 #xcf9d9d52
#x904848d8 #xe38b8b68 #x542a2a7e #x41dada9b
#xbfa5a51a #x66333355 #xf1828273 #x7239394b
#x59d6d68f #xf0787888 #xf986867f #x01fafafb
#x3de4e4d9 #x562b2b7d #xa7a9a90e #x3c1e1e22
#xe789896e #xc06060a0 #xd66b6bbd #x21eaeacb
#xaa5555ff #x984c4cd4 #x1bf7f7ec #x31e2e2d3))
(defconst t-decryption-table
#32@(#xe368bc02 #x5585620c #x2a3f2331 #x61ab13f7
#x98d46d72 #x21cb9a19 #x3c22a461 #x459d3dcd
#x05fdb423 #x2bc4075f #x9b2c01c0 #x3dd9800f
#x486c5c74 #xf97f7e85 #xf173ab1f #xb6edde0e
#x283c6bed #x4997781a #x9f2a918d #xc9579f33
#xa907a8aa #xa50ded7d #x7c422d8f #x764db0c9
#x4d91e857 #xcea963cc #xb4ee96d2 #x3028e1b6
#x0df161b9 #xbd196726 #x419bad80 #xc0a06ec7
#x5183f241 #x92dbf034 #x6fa21efc #x8f32ce4c
#x13e03373 #x69a7c66d #xe56d6493 #xbf1a2ffa
#xbb1cbfb7 #x587403b5 #xe76e2c4f #x5d89b796
#xe89c052a #x446619a3 #x342e71fb #x0ff22965
#xfe81827a #xb11322f1 #xa30835ec #xcd510f7e
#xff7aa614 #x5c7293f8 #x2fc29712 #xf370e3c3
#x992f491c #xd1431568 #xc2a3261b #x88cc32b3
#x8acf7a6f #xb0e8069f #x7a47f51e #xd2bb79da
#xe6950821 #x4398e55c #xd0b83106 #x11e37baf
#x7e416553 #xccaa2b10 #xd8b4e49c #x6456a7d4
#xfb7c3659 #x724b2084 #xea9f4df6 #x6a5faadf
#x2dc1dfce #x70486858 #xcaaff381 #x0605d891
#x5a774b69 #x94de28a5 #x39df1042 #x813bc347
#xfc82caa6 #x23c8d2c5 #x03f86cb2 #x080cd59a
#xdab7ac40 #x7db909e1 #x3824342c #xcf5247a2
#xdcb274d1 #x63a85b2b #x35d55595 #x479e7511
#x15e5ebe2 #x4b9430c6 #x4a6f14a8 #x91239c86
#x4c6acc39 #x5f8aff4a #x0406904d #xee99ddbb
#x1e1152ca #xaaffc418 #xeb646998 #x07fefcff
#x8b345e01 #x567d0ebe #xbae79bd9 #x4263c132
#x75b5dc7b #x97264417 #x67aecb66 #x95250ccb
#xec9a9567 #x57862ad0 #x60503799 #xb8e4d305
#x65ad83ba #x19efae35 #xa4f6c913 #xc15b4aa9
#x873e1bd6 #xa0f0595e #x18148a5b #xaf02703b
#xab04e076 #xdd4950bf #xdf4a1863 #xc6a5b656
#x853d530a #xfa871237 #x77b694a7 #x4665517f
#xed61b109 #x1bece6e9 #xd5458525 #xf5753b52
#x7fba413d #x27ce4288 #xb2eb4e43 #xd6bde997
#x527b9ef3 #x62537f45 #x2c3afba0 #x7bbcd170
#xb91ff76b #x121b171d #xfd79eec8 #x3a277cf0
#x0c0a45d7 #x96dd6079 #x2233f6ab #xacfa1c89
#xc8acbb5d #xa10b7d30 #xd4bea14b #xbee10b94
#x25cd0a54 #x547e4662 #xa2f31182 #x17e6a33e
#x263566e6 #xc3580275 #x83388b9b #x7844bdc2
#x020348dc #x4f92a08b #x2e39b37c #x4e6984e5
#xf0888f71 #x362d3927 #x9cd2fd3f #x01fb246e
#x893716dd #x00000000 #xf68d57e0 #xe293986c
#x744ef815 #x9320d45a #xad0138e7 #xd3405db4
#x1a17c287 #xb3106a2d #x5078d62f #xf48e1f3c
#xa70ea5a1 #x71b34c36 #x9ad725ae #x5e71db24
#x161d8750 #xef62f9d5 #x8d318690 #x1c121a16
#xa6f581cf #x5b8c6f07 #x37d61d49 #x6e593a92
#x84c67764 #x86c53fb8 #xd746cdf9 #xe090d0b0
#x29c74f83 #xe49640fd #x0e090d0b #x6da15620
#x8ec9ea22 #xdb4c882e #xf776738e #xb515b2bc
#x10185fc1 #x322ba96a #x6ba48eb1 #xaef95455
#x406089ee #x6655ef08 #xe9672144 #x3e21ecbd
#x2030be77 #xf28bc7ad #x80c0e729 #x141ecf8c
#xbce24348 #xc4a6fe8a #x31d3c5d8 #xb716fa60
#x5380ba9d #xd94fc0f2 #x1de93e78 #x24362e3a
#xe16bf4de #xcb54d7ef #x09f7f1f4 #x82c3aff5
#x0bf4b928 #x9d29d951 #xc75e9238 #xf8845aeb
#x90d8b8e8 #xdeb13c0d #x33d08d04 #x685ce203
#xc55ddae4 #x3bdc589e #x0a0f9d46 #x3fdac8d3
#x598f27db #xa8fc8cc4 #x79bf99ac #x6c5a724e
#x8ccaa2fe #x9ed1b5e3 #x1fea76a4 #x73b004ea))
(declaim (inline mul8))
(defun mul8 (a b)
(declare (type (unsigned-byte 8) a b))
(if (or (zerop a) (zerop b))
0
(aref alogtable (mod (+ (aref logtable a) (aref logtable b)) 255))))
(defun transform (in in-offset out out-offset)
(declare (type (simple-array (unsigned-byte 32) (*)) in out))
(let ((a-matrix (make-array (list 4 4) :element-type '(unsigned-byte 8)))
(b-matrix (make-array (list 4 4) :element-type '(unsigned-byte 8)
:initial-element 0)))
(macrolet ((inref (index)
`(aref in (+ ,index in-offset)))
(outref (index)
`(aref out (+ ,index out-offset))))
(dotimes (i 4)
(dotimes (j 4)
(setf (aref a-matrix i j)
(logand (ash (inref i) (- (- 24 (* j 8)))) #xff))))
(dotimes (i 4)
(dotimes (j 4)
(dotimes (k 4)
(setf (aref b-matrix i j)
(logand
(logxor (mul8 (aref a-matrix i k) (aref g-matrix k j))
(aref b-matrix i j))
#xff)))))
(dotimes (i 4)
(setf (outref i) 0)
(dotimes (j 4)
(setf (outref i)
(logxor (outref i)
(ash (aref b-matrix i j) (- 24 (* j 8))))))))))
(defun generate-round-keys (key n-rounds encrypt-roundkeys decrypt-roundkeys)
(declare (type (simple-array (unsigned-byte 32) (*))
encrypt-roundkeys decrypt-roundkeys)
(type (simple-array (unsigned-byte 8) (16)) key))
(let ((offset (make-array n-rounds :element-type '(unsigned-byte 8)
:initial-element 1))
(tempkeys (make-array (* (1+ n-rounds) 4) :element-type '(unsigned-byte 32))))
(declare (type (simple-array (unsigned-byte 8) (*)) offset)
(type (simple-array (unsigned-byte 32) (*)) tempkeys))
(macrolet ((mdref (array i j)
`(aref ,array (+ (* ,i 4) ,j))))
(do ((i 1 (1+ i)))
((>= i n-rounds))
(setf (aref offset i) (mul8 2 (aref offset (1- i)))))
(dotimes (i 4)
(setf (mdref tempkeys 0 i) (ub32ref/be key (* 4 i))))
(do ((i 1 (1+ i)))
((>= i (1+ n-rounds)))
(setf (mdref tempkeys i 0)
(logxor (mdref tempkeys (1- i) 0)
(rol32 (mdref tempkeys (1- i) 3) 8)
(ash (aref offset (1- i)) 24))
(mdref tempkeys i 1)
(logxor (mdref tempkeys (1- i) 1) (mdref tempkeys i 0))
(mdref tempkeys i 2)
(logxor (mdref tempkeys (1- i) 2) (mdref tempkeys i 1))
(mdref tempkeys i 3)
(logxor (mdref tempkeys (1- i) 3) (mdref tempkeys i 2))))
(dotimes (i n-rounds)
(transform tempkeys (* i 4) encrypt-roundkeys (* i 4)))
(dotimes (i 4)
(setf (mdref encrypt-roundkeys n-rounds i)
(mdref tempkeys n-rounds i)))
(dotimes (i n-rounds)
(dotimes (j 4)
(setf (mdref decrypt-roundkeys i j)
(mdref tempkeys (- n-rounds i) j))))
(dotimes (i 4)
(setf (mdref decrypt-roundkeys n-rounds i)
(mdref encrypt-roundkeys 0 i))))))
(declaim (inline square-munge-block))
(defun square-munge-block (round-keys n-rounds t-array s-array
plaintext plaintext-start
ciphertext ciphertext-start)
(declare (type (simple-array (unsigned-byte 8) (*)) plaintext ciphertext)
(type (simple-array (unsigned-byte 8) (256)) s-array)
(type (simple-array (unsigned-byte 32) (*)) round-keys)
(type (simple-array (unsigned-byte 32) (256)) t-array))
(declare (type (integer 0 #.(- array-dimension-limit 16))
plaintext-start ciphertext-start))
(with-words ((b0 b1 b2 b3) plaintext plaintext-start)
(let ((a0 0) (a1 0) (a2 0) (a3 0))
(declare (type (unsigned-byte 32) a0 a1 a2 a3))
(setf b0 (logxor b0 (aref round-keys 0))
b1 (logxor b1 (aref round-keys 1))
b2 (logxor b2 (aref round-keys 2))
b3 (logxor b3 (aref round-keys 3)))
(do ((i 0 (1+ i))
(rk-offset 4 (+ rk-offset 4)))
((>= i (1- n-rounds)))
(macrolet ((mix (tmpvar bytefun)
`(setf ,tmpvar
(logxor (aref t-array (,bytefun b0))
(mod32+ (mod32ash (aref t-array (,bytefun b1)) -8)
(mod32ash (aref t-array (,bytefun b1)) 24))
(mod32+ (mod32ash (aref t-array (,bytefun b2)) -16)
(mod32ash (aref t-array (,bytefun b2)) 16))
(mod32+ (mod32ash (aref t-array (,bytefun b3)) -24)
(mod32ash (aref t-array (,bytefun b3)) 8))))))
(mix a0 fourth-byte)
(mix a1 third-byte)
(mix a2 second-byte)
(mix a3 first-byte)
(setf b0 (logxor a0 (aref round-keys (+ rk-offset 0)))
b1 (logxor a1 (aref round-keys (+ rk-offset 1)))
b2 (logxor a2 (aref round-keys (+ rk-offset 2)))
b3 (logxor a3 (aref round-keys (+ rk-offset 3)))))))
(macrolet ((last-round (bytefun)
`(mod32+ (mod32ash (aref s-array (,bytefun b0)) 24)
(mod32+ (mod32ash (aref s-array (,bytefun b1)) 16)
(mod32+ (mod32ash (aref s-array (,bytefun b2)) 8)
(mod32ash (aref s-array (,bytefun b3)) 0)))))
(rkref (index)
`(aref round-keys (+ ,index (* n-rounds 4)))))
(let ((t0 (last-round fourth-byte))
(t1 (last-round third-byte))
(t2 (last-round second-byte))
(t3 (last-round first-byte)))
(declare (type (unsigned-byte 32) t0 t1 t2 t3))
(flet ((apply-rk (temp round-key)
(declare (type (unsigned-byte 32) temp round-key))
(logxor temp round-key)))
(declare (inline apply-rk))
(store-words ciphertext ciphertext-start
(apply-rk t0 (rkref 0))
(apply-rk t1 (rkref 1))
(apply-rk t2 (rkref 2))
(apply-rk t3 (rkref 3))))))))
(defclass square (cipher 16-byte-block-mixin)
((encryption-round-keys :accessor encryption-round-keys
:type (simple-array (unsigned-byte 32) (*)))
(decryption-round-keys :accessor decryption-round-keys
:type (simple-array (unsigned-byte 32) (*)))
(n-rounds :initarg :n-rounds :reader n-rounds))
(:default-initargs :n-rounds 8))
(define-block-encryptor square 16
(let ((n-rounds (n-rounds context))
(round-keys (encryption-round-keys context)))
(square-munge-block round-keys n-rounds t-encryption-table s-encryption-table
plaintext plaintext-start ciphertext ciphertext-start)))
(define-block-decryptor square 16
(let ((n-rounds (n-rounds context))
(round-keys (decryption-round-keys context)))
(square-munge-block round-keys n-rounds t-decryption-table s-decryption-table
ciphertext ciphertext-start plaintext plaintext-start)))
(defmethod schedule-key ((cipher square) key)
(let ((encryption-schedule (make-array (* 4 (1+ (n-rounds cipher)))
:element-type '(unsigned-byte 32)))
(decryption-schedule (make-array (* 4 (1+ (n-rounds cipher)))
:element-type '(unsigned-byte 32))))
(generate-round-keys key (n-rounds cipher)
encryption-schedule decryption-schedule)
(setf (encryption-round-keys cipher) encryption-schedule
(decryption-round-keys cipher) decryption-schedule)
cipher))
(defcipher square
(:encrypt-function square-encrypt-block)
(:decrypt-function square-decrypt-block)
(:block-length 16)
(:key-length (:fixed 16)))
|
ec38c504ac1d3736693e8a19b693cd5ed794b1552b008727cafd34b12f768114 | ghollisjr/cl-ana | h5ex-t-arrayatt.lisp | Copyright by The HDF Group .
;;;; All rights reserved.
;;;;
This file is part of hdf5 - cffi .
The full hdf5 - cffi copyright notice , including terms governing
;;;; use, modification, and redistribution, is contained in the file COPYING,
;;;; which can be found at the root of the source code distribution tree.
;;;; If you do not have access to this file, you may request a copy from
;;;; .
;;; This example shows how to read and write array datatypes
to an attribute . The program first writes integers arrays
;;; of dimension ADIM0xADIM1 to an attribute with a dataspace
of DIM0 , then closes the file . Next , it reopens the
;;; file, reads back the data, and outputs it to the screen.
;;; -by-api/hdf5-examples/1_8/C/H5T/h5ex_t_arrayatt.c
(in-package :hdf5)
(defparameter *FILE* (namestring (merge-pathnames "h5ex_t_arrayatt.h5" *load-pathname*)))
(defparameter *DATASET* "DS1")
(defparameter *ATTRIBUTE* "A1")
(defparameter *DIM0* 4)
(defparameter *ADIM0* 3)
(defparameter *ADIM1* 5)
(defun pos (rows cols i j k)
"3D array position"
(+ (* (+ (* i rows) j) cols) k))
(cffi:with-foreign-objects
((dims 'hsize-t 1)
(adims 'hsize-t 2)
(wdata :int (* *DIM0* *ADIM0* *ADIM1*)))
(setf (cffi:mem-aref adims 'hsize-t 0) *ADIM0*
(cffi:mem-aref adims 'hsize-t 1) *ADIM1*
(cffi:mem-aref dims 'hsize-t 0) *DIM0*)
Initialize data . i is the element in the dataspace , j and k the
;; elements within the array datatype.
(dotimes (i *DIM0*)
(dotimes (j *ADIM0*)
(dotimes (k *ADIM1*)
(setf (cffi:mem-aref wdata :int (pos *ADIM0* *ADIM1* i j k))
(+ (* i j) (- (* j k)) (* i k))))))
;; Create a new file using the default properties.
(let* ((fapl (h5pcreate +H5P-FILE-ACCESS+))
(file (prog2 (h5pset-fclose-degree fapl :H5F-CLOSE-STRONG)
(h5fcreate *FILE* +H5F-ACC-TRUNC+ +H5P-DEFAULT+ fapl))))
(unwind-protect
;; Create array datatypes for file and memory.
(let* ((filetype (h5tarray-create2 +H5T-STD-I64LE+ 2 adims))
(memtype (h5tarray-create2 +H5T-NATIVE-INT+ 2 adims))
;; Create dataset with a null dataspace.
(dspace (h5ex:create-null-dataspace))
(dset (h5dcreate2 file *DATASET* +H5T-STD-I32LE+ dspace
+H5P-DEFAULT+ +H5P-DEFAULT+ +H5P-DEFAULT+))
(aspace (h5ex:create-simple-dataspace `(,*DIM0*)))
;; Create the attribute and write the array data to it.
(attr (h5acreate2 dset *ATTRIBUTE* filetype aspace
+H5P-DEFAULT+ +H5P-DEFAULT+)))
(h5awrite attr memtype wdata)
(h5ex:close-handles (list attr aspace dset dspace memtype filetype)))
(h5ex:close-handles (list file fapl))))
;; Now we begin the read section of this example. Here we assume
;; the attribute and array have the same name and rank, but can
;; have any size. Therefore we must allocate a new array to read
;; in data dynamically.
(let* ((fapl (h5pcreate +H5P-FILE-ACCESS+))
(file (prog2 (h5pset-fclose-degree fapl :H5F-CLOSE-STRONG)
(h5fopen *FILE* +H5F-ACC-RDONLY+ fapl))))
(unwind-protect
(let* ((dset (h5dopen2 file *DATASET* +H5P-DEFAULT+))
(attr (h5aopen dset *ATTRIBUTE* +H5P-DEFAULT+))
(filetype (h5aget-type attr))
(space (h5aget-space attr)))
;; Get dataspace and allocate memory for read buffer.
(h5tget-array-dims2 filetype adims)
(h5sget-simple-extent-dims space dims +NULL+)
;; Allocate space for integer data.
(let ((dims[0] (cffi:mem-aref dims 'hsize-t 0))
(adims[0] (cffi:mem-aref adims 'hsize-t 0))
(adims[1] (cffi:mem-aref adims 'hsize-t 1))
;; Create the memory datatype.
(memtype (h5tarray-create2 +H5T-NATIVE-INT+ 2 adims)))
(cffi:with-foreign-object (rdata :int (* dims[0] adims[0]
adims[1]))
;; Read the data.
(h5aread attr memtype rdata)
;; Output the data to the screen.
(dotimes (i *DIM0*)
(format t "~a[~a]:~%" *ATTRIBUTE* i)
(dotimes (j *ADIM0*)
(format t " [")
(dotimes (k *ADIM1*)
(format t " ~3d" (cffi:mem-aref rdata :int
(pos *ADIM0* *ADIM1*
i j k))))
(format t "]~%"))
(format t "~%"))
(h5tclose memtype)))
;; Close and release resources.
(h5ex:close-handles (list space filetype attr dset)))
(h5ex:close-handles (list file fapl)))))
| null | https://raw.githubusercontent.com/ghollisjr/cl-ana/5cb4c0b0c9c4957452ad2a769d6ff9e8d5df0b10/hdf-cffi/examples/datatypes/h5ex-t-arrayatt.lisp | lisp | All rights reserved.
use, modification, and redistribution, is contained in the file COPYING,
which can be found at the root of the source code distribution tree.
If you do not have access to this file, you may request a copy from
.
This example shows how to read and write array datatypes
of dimension ADIM0xADIM1 to an attribute with a dataspace
file, reads back the data, and outputs it to the screen.
-by-api/hdf5-examples/1_8/C/H5T/h5ex_t_arrayatt.c
elements within the array datatype.
Create a new file using the default properties.
Create array datatypes for file and memory.
Create dataset with a null dataspace.
Create the attribute and write the array data to it.
Now we begin the read section of this example. Here we assume
the attribute and array have the same name and rank, but can
have any size. Therefore we must allocate a new array to read
in data dynamically.
Get dataspace and allocate memory for read buffer.
Allocate space for integer data.
Create the memory datatype.
Read the data.
Output the data to the screen.
Close and release resources. | Copyright by The HDF Group .
This file is part of hdf5 - cffi .
The full hdf5 - cffi copyright notice , including terms governing
to an attribute . The program first writes integers arrays
of DIM0 , then closes the file . Next , it reopens the
(in-package :hdf5)
(defparameter *FILE* (namestring (merge-pathnames "h5ex_t_arrayatt.h5" *load-pathname*)))
(defparameter *DATASET* "DS1")
(defparameter *ATTRIBUTE* "A1")
(defparameter *DIM0* 4)
(defparameter *ADIM0* 3)
(defparameter *ADIM1* 5)
(defun pos (rows cols i j k)
"3D array position"
(+ (* (+ (* i rows) j) cols) k))
(cffi:with-foreign-objects
((dims 'hsize-t 1)
(adims 'hsize-t 2)
(wdata :int (* *DIM0* *ADIM0* *ADIM1*)))
(setf (cffi:mem-aref adims 'hsize-t 0) *ADIM0*
(cffi:mem-aref adims 'hsize-t 1) *ADIM1*
(cffi:mem-aref dims 'hsize-t 0) *DIM0*)
Initialize data . i is the element in the dataspace , j and k the
(dotimes (i *DIM0*)
(dotimes (j *ADIM0*)
(dotimes (k *ADIM1*)
(setf (cffi:mem-aref wdata :int (pos *ADIM0* *ADIM1* i j k))
(+ (* i j) (- (* j k)) (* i k))))))
(let* ((fapl (h5pcreate +H5P-FILE-ACCESS+))
(file (prog2 (h5pset-fclose-degree fapl :H5F-CLOSE-STRONG)
(h5fcreate *FILE* +H5F-ACC-TRUNC+ +H5P-DEFAULT+ fapl))))
(unwind-protect
(let* ((filetype (h5tarray-create2 +H5T-STD-I64LE+ 2 adims))
(memtype (h5tarray-create2 +H5T-NATIVE-INT+ 2 adims))
(dspace (h5ex:create-null-dataspace))
(dset (h5dcreate2 file *DATASET* +H5T-STD-I32LE+ dspace
+H5P-DEFAULT+ +H5P-DEFAULT+ +H5P-DEFAULT+))
(aspace (h5ex:create-simple-dataspace `(,*DIM0*)))
(attr (h5acreate2 dset *ATTRIBUTE* filetype aspace
+H5P-DEFAULT+ +H5P-DEFAULT+)))
(h5awrite attr memtype wdata)
(h5ex:close-handles (list attr aspace dset dspace memtype filetype)))
(h5ex:close-handles (list file fapl))))
(let* ((fapl (h5pcreate +H5P-FILE-ACCESS+))
(file (prog2 (h5pset-fclose-degree fapl :H5F-CLOSE-STRONG)
(h5fopen *FILE* +H5F-ACC-RDONLY+ fapl))))
(unwind-protect
(let* ((dset (h5dopen2 file *DATASET* +H5P-DEFAULT+))
(attr (h5aopen dset *ATTRIBUTE* +H5P-DEFAULT+))
(filetype (h5aget-type attr))
(space (h5aget-space attr)))
(h5tget-array-dims2 filetype adims)
(h5sget-simple-extent-dims space dims +NULL+)
(let ((dims[0] (cffi:mem-aref dims 'hsize-t 0))
(adims[0] (cffi:mem-aref adims 'hsize-t 0))
(adims[1] (cffi:mem-aref adims 'hsize-t 1))
(memtype (h5tarray-create2 +H5T-NATIVE-INT+ 2 adims)))
(cffi:with-foreign-object (rdata :int (* dims[0] adims[0]
adims[1]))
(h5aread attr memtype rdata)
(dotimes (i *DIM0*)
(format t "~a[~a]:~%" *ATTRIBUTE* i)
(dotimes (j *ADIM0*)
(format t " [")
(dotimes (k *ADIM1*)
(format t " ~3d" (cffi:mem-aref rdata :int
(pos *ADIM0* *ADIM1*
i j k))))
(format t "]~%"))
(format t "~%"))
(h5tclose memtype)))
(h5ex:close-handles (list space filetype attr dset)))
(h5ex:close-handles (list file fapl)))))
|
e1acdb26d1fca58ae050e6c53115e98652f15fb777651204b05d0402ff3db4fe | RyanMcG/incise | incise_layout.clj | (ns incise.transformers.impl.incise-layout
(:require (incise.transformers [layout :refer [repartial use-layout
deflayout defpartial]]
[core :refer [register]])
[stefon.core :refer [link-to-asset]]
[robert.hooke :refer [clear-hooks]]
(incise.transformers.impl [vm-layout :as vm-layout]
[base-layout :as base-layout])
[hiccup.util :refer [to-uri]]))
(defpartial header []
[:header
[:h1
[:a {:href "/"
:title "inciꞅe"}
[:img {:alt "inciꞅe"
:src ""}]]
[:a {:href "-ci.org/RyanMcG/incise-core"
:title "Build Status"}
[:img {:alt "Build Status"
:src "-ci.org/RyanMcG/incise-core.png?branch=master"}]]]
[:span.tag-line "An extensible static site generator written in Clojure."]
; This nav is duplicated in README.md
[:ul.nav
[:li [:a {:href "-core"
:title "Source code on GitHub"}
"Source"]]
[:li
[:a {:href "/"
:title "codox generated API documentation"}
"API"]]
[:li
[:a {:href "/"
:title "codox generated API documentation"}
"Extensibility"]]]])
(defpartial stylesheets [_ _ old-sheets]
(vec (conj (vec (butlast old-sheets))
(link-to-asset "incise.css.stefon"))))
(defpartial head [_ _ old-head]
(vec (conj (vec (butlast (first old-head)))
[:link {:rel "icon"
:type "image/png"
:href (to-uri "/assets/images/favicon.png")}])))
(deflayout incise []
(clear-hooks #'base-layout/head) ; Necessary to remove old favicon
(repartial base-layout/head head)
(repartial vm-layout/stylesheets stylesheets)
(repartial base-layout/header header)
(use-layout vm-layout/vm))
(register :incise-layout incise)
| null | https://raw.githubusercontent.com/RyanMcG/incise/fc34f5715cb7555ea575f665a2f4ec76bc9e012e/src/incise/transformers/impl/incise_layout.clj | clojure | This nav is duplicated in README.md
Necessary to remove old favicon | (ns incise.transformers.impl.incise-layout
(:require (incise.transformers [layout :refer [repartial use-layout
deflayout defpartial]]
[core :refer [register]])
[stefon.core :refer [link-to-asset]]
[robert.hooke :refer [clear-hooks]]
(incise.transformers.impl [vm-layout :as vm-layout]
[base-layout :as base-layout])
[hiccup.util :refer [to-uri]]))
(defpartial header []
[:header
[:h1
[:a {:href "/"
:title "inciꞅe"}
[:img {:alt "inciꞅe"
:src ""}]]
[:a {:href "-ci.org/RyanMcG/incise-core"
:title "Build Status"}
[:img {:alt "Build Status"
:src "-ci.org/RyanMcG/incise-core.png?branch=master"}]]]
[:span.tag-line "An extensible static site generator written in Clojure."]
[:ul.nav
[:li [:a {:href "-core"
:title "Source code on GitHub"}
"Source"]]
[:li
[:a {:href "/"
:title "codox generated API documentation"}
"API"]]
[:li
[:a {:href "/"
:title "codox generated API documentation"}
"Extensibility"]]]])
(defpartial stylesheets [_ _ old-sheets]
(vec (conj (vec (butlast old-sheets))
(link-to-asset "incise.css.stefon"))))
(defpartial head [_ _ old-head]
(vec (conj (vec (butlast (first old-head)))
[:link {:rel "icon"
:type "image/png"
:href (to-uri "/assets/images/favicon.png")}])))
(deflayout incise []
(repartial base-layout/head head)
(repartial vm-layout/stylesheets stylesheets)
(repartial base-layout/header header)
(use-layout vm-layout/vm))
(register :incise-layout incise)
|
ded79628f7a62a0a3e89bcb6cdc0b8829e0469c8cab92cffb87cf6d510114848 | twosigma/waiter | kubernetes_scheduler_integration_test.clj | (ns waiter.kubernetes-scheduler-integration-test
(:require [clojure.data.json :as json]
[clojure.string :as str]
[clojure.test :refer :all]
[clojure.tools.logging :as log]
[clojure.walk :as walk]
[waiter.status-codes :refer :all]
[waiter.util.client-tools :refer :all]
[waiter.util.utils :as utils]))
(deftest ^:parallel ^:integration-fast test-k8s-service-and-instance-fields
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [{:keys [cookies service-id] :as response}
(make-request-with-debug-info
{:x-waiter-name (rand-name)} #(make-kitchen-request waiter-url % :path "/hello"))]
(with-service-cleanup
service-id
(assert-response-status response http-200-ok)
(assert-service-on-all-routers waiter-url service-id cookies)
(let [instances (active-instances waiter-url service-id :cookies cookies)]
(testing "k8s scheduler service instance fields"
(doseq [instance instances]
(let [{:keys [k8s/app-name k8s/namespace k8s/node-name k8s/pod-name k8s/revision-timestamp k8s/user]} (walk/keywordize-keys instance)
assertion-message (str instance)]
(is app-name assertion-message)
(is namespace assertion-message)
(is node-name assertion-message)
(is pod-name assertion-message)
(is revision-timestamp assertion-message)
(is user assertion-message)))))
(testing "k8s scheduler service fields"
(doseq [router-url (-> waiter-url routers vals)]
(let [watch-state-json (get-k8s-watch-state router-url cookies)
service (get-in watch-state-json ["service-id->service" service-id])]
(if (map? service)
(let [{:keys [k8s/app-name k8s/container-resources k8s/containers k8s/namespace k8s/replicaset-annotations
k8s/replicaset-pod-annotations k8s/replicaset-uid]} (walk/keywordize-keys service)
k8s-containers (set containers)
assertion-message (str {:router-url router-url :service service})]
(is (= service-id (get service "id")) assertion-message)
(is app-name assertion-message)
(is (seq container-resources) assertion-message)
(is (seq k8s-containers) assertion-message)
(is (= (set k8s-containers) (set (map :name container-resources))) assertion-message)
(is (contains? k8s-containers "waiter-app") assertion-message)
(is namespace assertion-message)
(is replicaset-uid assertion-message)
(is (contains? replicaset-annotations :waiter/revision-timestamp) assertion-message)
(is (contains? replicaset-annotations :waiter/revision-version) assertion-message)
(is (contains? replicaset-pod-annotations :waiter/revision-timestamp) assertion-message)
(is (contains? replicaset-pod-annotations :waiter/revision-version) assertion-message))
(is false (str {:message "service unavailable in k8s watch state"
:router-url router-url
:service-id service-id
:watch-state-json watch-state-json})))))))))))
(deftest ^:parallel ^:integration-fast test-kubernetes-watch-state-update
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [cookies (all-cookies waiter-url)
router-url (-> waiter-url routers first val)
watch-state-json (get-k8s-watch-state router-url cookies)
initial-pods-snapshot-version (get-in watch-state-json ["pods-metadata" "version" "snapshot"])
initial-pods-watch-version (get-in watch-state-json ["pods-metadata" "version" "watch"])
initial-rs-snapshot-version (get-in watch-state-json ["rs-metadata" "version" "snapshot"])
initial-rs-watch-version (get-in watch-state-json ["rs-metadata" "version" "watch"])
{:keys [service-id]} (make-request-with-debug-info
{:x-waiter-name (rand-name)}
#(make-kitchen-request waiter-url % :path "/hello"))]
(with-service-cleanup
service-id
(let [watch-state-json (get-k8s-watch-state router-url cookies)
pods-snapshot-version' (get-in watch-state-json ["pods-metadata" "version" "snapshot"])
pods-watch-version' (get-in watch-state-json ["pods-metadata" "version" "watch"])
rs-snapshot-version' (get-in watch-state-json ["rs-metadata" "version" "snapshot"])
rs-watch-version' (get-in watch-state-json ["rs-metadata" "version" "watch"])]
(is (or (nil? initial-pods-watch-version)
(< initial-pods-snapshot-version initial-pods-watch-version)))
(is (<= initial-pods-snapshot-version pods-snapshot-version'))
(is (< pods-snapshot-version' pods-watch-version'))
(is (or (nil? initial-rs-watch-version)
(< initial-rs-snapshot-version initial-rs-watch-version)))
(is (<= initial-rs-snapshot-version rs-snapshot-version'))
(is (< rs-snapshot-version' rs-watch-version'))))))))
(defn- validate-kubernetes-custom-image
[waiter-url custom-image]
(let [{:keys [body service-id] :as response}
(make-request-with-debug-info
{:x-waiter-name (rand-name)
:x-waiter-image custom-image
:x-waiter-cmd "echo -n $INTEGRATION_TEST_SENTINEL_VALUE > index.html && python3 -m http.server $PORT0"
:x-waiter-health-check-url "/"}
#(make-kitchen-request waiter-url % :method :get :path "/"))]
(assert-response-status response http-200-ok)
(is (= "Integration Test Sentinel Value" body))
(delete-service waiter-url service-id)))
test that we can provide a custom docker image that contains /tmp / index.html with " Integration Test Image " in it
(deftest ^:parallel ^:integration-fast test-kubernetes-custom-image
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [custom-image (System/getenv "INTEGRATION_TEST_CUSTOM_IMAGE")
_ (is (not (str/blank? custom-image)) "You must provide a custom image in the INTEGRATION_TEST_CUSTOM_IMAGE environment variable")]
(validate-kubernetes-custom-image waiter-url custom-image)))))
(deftest ^:parallel ^:integration-fast test-kubernetes-image-alias
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [custom-image (System/getenv "INTEGRATION_TEST_CUSTOM_IMAGE_ALIAS")
_ (is (not (str/blank? custom-image)) "You must provide a custom image in the INTEGRATION_TEST_CUSTOM_IMAGE_ALIAS environment variable")]
(validate-kubernetes-custom-image waiter-url custom-image)))))
(deftest ^:parallel ^:integration-slow ^:resource-heavy test-s3-logs
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(when-let [log-bucket-url (-> waiter-url get-kubernetes-scheduler-settings :log-bucket-url)]
(let [router-url (-> waiter-url routers first val)
headers {:x-waiter-max-instances 2
:x-waiter-min-instances 1
:x-waiter-name (rand-name)
:x-waiter-scale-down-factor 0.99
:x-waiter-scale-up-factor 0.99}
_ (log/info "making canary request...")
{:keys [cookies service-id]} (make-request-with-debug-info headers #(make-kitchen-request waiter-url %))
make-request-fn (fn [url] (make-request url "" :verbose true :cookies cookies))]
(with-service-cleanup
service-id
(assert-service-on-all-routers waiter-url service-id cookies)
(log/info "waiting for at least one active instance on target router")
(is (wait-for #(seq (active-instances router-url service-id :cookies cookies))
:interval 2 :timeout 45)
(str "no active instances found for " service-id))
;; Test that the active instances' logs are available.
(let [active-instances (active-instances router-url service-id :cookies cookies)
log-url (:log-url (first active-instances))
{:keys [body] :as logs-response} (make-request-fn log-url)
_ (assert-response-status logs-response http-200-ok)
log-files-list (walk/keywordize-keys (json/read-str body))
stdout-file-link (:url (first (filter #(= (:name %) "stdout") log-files-list)))
stderr-file-link (:url (first (filter #(= (:name %) "stderr") log-files-list)))]
(is (every? #(str/includes? body %) ["stderr" "stdout"])
(str "Live directory listing is missing entries: stderr and stdout, got response: " logs-response))
(doseq [file-link [stderr-file-link stdout-file-link]]
(if (str/starts-with? (str file-link) "http")
(assert-response-status (make-request-fn file-link) http-200-ok)
(log/warn "test-s3-logs did not verify file link:" file-link))))
get a killed instance by scaling up to 2 and back down to 1
(log/info "creating min-instances=2 override")
(let [override-path (str "/apps/" service-id "/override")
post-override-response (make-request waiter-url override-path
:body (utils/clj->json {:min-instances 2})
:cookies cookies
:method :post
:verbose true)]
(assert-response-status post-override-response http-200-ok)
;; wait for scale up
(is (wait-for #(let [healthy-instance-count (->> (active-instances router-url service-id :cookies cookies)
(filter :healthy?)
(count))]
(>= healthy-instance-count 2))
:interval 2 :timeout 300)
(str service-id " never scaled to at least 2 healthy instances"))
(log/info "deleting min-instances=2 override")
(let [delete-override-response (make-request waiter-url override-path
:cookies cookies
:method :delete
:verbose true)]
(assert-response-status delete-override-response http-200-ok))
;; wait for scale down
(log/info "waiting for at least one killed instance on target router")
(is (wait-for #(seq (killed-instances router-url service-id :cookies cookies))
:interval 2 :timeout 45)
(str "no killed instances found for " service-id)))
;; Test that the killed instance's logs were persisted to S3.
;; This portion of the test logic was modified from the active-instances tests above.
(let [killed-instances (killed-instances router-url service-id :cookies cookies)
log-url (:log-url (first killed-instances))
_ (do
(log/info "waiting s3 logs to appear")
(is (wait-for
#(let [{:keys [body]} (make-request-fn log-url)]
(str/includes? body log-bucket-url))
:interval 5 :timeout 300)
(str "Log URL never pointed to S3 bucket " log-bucket-url)))
{:keys [body] :as logs-response} (make-request-fn log-url)
_ (assert-response-status logs-response http-200-ok)
log-files-list (walk/keywordize-keys (json/read-str body))
stdout-file-link (:url (first (filter #(= (:name %) "stdout") log-files-list)))
stderr-file-link (:url (first (filter #(= (:name %) "stderr") log-files-list)))]
(is (wait-for
#(every? (partial str/includes? body) ["stderr" "stdout"])
:interval 1 :timeout 30)
(str "Killed directory listing is missing entries: stderr and stdout, got response: " logs-response))
(doseq [file-link [stderr-file-link stdout-file-link]]
(if (str/starts-with? (str file-link) "http")
(assert-response-status (make-request file-link "" :verbose true) http-200-ok)
(log/warn "test-s3-logs did not verify file link:" file-link))))))))))
(deftest ^:parallel ^:integration-fast test-s3-custom-bucket
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(when-let [log-bucket-url (-> waiter-url get-kubernetes-scheduler-settings :log-bucket-url)]
(let [bucket-subpath "/my/custom/path"
custom-bucket-url (str log-bucket-url bucket-subpath)
service-headers {:x-waiter-name (rand-name)
:x-waiter-env-WAITER_CONFIG_LOG_BUCKET_URL custom-bucket-url}
_ (log/info "making canary request...")
{:keys [headers service-id]} (make-request-with-debug-info service-headers #(make-kitchen-request waiter-url %))
{user "x-waiter-auth-user" instance-id "x-waiter-backend-id"} headers
[_ pod-name run-number] (re-find #"^[^.]+\.(.*)-(\d+)$" instance-id)
stderr-path (str/join "/" [bucket-subpath user service-id pod-name (str "r" run-number) "stderr"])]
(with-service-cleanup service-id
(comment "Kill the service"))
(is (wait-for
(fn look-for-s3-logs []
(let [stderr-response (make-request log-bucket-url stderr-path :method :get)]
(and (= (:status stderr-response) http-200-ok)
(str/includes? (:body stderr-response) service-id))))
:interval 2 :timeout 45)))))))
(defn- check-pod-namespace
[waiter-url headers expected-namespace]
(let [cookies (all-cookies waiter-url)
router-url (-> waiter-url routers first val)
testing-suffix (str (:x-waiter-run-as-user headers "nil") "-" (:x-waiter-namespace headers "nil"))
{:keys [body error service-id status]}
(make-request-with-debug-info
(merge {:x-waiter-name (str (rand-name) "-" testing-suffix)} headers)
#(make-kitchen-request waiter-url % :path "/hello"))]
(when-not (= http-200-ok status)
(throw (ex-info "Failed to create service"
{:response-body body
:response-status status}
error)))
(with-service-cleanup
service-id
(let [watch-state-json (get-k8s-watch-state router-url cookies)
pod-spec (-> watch-state-json (get-in ["service-id->pod-id->pod" service-id]) first val)
pod-namespace (get-in pod-spec ["metadata" "namespace"])]
(is (some? pod-spec))
(is (= expected-namespace pod-namespace))))))
(deftest ^:parallel ^:integration-slow test-pod-namespace
"Expected behavior for services with namespaces:
Run-As-User Namespace Validation
Missing Missing OK
Missing * OK
Missing foo OK
Missing bar FAIL
foo Missing OK
foo * OK
foo foo OK
foo bar FAIL
* Missing OK
* * OK
* foo FAIL
* bar FAIL"
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [current-user (retrieve-username)
configured-namespace (-> waiter-url get-kubernetes-scheduler-settings :replicaset-spec-builder :default-namespace)
default-namespace (if (= "*" configured-namespace) current-user configured-namespace)
star-user-header {:x-waiter-run-as-user "*"}
current-user-header {:x-waiter-run-as-user current-user}
not-current-user "not-current-user"]
(testing "namespaces for current user (implicit)"
(check-pod-namespace waiter-url {} default-namespace)
(check-pod-namespace waiter-url {:x-waiter-namespace "*"} current-user)
(check-pod-namespace waiter-url {:x-waiter-namespace current-user} current-user)
(is (thrown? Exception #"Service namespace must either be omitted or match the run-as-user"
(check-pod-namespace waiter-url {:x-waiter-namespace not-current-user} current-user))))
(testing "namespaces for current user (explicit)"
(check-pod-namespace waiter-url current-user-header default-namespace)
(check-pod-namespace waiter-url (assoc current-user-header :x-waiter-namespace "*") current-user)
(check-pod-namespace waiter-url (assoc current-user-header :x-waiter-namespace current-user) current-user)
(is (thrown? Exception #"Service namespace must either be omitted or match the run-as-user"
(check-pod-namespace waiter-url (assoc current-user-header :x-waiter-namespace not-current-user) current-user))))
(testing "namespaces for run-as-requester"
(check-pod-namespace waiter-url star-user-header default-namespace)
(check-pod-namespace waiter-url (assoc star-user-header :x-waiter-namespace "*") current-user)
(is (thrown? Exception #"Cannot use run-as-requester with a specific namespace"
(check-pod-namespace waiter-url (assoc star-user-header :x-waiter-namespace current-user) current-user)))
(is (thrown? Exception #"Cannot use run-as-requester with a specific namespace"
(check-pod-namespace waiter-url (assoc star-user-header :x-waiter-namespace not-current-user) current-user))))))))
(defn- get-pod-service-account-info
[waiter-url namespace-arg]
(let [{:keys [body cookies error service-id status] :as response}
(make-request-with-debug-info
(cond->
{:x-waiter-name (rand-name)
:x-waiter-cmd (str "env SERVICE_ACCOUNT=\"$(grep -hs . /var/run/secrets/kubernetes.io/serviceaccount/namespace)\" "
(kitchen-cmd "-p $PORT0"))}
namespace-arg
(assoc :x-waiter-namespace namespace-arg))
#(make-kitchen-request waiter-url % :path "/environment"))]
(with-service-cleanup
service-id
(assert-response-status response http-200-ok)
(assert-service-on-all-routers waiter-url service-id cookies)
(let [instance (first (active-instances waiter-url service-id :cookies cookies))
instance-env (-> body str try-parse-json)]
{:pod-namespace (:k8s/namespace instance)
:service-account (get instance-env "SERVICE_ACCOUNT")}))))
(deftest ^:parallel ^:integration-fast test-kubernetes-scheduler-state
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [{:keys [body service-id] :as response}
(make-request-with-debug-info
{:x-waiter-name (rand-name)
:x-waiter-cmd "sleep 900"}
#(make-shell-request waiter-url % :method :get :path "/"))]
(with-service-cleanup
service-id
(is (wait-for
(fn []
(let [state (service-state waiter-url service-id)
unhealthy-instances (get-in state [:state :scheduler-state :syncer :instance-id->unhealthy-instance])]
(log/debug "state for" service-id state)
(seq unhealthy-instances))))))))))
(deftest ^:parallel ^:integration-fast test-service-account-injection
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [current-user (retrieve-username)]
(testing "No service account for default namespace, or matches user"
(let [{:keys [service-account pod-namespace]} (get-pod-service-account-info waiter-url nil)]
;; matches run-as-user when default-namespace resolves to run-as-user
;; blank when default-namespace resolves to some other user (don't leak credentials)
(if (= current-user pod-namespace)
(is (= current-user service-account))
(is (str/blank? service-account)))))
(testing "Has service account with custom namespace"
(let [{:keys [service-account pod-namespace]} (get-pod-service-account-info waiter-url current-user)]
(is (= current-user pod-namespace))
(is (= current-user service-account))))))))
(deftest ^:parallel ^:integration-fast ^:resource-heavy test-kubernetes-pod-expiry-failing-instance
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [{:keys [cookies request-headers service-id] :as response}
(make-request-with-debug-info
{:x-waiter-distribution-scheme "simple"
:x-waiter-name (rand-name)}
#(make-kitchen-request waiter-url % :method :get :path "/"))]
(with-service-cleanup
service-id
(assert-response-status response http-200-ok)
(dotimes [_ 5]
(let [request-headers (assoc request-headers :x-kitchen-delay-ms 1000)
response (make-kitchen-request waiter-url request-headers :path "/die")]
(assert-response-status response #{http-502-bad-gateway http-503-service-unavailable})))
assert that more than one pod was created
(is (wait-for
(fn []
(let [{:keys [active-instances failed-instances]} (:instances (service-settings waiter-url service-id))
pod-ids (->> (concat active-instances failed-instances)
(map :k8s/pod-name)
(into #{}))]
(log/info pod-ids)
(< 1 (count pod-ids)))))))
(assert-service-not-on-any-routers waiter-url service-id cookies)
(let [{:keys [active-instances failed-instances]} (get (service-settings waiter-url service-id) :instances)]
(is (empty? active-instances))
(is (empty? failed-instances)))))))
(deftest ^:parallel ^:integration-slow ^:resource-heavy test-kubernetes-pod-expiry-grace-period
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(if-let [custom-image (System/getenv "INTEGRATION_TEST_BAD_IMAGE")]
(let [{:keys [container-running-grace-secs]} (get-kubernetes-scheduler-settings waiter-url)
waiter-headers (assoc (kitchen-request-headers)
:x-waiter-distribution-scheme "simple"
:x-waiter-image custom-image
:x-waiter-max-instances 1
:x-waiter-min-instances 1
:x-waiter-name (rand-name)
:x-waiter-timeout 30000
:x-waiter-queue-timeout 30000)
service-id (retrieve-service-id waiter-url waiter-headers)
timeout-secs 150]
(cond
(zero? container-running-grace-secs)
(log/info "skipping test as container-running-grace-secs is disabled"
{:container-running-grace-secs container-running-grace-secs
:waiter-url waiter-url})
(> container-running-grace-secs timeout-secs)
(log/warn "skipping test as the configuration will cause the test to run for too long"
{:container-running-grace-secs container-running-grace-secs
:waiter-url waiter-url})
:else
(with-service-cleanup
service-id
;; make request to launch service instance(s), we do not care about the response
(make-request waiter-url "/status" :headers waiter-headers)
assert that more than one pod was created
(is (wait-for
(fn []
(let [{:keys [instances]} (service-settings waiter-url service-id)
{:keys [active-instances failed-instances]} instances
pod-ids (->> (concat active-instances failed-instances)
(map :k8s/pod-name)
(into #{}))]
(log/info "active-instances" active-instances)
(log/info "failed-instances" failed-instances)
(< 1 (count pod-ids))))
:interval 15
:timeout timeout-secs)))))
(log/warn "skipping test as INTEGRATION_TEST_BAD_IMAGE is not specified")))))
(deftest ^:parallel ^:integration-fast test-kubernetes-raven-sidecar
(testing-using-waiter-url
(if-not (using-raven? waiter-url)
(log/warn "skipping the integration test as :raven-sidecar is not configured")
(let [x-waiter-name (rand-name)
raven-sidecar-flag (get-raven-sidecar-flag waiter-url)
request-headers {:x-waiter-name x-waiter-name
(keyword (str "x-waiter-env-" raven-sidecar-flag)) "true"}
_ (log/info "making canary request")
{:keys [cookies service-id] :as response} (make-request-with-debug-info
request-headers
#(make-kitchen-request waiter-url % :method :get :path "/status"))]
(with-service-cleanup
service-id
(assert-service-on-all-routers waiter-url service-id cookies)
(assert-response-status response http-200-ok)
(let [response (make-kitchen-request waiter-url request-headers :method :get :path "/request-info")]
(assert-response-status response http-200-ok)
(testing "Expected Raven/Envoy specific headers are present in both request and response"
(let [response-body (try-parse-json (:body response))
response-headers (:headers response)]
;; x-envoy-expected-rq-timeout-ms is absent when timeouts are disabled
(is (some (get response-body "headers") ["x-envoy-external-address" "x-envoy-internal"]))
(is (utils/raven-proxy-response? response)))))
(let [response (make-request-with-debug-info
request-headers
#(make-kitchen-request waiter-url % :method :get :path "/environment"))]
(assert-response-status response http-200-ok)
(let [response-body (try-parse-json (:body response))
response-headers (:headers response)]
(testing "Port value is correctly offset compared to instance value"
(let [response-header-backend-port (get response-headers "x-waiter-backend-port")
env-response-port0 (get response-body "PORT0")]
(is (not= response-header-backend-port env-response-port0))))
(testing "Reverse proxy flag environment variable is present"
(is (contains? response-body raven-sidecar-flag))
(is (= "true" (get response-body raven-sidecar-flag)))))))))))
(deftest ^:parallel ^:integration-slow test-kubernetes-event-fetching
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [router-url (-> waiter-url routers first val)]
(testing "service that yields bad pod config will result in k8s events fetching"
(let [{:keys [cookies service-id] :as response}
(make-request-with-debug-info
{:x-waiter-name (rand-name)
:x-waiter-env-K8S_CONFIGMAP_NAMES (str (System/nanoTime) "-configmap")
:x-waiter-queue-timeout 5000}
#(make-kitchen-request waiter-url % :method :get :path "/"))]
(with-service-cleanup
service-id
bad configmap causes service not to start promptly , 5s queue timout yields 503 response
(assert-response-status response http-503-service-unavailable)
after 30 seconds , scheduler should fetch k8s events
(is (wait-for
(fn []
(let [watch-state-json (get-k8s-watch-state router-url cookies)
rs-spec (-> watch-state-json (get-in ["service-id->service" service-id]))
pod-spec (-> watch-state-json (get-in ["service-id->pod-id->pod" service-id]) vals first)
pod-events (get pod-spec "k8s/events")
failed-mount-events (filter #(= "FailedMount" (get % "reason")) pod-events)]
(and (contains? rs-spec "k8s/events")
(pos? (count failed-mount-events))))))))))
(testing "a healthy service will not result in k8s event fetching"
(let [request-headers {:x-waiter-name (rand-name)}
{:keys [cookies service-id] :as response}
(make-request-with-debug-info request-headers #(make-kitchen-request waiter-url % :method :get :path "/"))]
(with-service-cleanup
service-id
(assert-response-status response http-200-ok)
;; service is healthy and responding to requests
(let [response (make-kitchen-request waiter-url request-headers :method :get :path "/hello")]
(assert-response-status response http-200-ok))
since srevice is healthy , do not expect / events to be present in scheduler state
(is (nil? (wait-for
(fn []
(let [watch-state-json (get-k8s-watch-state router-url cookies)
rs-spec (-> watch-state-json (get-in ["service-id->service" service-id]))
pod-spec (-> watch-state-json (get-in ["service-id->pod-id->pod" service-id]) vals first)]
(and (contains? rs-spec "k8s/events")
(contains? pod-spec "k8s/events"))))))))))))))
| null | https://raw.githubusercontent.com/twosigma/waiter/84b99f66aab5ac9faae14287d728605f47508bcc/waiter/integration/waiter/kubernetes_scheduler_integration_test.clj | clojure | Test that the active instances' logs are available.
wait for scale up
wait for scale down
Test that the killed instance's logs were persisted to S3.
This portion of the test logic was modified from the active-instances tests above.
matches run-as-user when default-namespace resolves to run-as-user
blank when default-namespace resolves to some other user (don't leak credentials)
make request to launch service instance(s), we do not care about the response
x-envoy-expected-rq-timeout-ms is absent when timeouts are disabled
service is healthy and responding to requests | (ns waiter.kubernetes-scheduler-integration-test
(:require [clojure.data.json :as json]
[clojure.string :as str]
[clojure.test :refer :all]
[clojure.tools.logging :as log]
[clojure.walk :as walk]
[waiter.status-codes :refer :all]
[waiter.util.client-tools :refer :all]
[waiter.util.utils :as utils]))
(deftest ^:parallel ^:integration-fast test-k8s-service-and-instance-fields
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [{:keys [cookies service-id] :as response}
(make-request-with-debug-info
{:x-waiter-name (rand-name)} #(make-kitchen-request waiter-url % :path "/hello"))]
(with-service-cleanup
service-id
(assert-response-status response http-200-ok)
(assert-service-on-all-routers waiter-url service-id cookies)
(let [instances (active-instances waiter-url service-id :cookies cookies)]
(testing "k8s scheduler service instance fields"
(doseq [instance instances]
(let [{:keys [k8s/app-name k8s/namespace k8s/node-name k8s/pod-name k8s/revision-timestamp k8s/user]} (walk/keywordize-keys instance)
assertion-message (str instance)]
(is app-name assertion-message)
(is namespace assertion-message)
(is node-name assertion-message)
(is pod-name assertion-message)
(is revision-timestamp assertion-message)
(is user assertion-message)))))
(testing "k8s scheduler service fields"
(doseq [router-url (-> waiter-url routers vals)]
(let [watch-state-json (get-k8s-watch-state router-url cookies)
service (get-in watch-state-json ["service-id->service" service-id])]
(if (map? service)
(let [{:keys [k8s/app-name k8s/container-resources k8s/containers k8s/namespace k8s/replicaset-annotations
k8s/replicaset-pod-annotations k8s/replicaset-uid]} (walk/keywordize-keys service)
k8s-containers (set containers)
assertion-message (str {:router-url router-url :service service})]
(is (= service-id (get service "id")) assertion-message)
(is app-name assertion-message)
(is (seq container-resources) assertion-message)
(is (seq k8s-containers) assertion-message)
(is (= (set k8s-containers) (set (map :name container-resources))) assertion-message)
(is (contains? k8s-containers "waiter-app") assertion-message)
(is namespace assertion-message)
(is replicaset-uid assertion-message)
(is (contains? replicaset-annotations :waiter/revision-timestamp) assertion-message)
(is (contains? replicaset-annotations :waiter/revision-version) assertion-message)
(is (contains? replicaset-pod-annotations :waiter/revision-timestamp) assertion-message)
(is (contains? replicaset-pod-annotations :waiter/revision-version) assertion-message))
(is false (str {:message "service unavailable in k8s watch state"
:router-url router-url
:service-id service-id
:watch-state-json watch-state-json})))))))))))
(deftest ^:parallel ^:integration-fast test-kubernetes-watch-state-update
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [cookies (all-cookies waiter-url)
router-url (-> waiter-url routers first val)
watch-state-json (get-k8s-watch-state router-url cookies)
initial-pods-snapshot-version (get-in watch-state-json ["pods-metadata" "version" "snapshot"])
initial-pods-watch-version (get-in watch-state-json ["pods-metadata" "version" "watch"])
initial-rs-snapshot-version (get-in watch-state-json ["rs-metadata" "version" "snapshot"])
initial-rs-watch-version (get-in watch-state-json ["rs-metadata" "version" "watch"])
{:keys [service-id]} (make-request-with-debug-info
{:x-waiter-name (rand-name)}
#(make-kitchen-request waiter-url % :path "/hello"))]
(with-service-cleanup
service-id
(let [watch-state-json (get-k8s-watch-state router-url cookies)
pods-snapshot-version' (get-in watch-state-json ["pods-metadata" "version" "snapshot"])
pods-watch-version' (get-in watch-state-json ["pods-metadata" "version" "watch"])
rs-snapshot-version' (get-in watch-state-json ["rs-metadata" "version" "snapshot"])
rs-watch-version' (get-in watch-state-json ["rs-metadata" "version" "watch"])]
(is (or (nil? initial-pods-watch-version)
(< initial-pods-snapshot-version initial-pods-watch-version)))
(is (<= initial-pods-snapshot-version pods-snapshot-version'))
(is (< pods-snapshot-version' pods-watch-version'))
(is (or (nil? initial-rs-watch-version)
(< initial-rs-snapshot-version initial-rs-watch-version)))
(is (<= initial-rs-snapshot-version rs-snapshot-version'))
(is (< rs-snapshot-version' rs-watch-version'))))))))
(defn- validate-kubernetes-custom-image
[waiter-url custom-image]
(let [{:keys [body service-id] :as response}
(make-request-with-debug-info
{:x-waiter-name (rand-name)
:x-waiter-image custom-image
:x-waiter-cmd "echo -n $INTEGRATION_TEST_SENTINEL_VALUE > index.html && python3 -m http.server $PORT0"
:x-waiter-health-check-url "/"}
#(make-kitchen-request waiter-url % :method :get :path "/"))]
(assert-response-status response http-200-ok)
(is (= "Integration Test Sentinel Value" body))
(delete-service waiter-url service-id)))
test that we can provide a custom docker image that contains /tmp / index.html with " Integration Test Image " in it
(deftest ^:parallel ^:integration-fast test-kubernetes-custom-image
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [custom-image (System/getenv "INTEGRATION_TEST_CUSTOM_IMAGE")
_ (is (not (str/blank? custom-image)) "You must provide a custom image in the INTEGRATION_TEST_CUSTOM_IMAGE environment variable")]
(validate-kubernetes-custom-image waiter-url custom-image)))))
(deftest ^:parallel ^:integration-fast test-kubernetes-image-alias
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [custom-image (System/getenv "INTEGRATION_TEST_CUSTOM_IMAGE_ALIAS")
_ (is (not (str/blank? custom-image)) "You must provide a custom image in the INTEGRATION_TEST_CUSTOM_IMAGE_ALIAS environment variable")]
(validate-kubernetes-custom-image waiter-url custom-image)))))
(deftest ^:parallel ^:integration-slow ^:resource-heavy test-s3-logs
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(when-let [log-bucket-url (-> waiter-url get-kubernetes-scheduler-settings :log-bucket-url)]
(let [router-url (-> waiter-url routers first val)
headers {:x-waiter-max-instances 2
:x-waiter-min-instances 1
:x-waiter-name (rand-name)
:x-waiter-scale-down-factor 0.99
:x-waiter-scale-up-factor 0.99}
_ (log/info "making canary request...")
{:keys [cookies service-id]} (make-request-with-debug-info headers #(make-kitchen-request waiter-url %))
make-request-fn (fn [url] (make-request url "" :verbose true :cookies cookies))]
(with-service-cleanup
service-id
(assert-service-on-all-routers waiter-url service-id cookies)
(log/info "waiting for at least one active instance on target router")
(is (wait-for #(seq (active-instances router-url service-id :cookies cookies))
:interval 2 :timeout 45)
(str "no active instances found for " service-id))
(let [active-instances (active-instances router-url service-id :cookies cookies)
log-url (:log-url (first active-instances))
{:keys [body] :as logs-response} (make-request-fn log-url)
_ (assert-response-status logs-response http-200-ok)
log-files-list (walk/keywordize-keys (json/read-str body))
stdout-file-link (:url (first (filter #(= (:name %) "stdout") log-files-list)))
stderr-file-link (:url (first (filter #(= (:name %) "stderr") log-files-list)))]
(is (every? #(str/includes? body %) ["stderr" "stdout"])
(str "Live directory listing is missing entries: stderr and stdout, got response: " logs-response))
(doseq [file-link [stderr-file-link stdout-file-link]]
(if (str/starts-with? (str file-link) "http")
(assert-response-status (make-request-fn file-link) http-200-ok)
(log/warn "test-s3-logs did not verify file link:" file-link))))
get a killed instance by scaling up to 2 and back down to 1
(log/info "creating min-instances=2 override")
(let [override-path (str "/apps/" service-id "/override")
post-override-response (make-request waiter-url override-path
:body (utils/clj->json {:min-instances 2})
:cookies cookies
:method :post
:verbose true)]
(assert-response-status post-override-response http-200-ok)
(is (wait-for #(let [healthy-instance-count (->> (active-instances router-url service-id :cookies cookies)
(filter :healthy?)
(count))]
(>= healthy-instance-count 2))
:interval 2 :timeout 300)
(str service-id " never scaled to at least 2 healthy instances"))
(log/info "deleting min-instances=2 override")
(let [delete-override-response (make-request waiter-url override-path
:cookies cookies
:method :delete
:verbose true)]
(assert-response-status delete-override-response http-200-ok))
(log/info "waiting for at least one killed instance on target router")
(is (wait-for #(seq (killed-instances router-url service-id :cookies cookies))
:interval 2 :timeout 45)
(str "no killed instances found for " service-id)))
(let [killed-instances (killed-instances router-url service-id :cookies cookies)
log-url (:log-url (first killed-instances))
_ (do
(log/info "waiting s3 logs to appear")
(is (wait-for
#(let [{:keys [body]} (make-request-fn log-url)]
(str/includes? body log-bucket-url))
:interval 5 :timeout 300)
(str "Log URL never pointed to S3 bucket " log-bucket-url)))
{:keys [body] :as logs-response} (make-request-fn log-url)
_ (assert-response-status logs-response http-200-ok)
log-files-list (walk/keywordize-keys (json/read-str body))
stdout-file-link (:url (first (filter #(= (:name %) "stdout") log-files-list)))
stderr-file-link (:url (first (filter #(= (:name %) "stderr") log-files-list)))]
(is (wait-for
#(every? (partial str/includes? body) ["stderr" "stdout"])
:interval 1 :timeout 30)
(str "Killed directory listing is missing entries: stderr and stdout, got response: " logs-response))
(doseq [file-link [stderr-file-link stdout-file-link]]
(if (str/starts-with? (str file-link) "http")
(assert-response-status (make-request file-link "" :verbose true) http-200-ok)
(log/warn "test-s3-logs did not verify file link:" file-link))))))))))
(deftest ^:parallel ^:integration-fast test-s3-custom-bucket
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(when-let [log-bucket-url (-> waiter-url get-kubernetes-scheduler-settings :log-bucket-url)]
(let [bucket-subpath "/my/custom/path"
custom-bucket-url (str log-bucket-url bucket-subpath)
service-headers {:x-waiter-name (rand-name)
:x-waiter-env-WAITER_CONFIG_LOG_BUCKET_URL custom-bucket-url}
_ (log/info "making canary request...")
{:keys [headers service-id]} (make-request-with-debug-info service-headers #(make-kitchen-request waiter-url %))
{user "x-waiter-auth-user" instance-id "x-waiter-backend-id"} headers
[_ pod-name run-number] (re-find #"^[^.]+\.(.*)-(\d+)$" instance-id)
stderr-path (str/join "/" [bucket-subpath user service-id pod-name (str "r" run-number) "stderr"])]
(with-service-cleanup service-id
(comment "Kill the service"))
(is (wait-for
(fn look-for-s3-logs []
(let [stderr-response (make-request log-bucket-url stderr-path :method :get)]
(and (= (:status stderr-response) http-200-ok)
(str/includes? (:body stderr-response) service-id))))
:interval 2 :timeout 45)))))))
(defn- check-pod-namespace
[waiter-url headers expected-namespace]
(let [cookies (all-cookies waiter-url)
router-url (-> waiter-url routers first val)
testing-suffix (str (:x-waiter-run-as-user headers "nil") "-" (:x-waiter-namespace headers "nil"))
{:keys [body error service-id status]}
(make-request-with-debug-info
(merge {:x-waiter-name (str (rand-name) "-" testing-suffix)} headers)
#(make-kitchen-request waiter-url % :path "/hello"))]
(when-not (= http-200-ok status)
(throw (ex-info "Failed to create service"
{:response-body body
:response-status status}
error)))
(with-service-cleanup
service-id
(let [watch-state-json (get-k8s-watch-state router-url cookies)
pod-spec (-> watch-state-json (get-in ["service-id->pod-id->pod" service-id]) first val)
pod-namespace (get-in pod-spec ["metadata" "namespace"])]
(is (some? pod-spec))
(is (= expected-namespace pod-namespace))))))
(deftest ^:parallel ^:integration-slow test-pod-namespace
"Expected behavior for services with namespaces:
Run-As-User Namespace Validation
Missing Missing OK
Missing * OK
Missing foo OK
Missing bar FAIL
foo Missing OK
foo * OK
foo foo OK
foo bar FAIL
* Missing OK
* * OK
* foo FAIL
* bar FAIL"
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [current-user (retrieve-username)
configured-namespace (-> waiter-url get-kubernetes-scheduler-settings :replicaset-spec-builder :default-namespace)
default-namespace (if (= "*" configured-namespace) current-user configured-namespace)
star-user-header {:x-waiter-run-as-user "*"}
current-user-header {:x-waiter-run-as-user current-user}
not-current-user "not-current-user"]
(testing "namespaces for current user (implicit)"
(check-pod-namespace waiter-url {} default-namespace)
(check-pod-namespace waiter-url {:x-waiter-namespace "*"} current-user)
(check-pod-namespace waiter-url {:x-waiter-namespace current-user} current-user)
(is (thrown? Exception #"Service namespace must either be omitted or match the run-as-user"
(check-pod-namespace waiter-url {:x-waiter-namespace not-current-user} current-user))))
(testing "namespaces for current user (explicit)"
(check-pod-namespace waiter-url current-user-header default-namespace)
(check-pod-namespace waiter-url (assoc current-user-header :x-waiter-namespace "*") current-user)
(check-pod-namespace waiter-url (assoc current-user-header :x-waiter-namespace current-user) current-user)
(is (thrown? Exception #"Service namespace must either be omitted or match the run-as-user"
(check-pod-namespace waiter-url (assoc current-user-header :x-waiter-namespace not-current-user) current-user))))
(testing "namespaces for run-as-requester"
(check-pod-namespace waiter-url star-user-header default-namespace)
(check-pod-namespace waiter-url (assoc star-user-header :x-waiter-namespace "*") current-user)
(is (thrown? Exception #"Cannot use run-as-requester with a specific namespace"
(check-pod-namespace waiter-url (assoc star-user-header :x-waiter-namespace current-user) current-user)))
(is (thrown? Exception #"Cannot use run-as-requester with a specific namespace"
(check-pod-namespace waiter-url (assoc star-user-header :x-waiter-namespace not-current-user) current-user))))))))
(defn- get-pod-service-account-info
[waiter-url namespace-arg]
(let [{:keys [body cookies error service-id status] :as response}
(make-request-with-debug-info
(cond->
{:x-waiter-name (rand-name)
:x-waiter-cmd (str "env SERVICE_ACCOUNT=\"$(grep -hs . /var/run/secrets/kubernetes.io/serviceaccount/namespace)\" "
(kitchen-cmd "-p $PORT0"))}
namespace-arg
(assoc :x-waiter-namespace namespace-arg))
#(make-kitchen-request waiter-url % :path "/environment"))]
(with-service-cleanup
service-id
(assert-response-status response http-200-ok)
(assert-service-on-all-routers waiter-url service-id cookies)
(let [instance (first (active-instances waiter-url service-id :cookies cookies))
instance-env (-> body str try-parse-json)]
{:pod-namespace (:k8s/namespace instance)
:service-account (get instance-env "SERVICE_ACCOUNT")}))))
(deftest ^:parallel ^:integration-fast test-kubernetes-scheduler-state
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [{:keys [body service-id] :as response}
(make-request-with-debug-info
{:x-waiter-name (rand-name)
:x-waiter-cmd "sleep 900"}
#(make-shell-request waiter-url % :method :get :path "/"))]
(with-service-cleanup
service-id
(is (wait-for
(fn []
(let [state (service-state waiter-url service-id)
unhealthy-instances (get-in state [:state :scheduler-state :syncer :instance-id->unhealthy-instance])]
(log/debug "state for" service-id state)
(seq unhealthy-instances))))))))))
(deftest ^:parallel ^:integration-fast test-service-account-injection
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [current-user (retrieve-username)]
(testing "No service account for default namespace, or matches user"
(let [{:keys [service-account pod-namespace]} (get-pod-service-account-info waiter-url nil)]
(if (= current-user pod-namespace)
(is (= current-user service-account))
(is (str/blank? service-account)))))
(testing "Has service account with custom namespace"
(let [{:keys [service-account pod-namespace]} (get-pod-service-account-info waiter-url current-user)]
(is (= current-user pod-namespace))
(is (= current-user service-account))))))))
(deftest ^:parallel ^:integration-fast ^:resource-heavy test-kubernetes-pod-expiry-failing-instance
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [{:keys [cookies request-headers service-id] :as response}
(make-request-with-debug-info
{:x-waiter-distribution-scheme "simple"
:x-waiter-name (rand-name)}
#(make-kitchen-request waiter-url % :method :get :path "/"))]
(with-service-cleanup
service-id
(assert-response-status response http-200-ok)
(dotimes [_ 5]
(let [request-headers (assoc request-headers :x-kitchen-delay-ms 1000)
response (make-kitchen-request waiter-url request-headers :path "/die")]
(assert-response-status response #{http-502-bad-gateway http-503-service-unavailable})))
assert that more than one pod was created
(is (wait-for
(fn []
(let [{:keys [active-instances failed-instances]} (:instances (service-settings waiter-url service-id))
pod-ids (->> (concat active-instances failed-instances)
(map :k8s/pod-name)
(into #{}))]
(log/info pod-ids)
(< 1 (count pod-ids)))))))
(assert-service-not-on-any-routers waiter-url service-id cookies)
(let [{:keys [active-instances failed-instances]} (get (service-settings waiter-url service-id) :instances)]
(is (empty? active-instances))
(is (empty? failed-instances)))))))
(deftest ^:parallel ^:integration-slow ^:resource-heavy test-kubernetes-pod-expiry-grace-period
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(if-let [custom-image (System/getenv "INTEGRATION_TEST_BAD_IMAGE")]
(let [{:keys [container-running-grace-secs]} (get-kubernetes-scheduler-settings waiter-url)
waiter-headers (assoc (kitchen-request-headers)
:x-waiter-distribution-scheme "simple"
:x-waiter-image custom-image
:x-waiter-max-instances 1
:x-waiter-min-instances 1
:x-waiter-name (rand-name)
:x-waiter-timeout 30000
:x-waiter-queue-timeout 30000)
service-id (retrieve-service-id waiter-url waiter-headers)
timeout-secs 150]
(cond
(zero? container-running-grace-secs)
(log/info "skipping test as container-running-grace-secs is disabled"
{:container-running-grace-secs container-running-grace-secs
:waiter-url waiter-url})
(> container-running-grace-secs timeout-secs)
(log/warn "skipping test as the configuration will cause the test to run for too long"
{:container-running-grace-secs container-running-grace-secs
:waiter-url waiter-url})
:else
(with-service-cleanup
service-id
(make-request waiter-url "/status" :headers waiter-headers)
assert that more than one pod was created
(is (wait-for
(fn []
(let [{:keys [instances]} (service-settings waiter-url service-id)
{:keys [active-instances failed-instances]} instances
pod-ids (->> (concat active-instances failed-instances)
(map :k8s/pod-name)
(into #{}))]
(log/info "active-instances" active-instances)
(log/info "failed-instances" failed-instances)
(< 1 (count pod-ids))))
:interval 15
:timeout timeout-secs)))))
(log/warn "skipping test as INTEGRATION_TEST_BAD_IMAGE is not specified")))))
(deftest ^:parallel ^:integration-fast test-kubernetes-raven-sidecar
(testing-using-waiter-url
(if-not (using-raven? waiter-url)
(log/warn "skipping the integration test as :raven-sidecar is not configured")
(let [x-waiter-name (rand-name)
raven-sidecar-flag (get-raven-sidecar-flag waiter-url)
request-headers {:x-waiter-name x-waiter-name
(keyword (str "x-waiter-env-" raven-sidecar-flag)) "true"}
_ (log/info "making canary request")
{:keys [cookies service-id] :as response} (make-request-with-debug-info
request-headers
#(make-kitchen-request waiter-url % :method :get :path "/status"))]
(with-service-cleanup
service-id
(assert-service-on-all-routers waiter-url service-id cookies)
(assert-response-status response http-200-ok)
(let [response (make-kitchen-request waiter-url request-headers :method :get :path "/request-info")]
(assert-response-status response http-200-ok)
(testing "Expected Raven/Envoy specific headers are present in both request and response"
(let [response-body (try-parse-json (:body response))
response-headers (:headers response)]
(is (some (get response-body "headers") ["x-envoy-external-address" "x-envoy-internal"]))
(is (utils/raven-proxy-response? response)))))
(let [response (make-request-with-debug-info
request-headers
#(make-kitchen-request waiter-url % :method :get :path "/environment"))]
(assert-response-status response http-200-ok)
(let [response-body (try-parse-json (:body response))
response-headers (:headers response)]
(testing "Port value is correctly offset compared to instance value"
(let [response-header-backend-port (get response-headers "x-waiter-backend-port")
env-response-port0 (get response-body "PORT0")]
(is (not= response-header-backend-port env-response-port0))))
(testing "Reverse proxy flag environment variable is present"
(is (contains? response-body raven-sidecar-flag))
(is (= "true" (get response-body raven-sidecar-flag)))))))))))
(deftest ^:parallel ^:integration-slow test-kubernetes-event-fetching
(testing-using-waiter-url
(when (using-k8s? waiter-url)
(let [router-url (-> waiter-url routers first val)]
(testing "service that yields bad pod config will result in k8s events fetching"
(let [{:keys [cookies service-id] :as response}
(make-request-with-debug-info
{:x-waiter-name (rand-name)
:x-waiter-env-K8S_CONFIGMAP_NAMES (str (System/nanoTime) "-configmap")
:x-waiter-queue-timeout 5000}
#(make-kitchen-request waiter-url % :method :get :path "/"))]
(with-service-cleanup
service-id
bad configmap causes service not to start promptly , 5s queue timout yields 503 response
(assert-response-status response http-503-service-unavailable)
after 30 seconds , scheduler should fetch k8s events
(is (wait-for
(fn []
(let [watch-state-json (get-k8s-watch-state router-url cookies)
rs-spec (-> watch-state-json (get-in ["service-id->service" service-id]))
pod-spec (-> watch-state-json (get-in ["service-id->pod-id->pod" service-id]) vals first)
pod-events (get pod-spec "k8s/events")
failed-mount-events (filter #(= "FailedMount" (get % "reason")) pod-events)]
(and (contains? rs-spec "k8s/events")
(pos? (count failed-mount-events))))))))))
(testing "a healthy service will not result in k8s event fetching"
(let [request-headers {:x-waiter-name (rand-name)}
{:keys [cookies service-id] :as response}
(make-request-with-debug-info request-headers #(make-kitchen-request waiter-url % :method :get :path "/"))]
(with-service-cleanup
service-id
(assert-response-status response http-200-ok)
(let [response (make-kitchen-request waiter-url request-headers :method :get :path "/hello")]
(assert-response-status response http-200-ok))
since srevice is healthy , do not expect / events to be present in scheduler state
(is (nil? (wait-for
(fn []
(let [watch-state-json (get-k8s-watch-state router-url cookies)
rs-spec (-> watch-state-json (get-in ["service-id->service" service-id]))
pod-spec (-> watch-state-json (get-in ["service-id->pod-id->pod" service-id]) vals first)]
(and (contains? rs-spec "k8s/events")
(contains? pod-spec "k8s/events"))))))))))))))
|
becbc7c0b5046b06a5625a230e9c149eb49e4b8dbc725734f872d57972e47036 | hannesm/patch | patch_command.ml | (** For now this command is only used for testing,
in particular it is not installed to the user.
(If we wanted to install it, it would need
a better name.)
*)
let usage =
"Simplified patch utility for single-file patches;\n
./patch.exe <input-file> <unififed-diff-file> -o <output-file>"
let exit_command_line_error = 1
let exit_open_error = 2
let exit_several_chunks = 3
let exit_patch_failure = 4
let run ~input ~diff =
match Patch.to_diffs diff with
| [] -> input
| _::_::_ ->
prerr_endline "Error: The diff contains several chunks,\n\
which is not supported by this command.";
exit exit_several_chunks
| [diff] ->
begin match Patch.patch (Some input) diff with
| None ->
Printf.eprintf "Error during patching:\n%!";
exit exit_patch_failure
| Some output -> output
end
module IO = struct
let read input =
let rec loop buf input =
match input_char input with
| exception End_of_file -> Buffer.contents buf
| c -> Buffer.add_char buf c; loop buf input
in
loop (Buffer.create 80) input
let write output data =
String.iter (output_char output) data;
flush output;
()
end
let () =
if Array.length Sys.argv = 1 then begin
prerr_endline usage;
exit 0;
end;
let input_path, diff_path, output_path = try
let input_path = Sys.argv.(1) in
let diff_path = Sys.argv.(2) in
let dash_o = Sys.argv.(3) in
let output_path = Sys.argv.(4) in
if dash_o <> "-o" then raise Exit;
input_path,
diff_path,
output_path
with _ ->
prerr_endline "Error parsing the command-line arguments";
prerr_endline usage;
prerr_newline ();
exit exit_command_line_error
in
let get_data path =
match open_in path with
| exception _ ->
Printf.eprintf "Error: unable to open file %S for reading\n%!" path;
exit exit_open_error
| input ->
let data = IO.read input in
close_in input;
data
in
let write_data path ~data =
match open_out path with
| exception _ ->
Printf.eprintf "Error: unable to open file %S for writing\n%!" path;
exit exit_open_error
| output ->
IO.write output data;
close_out output
in
let input_data = get_data input_path in
let diff_data = get_data diff_path in
let output_data = run ~input:input_data ~diff:diff_data in
write_data output_path ~data:output_data
| null | https://raw.githubusercontent.com/hannesm/patch/29b7c70327f30fd618ac487116d3010d5b99d189/src/patch_command.ml | ocaml | * For now this command is only used for testing,
in particular it is not installed to the user.
(If we wanted to install it, it would need
a better name.)
|
let usage =
"Simplified patch utility for single-file patches;\n
./patch.exe <input-file> <unififed-diff-file> -o <output-file>"
let exit_command_line_error = 1
let exit_open_error = 2
let exit_several_chunks = 3
let exit_patch_failure = 4
let run ~input ~diff =
match Patch.to_diffs diff with
| [] -> input
| _::_::_ ->
prerr_endline "Error: The diff contains several chunks,\n\
which is not supported by this command.";
exit exit_several_chunks
| [diff] ->
begin match Patch.patch (Some input) diff with
| None ->
Printf.eprintf "Error during patching:\n%!";
exit exit_patch_failure
| Some output -> output
end
module IO = struct
let read input =
let rec loop buf input =
match input_char input with
| exception End_of_file -> Buffer.contents buf
| c -> Buffer.add_char buf c; loop buf input
in
loop (Buffer.create 80) input
let write output data =
String.iter (output_char output) data;
flush output;
()
end
let () =
if Array.length Sys.argv = 1 then begin
prerr_endline usage;
exit 0;
end;
let input_path, diff_path, output_path = try
let input_path = Sys.argv.(1) in
let diff_path = Sys.argv.(2) in
let dash_o = Sys.argv.(3) in
let output_path = Sys.argv.(4) in
if dash_o <> "-o" then raise Exit;
input_path,
diff_path,
output_path
with _ ->
prerr_endline "Error parsing the command-line arguments";
prerr_endline usage;
prerr_newline ();
exit exit_command_line_error
in
let get_data path =
match open_in path with
| exception _ ->
Printf.eprintf "Error: unable to open file %S for reading\n%!" path;
exit exit_open_error
| input ->
let data = IO.read input in
close_in input;
data
in
let write_data path ~data =
match open_out path with
| exception _ ->
Printf.eprintf "Error: unable to open file %S for writing\n%!" path;
exit exit_open_error
| output ->
IO.write output data;
close_out output
in
let input_data = get_data input_path in
let diff_data = get_data diff_path in
let output_data = run ~input:input_data ~diff:diff_data in
write_data output_path ~data:output_data
|
7cc782d1d7e0e6b12e1711598d8b26728ce4993e6a82fcc938fd6ce8079a5dce | gigasquid/speech-acts-classifier | core_test.clj | (ns speech-acts-classifier.core-test
(:require [clojure.test :refer :all]
[speech-acts-classifier.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| null | https://raw.githubusercontent.com/gigasquid/speech-acts-classifier/4052f3b20a5fbbcacb298005fcaedf6f7df9b731/test/speech_acts_classifier/core_test.clj | clojure | (ns speech-acts-classifier.core-test
(:require [clojure.test :refer :all]
[speech-acts-classifier.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| |
2c7f69d23891e54a5d8e6108d7311f436cd987c1d88c8c16e7221d87963bf848 | yuriy-chumak/ol | xpm.scm | (define-library (file xpm)
(import
(otus lisp)
(only (lang intern) string->symbol)
(only (lang sexp) list->number)
(file parser))
(export
xpm-parser
xpm-parse-file)
parsed xpm is :
; vector of vectors of colors
(begin
(define (block-comment)
(either
(let-parse* (
(skip (get-imm #\*))
(skip (get-imm #\/)))
'comment)
(let-parse* (
(skip byte)
(skip (block-comment)))
'comment)))
(define whitespace
(either
(byte-if (lambda (c) (has? '(#\tab #\newline #\space #\return) c)))
(let-parse* (
(skip (get-imm #\/))
(skip (get-imm #\*))
(skip (block-comment)))
'comment)))
(define maybe-whitespaces (greedy* whitespace))
(define rest-of-line
(let-parse*
((chars (greedy* (byte-if (lambda (x) (not (eq? x 10))))))
< - note that this wo n't match if line ends to eof
chars))
(define (digit? x) (<= #\0 x #\9))
(define xpm-parser
(let-parse* (
; header:
(/ maybe-whitespaces)
(/ (get-word "static char *" #t))
(/ rest-of-line)
; parameters:
(/ maybe-whitespaces)
(/ (imm #\"))
(width (greedy+ (byte-if digit?)))
(/ (imm #\space))
(height (greedy+ (byte-if digit?)))
(/ (imm #\space))
(colors (greedy+ (byte-if digit?)))
(/ (imm #\space))
supported only 1
(/ (imm #\")) (/ (imm #\,))
; color table
(color-table (times (list->number colors 10)
(let-parse* (
(/ maybe-whitespaces)
(/ (imm #\"))
(key byte)
(/ (imm #\tab))
(ctype byte)
(/ (imm #\space))
(color (greedy+ (byte-if (lambda (b) (not (eq? b #\"))))))
(/ (imm #\")) (/ (imm #\,)))
[key ctype color])))
; bitmap
(bitmap (times (list->number height 10)
(let-parse* (
(/ maybe-whitespaces)
(/ (imm #\"))
(row (times (list->number width 10) byte))
(/ (imm #\"))
(/ rest-of-line))
row)))
;; (out get-byte)
(/ maybe-whitespaces)
)
{
'width (list->number width 10)
'height (list->number height 10)
'colors (list->number colors 10)
'bpp (list->number (list bpp) 10)
'color-table color-table
'bitmap bitmap
}))
(define (xpm-parse-file filename)
(let ((file (open-input-file filename)))
(if file
(let ((o (parse xpm-parser (port->bytestream file) filename "xpm parse error" #false)))
(close-port file)
o))))
))
| null | https://raw.githubusercontent.com/yuriy-chumak/ol/83dd03d311339763682eab02cbe0c1321daa25bc/libraries/file/xpm.scm | scheme | vector of vectors of colors
header:
parameters:
color table
bitmap
(out get-byte) | (define-library (file xpm)
(import
(otus lisp)
(only (lang intern) string->symbol)
(only (lang sexp) list->number)
(file parser))
(export
xpm-parser
xpm-parse-file)
parsed xpm is :
(begin
(define (block-comment)
(either
(let-parse* (
(skip (get-imm #\*))
(skip (get-imm #\/)))
'comment)
(let-parse* (
(skip byte)
(skip (block-comment)))
'comment)))
(define whitespace
(either
(byte-if (lambda (c) (has? '(#\tab #\newline #\space #\return) c)))
(let-parse* (
(skip (get-imm #\/))
(skip (get-imm #\*))
(skip (block-comment)))
'comment)))
(define maybe-whitespaces (greedy* whitespace))
(define rest-of-line
(let-parse*
((chars (greedy* (byte-if (lambda (x) (not (eq? x 10))))))
< - note that this wo n't match if line ends to eof
chars))
(define (digit? x) (<= #\0 x #\9))
(define xpm-parser
(let-parse* (
(/ maybe-whitespaces)
(/ (get-word "static char *" #t))
(/ rest-of-line)
(/ maybe-whitespaces)
(/ (imm #\"))
(width (greedy+ (byte-if digit?)))
(/ (imm #\space))
(height (greedy+ (byte-if digit?)))
(/ (imm #\space))
(colors (greedy+ (byte-if digit?)))
(/ (imm #\space))
supported only 1
(/ (imm #\")) (/ (imm #\,))
(color-table (times (list->number colors 10)
(let-parse* (
(/ maybe-whitespaces)
(/ (imm #\"))
(key byte)
(/ (imm #\tab))
(ctype byte)
(/ (imm #\space))
(color (greedy+ (byte-if (lambda (b) (not (eq? b #\"))))))
(/ (imm #\")) (/ (imm #\,)))
[key ctype color])))
(bitmap (times (list->number height 10)
(let-parse* (
(/ maybe-whitespaces)
(/ (imm #\"))
(row (times (list->number width 10) byte))
(/ (imm #\"))
(/ rest-of-line))
row)))
(/ maybe-whitespaces)
)
{
'width (list->number width 10)
'height (list->number height 10)
'colors (list->number colors 10)
'bpp (list->number (list bpp) 10)
'color-table color-table
'bitmap bitmap
}))
(define (xpm-parse-file filename)
(let ((file (open-input-file filename)))
(if file
(let ((o (parse xpm-parser (port->bytestream file) filename "xpm parse error" #false)))
(close-port file)
o))))
))
|
510eca2840740f9f4874f3db1af4413860e80980374ee80dc992c9929ef9e8f5 | scheme/edwin48 | weak-pair.scm | ;;; -*- Mode: Scheme; scheme48-package: weak-pair -*-
(define-record-type* weak-pair
(make-weak-pair (car) (cdr))
())
(define (weak-cons car cdr)
(make-weak-pair (make-weak-pointer car) cdr))
(define (weak-pair/car? pair)
(weak-pointer-ref (weak-pair-car pair)))
(define (weak-car pair)
(if (weak-pair/car? pair)
(weak-pointer-ref (weak-pair-car pair))
#f))
(define (weak-set-car! pair obj)
(if (weak-pair? pair)
(set-weak-pair-car! pair (make-weak-pointer obj))))
(define (weak-cdr pair) (weak-pair-cdr pair))
(define (weak-set-cdr! pair obj)
(set-weak-pair-cdr! pair obj))
(define (weak-memq x weaks)
(if (null? weaks)
#f
(if (eq? x (weak-pointer-ref (weak-car weaks)))
weaks
(weak-memq x (weak-cdr weaks)))))
(define (weak-assq item alist)
(let loop ((alist alist))
(and (not (null? alist))
(if (eq? (weak-car (car alist)) item)
(car alist)
(loop (cdr alist))))))
(define weak-pair/false "weak-pair/false")
(define (weak-list->list items)
(let loop ((items* items) (result '()))
(if (weak-pair? items*)
(loop (cdr items*)
(let ((item (car items*)))
(if (not item)
result
(cons (if (eq? item weak-pair/false) #f item)
result))))
(begin
(if (not (null? items*))
(error:not-weak-list items 'WEAK-LIST->LIST))
(reverse! result)))))
(define (list->weak-list items)
(let loop ((items* (reverse items)) (result '()))
(if (pair? items*)
(loop (cdr items*)
(weak-cons (car items*) result))
(begin
(if (not (null? items*))
(error:not-list items 'LIST->WEAK-LIST))
result))))
| null | https://raw.githubusercontent.com/scheme/edwin48/fbe3c7ca14f1418eafddebd35f78ad12e42ea851/edwin48/scsh/weak-pair.scm | scheme | -*- Mode: Scheme; scheme48-package: weak-pair -*- |
(define-record-type* weak-pair
(make-weak-pair (car) (cdr))
())
(define (weak-cons car cdr)
(make-weak-pair (make-weak-pointer car) cdr))
(define (weak-pair/car? pair)
(weak-pointer-ref (weak-pair-car pair)))
(define (weak-car pair)
(if (weak-pair/car? pair)
(weak-pointer-ref (weak-pair-car pair))
#f))
(define (weak-set-car! pair obj)
(if (weak-pair? pair)
(set-weak-pair-car! pair (make-weak-pointer obj))))
(define (weak-cdr pair) (weak-pair-cdr pair))
(define (weak-set-cdr! pair obj)
(set-weak-pair-cdr! pair obj))
(define (weak-memq x weaks)
(if (null? weaks)
#f
(if (eq? x (weak-pointer-ref (weak-car weaks)))
weaks
(weak-memq x (weak-cdr weaks)))))
(define (weak-assq item alist)
(let loop ((alist alist))
(and (not (null? alist))
(if (eq? (weak-car (car alist)) item)
(car alist)
(loop (cdr alist))))))
(define weak-pair/false "weak-pair/false")
(define (weak-list->list items)
(let loop ((items* items) (result '()))
(if (weak-pair? items*)
(loop (cdr items*)
(let ((item (car items*)))
(if (not item)
result
(cons (if (eq? item weak-pair/false) #f item)
result))))
(begin
(if (not (null? items*))
(error:not-weak-list items 'WEAK-LIST->LIST))
(reverse! result)))))
(define (list->weak-list items)
(let loop ((items* (reverse items)) (result '()))
(if (pair? items*)
(loop (cdr items*)
(weak-cons (car items*) result))
(begin
(if (not (null? items*))
(error:not-list items 'LIST->WEAK-LIST))
result))))
|
5a7c18ecf7546a56c20ee10a889a4bf687e9bcf2f4bc9fd2097377b60563b946 | bobzhang/fan | btyp.ml |
type 'a t =
{u: int}
(* local variables: *)
(* compile-command: "ocamlc.opt -c btyp.ml" *)
(* end: *)
| null | https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/unitest/btyp.ml | ocaml | local variables:
compile-command: "ocamlc.opt -c btyp.ml"
end: |
type 'a t =
{u: int}
|
9e47e7752ff83c0b1611061634e102b8d6e7566a302038dcf0e0a720b51d8ac5 | Kappa-Dev/KappaTools | jsonUtil.ml | (******************************************************************************)
(* _ __ * The Kappa Language *)
| |/ / * Copyright 2010 - 2020 CNRS - Harvard Medical School - INRIA - IRIF
(* | ' / *********************************************************************)
(* | . \ * This file is distributed under the terms of the *)
(* |_|\_\ * GNU Lesser General Public License Version 3 *)
(******************************************************************************)
let initial_buffer_size = 0x1000
let write_to_channel f d x =
let b = Buffer.create initial_buffer_size in
let () = f b x in
Buffer.output_buffer d b
let string_of_write f ?(len = 1024) x =
let ob = Buffer.create len in
let () = f ob x in
Buffer.contents ob
let read_of_string f x =
let lex_st = Yojson.Basic.init_lexer () in
let lex_buf = Lexing.from_string x in
f lex_st lex_buf
let read_between_spaces f lex_st lex_buf =
let () = Yojson.Basic.read_space lex_st lex_buf in
let x = f lex_st lex_buf in
let () = Yojson.Basic.read_space lex_st lex_buf in
x
let read_next_item f st b =
let () = Yojson.Basic.read_comma st b in
read_between_spaces f st b
let build_msg s = "Not a correct "^s
let of_string (s:string) = `String s
let to_string ?error_msg:(error_msg=build_msg "string") =
function
| `String (s:string) -> s
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let of_int (s:int) = `Int s
let to_int ?error_msg:(error_msg=build_msg "int") =
function
| `Int (s:int) -> s
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let of_bool (s:bool) = `String (if s then "true" else "false")
let to_bool ?error_msg:(error_msg=build_msg "boolean") =
function
| `String "true" -> true
| `String "false" -> false
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let of_unit () = `Null
let to_unit ?error_msg:(error_msg=build_msg "unit") =
function
| `Null -> ()
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let of_option to_json = function
| None -> `Null
| Some x -> match to_json x with
| `Null -> failwith "ambiguous JsonUtil.of_option"
| x -> x
let to_option = Yojson.Basic.Util.to_option
let write_option f ob = function
| None -> Yojson.Basic.write_null ob ()
| Some x -> f ob x
let read_option f p lb =
if Yojson.Basic.read_null_if_possible p lb then None else Some (f p lb)
let of_list to_json l =
`List (List.rev_map to_json (List.rev l))
let to_list ?error_msg:(error_msg=build_msg "list") of_json = function
| `List l as x ->
begin
try List.rev_map of_json (List.rev l)
with Not_found ->
raise (Yojson.Basic.Util.Type_error (error_msg,x))
end
| `Null -> []
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let write_comma ob = Buffer.add_char ob ','
let rec iter2 f_elt x = function
| [] -> ()
| y :: l -> write_comma x; f_elt x y; iter2 f_elt x l
let write_list f ob l =
let () = Buffer.add_char ob '[' in
let () = match l with
| [] -> ()
| y :: l -> f ob y; iter2 f ob l in
Buffer.add_char ob ']'
let of_array to_json a =
`List (Array.fold_right (fun x acc -> to_json x::acc) a [])
let to_array ?error_msg:(error_msg=build_msg "array") of_json = function
| `List l -> Tools.array_map_of_list of_json l
| `Null -> [||]
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let write_array f ob l =
let () = Buffer.add_char ob '[' in
let () = if Array.length l > 0 then f ob l.(0) in
let () = Tools.iteri
(fun i -> let () = write_comma ob in f ob l.(succ i))
(pred (Array.length l)) in
Buffer.add_char ob ']'
let rec iter_seq ob = function
| [] -> ()
| f::q ->
let () = write_comma ob in
let () = f ob in
iter_seq ob q
let write_sequence ob l =
let () = Buffer.add_char ob '[' in
let () = match l with
| [] -> ()
| f::q -> let () = f ob in iter_seq ob q in
Buffer.add_char ob ']'
let read_variant read_id read st b =
let () = Yojson.Basic.read_lbr st b in
let cst = read_between_spaces read_id st b in
let out = read st b cst in
let () = Yojson.Basic.read_rbr st b in
out
let smart_assoc l =
`Assoc (List.rev (List.fold_left (fun acc -> function
| _,(`Null | `Assoc [] | `List []) -> acc
| x -> x::acc) [] l))
let of_assoc to_json l =
`Assoc (List.rev_map to_json (List.rev l))
let to_assoc
?error_msg:(error_msg=build_msg "association")
of_json json =
match json
with
| `Assoc l as x ->
begin
try
List.rev_map of_json (List.rev l)
with Not_found ->
raise (Yojson.Basic.Util.Type_error (error_msg,x))
end
| `Null -> []
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let write_field na f ob x =
let () = Yojson.Basic.write_string ob na in
let () = Buffer.add_char ob ':' in
f ob x
let of_pair ?(lab1="first") ?(lab2="second") to_json1 to_json2 (a,b) =
`Assoc [ lab1, to_json1 a; lab2, to_json2 b ]
let to_triple
?lab1:(lab1="first") ?lab2:(lab2="second") ?lab3:(lab3="third")
?error_msg:(error_msg=build_msg "triple")
of_json1 of_json2 of_json3 =
function
| `Assoc l as x when List.length l = 3 ->
begin
try
of_json1 (List.assoc lab1 l),
of_json2 (List.assoc lab2 l),
of_json3 (List.assoc lab3 l)
with Not_found ->
raise (Yojson.Basic.Util.Type_error (error_msg,x))
end
| x ->
raise (Yojson.Basic.Util.Type_error (error_msg,x))
let of_triple ?(lab1="first") ?(lab2="second") ?(lab3="third")
to_json1 to_json2 to_json3 (a,b,c) =
`Assoc [ lab1, to_json1 a; lab2, to_json2 b ; lab3, to_json3 c]
let to_pair ?lab1:(lab1="first") ?lab2:(lab2="second")
?error_msg:(error_msg=build_msg "pair") of_json1 of_json2 =
function
| `Assoc l as x when List.length l = 2 ->
begin
try
of_json1 (List.assoc lab1 l),
of_json2 (List.assoc lab2 l)
with Not_found ->
raise (Yojson.Basic.Util.Type_error (error_msg,x))
end
| x ->
raise (Yojson.Basic.Util.Type_error (error_msg,x))
let write_compact_pair f g ob (x,y) =
let () = Buffer.add_char ob '[' in
let () = f ob x in
let () = write_comma ob in
let () = g ob y in
Buffer.add_char ob ']'
let read_compact_pair f g st b =
let () = Yojson.Basic.read_lbr st b in
let x = read_between_spaces f st b in
let () = Yojson.Basic.read_comma st b in
let y = read_between_spaces g st b in
let () = Yojson.Basic.read_rbr st b in
(x,y)
let compact_to_pair f g = function
| `List [ x; y ] -> (f x, g y)
| x -> raise (Yojson.Basic.Util.Type_error ("Not a compact pair",x))
let of_map
?lab_key:(lab_key="key")
?lab_value:(lab_value="value")
~fold key_to_json value_to_json map =
`List
(List.rev
(fold
(fun (key:'key) (value:'value) (list:Yojson.Basic.t list) ->
(`Assoc [
lab_key,key_to_json key;
lab_value,value_to_json value
])::list
)
map
[])
)
let to_map
?lab_key:(lab_key="key")
?lab_value:(lab_value="value")
?error_msg:(error_msg=build_msg "map")
~add ~empty json_to_key json_to_value =
function
| `List l ->
List.fold_left
(fun map x ->
match x
with `Assoc l as x when List.length l = 2 ->
begin
try
add
(json_to_key (List.assoc lab_key l))
(json_to_value (List.assoc lab_value l))
map
with Not_found ->
raise
(Yojson.Basic.Util.Type_error (error_msg,x))
end
| x ->
raise
(Yojson.Basic.Util.Type_error (error_msg,x)))
empty
l
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let of_unix_label =
function
| UnixLabels.E2BIG -> `Assoc ["E2BIG",`Null]
| UnixLabels.EACCES -> `Assoc ["EACCES",`Null]
| UnixLabels.EAGAIN -> `Assoc ["EAGAIN",`Null]
| UnixLabels.EBADF -> `Assoc ["EBADF",`Null]
| UnixLabels.EBUSY -> `Assoc ["EBUSY",`Null]
| UnixLabels.ECHILD -> `Assoc ["ECHILD",`Null]
| UnixLabels.EDEADLK -> `Assoc ["EDEADLK",`Null]
| UnixLabels.EDOM -> `Assoc ["EDOM",`Null]
| UnixLabels.EEXIST -> `Assoc ["EEXIST",`Null]
| UnixLabels.EFAULT -> `Assoc ["EFAULT",`Null]
| UnixLabels.EFBIG -> `Assoc ["EFBIG",`Null]
| UnixLabels.EINTR -> `Assoc ["EINTR",`Null]
| UnixLabels.EINVAL -> `Assoc ["EINVAL",`Null]
| UnixLabels.EIO -> `Assoc ["EIO",`Null]
| UnixLabels.EISDIR -> `Assoc ["EISDIR",`Null]
| UnixLabels.EMFILE -> `Assoc ["EMFILE",`Null]
| UnixLabels.EMLINK -> `Assoc ["EMLINK",`Null]
| UnixLabels.ENAMETOOLONG -> `Assoc ["ENAMETOOLONG",`Null]
| UnixLabels.ENFILE -> `Assoc ["ENFILE",`Null]
| UnixLabels.ENODEV -> `Assoc ["ENODEV",`Null]
| UnixLabels.ENOENT -> `Assoc ["ENOENT",`Null]
| UnixLabels.ENOEXEC -> `Assoc ["ENOEXEC",`Null]
| UnixLabels.ENOLCK -> `Assoc ["ENOLCK",`Null]
| UnixLabels.ENOMEM -> `Assoc ["ENOMEM",`Null]
| UnixLabels.ENOSPC -> `Assoc ["ENOSPC",`Null]
| UnixLabels.ENOSYS -> `Assoc ["ENOSYS",`Null]
| UnixLabels.ENOTDIR -> `Assoc ["ENOTDIR",`Null]
| UnixLabels.ENOTEMPTY -> `Assoc ["ENOTEMPTY",`Null]
| UnixLabels.ENOTTY -> `Assoc ["ENOTTY",`Null]
| UnixLabels.ENXIO -> `Assoc ["ENXIO",`Null]
| UnixLabels.EPERM -> `Assoc ["EPERM",`Null]
| UnixLabels.EPIPE -> `Assoc ["EPIPE",`Null]
| UnixLabels.ERANGE -> `Assoc ["ERANGE",`Null]
| UnixLabels.EROFS -> `Assoc ["EROFS",`Null]
| UnixLabels.ESPIPE -> `Assoc ["ESPIPE",`Null]
| UnixLabels.ESRCH -> `Assoc ["ESRCH",`Null]
| UnixLabels.EXDEV -> `Assoc ["EXDEV",`Null]
| UnixLabels.EWOULDBLOCK -> `Assoc ["EWOULDBLOCK",`Null]
| UnixLabels.EINPROGRESS -> `Assoc ["EINPROGRESS",`Null]
| UnixLabels.EALREADY -> `Assoc ["EALREADY",`Null]
| UnixLabels.ENOTSOCK -> `Assoc ["ENOTSOCK",`Null]
| UnixLabels.EDESTADDRREQ -> `Assoc ["EDESTADDRREQ",`Null]
| UnixLabels.EMSGSIZE -> `Assoc ["EMSGSIZE",`Null]
| UnixLabels.EPROTOTYPE -> `Assoc ["EPROTOTYPE",`Null]
| UnixLabels.ENOPROTOOPT -> `Assoc ["ENOPROTOOPT",`Null]
| UnixLabels.EPROTONOSUPPORT -> `Assoc ["EPROTONOSUPPORT",`Null]
| UnixLabels.ESOCKTNOSUPPORT -> `Assoc ["ESOCKTNOSUPPORT",`Null]
| UnixLabels.EOPNOTSUPP -> `Assoc ["EOPNOTSUPP",`Null]
| UnixLabels.EPFNOSUPPORT -> `Assoc ["EPFNOSUPPORT",`Null]
| UnixLabels.EAFNOSUPPORT -> `Assoc ["EAFNOSUPPORT",`Null]
| UnixLabels.EADDRINUSE -> `Assoc ["EADDRINUSE",`Null]
| UnixLabels.EADDRNOTAVAIL -> `Assoc ["EADDRNOTAVAIL",`Null]
| UnixLabels.ENETDOWN -> `Assoc ["ENETDOWN",`Null]
| UnixLabels.ENETUNREACH -> `Assoc ["ENETUNREACH",`Null]
| UnixLabels.ENETRESET -> `Assoc ["ENETRESET",`Null]
| UnixLabels.ECONNABORTED -> `Assoc ["ECONNABORTED",`Null]
| UnixLabels.ECONNRESET -> `Assoc ["ECONNRESET",`Null]
| UnixLabels.ENOBUFS -> `Assoc ["ENOBUFS",`Null]
| UnixLabels.EISCONN -> `Assoc ["EISCONN",`Null]
| UnixLabels.ENOTCONN -> `Assoc ["ENOTCONN",`Null]
| UnixLabels.ESHUTDOWN -> `Assoc ["ESHUTDOWN",`Null]
| UnixLabels.ETOOMANYREFS -> `Assoc ["ETOOMANYREFS",`Null]
| UnixLabels.ETIMEDOUT -> `Assoc ["ETIMEDOUT",`Null]
| UnixLabels.ECONNREFUSED -> `Assoc ["ECONNREFUSED",`Null]
| UnixLabels.EHOSTDOWN -> `Assoc ["EHOSTDOWN",`Null]
| UnixLabels.EHOSTUNREACH -> `Assoc ["EHOSTUNREACH",`Null]
| UnixLabels.ELOOP -> `Assoc ["ELOOP",`Null]
| UnixLabels.EOVERFLOW -> `Assoc ["EOVERFLOW",`Null]
| UnixLabels.EUNKNOWNERR int -> `Assoc ["EUNKNOWNERR", of_int int]
let (to_unix_label : Yojson.Basic.t -> UnixLabels.error) =
function
| `Assoc ["E2BIG",`Null] -> UnixLabels.E2BIG
| `Assoc ["EACCES",`Null] -> UnixLabels.EACCES
| `Assoc ["EAGAIN",`Null] -> UnixLabels.EAGAIN
| `Assoc ["EBADF",`Null] -> UnixLabels.EBADF
| `Assoc ["EBUSY",`Null] -> UnixLabels.EBUSY
| `Assoc ["ECHILD",`Null] -> UnixLabels.ECHILD
| `Assoc ["EDEADLK",`Null] -> UnixLabels.EDEADLK
| `Assoc ["EDOM",`Null] -> UnixLabels.EDOM
| `Assoc ["EEXIST",`Null] -> UnixLabels.EEXIST
| `Assoc ["EFAULT",`Null] -> UnixLabels.EFAULT
| `Assoc ["EFBIG",`Null] -> UnixLabels.EFBIG
| `Assoc ["EINTR",`Null] -> UnixLabels.EINTR
| `Assoc ["EINVAL",`Null] -> UnixLabels.EINVAL
| `Assoc ["EIO",`Null] -> UnixLabels.EIO
| `Assoc ["EISDIR",`Null] -> UnixLabels.EISDIR
| `Assoc ["EMFILE",`Null] -> UnixLabels.EMFILE
| `Assoc ["EMLINK",`Null] -> UnixLabels.EMLINK
| `Assoc ["ENAMETOOLONG",`Null] -> UnixLabels.ENAMETOOLONG
| `Assoc ["ENFILE",`Null] -> UnixLabels.ENFILE
| `Assoc ["ENODEV",`Null] -> UnixLabels.ENODEV
| `Assoc ["ENOENT",`Null] -> UnixLabels.ENOENT
| `Assoc ["ENOEXEC",`Null] -> UnixLabels.ENOEXEC
| `Assoc ["ENOLCK",`Null] -> UnixLabels.ENOLCK
| `Assoc ["ENOMEM",`Null] -> UnixLabels.ENOMEM
| `Assoc ["ENOSPC",`Null] -> UnixLabels.ENOSPC
| `Assoc ["ENOSYS",`Null] -> UnixLabels.ENOSYS
| `Assoc ["ENOTDIR",`Null] -> UnixLabels.ENOTDIR
| `Assoc ["ENOTEMPTY",`Null] -> UnixLabels.ENOTEMPTY
| `Assoc ["ENOTTY",`Null] -> UnixLabels.ENOTTY
| `Assoc ["ENXIO",`Null] -> UnixLabels.ENXIO
| `Assoc ["EPERM",`Null] -> UnixLabels.EPERM
| `Assoc ["EPIPE",`Null] -> UnixLabels.EPIPE
| `Assoc ["ERANGE",`Null] -> UnixLabels.ERANGE
| `Assoc ["EROFS",`Null] -> UnixLabels.EROFS
| `Assoc ["ESPIPE",`Null] -> UnixLabels.ESPIPE
| `Assoc ["ESRCH",`Null] -> UnixLabels.ESRCH
| `Assoc ["EXDEV",`Null] -> UnixLabels.EXDEV
| `Assoc ["EWOULDBLOCK",`Null] -> UnixLabels.EWOULDBLOCK
| `Assoc ["EINPROGRESS",`Null] -> UnixLabels.EINPROGRESS
| `Assoc ["EALREADY",`Null] -> UnixLabels.EALREADY
| `Assoc ["ENOTSOCK",`Null] -> UnixLabels.ENOTSOCK
| `Assoc ["EDESTADDRREQ",`Null] -> UnixLabels.EDESTADDRREQ
| `Assoc ["EMSGSIZE",`Null] -> UnixLabels.EMSGSIZE
| `Assoc ["EPROTOTYPE",`Null] -> UnixLabels.EPROTOTYPE
| `Assoc ["ENOPROTOOPT",`Null] -> UnixLabels.ENOPROTOOPT
| `Assoc ["EPROTONOSUPPORT",`Null] -> UnixLabels.EPROTONOSUPPORT
| `Assoc ["ESOCKTNOSUPPORT",`Null] -> UnixLabels.ESOCKTNOSUPPORT
| `Assoc ["EOPNOTSUPP",`Null] -> UnixLabels.EOPNOTSUPP
| `Assoc ["EPFNOSUPPORT",`Null] -> UnixLabels.EPFNOSUPPORT
| `Assoc ["EAFNOSUPPORT",`Null] -> UnixLabels.EAFNOSUPPORT
| `Assoc ["EADDRINUSE",`Null] -> UnixLabels.EADDRINUSE
| `Assoc ["EADDRNOTAVAIL",`Null] -> UnixLabels.EADDRNOTAVAIL
| `Assoc ["ENETDOWN",`Null] -> UnixLabels.ENETDOWN
| `Assoc ["ENETUNREACH",`Null] -> UnixLabels.ENETUNREACH
| `Assoc ["ENETRESET",`Null] -> UnixLabels.ENETRESET
| `Assoc ["ECONNABORTED",`Null] -> UnixLabels.ECONNABORTED
| `Assoc ["ECONNRESET",`Null] -> UnixLabels.ECONNRESET
| `Assoc ["ENOBUFS",`Null] -> UnixLabels.ENOBUFS
| `Assoc ["EISCONN",`Null] -> UnixLabels.EISCONN
| `Assoc ["ENOTCONN",`Null] -> UnixLabels.ENOTCONN
| `Assoc ["ESHUTDOWN",`Null] -> UnixLabels.ESHUTDOWN
| `Assoc ["ETOOMANYREFS",`Null] -> UnixLabels.ETOOMANYREFS
| `Assoc ["ETIMEDOUT",`Null] -> UnixLabels.ETIMEDOUT
| `Assoc ["ECONNREFUSED",`Null] -> UnixLabels.ECONNREFUSED
| `Assoc ["EHOSTDOWN",`Null] -> UnixLabels.EHOSTDOWN
| `Assoc ["EHOSTUNREACH",`Null] -> UnixLabels.EHOSTUNREACH
| `Assoc ["ELOOP",`Null] -> UnixLabels.ELOOP
| `Assoc ["EOVERFLOW",`Null] -> UnixLabels.EOVERFLOW
| `Assoc ["EUNKNOWNERR",int] -> UnixLabels.EUNKNOWNERR (to_int int)
| x ->
raise (Yojson.Basic.Util.Type_error (build_msg "unix labels error",x))
let of_unix_error =
function
| Unix.E2BIG -> `Assoc ["E2BIG",`Null]
| Unix.EACCES -> `Assoc ["EACCES",`Null]
| Unix.EAGAIN -> `Assoc ["EAGAIN",`Null]
| Unix.EBADF -> `Assoc ["EBADF",`Null]
| Unix.EBUSY -> `Assoc ["EBUSY",`Null]
| Unix.ECHILD -> `Assoc ["ECHILD",`Null]
| Unix.EDEADLK -> `Assoc ["EDEADLK",`Null]
| Unix.EDOM -> `Assoc ["EDOM",`Null]
| Unix.EEXIST -> `Assoc ["EEXIST",`Null]
| Unix.EFAULT -> `Assoc ["EFAULT",`Null]
| Unix.EFBIG -> `Assoc ["EFBIG",`Null]
| Unix.EINTR -> `Assoc ["EINTR",`Null]
| Unix.EINVAL -> `Assoc ["EINVAL",`Null]
| Unix.EIO -> `Assoc ["EIO",`Null]
| Unix.EISDIR -> `Assoc ["EISDIR",`Null]
| Unix.EMFILE -> `Assoc ["EMFILE",`Null]
| Unix.EMLINK -> `Assoc ["EMLINK",`Null]
| Unix.ENAMETOOLONG -> `Assoc ["ENAMETOOLONG",`Null]
| Unix.ENFILE -> `Assoc ["ENFILE",`Null]
| Unix.ENODEV -> `Assoc ["ENODEV",`Null]
| Unix.ENOENT -> `Assoc ["ENOENT",`Null]
| Unix.ENOEXEC -> `Assoc ["ENOEXEC",`Null]
| Unix.ENOLCK -> `Assoc ["ENOLCK",`Null]
| Unix.ENOMEM -> `Assoc ["ENOMEM",`Null]
| Unix.ENOSPC -> `Assoc ["ENOSPC",`Null]
| Unix.ENOSYS -> `Assoc ["ENOSYS",`Null]
| Unix.ENOTDIR -> `Assoc ["ENOTDIR",`Null]
| Unix.ENOTEMPTY -> `Assoc ["ENOTEMPTY",`Null]
| Unix.ENOTTY -> `Assoc ["ENOTTY",`Null]
| Unix.ENXIO -> `Assoc ["ENXIO",`Null]
| Unix.EPERM -> `Assoc ["EPERM",`Null]
| Unix.EPIPE -> `Assoc ["EPIPE",`Null]
| Unix.ERANGE -> `Assoc ["ERANGE",`Null]
| Unix.EROFS -> `Assoc ["EROFS",`Null]
| Unix.ESPIPE -> `Assoc ["ESPIPE",`Null]
| Unix.ESRCH -> `Assoc ["ESRCH",`Null]
| Unix.EXDEV -> `Assoc ["EXDEV",`Null]
| Unix.EWOULDBLOCK -> `Assoc ["EWOULDBLOCK",`Null]
| Unix.EINPROGRESS -> `Assoc ["EINPROGRESS",`Null]
| Unix.EALREADY -> `Assoc ["EALREADY",`Null]
| Unix.ENOTSOCK -> `Assoc ["ENOTSOCK",`Null]
| Unix.EDESTADDRREQ -> `Assoc ["EDESTADDRREQ",`Null]
| Unix.EMSGSIZE -> `Assoc ["EMSGSIZE",`Null]
| Unix.EPROTOTYPE -> `Assoc ["EPROTOTYPE",`Null]
| Unix.ENOPROTOOPT -> `Assoc ["ENOPROTOOPT",`Null]
| Unix.EPROTONOSUPPORT -> `Assoc ["EPROTONOSUPPORT",`Null]
| Unix.ESOCKTNOSUPPORT -> `Assoc ["ESOCKTNOSUPPORT",`Null]
| Unix.EOPNOTSUPP -> `Assoc ["EOPNOTSUPP",`Null]
| Unix.EPFNOSUPPORT -> `Assoc ["EPFNOSUPPORT",`Null]
| Unix.EAFNOSUPPORT -> `Assoc ["EAFNOSUPPORT",`Null]
| Unix.EADDRINUSE -> `Assoc ["EADDRINUSE",`Null]
| Unix.EADDRNOTAVAIL -> `Assoc ["EADDRNOTAVAIL",`Null]
| Unix.ENETDOWN -> `Assoc ["ENETDOWN",`Null]
| Unix.ENETUNREACH -> `Assoc ["ENETUNREACH",`Null]
| Unix.ENETRESET -> `Assoc ["ENETRESET",`Null]
| Unix.ECONNABORTED -> `Assoc ["ECONNABORTED",`Null]
| Unix.ECONNRESET -> `Assoc ["ECONNRESET",`Null]
| Unix.ENOBUFS -> `Assoc ["ENOBUFS",`Null]
| Unix.EISCONN -> `Assoc ["EISCONN",`Null]
| Unix.ENOTCONN -> `Assoc ["ENOTCONN",`Null]
| Unix.ESHUTDOWN -> `Assoc ["ESHUTDOWN",`Null]
| Unix.ETOOMANYREFS -> `Assoc ["ETOOMANYREFS",`Null]
| Unix.ETIMEDOUT -> `Assoc ["ETIMEDOUT",`Null]
| Unix.ECONNREFUSED -> `Assoc ["ECONNREFUSED",`Null]
| Unix.EHOSTDOWN -> `Assoc ["EHOSTDOWN",`Null]
| Unix.EHOSTUNREACH -> `Assoc ["EHOSTUNREACH",`Null]
| Unix.ELOOP -> `Assoc ["ELOOP",`Null]
| Unix.EOVERFLOW -> `Assoc ["EOVERFLOW",`Null]
| Unix.EUNKNOWNERR int -> `Assoc ["EUNKNOWNERR", of_int int]
let (to_unix_error : Yojson.Basic.t -> Unix.error) =
function
| `Assoc ["E2BIG",`Null] -> Unix.E2BIG
| `Assoc ["EACCES",`Null] -> Unix.EACCES
| `Assoc ["EAGAIN",`Null] -> Unix.EAGAIN
| `Assoc ["EBADF",`Null] -> Unix.EBADF
| `Assoc ["EBUSY",`Null] -> Unix.EBUSY
| `Assoc ["ECHILD",`Null] -> Unix.ECHILD
| `Assoc ["EDEADLK",`Null] -> Unix.EDEADLK
| `Assoc ["EDOM",`Null] -> Unix.EDOM
| `Assoc ["EEXIST",`Null] -> Unix.EEXIST
| `Assoc ["EFAULT",`Null] -> Unix.EFAULT
| `Assoc ["EFBIG",`Null] -> Unix.EFBIG
| `Assoc ["EINTR",`Null] -> Unix.EINTR
| `Assoc ["EINVAL",`Null] -> Unix.EINVAL
| `Assoc ["EIO",`Null] -> Unix.EIO
| `Assoc ["EISDIR",`Null] -> Unix.EISDIR
| `Assoc ["EMFILE",`Null] -> Unix.EMFILE
| `Assoc ["EMLINK",`Null] -> Unix.EMLINK
| `Assoc ["ENAMETOOLONG",`Null] -> Unix.ENAMETOOLONG
| `Assoc ["ENFILE",`Null] -> Unix.ENFILE
| `Assoc ["ENODEV",`Null] -> Unix.ENODEV
| `Assoc ["ENOENT",`Null] -> Unix.ENOENT
| `Assoc ["ENOEXEC",`Null] -> Unix.ENOEXEC
| `Assoc ["ENOLCK",`Null] -> Unix.ENOLCK
| `Assoc ["ENOMEM",`Null] -> Unix.ENOMEM
| `Assoc ["ENOSPC",`Null] -> Unix.ENOSPC
| `Assoc ["ENOSYS",`Null] -> Unix.ENOSYS
| `Assoc ["ENOTDIR",`Null] -> Unix.ENOTDIR
| `Assoc ["ENOTEMPTY",`Null] -> Unix.ENOTEMPTY
| `Assoc ["ENOTTY",`Null] -> Unix.ENOTTY
| `Assoc ["ENXIO",`Null] -> Unix.ENXIO
| `Assoc ["EPERM",`Null] -> Unix.EPERM
| `Assoc ["EPIPE",`Null] -> Unix.EPIPE
| `Assoc ["ERANGE",`Null] -> Unix.ERANGE
| `Assoc ["EROFS",`Null] -> Unix.EROFS
| `Assoc ["ESPIPE",`Null] -> Unix.ESPIPE
| `Assoc ["ESRCH",`Null] -> Unix.ESRCH
| `Assoc ["EXDEV",`Null] -> Unix.EXDEV
| `Assoc ["EWOULDBLOCK",`Null] -> Unix.EWOULDBLOCK
| `Assoc ["EINPROGRESS",`Null] -> Unix.EINPROGRESS
| `Assoc ["EALREADY",`Null] -> Unix.EALREADY
| `Assoc ["ENOTSOCK",`Null] -> Unix.ENOTSOCK
| `Assoc ["EDESTADDRREQ",`Null] -> Unix.EDESTADDRREQ
| `Assoc ["EMSGSIZE",`Null] -> Unix.EMSGSIZE
| `Assoc ["EPROTOTYPE",`Null] -> Unix.EPROTOTYPE
| `Assoc ["ENOPROTOOPT",`Null] -> Unix.ENOPROTOOPT
| `Assoc ["EPROTONOSUPPORT",`Null] -> Unix.EPROTONOSUPPORT
| `Assoc ["ESOCKTNOSUPPORT",`Null] -> Unix.ESOCKTNOSUPPORT
| `Assoc ["EOPNOTSUPP",`Null] -> Unix.EOPNOTSUPP
| `Assoc ["EPFNOSUPPORT",`Null] -> Unix.EPFNOSUPPORT
| `Assoc ["EAFNOSUPPORT",`Null] -> Unix.EAFNOSUPPORT
| `Assoc ["EADDRINUSE",`Null] -> Unix.EADDRINUSE
| `Assoc ["EADDRNOTAVAIL",`Null] -> Unix.EADDRNOTAVAIL
| `Assoc ["ENETDOWN",`Null] -> Unix.ENETDOWN
| `Assoc ["ENETUNREACH",`Null] -> Unix.ENETUNREACH
| `Assoc ["ENETRESET",`Null] -> Unix.ENETRESET
| `Assoc ["ECONNABORTED",`Null] -> Unix.ECONNABORTED
| `Assoc ["ECONNRESET",`Null] -> Unix.ECONNRESET
| `Assoc ["ENOBUFS",`Null] -> Unix.ENOBUFS
| `Assoc ["EISCONN",`Null] -> Unix.EISCONN
| `Assoc ["ENOTCONN",`Null] -> Unix.ENOTCONN
| `Assoc ["ESHUTDOWN",`Null] -> Unix.ESHUTDOWN
| `Assoc ["ETOOMANYREFS",`Null] -> Unix.ETOOMANYREFS
| `Assoc ["ETIMEDOUT",`Null] -> Unix.ETIMEDOUT
| `Assoc ["ECONNREFUSED",`Null] -> Unix.ECONNREFUSED
| `Assoc ["EHOSTDOWN",`Null] -> Unix.EHOSTDOWN
| `Assoc ["EHOSTUNREACH",`Null] -> Unix.EHOSTUNREACH
| `Assoc ["ELOOP",`Null] -> Unix.ELOOP
| `Assoc ["EOVERFLOW",`Null] -> Unix.EOVERFLOW
| `Assoc ["EUNKNOWNERR",int] -> Unix.EUNKNOWNERR (to_int int)
| x ->
raise (Yojson.Basic.Util.Type_error (build_msg "unix error",x))
| null | https://raw.githubusercontent.com/Kappa-Dev/KappaTools/777835b82f449d3d379713df76ff25fd5926b762/core/dataStructures/jsonUtil.ml | ocaml | ****************************************************************************
_ __ * The Kappa Language
| ' / ********************************************************************
| . \ * This file is distributed under the terms of the
|_|\_\ * GNU Lesser General Public License Version 3
**************************************************************************** | | |/ / * Copyright 2010 - 2020 CNRS - Harvard Medical School - INRIA - IRIF
let initial_buffer_size = 0x1000
let write_to_channel f d x =
let b = Buffer.create initial_buffer_size in
let () = f b x in
Buffer.output_buffer d b
let string_of_write f ?(len = 1024) x =
let ob = Buffer.create len in
let () = f ob x in
Buffer.contents ob
let read_of_string f x =
let lex_st = Yojson.Basic.init_lexer () in
let lex_buf = Lexing.from_string x in
f lex_st lex_buf
let read_between_spaces f lex_st lex_buf =
let () = Yojson.Basic.read_space lex_st lex_buf in
let x = f lex_st lex_buf in
let () = Yojson.Basic.read_space lex_st lex_buf in
x
let read_next_item f st b =
let () = Yojson.Basic.read_comma st b in
read_between_spaces f st b
let build_msg s = "Not a correct "^s
let of_string (s:string) = `String s
let to_string ?error_msg:(error_msg=build_msg "string") =
function
| `String (s:string) -> s
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let of_int (s:int) = `Int s
let to_int ?error_msg:(error_msg=build_msg "int") =
function
| `Int (s:int) -> s
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let of_bool (s:bool) = `String (if s then "true" else "false")
let to_bool ?error_msg:(error_msg=build_msg "boolean") =
function
| `String "true" -> true
| `String "false" -> false
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let of_unit () = `Null
let to_unit ?error_msg:(error_msg=build_msg "unit") =
function
| `Null -> ()
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let of_option to_json = function
| None -> `Null
| Some x -> match to_json x with
| `Null -> failwith "ambiguous JsonUtil.of_option"
| x -> x
let to_option = Yojson.Basic.Util.to_option
let write_option f ob = function
| None -> Yojson.Basic.write_null ob ()
| Some x -> f ob x
let read_option f p lb =
if Yojson.Basic.read_null_if_possible p lb then None else Some (f p lb)
let of_list to_json l =
`List (List.rev_map to_json (List.rev l))
let to_list ?error_msg:(error_msg=build_msg "list") of_json = function
| `List l as x ->
begin
try List.rev_map of_json (List.rev l)
with Not_found ->
raise (Yojson.Basic.Util.Type_error (error_msg,x))
end
| `Null -> []
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let write_comma ob = Buffer.add_char ob ','
let rec iter2 f_elt x = function
| [] -> ()
| y :: l -> write_comma x; f_elt x y; iter2 f_elt x l
let write_list f ob l =
let () = Buffer.add_char ob '[' in
let () = match l with
| [] -> ()
| y :: l -> f ob y; iter2 f ob l in
Buffer.add_char ob ']'
let of_array to_json a =
`List (Array.fold_right (fun x acc -> to_json x::acc) a [])
let to_array ?error_msg:(error_msg=build_msg "array") of_json = function
| `List l -> Tools.array_map_of_list of_json l
| `Null -> [||]
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let write_array f ob l =
let () = Buffer.add_char ob '[' in
let () = if Array.length l > 0 then f ob l.(0) in
let () = Tools.iteri
(fun i -> let () = write_comma ob in f ob l.(succ i))
(pred (Array.length l)) in
Buffer.add_char ob ']'
let rec iter_seq ob = function
| [] -> ()
| f::q ->
let () = write_comma ob in
let () = f ob in
iter_seq ob q
let write_sequence ob l =
let () = Buffer.add_char ob '[' in
let () = match l with
| [] -> ()
| f::q -> let () = f ob in iter_seq ob q in
Buffer.add_char ob ']'
let read_variant read_id read st b =
let () = Yojson.Basic.read_lbr st b in
let cst = read_between_spaces read_id st b in
let out = read st b cst in
let () = Yojson.Basic.read_rbr st b in
out
let smart_assoc l =
`Assoc (List.rev (List.fold_left (fun acc -> function
| _,(`Null | `Assoc [] | `List []) -> acc
| x -> x::acc) [] l))
let of_assoc to_json l =
`Assoc (List.rev_map to_json (List.rev l))
let to_assoc
?error_msg:(error_msg=build_msg "association")
of_json json =
match json
with
| `Assoc l as x ->
begin
try
List.rev_map of_json (List.rev l)
with Not_found ->
raise (Yojson.Basic.Util.Type_error (error_msg,x))
end
| `Null -> []
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let write_field na f ob x =
let () = Yojson.Basic.write_string ob na in
let () = Buffer.add_char ob ':' in
f ob x
let of_pair ?(lab1="first") ?(lab2="second") to_json1 to_json2 (a,b) =
`Assoc [ lab1, to_json1 a; lab2, to_json2 b ]
let to_triple
?lab1:(lab1="first") ?lab2:(lab2="second") ?lab3:(lab3="third")
?error_msg:(error_msg=build_msg "triple")
of_json1 of_json2 of_json3 =
function
| `Assoc l as x when List.length l = 3 ->
begin
try
of_json1 (List.assoc lab1 l),
of_json2 (List.assoc lab2 l),
of_json3 (List.assoc lab3 l)
with Not_found ->
raise (Yojson.Basic.Util.Type_error (error_msg,x))
end
| x ->
raise (Yojson.Basic.Util.Type_error (error_msg,x))
let of_triple ?(lab1="first") ?(lab2="second") ?(lab3="third")
to_json1 to_json2 to_json3 (a,b,c) =
`Assoc [ lab1, to_json1 a; lab2, to_json2 b ; lab3, to_json3 c]
let to_pair ?lab1:(lab1="first") ?lab2:(lab2="second")
?error_msg:(error_msg=build_msg "pair") of_json1 of_json2 =
function
| `Assoc l as x when List.length l = 2 ->
begin
try
of_json1 (List.assoc lab1 l),
of_json2 (List.assoc lab2 l)
with Not_found ->
raise (Yojson.Basic.Util.Type_error (error_msg,x))
end
| x ->
raise (Yojson.Basic.Util.Type_error (error_msg,x))
let write_compact_pair f g ob (x,y) =
let () = Buffer.add_char ob '[' in
let () = f ob x in
let () = write_comma ob in
let () = g ob y in
Buffer.add_char ob ']'
let read_compact_pair f g st b =
let () = Yojson.Basic.read_lbr st b in
let x = read_between_spaces f st b in
let () = Yojson.Basic.read_comma st b in
let y = read_between_spaces g st b in
let () = Yojson.Basic.read_rbr st b in
(x,y)
let compact_to_pair f g = function
| `List [ x; y ] -> (f x, g y)
| x -> raise (Yojson.Basic.Util.Type_error ("Not a compact pair",x))
let of_map
?lab_key:(lab_key="key")
?lab_value:(lab_value="value")
~fold key_to_json value_to_json map =
`List
(List.rev
(fold
(fun (key:'key) (value:'value) (list:Yojson.Basic.t list) ->
(`Assoc [
lab_key,key_to_json key;
lab_value,value_to_json value
])::list
)
map
[])
)
let to_map
?lab_key:(lab_key="key")
?lab_value:(lab_value="value")
?error_msg:(error_msg=build_msg "map")
~add ~empty json_to_key json_to_value =
function
| `List l ->
List.fold_left
(fun map x ->
match x
with `Assoc l as x when List.length l = 2 ->
begin
try
add
(json_to_key (List.assoc lab_key l))
(json_to_value (List.assoc lab_value l))
map
with Not_found ->
raise
(Yojson.Basic.Util.Type_error (error_msg,x))
end
| x ->
raise
(Yojson.Basic.Util.Type_error (error_msg,x)))
empty
l
| x -> raise (Yojson.Basic.Util.Type_error (error_msg,x))
let of_unix_label =
function
| UnixLabels.E2BIG -> `Assoc ["E2BIG",`Null]
| UnixLabels.EACCES -> `Assoc ["EACCES",`Null]
| UnixLabels.EAGAIN -> `Assoc ["EAGAIN",`Null]
| UnixLabels.EBADF -> `Assoc ["EBADF",`Null]
| UnixLabels.EBUSY -> `Assoc ["EBUSY",`Null]
| UnixLabels.ECHILD -> `Assoc ["ECHILD",`Null]
| UnixLabels.EDEADLK -> `Assoc ["EDEADLK",`Null]
| UnixLabels.EDOM -> `Assoc ["EDOM",`Null]
| UnixLabels.EEXIST -> `Assoc ["EEXIST",`Null]
| UnixLabels.EFAULT -> `Assoc ["EFAULT",`Null]
| UnixLabels.EFBIG -> `Assoc ["EFBIG",`Null]
| UnixLabels.EINTR -> `Assoc ["EINTR",`Null]
| UnixLabels.EINVAL -> `Assoc ["EINVAL",`Null]
| UnixLabels.EIO -> `Assoc ["EIO",`Null]
| UnixLabels.EISDIR -> `Assoc ["EISDIR",`Null]
| UnixLabels.EMFILE -> `Assoc ["EMFILE",`Null]
| UnixLabels.EMLINK -> `Assoc ["EMLINK",`Null]
| UnixLabels.ENAMETOOLONG -> `Assoc ["ENAMETOOLONG",`Null]
| UnixLabels.ENFILE -> `Assoc ["ENFILE",`Null]
| UnixLabels.ENODEV -> `Assoc ["ENODEV",`Null]
| UnixLabels.ENOENT -> `Assoc ["ENOENT",`Null]
| UnixLabels.ENOEXEC -> `Assoc ["ENOEXEC",`Null]
| UnixLabels.ENOLCK -> `Assoc ["ENOLCK",`Null]
| UnixLabels.ENOMEM -> `Assoc ["ENOMEM",`Null]
| UnixLabels.ENOSPC -> `Assoc ["ENOSPC",`Null]
| UnixLabels.ENOSYS -> `Assoc ["ENOSYS",`Null]
| UnixLabels.ENOTDIR -> `Assoc ["ENOTDIR",`Null]
| UnixLabels.ENOTEMPTY -> `Assoc ["ENOTEMPTY",`Null]
| UnixLabels.ENOTTY -> `Assoc ["ENOTTY",`Null]
| UnixLabels.ENXIO -> `Assoc ["ENXIO",`Null]
| UnixLabels.EPERM -> `Assoc ["EPERM",`Null]
| UnixLabels.EPIPE -> `Assoc ["EPIPE",`Null]
| UnixLabels.ERANGE -> `Assoc ["ERANGE",`Null]
| UnixLabels.EROFS -> `Assoc ["EROFS",`Null]
| UnixLabels.ESPIPE -> `Assoc ["ESPIPE",`Null]
| UnixLabels.ESRCH -> `Assoc ["ESRCH",`Null]
| UnixLabels.EXDEV -> `Assoc ["EXDEV",`Null]
| UnixLabels.EWOULDBLOCK -> `Assoc ["EWOULDBLOCK",`Null]
| UnixLabels.EINPROGRESS -> `Assoc ["EINPROGRESS",`Null]
| UnixLabels.EALREADY -> `Assoc ["EALREADY",`Null]
| UnixLabels.ENOTSOCK -> `Assoc ["ENOTSOCK",`Null]
| UnixLabels.EDESTADDRREQ -> `Assoc ["EDESTADDRREQ",`Null]
| UnixLabels.EMSGSIZE -> `Assoc ["EMSGSIZE",`Null]
| UnixLabels.EPROTOTYPE -> `Assoc ["EPROTOTYPE",`Null]
| UnixLabels.ENOPROTOOPT -> `Assoc ["ENOPROTOOPT",`Null]
| UnixLabels.EPROTONOSUPPORT -> `Assoc ["EPROTONOSUPPORT",`Null]
| UnixLabels.ESOCKTNOSUPPORT -> `Assoc ["ESOCKTNOSUPPORT",`Null]
| UnixLabels.EOPNOTSUPP -> `Assoc ["EOPNOTSUPP",`Null]
| UnixLabels.EPFNOSUPPORT -> `Assoc ["EPFNOSUPPORT",`Null]
| UnixLabels.EAFNOSUPPORT -> `Assoc ["EAFNOSUPPORT",`Null]
| UnixLabels.EADDRINUSE -> `Assoc ["EADDRINUSE",`Null]
| UnixLabels.EADDRNOTAVAIL -> `Assoc ["EADDRNOTAVAIL",`Null]
| UnixLabels.ENETDOWN -> `Assoc ["ENETDOWN",`Null]
| UnixLabels.ENETUNREACH -> `Assoc ["ENETUNREACH",`Null]
| UnixLabels.ENETRESET -> `Assoc ["ENETRESET",`Null]
| UnixLabels.ECONNABORTED -> `Assoc ["ECONNABORTED",`Null]
| UnixLabels.ECONNRESET -> `Assoc ["ECONNRESET",`Null]
| UnixLabels.ENOBUFS -> `Assoc ["ENOBUFS",`Null]
| UnixLabels.EISCONN -> `Assoc ["EISCONN",`Null]
| UnixLabels.ENOTCONN -> `Assoc ["ENOTCONN",`Null]
| UnixLabels.ESHUTDOWN -> `Assoc ["ESHUTDOWN",`Null]
| UnixLabels.ETOOMANYREFS -> `Assoc ["ETOOMANYREFS",`Null]
| UnixLabels.ETIMEDOUT -> `Assoc ["ETIMEDOUT",`Null]
| UnixLabels.ECONNREFUSED -> `Assoc ["ECONNREFUSED",`Null]
| UnixLabels.EHOSTDOWN -> `Assoc ["EHOSTDOWN",`Null]
| UnixLabels.EHOSTUNREACH -> `Assoc ["EHOSTUNREACH",`Null]
| UnixLabels.ELOOP -> `Assoc ["ELOOP",`Null]
| UnixLabels.EOVERFLOW -> `Assoc ["EOVERFLOW",`Null]
| UnixLabels.EUNKNOWNERR int -> `Assoc ["EUNKNOWNERR", of_int int]
let (to_unix_label : Yojson.Basic.t -> UnixLabels.error) =
function
| `Assoc ["E2BIG",`Null] -> UnixLabels.E2BIG
| `Assoc ["EACCES",`Null] -> UnixLabels.EACCES
| `Assoc ["EAGAIN",`Null] -> UnixLabels.EAGAIN
| `Assoc ["EBADF",`Null] -> UnixLabels.EBADF
| `Assoc ["EBUSY",`Null] -> UnixLabels.EBUSY
| `Assoc ["ECHILD",`Null] -> UnixLabels.ECHILD
| `Assoc ["EDEADLK",`Null] -> UnixLabels.EDEADLK
| `Assoc ["EDOM",`Null] -> UnixLabels.EDOM
| `Assoc ["EEXIST",`Null] -> UnixLabels.EEXIST
| `Assoc ["EFAULT",`Null] -> UnixLabels.EFAULT
| `Assoc ["EFBIG",`Null] -> UnixLabels.EFBIG
| `Assoc ["EINTR",`Null] -> UnixLabels.EINTR
| `Assoc ["EINVAL",`Null] -> UnixLabels.EINVAL
| `Assoc ["EIO",`Null] -> UnixLabels.EIO
| `Assoc ["EISDIR",`Null] -> UnixLabels.EISDIR
| `Assoc ["EMFILE",`Null] -> UnixLabels.EMFILE
| `Assoc ["EMLINK",`Null] -> UnixLabels.EMLINK
| `Assoc ["ENAMETOOLONG",`Null] -> UnixLabels.ENAMETOOLONG
| `Assoc ["ENFILE",`Null] -> UnixLabels.ENFILE
| `Assoc ["ENODEV",`Null] -> UnixLabels.ENODEV
| `Assoc ["ENOENT",`Null] -> UnixLabels.ENOENT
| `Assoc ["ENOEXEC",`Null] -> UnixLabels.ENOEXEC
| `Assoc ["ENOLCK",`Null] -> UnixLabels.ENOLCK
| `Assoc ["ENOMEM",`Null] -> UnixLabels.ENOMEM
| `Assoc ["ENOSPC",`Null] -> UnixLabels.ENOSPC
| `Assoc ["ENOSYS",`Null] -> UnixLabels.ENOSYS
| `Assoc ["ENOTDIR",`Null] -> UnixLabels.ENOTDIR
| `Assoc ["ENOTEMPTY",`Null] -> UnixLabels.ENOTEMPTY
| `Assoc ["ENOTTY",`Null] -> UnixLabels.ENOTTY
| `Assoc ["ENXIO",`Null] -> UnixLabels.ENXIO
| `Assoc ["EPERM",`Null] -> UnixLabels.EPERM
| `Assoc ["EPIPE",`Null] -> UnixLabels.EPIPE
| `Assoc ["ERANGE",`Null] -> UnixLabels.ERANGE
| `Assoc ["EROFS",`Null] -> UnixLabels.EROFS
| `Assoc ["ESPIPE",`Null] -> UnixLabels.ESPIPE
| `Assoc ["ESRCH",`Null] -> UnixLabels.ESRCH
| `Assoc ["EXDEV",`Null] -> UnixLabels.EXDEV
| `Assoc ["EWOULDBLOCK",`Null] -> UnixLabels.EWOULDBLOCK
| `Assoc ["EINPROGRESS",`Null] -> UnixLabels.EINPROGRESS
| `Assoc ["EALREADY",`Null] -> UnixLabels.EALREADY
| `Assoc ["ENOTSOCK",`Null] -> UnixLabels.ENOTSOCK
| `Assoc ["EDESTADDRREQ",`Null] -> UnixLabels.EDESTADDRREQ
| `Assoc ["EMSGSIZE",`Null] -> UnixLabels.EMSGSIZE
| `Assoc ["EPROTOTYPE",`Null] -> UnixLabels.EPROTOTYPE
| `Assoc ["ENOPROTOOPT",`Null] -> UnixLabels.ENOPROTOOPT
| `Assoc ["EPROTONOSUPPORT",`Null] -> UnixLabels.EPROTONOSUPPORT
| `Assoc ["ESOCKTNOSUPPORT",`Null] -> UnixLabels.ESOCKTNOSUPPORT
| `Assoc ["EOPNOTSUPP",`Null] -> UnixLabels.EOPNOTSUPP
| `Assoc ["EPFNOSUPPORT",`Null] -> UnixLabels.EPFNOSUPPORT
| `Assoc ["EAFNOSUPPORT",`Null] -> UnixLabels.EAFNOSUPPORT
| `Assoc ["EADDRINUSE",`Null] -> UnixLabels.EADDRINUSE
| `Assoc ["EADDRNOTAVAIL",`Null] -> UnixLabels.EADDRNOTAVAIL
| `Assoc ["ENETDOWN",`Null] -> UnixLabels.ENETDOWN
| `Assoc ["ENETUNREACH",`Null] -> UnixLabels.ENETUNREACH
| `Assoc ["ENETRESET",`Null] -> UnixLabels.ENETRESET
| `Assoc ["ECONNABORTED",`Null] -> UnixLabels.ECONNABORTED
| `Assoc ["ECONNRESET",`Null] -> UnixLabels.ECONNRESET
| `Assoc ["ENOBUFS",`Null] -> UnixLabels.ENOBUFS
| `Assoc ["EISCONN",`Null] -> UnixLabels.EISCONN
| `Assoc ["ENOTCONN",`Null] -> UnixLabels.ENOTCONN
| `Assoc ["ESHUTDOWN",`Null] -> UnixLabels.ESHUTDOWN
| `Assoc ["ETOOMANYREFS",`Null] -> UnixLabels.ETOOMANYREFS
| `Assoc ["ETIMEDOUT",`Null] -> UnixLabels.ETIMEDOUT
| `Assoc ["ECONNREFUSED",`Null] -> UnixLabels.ECONNREFUSED
| `Assoc ["EHOSTDOWN",`Null] -> UnixLabels.EHOSTDOWN
| `Assoc ["EHOSTUNREACH",`Null] -> UnixLabels.EHOSTUNREACH
| `Assoc ["ELOOP",`Null] -> UnixLabels.ELOOP
| `Assoc ["EOVERFLOW",`Null] -> UnixLabels.EOVERFLOW
| `Assoc ["EUNKNOWNERR",int] -> UnixLabels.EUNKNOWNERR (to_int int)
| x ->
raise (Yojson.Basic.Util.Type_error (build_msg "unix labels error",x))
let of_unix_error =
function
| Unix.E2BIG -> `Assoc ["E2BIG",`Null]
| Unix.EACCES -> `Assoc ["EACCES",`Null]
| Unix.EAGAIN -> `Assoc ["EAGAIN",`Null]
| Unix.EBADF -> `Assoc ["EBADF",`Null]
| Unix.EBUSY -> `Assoc ["EBUSY",`Null]
| Unix.ECHILD -> `Assoc ["ECHILD",`Null]
| Unix.EDEADLK -> `Assoc ["EDEADLK",`Null]
| Unix.EDOM -> `Assoc ["EDOM",`Null]
| Unix.EEXIST -> `Assoc ["EEXIST",`Null]
| Unix.EFAULT -> `Assoc ["EFAULT",`Null]
| Unix.EFBIG -> `Assoc ["EFBIG",`Null]
| Unix.EINTR -> `Assoc ["EINTR",`Null]
| Unix.EINVAL -> `Assoc ["EINVAL",`Null]
| Unix.EIO -> `Assoc ["EIO",`Null]
| Unix.EISDIR -> `Assoc ["EISDIR",`Null]
| Unix.EMFILE -> `Assoc ["EMFILE",`Null]
| Unix.EMLINK -> `Assoc ["EMLINK",`Null]
| Unix.ENAMETOOLONG -> `Assoc ["ENAMETOOLONG",`Null]
| Unix.ENFILE -> `Assoc ["ENFILE",`Null]
| Unix.ENODEV -> `Assoc ["ENODEV",`Null]
| Unix.ENOENT -> `Assoc ["ENOENT",`Null]
| Unix.ENOEXEC -> `Assoc ["ENOEXEC",`Null]
| Unix.ENOLCK -> `Assoc ["ENOLCK",`Null]
| Unix.ENOMEM -> `Assoc ["ENOMEM",`Null]
| Unix.ENOSPC -> `Assoc ["ENOSPC",`Null]
| Unix.ENOSYS -> `Assoc ["ENOSYS",`Null]
| Unix.ENOTDIR -> `Assoc ["ENOTDIR",`Null]
| Unix.ENOTEMPTY -> `Assoc ["ENOTEMPTY",`Null]
| Unix.ENOTTY -> `Assoc ["ENOTTY",`Null]
| Unix.ENXIO -> `Assoc ["ENXIO",`Null]
| Unix.EPERM -> `Assoc ["EPERM",`Null]
| Unix.EPIPE -> `Assoc ["EPIPE",`Null]
| Unix.ERANGE -> `Assoc ["ERANGE",`Null]
| Unix.EROFS -> `Assoc ["EROFS",`Null]
| Unix.ESPIPE -> `Assoc ["ESPIPE",`Null]
| Unix.ESRCH -> `Assoc ["ESRCH",`Null]
| Unix.EXDEV -> `Assoc ["EXDEV",`Null]
| Unix.EWOULDBLOCK -> `Assoc ["EWOULDBLOCK",`Null]
| Unix.EINPROGRESS -> `Assoc ["EINPROGRESS",`Null]
| Unix.EALREADY -> `Assoc ["EALREADY",`Null]
| Unix.ENOTSOCK -> `Assoc ["ENOTSOCK",`Null]
| Unix.EDESTADDRREQ -> `Assoc ["EDESTADDRREQ",`Null]
| Unix.EMSGSIZE -> `Assoc ["EMSGSIZE",`Null]
| Unix.EPROTOTYPE -> `Assoc ["EPROTOTYPE",`Null]
| Unix.ENOPROTOOPT -> `Assoc ["ENOPROTOOPT",`Null]
| Unix.EPROTONOSUPPORT -> `Assoc ["EPROTONOSUPPORT",`Null]
| Unix.ESOCKTNOSUPPORT -> `Assoc ["ESOCKTNOSUPPORT",`Null]
| Unix.EOPNOTSUPP -> `Assoc ["EOPNOTSUPP",`Null]
| Unix.EPFNOSUPPORT -> `Assoc ["EPFNOSUPPORT",`Null]
| Unix.EAFNOSUPPORT -> `Assoc ["EAFNOSUPPORT",`Null]
| Unix.EADDRINUSE -> `Assoc ["EADDRINUSE",`Null]
| Unix.EADDRNOTAVAIL -> `Assoc ["EADDRNOTAVAIL",`Null]
| Unix.ENETDOWN -> `Assoc ["ENETDOWN",`Null]
| Unix.ENETUNREACH -> `Assoc ["ENETUNREACH",`Null]
| Unix.ENETRESET -> `Assoc ["ENETRESET",`Null]
| Unix.ECONNABORTED -> `Assoc ["ECONNABORTED",`Null]
| Unix.ECONNRESET -> `Assoc ["ECONNRESET",`Null]
| Unix.ENOBUFS -> `Assoc ["ENOBUFS",`Null]
| Unix.EISCONN -> `Assoc ["EISCONN",`Null]
| Unix.ENOTCONN -> `Assoc ["ENOTCONN",`Null]
| Unix.ESHUTDOWN -> `Assoc ["ESHUTDOWN",`Null]
| Unix.ETOOMANYREFS -> `Assoc ["ETOOMANYREFS",`Null]
| Unix.ETIMEDOUT -> `Assoc ["ETIMEDOUT",`Null]
| Unix.ECONNREFUSED -> `Assoc ["ECONNREFUSED",`Null]
| Unix.EHOSTDOWN -> `Assoc ["EHOSTDOWN",`Null]
| Unix.EHOSTUNREACH -> `Assoc ["EHOSTUNREACH",`Null]
| Unix.ELOOP -> `Assoc ["ELOOP",`Null]
| Unix.EOVERFLOW -> `Assoc ["EOVERFLOW",`Null]
| Unix.EUNKNOWNERR int -> `Assoc ["EUNKNOWNERR", of_int int]
let (to_unix_error : Yojson.Basic.t -> Unix.error) =
function
| `Assoc ["E2BIG",`Null] -> Unix.E2BIG
| `Assoc ["EACCES",`Null] -> Unix.EACCES
| `Assoc ["EAGAIN",`Null] -> Unix.EAGAIN
| `Assoc ["EBADF",`Null] -> Unix.EBADF
| `Assoc ["EBUSY",`Null] -> Unix.EBUSY
| `Assoc ["ECHILD",`Null] -> Unix.ECHILD
| `Assoc ["EDEADLK",`Null] -> Unix.EDEADLK
| `Assoc ["EDOM",`Null] -> Unix.EDOM
| `Assoc ["EEXIST",`Null] -> Unix.EEXIST
| `Assoc ["EFAULT",`Null] -> Unix.EFAULT
| `Assoc ["EFBIG",`Null] -> Unix.EFBIG
| `Assoc ["EINTR",`Null] -> Unix.EINTR
| `Assoc ["EINVAL",`Null] -> Unix.EINVAL
| `Assoc ["EIO",`Null] -> Unix.EIO
| `Assoc ["EISDIR",`Null] -> Unix.EISDIR
| `Assoc ["EMFILE",`Null] -> Unix.EMFILE
| `Assoc ["EMLINK",`Null] -> Unix.EMLINK
| `Assoc ["ENAMETOOLONG",`Null] -> Unix.ENAMETOOLONG
| `Assoc ["ENFILE",`Null] -> Unix.ENFILE
| `Assoc ["ENODEV",`Null] -> Unix.ENODEV
| `Assoc ["ENOENT",`Null] -> Unix.ENOENT
| `Assoc ["ENOEXEC",`Null] -> Unix.ENOEXEC
| `Assoc ["ENOLCK",`Null] -> Unix.ENOLCK
| `Assoc ["ENOMEM",`Null] -> Unix.ENOMEM
| `Assoc ["ENOSPC",`Null] -> Unix.ENOSPC
| `Assoc ["ENOSYS",`Null] -> Unix.ENOSYS
| `Assoc ["ENOTDIR",`Null] -> Unix.ENOTDIR
| `Assoc ["ENOTEMPTY",`Null] -> Unix.ENOTEMPTY
| `Assoc ["ENOTTY",`Null] -> Unix.ENOTTY
| `Assoc ["ENXIO",`Null] -> Unix.ENXIO
| `Assoc ["EPERM",`Null] -> Unix.EPERM
| `Assoc ["EPIPE",`Null] -> Unix.EPIPE
| `Assoc ["ERANGE",`Null] -> Unix.ERANGE
| `Assoc ["EROFS",`Null] -> Unix.EROFS
| `Assoc ["ESPIPE",`Null] -> Unix.ESPIPE
| `Assoc ["ESRCH",`Null] -> Unix.ESRCH
| `Assoc ["EXDEV",`Null] -> Unix.EXDEV
| `Assoc ["EWOULDBLOCK",`Null] -> Unix.EWOULDBLOCK
| `Assoc ["EINPROGRESS",`Null] -> Unix.EINPROGRESS
| `Assoc ["EALREADY",`Null] -> Unix.EALREADY
| `Assoc ["ENOTSOCK",`Null] -> Unix.ENOTSOCK
| `Assoc ["EDESTADDRREQ",`Null] -> Unix.EDESTADDRREQ
| `Assoc ["EMSGSIZE",`Null] -> Unix.EMSGSIZE
| `Assoc ["EPROTOTYPE",`Null] -> Unix.EPROTOTYPE
| `Assoc ["ENOPROTOOPT",`Null] -> Unix.ENOPROTOOPT
| `Assoc ["EPROTONOSUPPORT",`Null] -> Unix.EPROTONOSUPPORT
| `Assoc ["ESOCKTNOSUPPORT",`Null] -> Unix.ESOCKTNOSUPPORT
| `Assoc ["EOPNOTSUPP",`Null] -> Unix.EOPNOTSUPP
| `Assoc ["EPFNOSUPPORT",`Null] -> Unix.EPFNOSUPPORT
| `Assoc ["EAFNOSUPPORT",`Null] -> Unix.EAFNOSUPPORT
| `Assoc ["EADDRINUSE",`Null] -> Unix.EADDRINUSE
| `Assoc ["EADDRNOTAVAIL",`Null] -> Unix.EADDRNOTAVAIL
| `Assoc ["ENETDOWN",`Null] -> Unix.ENETDOWN
| `Assoc ["ENETUNREACH",`Null] -> Unix.ENETUNREACH
| `Assoc ["ENETRESET",`Null] -> Unix.ENETRESET
| `Assoc ["ECONNABORTED",`Null] -> Unix.ECONNABORTED
| `Assoc ["ECONNRESET",`Null] -> Unix.ECONNRESET
| `Assoc ["ENOBUFS",`Null] -> Unix.ENOBUFS
| `Assoc ["EISCONN",`Null] -> Unix.EISCONN
| `Assoc ["ENOTCONN",`Null] -> Unix.ENOTCONN
| `Assoc ["ESHUTDOWN",`Null] -> Unix.ESHUTDOWN
| `Assoc ["ETOOMANYREFS",`Null] -> Unix.ETOOMANYREFS
| `Assoc ["ETIMEDOUT",`Null] -> Unix.ETIMEDOUT
| `Assoc ["ECONNREFUSED",`Null] -> Unix.ECONNREFUSED
| `Assoc ["EHOSTDOWN",`Null] -> Unix.EHOSTDOWN
| `Assoc ["EHOSTUNREACH",`Null] -> Unix.EHOSTUNREACH
| `Assoc ["ELOOP",`Null] -> Unix.ELOOP
| `Assoc ["EOVERFLOW",`Null] -> Unix.EOVERFLOW
| `Assoc ["EUNKNOWNERR",int] -> Unix.EUNKNOWNERR (to_int int)
| x ->
raise (Yojson.Basic.Util.Type_error (build_msg "unix error",x))
|
a749feaf43ad78bf98f527a3f8f466b83564c236b8ae1a9f21344637a3e34e1f | GianlucaGuarini/fortytwo | MultiselectExample.hs | module MultiselectExample where
import FortyTwo (multiselect, multiselectWithDefault)
main :: IO [String]
main = do
multiselect "Which kind of sports do you like?" ["Soccer", "Tennis", "Golf"]
multiselectWithDefault
"What are your \nfavourite books?"
["1984", "Multi\nline\nentry", "Moby Dick", "The Hitchhiker's Guide\n to the Galaxy"]
["1984", "The Hitchhiker's Guide to the Galaxy"]
| null | https://raw.githubusercontent.com/GianlucaGuarini/fortytwo/6d8d801f85e1dd993ee4cae6490a638872a14b47/demo/MultiselectExample.hs | haskell | module MultiselectExample where
import FortyTwo (multiselect, multiselectWithDefault)
main :: IO [String]
main = do
multiselect "Which kind of sports do you like?" ["Soccer", "Tennis", "Golf"]
multiselectWithDefault
"What are your \nfavourite books?"
["1984", "Multi\nline\nentry", "Moby Dick", "The Hitchhiker's Guide\n to the Galaxy"]
["1984", "The Hitchhiker's Guide to the Galaxy"]
| |
f163f1e5aa508941cd771deedcc97139bc52a3a524320c95620d6fa663658be0 | realworldocaml/book | response_file.ml | open Import
type t =
| Not_supported
| Zero_terminated_strings of string
(* This mutable table is safe under the assumption that a program path always
points to the binary with the same version. While the assumption seems likely
to hold, it would be better to avoid the need for it to simplify
reasoning. *)
let registry = Table.create (module Path) 128
let get ~prog = Option.value (Table.find registry prog) ~default:Not_supported
let set ~prog t = Table.set registry prog t
| null | https://raw.githubusercontent.com/realworldocaml/book/d822fd065f19dbb6324bf83e0143bc73fd77dbf9/duniverse/dune_/src/dune_engine/response_file.ml | ocaml | This mutable table is safe under the assumption that a program path always
points to the binary with the same version. While the assumption seems likely
to hold, it would be better to avoid the need for it to simplify
reasoning. | open Import
type t =
| Not_supported
| Zero_terminated_strings of string
let registry = Table.create (module Path) 128
let get ~prog = Option.value (Table.find registry prog) ~default:Not_supported
let set ~prog t = Table.set registry prog t
|
14d94cdcb171f91922d5835894e207e11d095d9ded07bbec19832f3d344856b3 | ucsd-progsys/dsolve | vec_all.ml | type 'a t =
Empty
| Node of 'a t * int * 'a * 'a t * int * int
let empty = Empty
let height t =
match t with
Empty -> 0
| Node(_, _, _, _, _, h) -> h
let length t =
match t with
Empty -> 0
| Node (_, cl, _, _, cr, _) -> 1 + cl + cr
let makenode l d r =
let (hl, cl) = match l with
Empty -> (0,0)
| Node(_,lcl,_,_,lcr,h) -> (h, lcl + lcr + 1) in
let (hr, cr) = match r with
Empty -> (0,0)
| Node(_,rcl,_,_,rcr,h) -> (h, rcl + rcr + 1) in
Node(l, cl, d, r, cr, (if hl >= hr then hl + 1 else hr + 1))
let rec create d n =
if n = 0 then Empty else
let ml = n / 2 in
let mr = n - ml - 1 in
let l = create d ml in
let r = create d mr in (* defer this particular property to runtime *)
if height l >= height r + 3 or height l <= height r - 3 then
assert false
else
makenode l d r
let bal l d r =
let hl =
match l with
Empty -> 0
| Node(_,_,_,_,_,h) -> h in
let hr =
match r with
Empty -> 0
| Node(_,_,_,_,_,h) -> h in
if hl > hr + 2 then begin
match l with
invalid_arg " Vec.bal "
| Node(ll, lll, ld, lr, llr, h) ->
if height ll >= height lr then
makenode ll ld (makenode lr d r)
else begin
match lr with
invalid_arg " Vec.bal "
| Node(lrl, llrl, lrd, lrr, llrr, h) ->
makenode (makenode ll ld lrl) lrd (makenode lrr d r)
end
end else if hr > hl + 2 then begin
match r with
invalid_arg " Vec.bal "
| Node(rl, lrl, rd, rr, lrr, h) ->
if height rr >= height rl then
makenode (makenode l d rl) rd rr
else begin
match rl with
invalid_arg " Vec.bal "
| Node(rll, lrll, rld, rlr, lrlr, h) ->
makenode (makenode l d rll) rld (makenode rlr rd rr)
end
end
else makenode l d r
let rec recbal l d r =
let hl = match l with Empty -> 0 | Node(_,_,_,_,_,h) -> h in
let hr = match r with Empty -> 0 | Node(_,_,_,_,_,h) -> h in
if hl > hr + 2 then begin
match l with
invalid_arg " Vec.bal "
| Node(ll, _, ld, lr, _, h) ->
if height ll >= height lr then
bal ll ld (recbal lr d r)
else begin
match lr with
invalid_arg " Vec.bal "
| Node(lrl, _, lrd, lrr, _, h) ->
let nr = recbal lrr d r in
if height nr <= height lr - 3 then
makenode ll ld (bal lrl lrd nr)
else
makenode (makenode ll ld lrl) lrd nr
end
end else if hr > hl + 2 then begin
match r with
invalid_arg " Vec.bal "
| Node(rl, _, rd, rr, _, h) ->
if height rr >= height rl then
bal (recbal l d rl) rd rr
else begin
match rl with
invalid_arg " Vec.bal "
| Node(rll, _, rld, rlr, _, h) ->
let nl = recbal l d rll in
if height nl <= height rl - 3 then
makenode (bal nl rld rlr) rd rr
else
makenode nl rld (makenode rlr rd rr)
end
end
else makenode l d r
let is_empty t =
match t with
| Empty -> true
| Node(_, _, _, _, _, _) -> false
let singleton d = Node (Empty, 0, d, Empty, 0, 1)
let rec get i t =
match t with
raise Vec_index_out_of_bounds
| Node (l, cl, d, r, cr, _) ->
if i < cl then get i l
else if i > cl then get (i - cl - 1) r
else d
let rec set i d t =
match t with
raise Vec_index_out_of_bounds
| Node (l, cl, dd, r, cr, _) ->
if i < cl then makenode (set i d l) dd r
else if i > cl then makenode l dd (set (i - cl - 1) d r)
else makenode l d r
let rec append d t =
match t with
Empty -> Node (Empty, 0, d, Empty, 0, 1)
| Node (l, ll, dd, r, lr, h) ->
bal l dd (append d r)
let setappend d0 d i v =
let l = length v in
if l > i then set i d v
else begin
let rec app_rec n v =
if n = (l-1) then v else append d0 (app_rec (n-1) v) in
append d (app_rec (i-1) v)
end
let rec leftmost t =
match t with
raise Vec_index_out_of_bounds
| Node(l, ll', d, r, lr', h) ->
match l with
| Empty -> d
| Node(ll, lll, d', lr, llr, h') -> leftmost l
let rec remove_leftmost t =
match t with
Empty -> assert (1 = 0); assert false (*invalid_arg "Vec.remove_min_elt"*)
| Node(l, ll, d, r, lr, h) ->
match l with
| Empty -> r
| Node(ll, lll, ld, lr, llr, h') -> bal (remove_leftmost l) d r
let merge t1 t2 =
match t1 with
| Empty -> t2
| Node(_, _, _, _, _, _) ->
match t2 with
| Empty -> t1
| Node(_, _, _, _, _, _) ->
let d = leftmost t2 in
bal t1 d (remove_leftmost t2)
let concat t1 t2 =
match t1 with
| Empty -> t2
| Node(_, _, _, _, _, _) ->
match t2 with
| Empty -> t1
| Node(_, _, _, _, _, _) ->
let d = leftmost t2 in
recbal t1 d (remove_leftmost t2)
let rec pop i t =
match t with
raise Vec_index_out_of_bounds
| Node(l, cl, d, r, cr, h) ->
if i < cl then
let (e, v) = pop i l in
(e, bal v d r)
else if i > cl then
let (e, v) = pop (i - cl - 1) r in
(e, bal l d v)
else (d, merge l r)
let rec remove i t =
match t with
raise Vec_index_out_of_bounds
| Node(l, cl, d, r, cr, h) ->
if i < cl then
bal (remove i l) d r
else if i > cl then
bal l d (remove (i - cl - 1) r)
else merge l r
let rec insert i d t =
match t with
Empty -> begin
if i = 0
then Node (Empty, 0, d, Empty, 0, 1)
raise Vec_index_out_of_bounds
end
| Node(l, cl, dd, r, cr, h) ->
if i < cl then bal (insert i d l) dd r
else if i > cl then bal l dd (insert (i - cl - 1) d r)
else bal l d (insert 0 dd r)
let rec sub i j t =
match t with
Empty -> Empty
| Node (l, cl, dd, r, cr, _) ->
if i >= j then Empty
else if i <= 0 && j >= cl + cr + 1 then t
else begin
if j <= cl then sub i j l
else if j = cl + 1 then append dd (sub i cl l)
else if i = cl then insert 0 dd (sub 0 (j - cl - 1) r)
else if i > cl then sub (i - cl - 1) (j - cl - 1) r
else begin
let ll = sub i cl l in
let rr = sub 0 (j - cl - 1) r in
recbal ll dd rr
end
end
let rec iteri t f =
let rec offsetiteri t' k =
match t' with
Empty -> ()
| Node(l, cl, d, r, _, _) ->
offsetiteri l k;
f (k + cl) d;
offsetiteri r (k + cl + 1)
in offsetiteri t 0
let rangeiteri i j t f =
let rec offsetrangeiteri k i' j' t' =
match t' with
Empty -> ()
| Node(l, cl, d, r, cr, _) ->
if i' < j' then begin
if i' < cl && j' > 0 then offsetrangeiteri k i' j' l else ();
if i' <= cl && j' > cl then f (k + cl) d else ();
if j' > cl + 1 && i' <= cl + cr + 1 then offsetrangeiteri (k + cl + 1) (i' - cl - 1) (j' - cl - 1) r else ()
end else ()
in offsetrangeiteri 0 i j t
let revrangeiteri i j t f =
let rec offsetrevrangeiteri k i j t' =
match t' with
Empty -> ()
| Node(l, cl, d, r, cr, _) ->
if i < j then begin
if j > cl + 1 && i <= cl + cr + 1
then offsetrevrangeiteri (k + cl + 1) (i - cl - 1) (j - cl - 1) r else ();
if i <= cl && j > cl then f (k + cl) d else ();
if i < cl && j > 0 then offsetrevrangeiteri k i j l else ()
end else ()
in offsetrevrangeiteri 0 i j t
let mapi t f =
let rec offsetmapi k t' =
match t' with
Empty -> Empty
| Node(l, cl, d, r, cr, h) ->
Node(offsetmapi k l, cl, f (k + cl) d, offsetmapi (k + cl + 1) r, cr, h)
in offsetmapi 0 t
let foldi t f accu =
let rec offsetfoldi k t' accu =
match t' with
Empty -> accu
| Node(l, cl, d, r, _, _) ->
offsetfoldi (k + cl + 1) r (f (k + cl) d (offsetfoldi k l accu))
in offsetfoldi 0 t accu
let rangefoldi i j t f accu =
let rec offsetrangefoldi k i j t' accu =
match t' with
Empty -> accu
| Node (l, cl, d, r, cr, _) ->
if i >= j then accu
else begin
let al = if i < cl && j > 0 then offsetrangefoldi k i j l accu else accu in
let ad = if i <= cl && j > cl then f (cl + k) d al else al in
if j > cl + 1 && i <= cl + cr + 1
then offsetrangefoldi (k + cl + 1) (i - cl - 1) (j - cl - 1) r ad
else ad
end
in offsetrangefoldi 0 i j t accu
let revfoldi t f accu =
let rec offsetrevfoldi k t' accu =
match t' with
Empty -> accu
| Node(l, cl, d, r, _, _) ->
offsetrevfoldi k l (f (k + cl) d (offsetrevfoldi (k + cl + 1) r accu))
in offsetrevfoldi 0 t accu
let revrangefoldi i j t f accu =
let rec offsetrevrangefoldi k i j t' accu =
match t' with
Empty -> accu
| Node (l, cl, d, r, cr, _) ->
if i >= j then accu
else begin
let ar = if j > cl + 1 && i <= cl + cr + 1
then offsetrevrangefoldi (k + cl + 1) (i - cl - 1) (j - cl - 1) r accu
else accu
in
let ad = if i <= cl && j > cl then f (cl + k) d ar else ar in
if i < cl && j > 0 then offsetrevrangefoldi k i j l ad else ad
end
in offsetrevrangefoldi 0 i j t accu
let rec to_array t =
match t with
Empty -> [||]
| Node (l, cl, d, r, cr, _) ->
begin
let n = (cl + cr + 1) in
let a = Array.make n d in
let rec fill k t' =
match t' with
Empty -> a
| Node (l, cl, d, r, _, _) -> begin
ignore (fill k l);
Array.set a (k + cl) d;
fill (k + cl + 1) r
end
in fill 0 t
end
| null | https://raw.githubusercontent.com/ucsd-progsys/dsolve/bfbbb8ed9bbf352d74561e9f9127ab07b7882c0c/proceedings-demos/ml/vec_all.ml | ocaml | defer this particular property to runtime
invalid_arg "Vec.remove_min_elt" | type 'a t =
Empty
| Node of 'a t * int * 'a * 'a t * int * int
let empty = Empty
let height t =
match t with
Empty -> 0
| Node(_, _, _, _, _, h) -> h
let length t =
match t with
Empty -> 0
| Node (_, cl, _, _, cr, _) -> 1 + cl + cr
let makenode l d r =
let (hl, cl) = match l with
Empty -> (0,0)
| Node(_,lcl,_,_,lcr,h) -> (h, lcl + lcr + 1) in
let (hr, cr) = match r with
Empty -> (0,0)
| Node(_,rcl,_,_,rcr,h) -> (h, rcl + rcr + 1) in
Node(l, cl, d, r, cr, (if hl >= hr then hl + 1 else hr + 1))
let rec create d n =
if n = 0 then Empty else
let ml = n / 2 in
let mr = n - ml - 1 in
let l = create d ml in
if height l >= height r + 3 or height l <= height r - 3 then
assert false
else
makenode l d r
let bal l d r =
let hl =
match l with
Empty -> 0
| Node(_,_,_,_,_,h) -> h in
let hr =
match r with
Empty -> 0
| Node(_,_,_,_,_,h) -> h in
if hl > hr + 2 then begin
match l with
invalid_arg " Vec.bal "
| Node(ll, lll, ld, lr, llr, h) ->
if height ll >= height lr then
makenode ll ld (makenode lr d r)
else begin
match lr with
invalid_arg " Vec.bal "
| Node(lrl, llrl, lrd, lrr, llrr, h) ->
makenode (makenode ll ld lrl) lrd (makenode lrr d r)
end
end else if hr > hl + 2 then begin
match r with
invalid_arg " Vec.bal "
| Node(rl, lrl, rd, rr, lrr, h) ->
if height rr >= height rl then
makenode (makenode l d rl) rd rr
else begin
match rl with
invalid_arg " Vec.bal "
| Node(rll, lrll, rld, rlr, lrlr, h) ->
makenode (makenode l d rll) rld (makenode rlr rd rr)
end
end
else makenode l d r
let rec recbal l d r =
let hl = match l with Empty -> 0 | Node(_,_,_,_,_,h) -> h in
let hr = match r with Empty -> 0 | Node(_,_,_,_,_,h) -> h in
if hl > hr + 2 then begin
match l with
invalid_arg " Vec.bal "
| Node(ll, _, ld, lr, _, h) ->
if height ll >= height lr then
bal ll ld (recbal lr d r)
else begin
match lr with
invalid_arg " Vec.bal "
| Node(lrl, _, lrd, lrr, _, h) ->
let nr = recbal lrr d r in
if height nr <= height lr - 3 then
makenode ll ld (bal lrl lrd nr)
else
makenode (makenode ll ld lrl) lrd nr
end
end else if hr > hl + 2 then begin
match r with
invalid_arg " Vec.bal "
| Node(rl, _, rd, rr, _, h) ->
if height rr >= height rl then
bal (recbal l d rl) rd rr
else begin
match rl with
invalid_arg " Vec.bal "
| Node(rll, _, rld, rlr, _, h) ->
let nl = recbal l d rll in
if height nl <= height rl - 3 then
makenode (bal nl rld rlr) rd rr
else
makenode nl rld (makenode rlr rd rr)
end
end
else makenode l d r
let is_empty t =
match t with
| Empty -> true
| Node(_, _, _, _, _, _) -> false
let singleton d = Node (Empty, 0, d, Empty, 0, 1)
let rec get i t =
match t with
raise Vec_index_out_of_bounds
| Node (l, cl, d, r, cr, _) ->
if i < cl then get i l
else if i > cl then get (i - cl - 1) r
else d
let rec set i d t =
match t with
raise Vec_index_out_of_bounds
| Node (l, cl, dd, r, cr, _) ->
if i < cl then makenode (set i d l) dd r
else if i > cl then makenode l dd (set (i - cl - 1) d r)
else makenode l d r
let rec append d t =
match t with
Empty -> Node (Empty, 0, d, Empty, 0, 1)
| Node (l, ll, dd, r, lr, h) ->
bal l dd (append d r)
let setappend d0 d i v =
let l = length v in
if l > i then set i d v
else begin
let rec app_rec n v =
if n = (l-1) then v else append d0 (app_rec (n-1) v) in
append d (app_rec (i-1) v)
end
let rec leftmost t =
match t with
raise Vec_index_out_of_bounds
| Node(l, ll', d, r, lr', h) ->
match l with
| Empty -> d
| Node(ll, lll, d', lr, llr, h') -> leftmost l
let rec remove_leftmost t =
match t with
| Node(l, ll, d, r, lr, h) ->
match l with
| Empty -> r
| Node(ll, lll, ld, lr, llr, h') -> bal (remove_leftmost l) d r
let merge t1 t2 =
match t1 with
| Empty -> t2
| Node(_, _, _, _, _, _) ->
match t2 with
| Empty -> t1
| Node(_, _, _, _, _, _) ->
let d = leftmost t2 in
bal t1 d (remove_leftmost t2)
let concat t1 t2 =
match t1 with
| Empty -> t2
| Node(_, _, _, _, _, _) ->
match t2 with
| Empty -> t1
| Node(_, _, _, _, _, _) ->
let d = leftmost t2 in
recbal t1 d (remove_leftmost t2)
let rec pop i t =
match t with
raise Vec_index_out_of_bounds
| Node(l, cl, d, r, cr, h) ->
if i < cl then
let (e, v) = pop i l in
(e, bal v d r)
else if i > cl then
let (e, v) = pop (i - cl - 1) r in
(e, bal l d v)
else (d, merge l r)
let rec remove i t =
match t with
raise Vec_index_out_of_bounds
| Node(l, cl, d, r, cr, h) ->
if i < cl then
bal (remove i l) d r
else if i > cl then
bal l d (remove (i - cl - 1) r)
else merge l r
let rec insert i d t =
match t with
Empty -> begin
if i = 0
then Node (Empty, 0, d, Empty, 0, 1)
raise Vec_index_out_of_bounds
end
| Node(l, cl, dd, r, cr, h) ->
if i < cl then bal (insert i d l) dd r
else if i > cl then bal l dd (insert (i - cl - 1) d r)
else bal l d (insert 0 dd r)
let rec sub i j t =
match t with
Empty -> Empty
| Node (l, cl, dd, r, cr, _) ->
if i >= j then Empty
else if i <= 0 && j >= cl + cr + 1 then t
else begin
if j <= cl then sub i j l
else if j = cl + 1 then append dd (sub i cl l)
else if i = cl then insert 0 dd (sub 0 (j - cl - 1) r)
else if i > cl then sub (i - cl - 1) (j - cl - 1) r
else begin
let ll = sub i cl l in
let rr = sub 0 (j - cl - 1) r in
recbal ll dd rr
end
end
let rec iteri t f =
let rec offsetiteri t' k =
match t' with
Empty -> ()
| Node(l, cl, d, r, _, _) ->
offsetiteri l k;
f (k + cl) d;
offsetiteri r (k + cl + 1)
in offsetiteri t 0
let rangeiteri i j t f =
let rec offsetrangeiteri k i' j' t' =
match t' with
Empty -> ()
| Node(l, cl, d, r, cr, _) ->
if i' < j' then begin
if i' < cl && j' > 0 then offsetrangeiteri k i' j' l else ();
if i' <= cl && j' > cl then f (k + cl) d else ();
if j' > cl + 1 && i' <= cl + cr + 1 then offsetrangeiteri (k + cl + 1) (i' - cl - 1) (j' - cl - 1) r else ()
end else ()
in offsetrangeiteri 0 i j t
let revrangeiteri i j t f =
let rec offsetrevrangeiteri k i j t' =
match t' with
Empty -> ()
| Node(l, cl, d, r, cr, _) ->
if i < j then begin
if j > cl + 1 && i <= cl + cr + 1
then offsetrevrangeiteri (k + cl + 1) (i - cl - 1) (j - cl - 1) r else ();
if i <= cl && j > cl then f (k + cl) d else ();
if i < cl && j > 0 then offsetrevrangeiteri k i j l else ()
end else ()
in offsetrevrangeiteri 0 i j t
let mapi t f =
let rec offsetmapi k t' =
match t' with
Empty -> Empty
| Node(l, cl, d, r, cr, h) ->
Node(offsetmapi k l, cl, f (k + cl) d, offsetmapi (k + cl + 1) r, cr, h)
in offsetmapi 0 t
let foldi t f accu =
let rec offsetfoldi k t' accu =
match t' with
Empty -> accu
| Node(l, cl, d, r, _, _) ->
offsetfoldi (k + cl + 1) r (f (k + cl) d (offsetfoldi k l accu))
in offsetfoldi 0 t accu
let rangefoldi i j t f accu =
let rec offsetrangefoldi k i j t' accu =
match t' with
Empty -> accu
| Node (l, cl, d, r, cr, _) ->
if i >= j then accu
else begin
let al = if i < cl && j > 0 then offsetrangefoldi k i j l accu else accu in
let ad = if i <= cl && j > cl then f (cl + k) d al else al in
if j > cl + 1 && i <= cl + cr + 1
then offsetrangefoldi (k + cl + 1) (i - cl - 1) (j - cl - 1) r ad
else ad
end
in offsetrangefoldi 0 i j t accu
let revfoldi t f accu =
let rec offsetrevfoldi k t' accu =
match t' with
Empty -> accu
| Node(l, cl, d, r, _, _) ->
offsetrevfoldi k l (f (k + cl) d (offsetrevfoldi (k + cl + 1) r accu))
in offsetrevfoldi 0 t accu
let revrangefoldi i j t f accu =
let rec offsetrevrangefoldi k i j t' accu =
match t' with
Empty -> accu
| Node (l, cl, d, r, cr, _) ->
if i >= j then accu
else begin
let ar = if j > cl + 1 && i <= cl + cr + 1
then offsetrevrangefoldi (k + cl + 1) (i - cl - 1) (j - cl - 1) r accu
else accu
in
let ad = if i <= cl && j > cl then f (cl + k) d ar else ar in
if i < cl && j > 0 then offsetrevrangefoldi k i j l ad else ad
end
in offsetrevrangefoldi 0 i j t accu
let rec to_array t =
match t with
Empty -> [||]
| Node (l, cl, d, r, cr, _) ->
begin
let n = (cl + cr + 1) in
let a = Array.make n d in
let rec fill k t' =
match t' with
Empty -> a
| Node (l, cl, d, r, _, _) -> begin
ignore (fill k l);
Array.set a (k + cl) d;
fill (k + cl + 1) r
end
in fill 0 t
end
|
86a845bccd3af81c2ed55d520a878bc01bbe8406cc603e9c7d17baca8c78c4de | josefs/Gradualizer | opaque_fail.erl | -module(opaque_fail).
-export([use_external/2]).
-spec use_external(user_types:my_opaque(), integer() | undefined) -> integer().
use_external(I, undefined) -> I;
use_external(_, I) -> I.
| null | https://raw.githubusercontent.com/josefs/Gradualizer/208f5816b0157f282212fc036ba7560f0822f9fc/test/should_fail/opaque_fail.erl | erlang | -module(opaque_fail).
-export([use_external/2]).
-spec use_external(user_types:my_opaque(), integer() | undefined) -> integer().
use_external(I, undefined) -> I;
use_external(_, I) -> I.
| |
7acf2427819fa7f87a98180c803a1580ab4bde99bfb59eb8a9a234d789aef48f | reanimate/reanimate | doc_oscillateS.hs | #!/usr/bin/env stack
-- stack runghc --package reanimate
module Main(main) where
import Reanimate
import Reanimate.Builtin.Documentation
main :: IO ()
main = reanimate $ docEnv $ signalA oscillateS drawProgress
| null | https://raw.githubusercontent.com/reanimate/reanimate/5ea023980ff7f488934d40593cc5069f5fd038b0/examples/doc_oscillateS.hs | haskell | stack runghc --package reanimate | #!/usr/bin/env stack
module Main(main) where
import Reanimate
import Reanimate.Builtin.Documentation
main :: IO ()
main = reanimate $ docEnv $ signalA oscillateS drawProgress
|
e818a4369b127df606de53ab1309c37ee29a548df8702dec48725ae56d969157 | nuprl/gradual-typing-performance | array-struct-build-array.rkt | #lang racket/base
(provide build-array)
;; -----------------------------------------------------------------------------
(require
"data-array.rkt"
(only-in "array-utils-check-array-shape.rkt"
check-array-shape)
(only-in "array-struct-array-default-strict.rkt"
array-default-strict!)
(only-in "array-struct-unsafe-build-array.rkt"
unsafe-build-array))
;; =============================================================================
(define (build-array ds proc)
(let ([ds (check-array-shape
ds (lambda () (raise-argument-error 'build-array "(Vectorof Integer)" 0 ds proc)))])
(define arr
(unsafe-build-array ds (lambda (js)
(proc (vector->immutable-vector js)))))
(array-default-strict! arr)
arr))
| null | https://raw.githubusercontent.com/nuprl/gradual-typing-performance/35442b3221299a9cadba6810573007736b0d65d4/experimental/micro/synth/untyped/array-struct-build-array.rkt | racket | -----------------------------------------------------------------------------
============================================================================= | #lang racket/base
(provide build-array)
(require
"data-array.rkt"
(only-in "array-utils-check-array-shape.rkt"
check-array-shape)
(only-in "array-struct-array-default-strict.rkt"
array-default-strict!)
(only-in "array-struct-unsafe-build-array.rkt"
unsafe-build-array))
(define (build-array ds proc)
(let ([ds (check-array-shape
ds (lambda () (raise-argument-error 'build-array "(Vectorof Integer)" 0 ds proc)))])
(define arr
(unsafe-build-array ds (lambda (js)
(proc (vector->immutable-vector js)))))
(array-default-strict! arr)
arr))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.