_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
7735e79c65a2f53baada667d7d6c2529b5dfbd47c46ccd6c75054a203136a2b8 | tezos/tezos-mirror | skip_list_costs.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2022 Nomadic Labs < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
module S = Saturation_repr
(* Inferred from model model_next in file skip_list_benchmarks.ml *)
fun size - > ( 19.813850951 * ( log2 ( 1 + size ) ) )
let model_next ~length =
let open S.Syntax in
let length = S.safe_z length in
S.safe_int 20 * log2 (S.safe_int 1 + length)
(* Inferred from model proto/alpha/skip_list/hash_cell in file
skip_list_benchmarks.ml *)
fun size - > ( 242.202299543 + ( 56.9693504823 * size ) )
let model_hash_cell backpointers_count =
let open S.Syntax in
S.safe_int 250 + (S.safe_int 57 * backpointers_count)
let model_hash_cell_computed_backpointers_count ~index =
model_hash_cell (S.Syntax.log2 (S.safe_z index))
let model_hash_cell ~backpointers_count =
model_hash_cell (S.safe_int backpointers_count)
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/c50423992947f9d3bf33f91ec8000f3a0a70bc2d/src/proto_016_PtMumbai/lib_protocol/skip_list_costs.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
Inferred from model model_next in file skip_list_benchmarks.ml
Inferred from model proto/alpha/skip_list/hash_cell in file
skip_list_benchmarks.ml | Copyright ( c ) 2022 Nomadic Labs < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
module S = Saturation_repr
fun size - > ( 19.813850951 * ( log2 ( 1 + size ) ) )
let model_next ~length =
let open S.Syntax in
let length = S.safe_z length in
S.safe_int 20 * log2 (S.safe_int 1 + length)
fun size - > ( 242.202299543 + ( 56.9693504823 * size ) )
let model_hash_cell backpointers_count =
let open S.Syntax in
S.safe_int 250 + (S.safe_int 57 * backpointers_count)
let model_hash_cell_computed_backpointers_count ~index =
model_hash_cell (S.Syntax.log2 (S.safe_z index))
let model_hash_cell ~backpointers_count =
model_hash_cell (S.safe_int backpointers_count)
|
7222690e2e2445cc8ed9b133f5465260d8436831a0ab53e2d0acd3c725274a0d | Mathnerd314/stroscot | Wait.hs | # LANGUAGE DeriveFunctor #
-- | A bit like 'Fence', but not thread safe and optimised for avoiding taking the fence
module General.Wait(
Wait(Now,Later), runWait, quickly, fromLater,
firstJustWaitUnordered, firstLeftWaitUnordered
) where
import Control.Monad.Extra
import Control.Monad.IO.Class
import Data.IORef.Extra
import Data.List.Extra
import Data.Primitive.Array
import GHC.Exts(RealWorld)
import Control.Monad.Fail
import Prelude
runWait :: Monad m => Wait m a -> m (Wait m a)
runWait (Lift x) = runWait =<< x
runWait x = pure x
fromLater :: Monad m => Wait m a -> (a -> m ()) -> m ()
fromLater (Lift x) f = do x <- x; fromLater x f
fromLater (Now x) f = f x
fromLater (Later x) f = x f
quickly :: Functor m => m a -> Wait m a
quickly = Lift . fmap Now
data Wait m a = Now a
| Lift (m (Wait m a))
| Later ((a -> m ()) -> m ())
deriving Functor
instance (Monad m, Applicative m) => Applicative (Wait m) where
pure = Now
Now x <*> y = x <$> y
Lift x <*> y = Lift $ (<*> y) <$> x
Later x <*> Now y = Later $ \c -> x $ \x -> c $ x y
-- Note: We pull the Lift from the right BEFORE the Later, to enable parallelism
Later x <*> Lift y = Lift $ do y <- y; pure $ Later x <*> y
Later x <*> Later y = Later $ \c -> x $ \x -> y $ \y -> c $ x y
instance (Monad m, Applicative m) => Monad (Wait m) where
return = pure
(>>) = (*>)
Now x >>= f = f x
Lift x >>= f = Lift $ do x <- x; pure $ x >>= f
Later x >>= f = Later $ \c -> x $ \x -> do
x <- runWait $ f x
case x of
Now x -> c x
_ -> fromLater x c
instance (MonadIO m, Applicative m) => MonadIO (Wait m) where
liftIO = Lift . liftIO . fmap Now
instance MonadFail m => MonadFail (Wait m) where
fail = Lift . Control.Monad.Fail.fail
firstJustWaitUnordered :: MonadIO m => (a -> Wait m (Maybe b)) -> [a] -> Wait m (Maybe b)
firstJustWaitUnordered f = go [] . map f
where
-- keep a list of those things we might visit later, and ask for each we see in turn
go :: MonadIO m => [(Maybe a -> m ()) -> m ()] -> [Wait m (Maybe a)] -> Wait m (Maybe a)
go later (x:xs) = case x of
Now (Just a) -> Now $ Just a
Now Nothing -> go later xs
Later l -> go (l:later) xs
Lift x -> Lift $ do
x <- x
pure $ go later (x:xs)
go [] [] = Now Nothing
go [l] [] = Later l
go ls [] = Later $ \callback -> do
ref <- liftIO $ newIORef $ length ls
forM_ ls $ \l -> l $ \r -> do
old <- liftIO $ readIORef ref
when (old > 0) $ case r of
Just a -> do
liftIO $ writeIORef' ref 0
callback $ Just a
Nothing -> do
liftIO $ writeIORef' ref $ old-1
when (old == 1) $ callback Nothing
firstLeftWaitUnordered :: MonadIO m => (a -> Wait m (Either e b)) -> [a] -> Wait m (Either e [b])
firstLeftWaitUnordered f xs = do
let n = length xs
mut <- liftIO $ newArray n undefined
res <- go mut [] $ zipFrom 0 $ map f xs
case res of
Just e -> pure $ Left e
Nothing -> liftIO $ Right <$> mapM (readArray mut) [0..n-1]
where
-- keep a list of those things we might visit later, and ask for each we see in turn
go :: MonadIO m => MutableArray RealWorld b -> [(Int, (Either e b -> m ()) -> m ())] -> [(Int, Wait m (Either e b))] -> Wait m (Maybe e)
go mut later ((i,x):xs) = case x of
Now (Left e) -> Now $ Just e
Now (Right b) -> do
liftIO $ writeArray mut i b
go mut later xs
Later l -> go mut ((i,l):later) xs
Lift x -> Lift $ do
x <- x
pure $ go mut later ((i,x):xs)
go _ [] [] = Now Nothing
go mut ls [] = Later $ \callback -> do
ref <- liftIO $ newIORef $ length ls
forM_ ls $ \(i,l) -> l $ \r -> do
old <- liftIO $ readIORef ref
when (old > 0) $ case r of
Left a -> do
liftIO $ writeIORef' ref 0
callback $ Just a
Right v -> do
liftIO $ writeArray mut i v
liftIO $ writeIORef' ref $ old-1
when (old == 1) $ callback Nothing
| null | https://raw.githubusercontent.com/Mathnerd314/stroscot/08b4cb638369a71e1e52fce5710a11e235e9e120/src/shake/Wait.hs | haskell | | A bit like 'Fence', but not thread safe and optimised for avoiding taking the fence
Note: We pull the Lift from the right BEFORE the Later, to enable parallelism
keep a list of those things we might visit later, and ask for each we see in turn
keep a list of those things we might visit later, and ask for each we see in turn | # LANGUAGE DeriveFunctor #
module General.Wait(
Wait(Now,Later), runWait, quickly, fromLater,
firstJustWaitUnordered, firstLeftWaitUnordered
) where
import Control.Monad.Extra
import Control.Monad.IO.Class
import Data.IORef.Extra
import Data.List.Extra
import Data.Primitive.Array
import GHC.Exts(RealWorld)
import Control.Monad.Fail
import Prelude
runWait :: Monad m => Wait m a -> m (Wait m a)
runWait (Lift x) = runWait =<< x
runWait x = pure x
fromLater :: Monad m => Wait m a -> (a -> m ()) -> m ()
fromLater (Lift x) f = do x <- x; fromLater x f
fromLater (Now x) f = f x
fromLater (Later x) f = x f
quickly :: Functor m => m a -> Wait m a
quickly = Lift . fmap Now
data Wait m a = Now a
| Lift (m (Wait m a))
| Later ((a -> m ()) -> m ())
deriving Functor
instance (Monad m, Applicative m) => Applicative (Wait m) where
pure = Now
Now x <*> y = x <$> y
Lift x <*> y = Lift $ (<*> y) <$> x
Later x <*> Now y = Later $ \c -> x $ \x -> c $ x y
Later x <*> Lift y = Lift $ do y <- y; pure $ Later x <*> y
Later x <*> Later y = Later $ \c -> x $ \x -> y $ \y -> c $ x y
instance (Monad m, Applicative m) => Monad (Wait m) where
return = pure
(>>) = (*>)
Now x >>= f = f x
Lift x >>= f = Lift $ do x <- x; pure $ x >>= f
Later x >>= f = Later $ \c -> x $ \x -> do
x <- runWait $ f x
case x of
Now x -> c x
_ -> fromLater x c
instance (MonadIO m, Applicative m) => MonadIO (Wait m) where
liftIO = Lift . liftIO . fmap Now
instance MonadFail m => MonadFail (Wait m) where
fail = Lift . Control.Monad.Fail.fail
firstJustWaitUnordered :: MonadIO m => (a -> Wait m (Maybe b)) -> [a] -> Wait m (Maybe b)
firstJustWaitUnordered f = go [] . map f
where
go :: MonadIO m => [(Maybe a -> m ()) -> m ()] -> [Wait m (Maybe a)] -> Wait m (Maybe a)
go later (x:xs) = case x of
Now (Just a) -> Now $ Just a
Now Nothing -> go later xs
Later l -> go (l:later) xs
Lift x -> Lift $ do
x <- x
pure $ go later (x:xs)
go [] [] = Now Nothing
go [l] [] = Later l
go ls [] = Later $ \callback -> do
ref <- liftIO $ newIORef $ length ls
forM_ ls $ \l -> l $ \r -> do
old <- liftIO $ readIORef ref
when (old > 0) $ case r of
Just a -> do
liftIO $ writeIORef' ref 0
callback $ Just a
Nothing -> do
liftIO $ writeIORef' ref $ old-1
when (old == 1) $ callback Nothing
firstLeftWaitUnordered :: MonadIO m => (a -> Wait m (Either e b)) -> [a] -> Wait m (Either e [b])
firstLeftWaitUnordered f xs = do
let n = length xs
mut <- liftIO $ newArray n undefined
res <- go mut [] $ zipFrom 0 $ map f xs
case res of
Just e -> pure $ Left e
Nothing -> liftIO $ Right <$> mapM (readArray mut) [0..n-1]
where
go :: MonadIO m => MutableArray RealWorld b -> [(Int, (Either e b -> m ()) -> m ())] -> [(Int, Wait m (Either e b))] -> Wait m (Maybe e)
go mut later ((i,x):xs) = case x of
Now (Left e) -> Now $ Just e
Now (Right b) -> do
liftIO $ writeArray mut i b
go mut later xs
Later l -> go mut ((i,l):later) xs
Lift x -> Lift $ do
x <- x
pure $ go mut later ((i,x):xs)
go _ [] [] = Now Nothing
go mut ls [] = Later $ \callback -> do
ref <- liftIO $ newIORef $ length ls
forM_ ls $ \(i,l) -> l $ \r -> do
old <- liftIO $ readIORef ref
when (old > 0) $ case r of
Left a -> do
liftIO $ writeIORef' ref 0
callback $ Just a
Right v -> do
liftIO $ writeArray mut i v
liftIO $ writeIORef' ref $ old-1
when (old == 1) $ callback Nothing
|
b7078f047bd1430582691ea07a0bc59a8ea42e93853d52f966ddb4c4af96a49e | symbiont-io/detsys-testkit | Description.hs | module Description where
import Data.Char (isSpace)
import Text.Printf
data Item f = Item
{ comment :: String,
formula :: f
}
deriving (Show)
instance Functor Item where
fmap f i = i {formula = f (formula i)}
data Description f = Description
{ items :: [Item f],
name :: String
}
deriving (Show)
type Result = Description Int
parseItems :: [String] -> Maybe [Item String]
parseItems [] = Just []
parseItems [_] = Nothing
parseItems (c : f : rest) = (Item c f :) <$> parseItems rest
parse :: String -> Maybe (Description String)
parse input = case filter ((/= ';') . head) . filter (not . null) . map (dropWhile isSpace) $ lines input of
[] -> Nothing
n : remainder -> do
is <- parseItems remainder
pure $ Description is n
outOf :: Int -> Int -> String
outOf r t = printf "%.2f%%" (100 * fromIntegral r / fromIntegral t :: Double)
pprint :: Int -> Result -> String
pprint nrRuns desc =
unlines
[ name desc,
"",
"there are a total of " ++ show nrRuns ++ " runs in this test:"
]
++ unlines
[ " " ++ comment i ++ ": " ++ show f ++ " (" ++ f `outOf` nrRuns ++ ")"
| i <- items desc,
let f = formula i
]
| null | https://raw.githubusercontent.com/symbiont-io/detsys-testkit/29a3a0140730420e4c5cc8db23df6fdb03f9302c/src/stats/src/Description.hs | haskell | module Description where
import Data.Char (isSpace)
import Text.Printf
data Item f = Item
{ comment :: String,
formula :: f
}
deriving (Show)
instance Functor Item where
fmap f i = i {formula = f (formula i)}
data Description f = Description
{ items :: [Item f],
name :: String
}
deriving (Show)
type Result = Description Int
parseItems :: [String] -> Maybe [Item String]
parseItems [] = Just []
parseItems [_] = Nothing
parseItems (c : f : rest) = (Item c f :) <$> parseItems rest
parse :: String -> Maybe (Description String)
parse input = case filter ((/= ';') . head) . filter (not . null) . map (dropWhile isSpace) $ lines input of
[] -> Nothing
n : remainder -> do
is <- parseItems remainder
pure $ Description is n
outOf :: Int -> Int -> String
outOf r t = printf "%.2f%%" (100 * fromIntegral r / fromIntegral t :: Double)
pprint :: Int -> Result -> String
pprint nrRuns desc =
unlines
[ name desc,
"",
"there are a total of " ++ show nrRuns ++ " runs in this test:"
]
++ unlines
[ " " ++ comment i ++ ": " ++ show f ++ " (" ++ f `outOf` nrRuns ++ ")"
| i <- items desc,
let f = formula i
]
| |
cdd77393bb4725ee877d3fef74e032c33baa11552e3cd3cdde92757f3c3e809f | bet365/soap | europepmc_client.erl | %%
%% %CopyrightBegin%
%%
Copyright Hillside Technology Ltd. 2016 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% This filw as generated using soap:wsdl2erlang, but included in the repository
%% because it is used by the tests.
%% generated by soap from: e:/e_soap/soap/examples and testcases/europepmc/europepmc.wsdl
%% for service "WSCitationImplService" and port "WSCitationImplPort"
%% using options: [{service,"WSCitationImplService"},{port,"WSCitationImplPort"},{generate,both},{namespaces,[{"-i.org/profiles/basic/1.1/xsd","P0"},{"/",undefined}]},{http_server,soap_server_cowboy},{server_name,"europepmc_server"},{http_client,soap_client_ibrowse},{client_name,"europepmc_client"},{attachments,true}]
-module(europepmc_client).
-include("europepmc.hrl").
-export([interface/0]).
%% The functions that are described by the WSDL
-export([searchPublications/4]).
-export([profilePublications/4]).
-export([getCitations/4]).
-export([getReferences/4]).
-export([getTextMinedTerms/4]).
-export([getDatabaseLinks/4]).
-export([getSupplementaryFiles/4]).
-export([getFulltextXML/4]).
-export([getBookXML/4]).
-export([listSearchFields/4]).
-export([getLabsLinks/4]).
-spec searchPublications(Soap_body::searchPublications(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(searchPublicationsResponse()).
searchPublications(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec profilePublications(Soap_body::profilePublications(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(profilePublicationsResponse()).
profilePublications(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getCitations(Soap_body::getCitations(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getCitationsResponse()).
getCitations(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getReferences(Soap_body::getReferences(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getReferencesResponse()).
getReferences(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getTextMinedTerms(Soap_body::getTextMinedTerms(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getTextMinedTermsResponse()).
getTextMinedTerms(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getDatabaseLinks(Soap_body::getDatabaseLinks(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getDatabaseLinksResponse()).
getDatabaseLinks(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getSupplementaryFiles(Soap_body::getSupplementaryFiles(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getSupplementaryFilesResponse()).
getSupplementaryFiles(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getFulltextXML(Soap_body::getFulltextXML(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getFulltextXMLResponse()).
getFulltextXML(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getBookXML(Soap_body::getBookXML(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getBookXMLResponse()).
getBookXML(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec listSearchFields(Soap_body::listSearchFields(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(listSearchFieldsResponse()).
listSearchFields(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getLabsLinks(Soap_body::getLabsLinks(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getLabsLinksResponse()).
getLabsLinks(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
%%% --------------------------------------------------------------------
Internal functions
%%% --------------------------------------------------------------------
interface() ->
?INTERFACE.
| null | https://raw.githubusercontent.com/bet365/soap/856b5c418d8d40a6b5bcbbe3fd390c6a0b8d4f18/test/europepmc_client.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
This filw as generated using soap:wsdl2erlang, but included in the repository
because it is used by the tests.
generated by soap from: e:/e_soap/soap/examples and testcases/europepmc/europepmc.wsdl
for service "WSCitationImplService" and port "WSCitationImplPort"
using options: [{service,"WSCitationImplService"},{port,"WSCitationImplPort"},{generate,both},{namespaces,[{"-i.org/profiles/basic/1.1/xsd","P0"},{"/",undefined}]},{http_server,soap_server_cowboy},{server_name,"europepmc_server"},{http_client,soap_client_ibrowse},{client_name,"europepmc_client"},{attachments,true}]
The functions that are described by the WSDL
--------------------------------------------------------------------
-------------------------------------------------------------------- | Copyright Hillside Technology Ltd. 2016 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(europepmc_client).
-include("europepmc.hrl").
-export([interface/0]).
-export([searchPublications/4]).
-export([profilePublications/4]).
-export([getCitations/4]).
-export([getReferences/4]).
-export([getTextMinedTerms/4]).
-export([getDatabaseLinks/4]).
-export([getSupplementaryFiles/4]).
-export([getFulltextXML/4]).
-export([getBookXML/4]).
-export([listSearchFields/4]).
-export([getLabsLinks/4]).
-spec searchPublications(Soap_body::searchPublications(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(searchPublicationsResponse()).
searchPublications(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec profilePublications(Soap_body::profilePublications(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(profilePublicationsResponse()).
profilePublications(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getCitations(Soap_body::getCitations(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getCitationsResponse()).
getCitations(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getReferences(Soap_body::getReferences(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getReferencesResponse()).
getReferences(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getTextMinedTerms(Soap_body::getTextMinedTerms(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getTextMinedTermsResponse()).
getTextMinedTerms(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getDatabaseLinks(Soap_body::getDatabaseLinks(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getDatabaseLinksResponse()).
getDatabaseLinks(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getSupplementaryFiles(Soap_body::getSupplementaryFiles(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getSupplementaryFilesResponse()).
getSupplementaryFiles(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getFulltextXML(Soap_body::getFulltextXML(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getFulltextXMLResponse()).
getFulltextXML(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getBookXML(Soap_body::getBookXML(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getBookXMLResponse()).
getBookXML(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec listSearchFields(Soap_body::listSearchFields(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(listSearchFieldsResponse()).
listSearchFields(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
-spec getLabsLinks(Soap_body::getLabsLinks(),
Soap_headers::[soap:soap_header()],
Options::[any()], Attachments::[soap:soap_attachment()]) -> soap:soap_response(getLabsLinksResponse()).
getLabsLinks(Soap_body, Soap_headers, Options, Attachments) ->
soap_client_util:call(Soap_body, Soap_headers, Options, "\"\"", interface(), Attachments).
Internal functions
interface() ->
?INTERFACE.
|
4eb274c740c77d0be3473f6e4429c7d2778e05f3302eec130064812a648a00f2 | Clozure/ccl | ffi-solarisx8632.lisp | ;;;
;;; Copyright 2009 Clozure Associates
;;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;;; you may not use this file except in compliance with the License.
;;; You may obtain a copy of the License at
;;;
;;; -2.0
;;;
;;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;;; See the License for the specific language governing permissions and
;;; limitations under the License.
(in-package "CCL")
(defun x86-solaris32::record-type-returns-structure-as-first-arg (rtype)
(x8632::record-type-returns-structure-as-first-arg rtype))
(defun x86-solaris32::expand-ff-call (callform args &key (arg-coerce #'null-coerce-foreign-arg) (result-coerce #'null-coerce-foreign-result))
(x8632::expand-ff-call callform args :arg-coerce arg-coerce :result-coerce result-coerce))
(defun x86-solaris32::generate-callback-bindings (stack-ptr fp-args-ptr argvars argspecs result-spec struct-result-name)
(x8632::generate-callback-bindings stack-ptr fp-args-ptr argvars argspecs result-spec struct-result-name))
(defun x86-solaris32::generate-callback-return-value (stack-ptr fp-args-ptr result return-type struct-return-arg)
(x8632::generate-callback-return-value stack-ptr fp-args-ptr result return-type struct-return-arg))
| null | https://raw.githubusercontent.com/Clozure/ccl/6c1a9458f7a5437b73ec227e989aa5b825f32fd3/lib/ffi-solarisx8632.lisp | lisp |
Copyright 2009 Clozure Associates
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | distributed under the License is distributed on an " AS IS " BASIS ,
(in-package "CCL")
(defun x86-solaris32::record-type-returns-structure-as-first-arg (rtype)
(x8632::record-type-returns-structure-as-first-arg rtype))
(defun x86-solaris32::expand-ff-call (callform args &key (arg-coerce #'null-coerce-foreign-arg) (result-coerce #'null-coerce-foreign-result))
(x8632::expand-ff-call callform args :arg-coerce arg-coerce :result-coerce result-coerce))
(defun x86-solaris32::generate-callback-bindings (stack-ptr fp-args-ptr argvars argspecs result-spec struct-result-name)
(x8632::generate-callback-bindings stack-ptr fp-args-ptr argvars argspecs result-spec struct-result-name))
(defun x86-solaris32::generate-callback-return-value (stack-ptr fp-args-ptr result return-type struct-return-arg)
(x8632::generate-callback-return-value stack-ptr fp-args-ptr result return-type struct-return-arg))
|
0c7d462964a6e94f6c7ff9dabc507e9ad39b30d404bf6a85d05ee1a78bd0dc45 | ThoughtWorksInc/stonecutter | handler.clj | (ns stonecutter.test.handler
(:require [midje.sweet :refer :all]
[ring.mock.request :as mock]
[stonecutter.handler :as h]))
(fact "can be split requests between html site and api"
(let [site-handler (fn [r] :site)
api-handler (fn [r] :api)
handler (h/splitter site-handler api-handler)]
(-> (mock/request :get "/blah") handler) => :site
(-> (mock/request :get "/api/blah") handler) => :api))
| null | https://raw.githubusercontent.com/ThoughtWorksInc/stonecutter/37ed22dd276ac652176c4d880e0f1b0c1e27abfe/test/stonecutter/test/handler.clj | clojure | (ns stonecutter.test.handler
(:require [midje.sweet :refer :all]
[ring.mock.request :as mock]
[stonecutter.handler :as h]))
(fact "can be split requests between html site and api"
(let [site-handler (fn [r] :site)
api-handler (fn [r] :api)
handler (h/splitter site-handler api-handler)]
(-> (mock/request :get "/blah") handler) => :site
(-> (mock/request :get "/api/blah") handler) => :api))
| |
9aa451238175a0327516cba398e433ea431a745e0844c5ce67a1fe923e01ea87 | polymeris/cljs-aws | route_53.cljs | (ns cljs-aws.route-53
(:require [cljs-aws.base.requests])
(:require-macros [cljs-aws.base.service :refer [defservice]]))
(defservice "Route53" "route53-2013-04-01.min.json")
| null | https://raw.githubusercontent.com/polymeris/cljs-aws/3326e7c4db4dfc36dcb80770610c14c8a7fd0d66/src/cljs_aws/route_53.cljs | clojure | (ns cljs-aws.route-53
(:require [cljs-aws.base.requests])
(:require-macros [cljs-aws.base.service :refer [defservice]]))
(defservice "Route53" "route53-2013-04-01.min.json")
| |
cbb2ca705c9447c03e84aebdf1fff25baf154942d942c041d36b29c7a5549a3e | ftovagliari/ocamleditor | error.ml |
OCamlEditor
Copyright ( C ) 2010 - 2014
This file is part of OCamlEditor .
OCamlEditor is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
OCamlEditor is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
OCamlEditor
Copyright (C) 2010-2014 Francesco Tovagliari
This file is part of OCamlEditor.
OCamlEditor is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OCamlEditor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
*)
open Printf
let empty = {Oe.er_warnings = []; er_errors = []}
let create messages =
let errors, warnings = List.partition (fun x -> x.Oe.er_level = Oe.Error) messages in
let errors = List.rev errors in
let warnings = List.rev warnings in
{Oe.er_warnings = warnings; er_errors = errors}
let parse_string buf =
let lexbuf = Lexing.from_string (String.trim buf) in
try
let messages = Err_parser.compiler_output Err_lexer.token lexbuf in
create messages
with Parsing.Parse_error ->
eprintf "Error: %S\n%!" buf;
empty
let parse chan =
let lexbuf = Lexing.from_channel chan in
let messages = Err_parser.compiler_output Err_lexer.token lexbuf in
create messages
| null | https://raw.githubusercontent.com/ftovagliari/ocamleditor/79669e14163420170e3e2ebb8da54ebe4e5a3dce/src/error.ml | ocaml |
OCamlEditor
Copyright ( C ) 2010 - 2014
This file is part of OCamlEditor .
OCamlEditor is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
OCamlEditor is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
OCamlEditor
Copyright (C) 2010-2014 Francesco Tovagliari
This file is part of OCamlEditor.
OCamlEditor is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OCamlEditor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
*)
open Printf
let empty = {Oe.er_warnings = []; er_errors = []}
let create messages =
let errors, warnings = List.partition (fun x -> x.Oe.er_level = Oe.Error) messages in
let errors = List.rev errors in
let warnings = List.rev warnings in
{Oe.er_warnings = warnings; er_errors = errors}
let parse_string buf =
let lexbuf = Lexing.from_string (String.trim buf) in
try
let messages = Err_parser.compiler_output Err_lexer.token lexbuf in
create messages
with Parsing.Parse_error ->
eprintf "Error: %S\n%!" buf;
empty
let parse chan =
let lexbuf = Lexing.from_channel chan in
let messages = Err_parser.compiler_output Err_lexer.token lexbuf in
create messages
| |
00c8f082abe441836c3ce731703927670a793eb9035484871e7ab56387ddbe72 | tov/dssl2 | lt.rkt | #lang dssl2
assert 3 < 4
assert not 3 < 3
assert not 4 < 3
| null | https://raw.githubusercontent.com/tov/dssl2/105d18069465781bd9b87466f8336d5ce9e9a0f3/test/dssl2/lt.rkt | racket | #lang dssl2
assert 3 < 4
assert not 3 < 3
assert not 4 < 3
| |
eb3d679ecc1b5c1523de548ed760eaadb4b74b3ae1b0769aa8ea167370c68b62 | babashka/nbb | example.cljs | (ns example
{:clj-kondo/config '{:lint-as {promesa.core/let clojure.core/let}}}
(:require
["fs" :as fs]
["ssh2" :refer [Client]]
["ssh2-promise$default" :as SSH2Promise]
[promesa.core :as p]))
(def ssh-config
#js {:host "yourhost"
:port 22
:username "username"
:privateKey (fs/readFileSync "/home/user/.ssh/id_rsa")})
;; callback based:
(defn ssh-exec [command]
(let [client (Client.)
conn (.connect client ssh-config)]
(-> conn
(.on "error"
(fn [err]
(js/console.log err)))
(.on "ready"
(fn [] (println "stream ready")
(.exec conn command
(fn [err stream]
(if err
(js/console.log err)
(-> stream
(.on "close"
(fn [code _signal]
(println (str "STRM:: close :: " code))
(.end conn)))
(.on "data" (fn [data] (println (str "STDOUT:: " data)))))))))))))
;; example call:
#_(ssh-exec "ls")
;; promise based:
(defn ssh-exec-promise [command]
(p/let [conn (new SSH2Promise ssh-config)
data (.exec conn command)]
(println "DATA:" data)
(.close conn)))
;; example call:
#_(ssh-exec-promise "ls")
| null | https://raw.githubusercontent.com/babashka/nbb/9c7ee2bfe0b4b2bf85d0204746b7e92668c826d9/examples/ssh2/example.cljs | clojure | callback based:
example call:
promise based:
example call: | (ns example
{:clj-kondo/config '{:lint-as {promesa.core/let clojure.core/let}}}
(:require
["fs" :as fs]
["ssh2" :refer [Client]]
["ssh2-promise$default" :as SSH2Promise]
[promesa.core :as p]))
(def ssh-config
#js {:host "yourhost"
:port 22
:username "username"
:privateKey (fs/readFileSync "/home/user/.ssh/id_rsa")})
(defn ssh-exec [command]
(let [client (Client.)
conn (.connect client ssh-config)]
(-> conn
(.on "error"
(fn [err]
(js/console.log err)))
(.on "ready"
(fn [] (println "stream ready")
(.exec conn command
(fn [err stream]
(if err
(js/console.log err)
(-> stream
(.on "close"
(fn [code _signal]
(println (str "STRM:: close :: " code))
(.end conn)))
(.on "data" (fn [data] (println (str "STDOUT:: " data)))))))))))))
#_(ssh-exec "ls")
(defn ssh-exec-promise [command]
(p/let [conn (new SSH2Promise ssh-config)
data (.exec conn command)]
(println "DATA:" data)
(.close conn)))
#_(ssh-exec-promise "ls")
|
16ddee16d81e80b2ffeaf9e9505e39ee2c5f9738c0f9d54ed1d5bb7d46f1f017 | namin/biohacker | dds.lisp | ;;; -*- Mode: Lisp; Syntax: Common-lisp; -*-
Modified : forbus on Thurs Apr 18 8:58:35 1996
Based on the LTMS version
;;;; Dependency-directed search facility
Last Edited 4/27/94 , by KDF
Copyright ( c ) 1993 , , Northwestern University ,
and , the Xerox Corporation .
;;; All rights reserved.
;;; See the file legal.txt for a paragraph stating scope of permission
;;; and disclaimer of warranty. The above copyright notice and that
;;; paragraph must be included in any separate copy of this file.
(in-package :COMMON-LISP-USER)
(defmacro assuming (asn &body body)
`(unwind-protect (let ((already? (in? ,asn)))
(unless already?
(assume! ,asn :try))
,@body
(unless already?
(retract! ,asn :try)))))
(defvar *debug-dds* nil)
(defmacro debug-dds (str &rest args)
`(if *debug-dds* (format t ,str ,@ args)))
(defun assert-choices! (choice-sets)
(loop for choices in choice-sets do
(assert! `(:OR ,@choices) :CHOICE))
choice-sets)
(defun prune (choice-sets)
;;(sort (mapcar #'(lambda (choices) (remove-if #'false? choices)) choice-sets) #'< :key #'length)
(sort choice-sets #'< :key #'length))
(defun DD-init ()
(eval (contradiction 'DD-Search-NoGood *jtre*)))
(defun DD-Search (choice-sets end &aux answer marker choices)
(when (null choice-sets)
(debug-dds "~% DDS: Found solution.")
(eval end)
(return-from DD-Search nil))
(setq marker (list 'DDS (car choice-sets)))
(setq choice-sets (prune choice-sets))
(setq choices (car choice-sets))
(dolist (choice choices)
(debug-dds "~% DDS: Considering ~A..." choice)
(cond ;;((false? choice) ;skip if known loser
( debug - dds " ~% DDS : ~A already known nogood . " choice ) )
((in? choice) ;continue if known
(debug-dds "~% DDS: ~A true by implication." choice)
(DD-Search (cdr choice-sets) end)
(return nil))
(t (debug-dds "~% DDS: Assuming ~A." choice)
(with-Contradiction-Handler (jtre-jtms *jtre*)
#'(lambda (jtms clauses &aux asns)
(debug-dds "~% DDS: Entering handler for ~A with ~A~A."
choice clauses
;;(mapcar #'(lambda (c) (violated-clause? c))
;; clauses)
clauses
)
(dolist (cl clauses)
(setq asns (assumptions-of-node cl))
(debug-dds "~% DDS: Assumptions are: ~A"
(mapcar #'view-node asns))
(dolist (asn asns)
(when (or (equal choice (view-node asn))
(and (listp choice) (eq (car choice) :NOT)
(equal (cadr choice) (view-node asn))))
(throw marker
(cons :LOSERS ;; Assign labels before any retraction
;; Failure to do so can result in incorrect nogoods.
(mapcar #'view-node
(delete asn asns))))))))
(unwind-protect
(setq answer (catch marker
(Assuming choice
(run-rules *jtre*)
(DD-Search (cdr choice-sets) end))))
(when (and (listp answer)
(eq (car answer) :LOSERS))
(debug-dds "~% DDS: ~A inconsistent with ~A."
choice (cdr answer))
(retract! choice :try)
( assert ! ' DD - Search - Nogood
` ( Combo , choice
;; ,@ (cdr answer)))
)))))))
;;;; A familiar example
(defun Test-DD-search (&optional (debugging? t))
(in-JTRE (create-jtre "DDS Test" :DEBUGGING debugging?))
(eval (contradiction 'Contra *jtre*))
(eval '(rule ((:IN A) (:IN C))
(rassert! Contra (Combo A C))))
(eval '(rule ((:IN B) (:IN E))
(rassert! Contra (Combo B E))))
(dd-init)
(DD-Search '((A B) (C D) (E F))
'(show-DD-test-solution)))
(defun show-DD-test-solution (&aux result)
(dolist (var '(F E D C B A))
(when (in? var *jtre*) (push var result)))
(format t "~% Consistent solution: (~A)." result))
| null | https://raw.githubusercontent.com/namin/biohacker/6b5da4c51c9caa6b5e1a68b046af171708d1af64/BPS/jtms/dds.lisp | lisp | -*- Mode: Lisp; Syntax: Common-lisp; -*-
Dependency-directed search facility
All rights reserved.
See the file legal.txt for a paragraph stating scope of permission
and disclaimer of warranty. The above copyright notice and that
paragraph must be included in any separate copy of this file.
(sort (mapcar #'(lambda (choices) (remove-if #'false? choices)) choice-sets) #'< :key #'length)
((false? choice) ;skip if known loser
continue if known
(mapcar #'(lambda (c) (violated-clause? c))
clauses)
Assign labels before any retraction
Failure to do so can result in incorrect nogoods.
,@ (cdr answer)))
A familiar example | Modified : forbus on Thurs Apr 18 8:58:35 1996
Based on the LTMS version
Last Edited 4/27/94 , by KDF
Copyright ( c ) 1993 , , Northwestern University ,
and , the Xerox Corporation .
(in-package :COMMON-LISP-USER)
(defmacro assuming (asn &body body)
`(unwind-protect (let ((already? (in? ,asn)))
(unless already?
(assume! ,asn :try))
,@body
(unless already?
(retract! ,asn :try)))))
(defvar *debug-dds* nil)
(defmacro debug-dds (str &rest args)
`(if *debug-dds* (format t ,str ,@ args)))
(defun assert-choices! (choice-sets)
(loop for choices in choice-sets do
(assert! `(:OR ,@choices) :CHOICE))
choice-sets)
(defun prune (choice-sets)
(sort choice-sets #'< :key #'length))
(defun DD-init ()
(eval (contradiction 'DD-Search-NoGood *jtre*)))
(defun DD-Search (choice-sets end &aux answer marker choices)
(when (null choice-sets)
(debug-dds "~% DDS: Found solution.")
(eval end)
(return-from DD-Search nil))
(setq marker (list 'DDS (car choice-sets)))
(setq choice-sets (prune choice-sets))
(setq choices (car choice-sets))
(dolist (choice choices)
(debug-dds "~% DDS: Considering ~A..." choice)
( debug - dds " ~% DDS : ~A already known nogood . " choice ) )
(debug-dds "~% DDS: ~A true by implication." choice)
(DD-Search (cdr choice-sets) end)
(return nil))
(t (debug-dds "~% DDS: Assuming ~A." choice)
(with-Contradiction-Handler (jtre-jtms *jtre*)
#'(lambda (jtms clauses &aux asns)
(debug-dds "~% DDS: Entering handler for ~A with ~A~A."
choice clauses
clauses
)
(dolist (cl clauses)
(setq asns (assumptions-of-node cl))
(debug-dds "~% DDS: Assumptions are: ~A"
(mapcar #'view-node asns))
(dolist (asn asns)
(when (or (equal choice (view-node asn))
(and (listp choice) (eq (car choice) :NOT)
(equal (cadr choice) (view-node asn))))
(throw marker
(mapcar #'view-node
(delete asn asns))))))))
(unwind-protect
(setq answer (catch marker
(Assuming choice
(run-rules *jtre*)
(DD-Search (cdr choice-sets) end))))
(when (and (listp answer)
(eq (car answer) :LOSERS))
(debug-dds "~% DDS: ~A inconsistent with ~A."
choice (cdr answer))
(retract! choice :try)
( assert ! ' DD - Search - Nogood
` ( Combo , choice
)))))))
(defun Test-DD-search (&optional (debugging? t))
(in-JTRE (create-jtre "DDS Test" :DEBUGGING debugging?))
(eval (contradiction 'Contra *jtre*))
(eval '(rule ((:IN A) (:IN C))
(rassert! Contra (Combo A C))))
(eval '(rule ((:IN B) (:IN E))
(rassert! Contra (Combo B E))))
(dd-init)
(DD-Search '((A B) (C D) (E F))
'(show-DD-test-solution)))
(defun show-DD-test-solution (&aux result)
(dolist (var '(F E D C B A))
(when (in? var *jtre*) (push var result)))
(format t "~% Consistent solution: (~A)." result))
|
7ad66a9be2bf09d58089e5f522772a62b6c6b32883aa6c09557a669b258309ac | imandra-ai/ocaml-opentelemetry | logs_service_types.ml | [@@@ocaml.warning "-27-30-39"]
type export_logs_service_request = {
resource_logs : Logs_types.resource_logs list;
}
type export_logs_partial_success = {
rejected_log_records : int64;
error_message : string;
}
type export_logs_service_response = {
partial_success : export_logs_partial_success option;
}
let rec default_export_logs_service_request
?resource_logs:((resource_logs:Logs_types.resource_logs list) = [])
() : export_logs_service_request = {
resource_logs;
}
let rec default_export_logs_partial_success
?rejected_log_records:((rejected_log_records:int64) = 0L)
?error_message:((error_message:string) = "")
() : export_logs_partial_success = {
rejected_log_records;
error_message;
}
let rec default_export_logs_service_response
?partial_success:((partial_success:export_logs_partial_success option) = None)
() : export_logs_service_response = {
partial_success;
}
| null | https://raw.githubusercontent.com/imandra-ai/ocaml-opentelemetry/3dc7d63c7d2c345ba13e50b67b56aff878b6d0bb/src/logs_service_types.ml | ocaml | [@@@ocaml.warning "-27-30-39"]
type export_logs_service_request = {
resource_logs : Logs_types.resource_logs list;
}
type export_logs_partial_success = {
rejected_log_records : int64;
error_message : string;
}
type export_logs_service_response = {
partial_success : export_logs_partial_success option;
}
let rec default_export_logs_service_request
?resource_logs:((resource_logs:Logs_types.resource_logs list) = [])
() : export_logs_service_request = {
resource_logs;
}
let rec default_export_logs_partial_success
?rejected_log_records:((rejected_log_records:int64) = 0L)
?error_message:((error_message:string) = "")
() : export_logs_partial_success = {
rejected_log_records;
error_message;
}
let rec default_export_logs_service_response
?partial_success:((partial_success:export_logs_partial_success option) = None)
() : export_logs_service_response = {
partial_success;
}
| |
ae80981cefa48e456a11db71a8f4f72ac601b762ad8558fca5c54c06a0e1fd21 | RunOrg/RunOrg | serverAdmin.mli | (* © 2013 RunOrg *)
(** Attempt to log in with persona. Returns a token and the e-mail used for login,
or [None] if authentication failed (assertion is incorrect, or user is not a
server administrator) *)
val auth_persona : string -> (# O.ctx, ([`ServerAdmin] Token.I.id * string) option) Run.t
* Attempt to log in using test mode . Returns a token and the e - mail of one of the
server administrators , or [ None ] if test mode is disabled .
server administrators, or [None] if test mode is disabled. *)
val auth_test : unit -> (# O.ctx, ([`ServerAdmin] Token.I.id * string) option) Run.t
(** The list of all adminstrators. [fromConfig] determines whether an administrator
is present in the list because it is in the configuration file. *)
val all : [`ServerAdmin] Token.I.id -> (# O.ctx, < email : string ; fromConfig : bool > list) Run.t
| null | https://raw.githubusercontent.com/RunOrg/RunOrg/b53ee2357f4bcb919ac48577426d632dffc25062/server/serverAdmin.mli | ocaml | © 2013 RunOrg
* Attempt to log in with persona. Returns a token and the e-mail used for login,
or [None] if authentication failed (assertion is incorrect, or user is not a
server administrator)
* The list of all adminstrators. [fromConfig] determines whether an administrator
is present in the list because it is in the configuration file. |
val auth_persona : string -> (# O.ctx, ([`ServerAdmin] Token.I.id * string) option) Run.t
* Attempt to log in using test mode . Returns a token and the e - mail of one of the
server administrators , or [ None ] if test mode is disabled .
server administrators, or [None] if test mode is disabled. *)
val auth_test : unit -> (# O.ctx, ([`ServerAdmin] Token.I.id * string) option) Run.t
val all : [`ServerAdmin] Token.I.id -> (# O.ctx, < email : string ; fromConfig : bool > list) Run.t
|
8370f42f59400646b8e34c56ebc6ff93f5e86acce33609c716986e1836b97b8c | cbaggers/cepl | ffi.lisp | (in-package :cepl)
#+darwin
(eval-when (:compile-toplevel :load-toplevel :execute)
(labels ((to-path (ns)
(uiop:ensure-directory-pathname
(uiop:parse-unix-namestring ns)))
(brew-prefix ()
"Returns brew's prefix path or nil"
(multiple-value-bind (res _ err)
(uiop:run-program "brew --prefix" :output :string
:ignore-error-status t)
(declare (ignore _))
(when (= err 0)
(to-path (string-trim '(#\newline) res))))))
(let ((ports-paths (mapcar #'to-path '("/opt/local/lib/" "/opt/local/"))))
(loop :for path :in (cons (brew-prefix) ports-paths) :do
(when (and path (uiop:directory-exists-p path))
(pushnew path cffi:*foreign-library-directories* :test #'equal))))))
| null | https://raw.githubusercontent.com/cbaggers/cepl/d1a10b6c8f4cedc07493bf06aef3a56c7b6f8d5b/core/ffi.lisp | lisp | (in-package :cepl)
#+darwin
(eval-when (:compile-toplevel :load-toplevel :execute)
(labels ((to-path (ns)
(uiop:ensure-directory-pathname
(uiop:parse-unix-namestring ns)))
(brew-prefix ()
"Returns brew's prefix path or nil"
(multiple-value-bind (res _ err)
(uiop:run-program "brew --prefix" :output :string
:ignore-error-status t)
(declare (ignore _))
(when (= err 0)
(to-path (string-trim '(#\newline) res))))))
(let ((ports-paths (mapcar #'to-path '("/opt/local/lib/" "/opt/local/"))))
(loop :for path :in (cons (brew-prefix) ports-paths) :do
(when (and path (uiop:directory-exists-p path))
(pushnew path cffi:*foreign-library-directories* :test #'equal))))))
| |
c4f458e87702cdb43fb6f498f0b9412f8663a3d8fba0bbebf3ef8d24bfc5ab97 | padsproj/oforest | universal.ml |
Use Makefile in examples directory
:
./desugar.sh universal / universal.ml
Compile :
make universal
Use Makefile in examples directory
Desugar:
./desugar.sh universal/universal.ml
Compile:
make universal
*)
module CostMon = Forest.CostNameMon
[%%skin {|
uniSkin = dir([<>])
|}]
[%%forest {|
universal = directory { asc is [ f :: file | f <- matches RE ".*", $get_kind f_att = AsciiK$];
bin is [ b :: file | b <- matches RE ".*", $get_kind b_att = BinaryK$];
sym is [ l :: link | l <- matches RE ".*", $get_kind l_att = SymK$];
dir is [ d :: universal | d <- matches RE ".*", $get_kind d_att = DirectoryK$]
}
universal_inc = universal @ uniSkin
|}]
let get_dir path =
if Filename.is_relative path
then Filename.concat (Sys.getcwd ()) path
else path
let rec trawl_univI cur nMax =
let rec trawl_internal cur n =
if n >= nMax
then return ()
else begin
load cur >>= fun ((r,r_md) : (universal_inc_rep * universal_inc_md)) ->
let dirName = match r_md.info with
| None -> "Error: No directory?"
| Some info -> info.full_path
in
let _ = Printf.printf "Dir: %s\n" dirName in
let _ = Forest.print_md_errors r_md in
let _ = Printf.printf "File contents:\n" in
let _ = List.iter (fun s -> Printf.printf "%s\n" s) r.asc in
List.fold_left (fun acc d ->
trawl_internal d (n+1) >>= fun _ ->
acc
) (return ()) r.dir
end
in
trawl_internal cur 0
The next two functions are there to show how storing works .
* They will go through up to = depth levels and add a line / remove a line
* respectively from each ascii file they find . The second should only be used after the first .
* They will go through up to nMax = depth levels and add a line/remove a line
* respectively from each ascii file they find. The second should only be used after the first.
*)
let rec add_lineI cur nMax =
let rec add_lineInternal cur n =
if n >= nMax
then return ()
else begin
load cur >>= fun ((r,r_md) : (universal_inc_rep * universal_inc_md)) ->
let r = {r with asc = (List.map (fun s ->Bytes.cat s "\nAdded Line!") r.asc)} in
manifest cur (r,r_md) >>= fun mani ->
let _ = if List.length mani.errors > 0 then Forest.print_mani_errors mani else store mani in
List.fold_left (fun acc d ->
add_lineInternal d (n+1) >>= fun _ ->
acc
) (return ()) r.dir
end
in
add_lineInternal cur 0
let rec remove_lineI cur nMax =
let rec remove_lineInternal cur n =
if n >= nMax
then return ()
else begin
load cur >>= fun ((r,r_md) : (universal_inc_rep * universal_inc_md)) ->
let r = {r with asc = (List.map (fun s ->
let pos = Str.search_forward (Str.regexp_string "\nAdded Line!") s 0 in
Bytes.sub s 0 pos
) r.asc)}
in
manifest cur (r,r_md) >>= fun mani ->
let _ = if List.length mani.errors > 0 then Forest.print_mani_errors mani else store mani in
List.fold_left (fun acc d ->
remove_lineInternal d (n+1) >>= fun _ ->
acc
) (return ()) r.dir
end
in
remove_lineInternal cur 0
let trawl_univ (r : universal_rep) nMax =
let rec trawl_internal (r : universal_rep) n =
if n >= nMax
then ()
else
let _ = List.iter (fun s -> Printf.printf "%s\n" s) r.asc in
List.iter (fun d -> trawl_internal d (n+1)) r.dir
in
trawl_internal r 0
let incremental directory nMax =
universal_inc_new directory >>= fun cur ->
trawl_univI cur nMax >>| fun () ->
Printf.printf "\n"
let unIncremental directory nMax =
universal_new directory >>= load >>| fun (r,md) ->
let _ = trawl_univ r nMax in
Printf.printf "\n"
let main () =
if Array.length Sys.argv < 2
then begin
Printf.printf "Usage: %s <directory> [<mode>] [<depth>]\n" Sys.argv.(0);
Printf.printf "Mode is inc or classic. inc is default\n";
Printf.printf "Depth is how deep into the folder we should traverse. 2 is default.\n";
exit 1;
end
else
let inc =
if Array.length Sys.argv >= 3 then Sys.argv.(2) <> "classic" else true
in
let depth =
if Array.length Sys.argv >= 4 then int_of_string (Sys.argv.(3)) else 2
in
let dir = get_dir Sys.argv.(1) in
run (
if inc
then incremental dir depth
else unIncremental dir depth)
let _ = main ()
| null | https://raw.githubusercontent.com/padsproj/oforest/e10abf1c35f6d49d4bdf13d51cab44487629b3a3/examples/universal/universal.ml | ocaml |
Use Makefile in examples directory
:
./desugar.sh universal / universal.ml
Compile :
make universal
Use Makefile in examples directory
Desugar:
./desugar.sh universal/universal.ml
Compile:
make universal
*)
module CostMon = Forest.CostNameMon
[%%skin {|
uniSkin = dir([<>])
|}]
[%%forest {|
universal = directory { asc is [ f :: file | f <- matches RE ".*", $get_kind f_att = AsciiK$];
bin is [ b :: file | b <- matches RE ".*", $get_kind b_att = BinaryK$];
sym is [ l :: link | l <- matches RE ".*", $get_kind l_att = SymK$];
dir is [ d :: universal | d <- matches RE ".*", $get_kind d_att = DirectoryK$]
}
universal_inc = universal @ uniSkin
|}]
let get_dir path =
if Filename.is_relative path
then Filename.concat (Sys.getcwd ()) path
else path
let rec trawl_univI cur nMax =
let rec trawl_internal cur n =
if n >= nMax
then return ()
else begin
load cur >>= fun ((r,r_md) : (universal_inc_rep * universal_inc_md)) ->
let dirName = match r_md.info with
| None -> "Error: No directory?"
| Some info -> info.full_path
in
let _ = Printf.printf "Dir: %s\n" dirName in
let _ = Forest.print_md_errors r_md in
let _ = Printf.printf "File contents:\n" in
let _ = List.iter (fun s -> Printf.printf "%s\n" s) r.asc in
List.fold_left (fun acc d ->
trawl_internal d (n+1) >>= fun _ ->
acc
) (return ()) r.dir
end
in
trawl_internal cur 0
The next two functions are there to show how storing works .
* They will go through up to = depth levels and add a line / remove a line
* respectively from each ascii file they find . The second should only be used after the first .
* They will go through up to nMax = depth levels and add a line/remove a line
* respectively from each ascii file they find. The second should only be used after the first.
*)
let rec add_lineI cur nMax =
let rec add_lineInternal cur n =
if n >= nMax
then return ()
else begin
load cur >>= fun ((r,r_md) : (universal_inc_rep * universal_inc_md)) ->
let r = {r with asc = (List.map (fun s ->Bytes.cat s "\nAdded Line!") r.asc)} in
manifest cur (r,r_md) >>= fun mani ->
let _ = if List.length mani.errors > 0 then Forest.print_mani_errors mani else store mani in
List.fold_left (fun acc d ->
add_lineInternal d (n+1) >>= fun _ ->
acc
) (return ()) r.dir
end
in
add_lineInternal cur 0
let rec remove_lineI cur nMax =
let rec remove_lineInternal cur n =
if n >= nMax
then return ()
else begin
load cur >>= fun ((r,r_md) : (universal_inc_rep * universal_inc_md)) ->
let r = {r with asc = (List.map (fun s ->
let pos = Str.search_forward (Str.regexp_string "\nAdded Line!") s 0 in
Bytes.sub s 0 pos
) r.asc)}
in
manifest cur (r,r_md) >>= fun mani ->
let _ = if List.length mani.errors > 0 then Forest.print_mani_errors mani else store mani in
List.fold_left (fun acc d ->
remove_lineInternal d (n+1) >>= fun _ ->
acc
) (return ()) r.dir
end
in
remove_lineInternal cur 0
let trawl_univ (r : universal_rep) nMax =
let rec trawl_internal (r : universal_rep) n =
if n >= nMax
then ()
else
let _ = List.iter (fun s -> Printf.printf "%s\n" s) r.asc in
List.iter (fun d -> trawl_internal d (n+1)) r.dir
in
trawl_internal r 0
let incremental directory nMax =
universal_inc_new directory >>= fun cur ->
trawl_univI cur nMax >>| fun () ->
Printf.printf "\n"
let unIncremental directory nMax =
universal_new directory >>= load >>| fun (r,md) ->
let _ = trawl_univ r nMax in
Printf.printf "\n"
let main () =
if Array.length Sys.argv < 2
then begin
Printf.printf "Usage: %s <directory> [<mode>] [<depth>]\n" Sys.argv.(0);
Printf.printf "Mode is inc or classic. inc is default\n";
Printf.printf "Depth is how deep into the folder we should traverse. 2 is default.\n";
exit 1;
end
else
let inc =
if Array.length Sys.argv >= 3 then Sys.argv.(2) <> "classic" else true
in
let depth =
if Array.length Sys.argv >= 4 then int_of_string (Sys.argv.(3)) else 2
in
let dir = get_dir Sys.argv.(1) in
run (
if inc
then incremental dir depth
else unIncremental dir depth)
let _ = main ()
| |
73513597ddf00ec24ddaf51a4afd15bf62c31d2b5821fbd54997ce76bb463c98 | jashmenn/smoker | urls.clj |
(ns smoker.test.urls
(:import [smoker.udf RobotsURL]
[smoker.udf NormalizeURL]
[org.apache.hadoop.io Text])
(:require [clojure.contrib.str-utils2 :as su]
[smoker.udf.RobotsURL :as robo]
[smoker.udf.NormalizeURL :as norm])
(:use [clojure.test]))
(def robots-tests
[["" ""]
["" ""]])
(deftest test-robots
(doseq [[question answer] robots-tests]
(is (= (Text. answer) (robo/evaluate (Text. question)))))
(is (nil? (robo/evaluate (Text. "asdf")))))
(def normal-tests
[["" "/"]
[":80" "/"]
["/#bla" "/"]])
(deftest test-normalize
(doseq [[question answer] normal-tests]
(is (= (Text. answer) (norm/evaluate (Text. question)))))
(is (nil? (norm/evaluate (Text. "asdf")))))
(comment
(test-robots)
(test-normalize)
)
| null | https://raw.githubusercontent.com/jashmenn/smoker/79048c99a69d8fb8d1a57796a6881bf2e732fe0e/test/smoker/test/urls.clj | clojure |
(ns smoker.test.urls
(:import [smoker.udf RobotsURL]
[smoker.udf NormalizeURL]
[org.apache.hadoop.io Text])
(:require [clojure.contrib.str-utils2 :as su]
[smoker.udf.RobotsURL :as robo]
[smoker.udf.NormalizeURL :as norm])
(:use [clojure.test]))
(def robots-tests
[["" ""]
["" ""]])
(deftest test-robots
(doseq [[question answer] robots-tests]
(is (= (Text. answer) (robo/evaluate (Text. question)))))
(is (nil? (robo/evaluate (Text. "asdf")))))
(def normal-tests
[["" "/"]
[":80" "/"]
["/#bla" "/"]])
(deftest test-normalize
(doseq [[question answer] normal-tests]
(is (= (Text. answer) (norm/evaluate (Text. question)))))
(is (nil? (norm/evaluate (Text. "asdf")))))
(comment
(test-robots)
(test-normalize)
)
| |
a37b43ae3e6ebf9967b0b880c97a8b315671077e9f5636b8ecf0675f69923189 | onyx-platform/onyx | db.cljc | (ns onyx.state.protocol.db)
(defprotocol State
(put-extent! [this window-id group extent v])
(put-state-entry! [this window-id group time entry])
(get-state-entries [this window-id group start end])
(get-state-entries-times [this window-id group])
(delete-state-entries! [this window-id group start end])
(get-extent [this window-id group extent])
(delete-extent! [this window-id group extent])
(put-trigger! [this trigger-id group v])
(get-group-id [this group-key])
(group-id [this group-key])
(group-key [this group-id])
(get-trigger [this trigger-id group])
(groups [this])
(group-extents [this window-id group]
[this window-id group end-exclusive])
(trigger-keys [this trigger-id])
(drop! [this])
(close! [this])
(export [this encoder])
(restore! [this decoder mapping])
(export-reader [this]))
(defmulti create-db
(fn [peer-config db-name serializers]
(or (:onyx.peer/state-store-impl peer-config) :memory)))
(defmulti open-db-reader
(fn [peer-config definition serializers]
(or (:onyx.peer/state-store-impl peer-config) :memory)))
| null | https://raw.githubusercontent.com/onyx-platform/onyx/74f9ae58cdbcfcb1163464595f1e6ae6444c9782/src/onyx/state/protocol/db.cljc | clojure | (ns onyx.state.protocol.db)
(defprotocol State
(put-extent! [this window-id group extent v])
(put-state-entry! [this window-id group time entry])
(get-state-entries [this window-id group start end])
(get-state-entries-times [this window-id group])
(delete-state-entries! [this window-id group start end])
(get-extent [this window-id group extent])
(delete-extent! [this window-id group extent])
(put-trigger! [this trigger-id group v])
(get-group-id [this group-key])
(group-id [this group-key])
(group-key [this group-id])
(get-trigger [this trigger-id group])
(groups [this])
(group-extents [this window-id group]
[this window-id group end-exclusive])
(trigger-keys [this trigger-id])
(drop! [this])
(close! [this])
(export [this encoder])
(restore! [this decoder mapping])
(export-reader [this]))
(defmulti create-db
(fn [peer-config db-name serializers]
(or (:onyx.peer/state-store-impl peer-config) :memory)))
(defmulti open-db-reader
(fn [peer-config definition serializers]
(or (:onyx.peer/state-store-impl peer-config) :memory)))
| |
7cf8a50a83b21fbe54365d81ec203b565852dbd98f03395bc04d35c6c6588099 | kind2-mc/kind2 | hset.ml | (**************************************************************************)
(* *)
Copyright ( C )
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
(* described in file LICENSE. *)
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
(**************************************************************************)
open Hashcons
s Sets of integers implemented as , following and paper { \em Fast Mergeable Integer Maps }
( { \tt\small /\~{}cdo/papers.html\#ml98maps } ) .
trees provide faster operations than standard library 's
module [ Set ] , and especially very fast [ union ] , [ subset ] , [ inter ]
and [ diff ] operations .
Okasaki and Andrew Gill's paper {\em Fast Mergeable Integer Maps}
({\tt\small /\~{}cdo/papers.html\#ml98maps}).
Patricia trees provide faster operations than standard library's
module [Set], and especially very fast [union], [subset], [inter]
and [diff] operations. *)
s The idea behind trees is to build a { \em trie } on the
binary digits of the elements , and to compact the representation
by branching only one the relevant bits ( i.e. the ones for which
there is at least on element in each subtree ) . We implement here
} trees : bits are processed from
least - significant to most - significant . The trie is implemented by
the following type [ t ] . [ Empty ] stands for the empty trie , and
[ Leaf k ] for the singleton [ k ] . ( Note that [ k ] is the actual
element . ) [ Branch ( m , p , l , r ) ] represents a branching , where [ p ] is
the prefix ( from the root of the trie ) and [ m ] is the branching
bit ( a power of 2 ) . [ l ] and [ r ] contain the subsets for which the
branching bit is respectively 0 and 1 . Invariant : the trees [ l ]
and [ r ] are not empty .
binary digits of the elements, and to compact the representation
by branching only one the relevant bits (i.e. the ones for which
there is at least on element in each subtree). We implement here
{\em little-endian} Patricia trees: bits are processed from
least-significant to most-significant. The trie is implemented by
the following type [t]. [Empty] stands for the empty trie, and
[Leaf k] for the singleton [k]. (Note that [k] is the actual
element.) [Branch (m,p,l,r)] represents a branching, where [p] is
the prefix (from the root of the trie) and [m] is the branching
bit (a power of 2). [l] and [r] contain the subsets for which the
branching bit is respectively 0 and 1. Invariant: the trees [l]
and [r] are not empty. *)
(*i*)
type ('a, 'b) elt = ('a, 'b) hash_consed
(*i*)
type ('a, 'b) t =
| Empty
| Leaf of ('a, 'b) hash_consed
| Branch of int * int * ('a, 'b) t * ('a, 'b) t
s Example : the representation of the set $ \{1,4,5\}$ is
$ $ \mathtt{Branch~(0,~1,~Leaf~4,~Branch~(1,~4,~Leaf~1,~Leaf~5))}$$
The first branching bit is the bit 0 ( and the corresponding prefix
is [ 0b0 ] , not of use here ) , with $ \{4\}$ on the left and $ \{1,5\}$ on the
right . Then the right subtree branches on bit 2 ( and so has a branching
value of $ 2 ^ 2 = 4 $ ) , with prefix [ 0b01 = 1 ] .
$$\mathtt{Branch~(0,~1,~Leaf~4,~Branch~(1,~4,~Leaf~1,~Leaf~5))}$$
The first branching bit is the bit 0 (and the corresponding prefix
is [0b0], not of use here), with $\{4\}$ on the left and $\{1,5\}$ on the
right. Then the right subtree branches on bit 2 (and so has a branching
value of $2^2 = 4$), with prefix [0b01 = 1]. *)
(*s Empty set and singletons. *)
let empty = Empty
let is_empty = function Empty -> true | _ -> false
let singleton k = Leaf k
(*s Testing the occurrence of a value is similar to the search in a
binary search tree, where the branching bit is used to select the
appropriate subtree. *)
let zero_bit k m = (k land m) == 0
let rec mem k = function
| Empty -> false
| Leaf j -> k.tag == j.tag
| Branch (_, m, l, r) -> mem k (if zero_bit k.tag m then l else r)
s The following operation [ join ] will be used in both insertion and
union . Given two non - empty trees [ t0 ] and [ t1 ] with longest common
prefixes [ p0 ] and [ p1 ] respectively , which are supposed to
disagree , it creates the union of [ t0 ] and [ t1 ] . For this , it
computes the first bit [ m ] where [ p0 ] and [ p1 ] disagree and create
a branching node on that bit . Depending on the value of that bit
in [ p0 ] , [ t0 ] will be the left subtree and [ t1 ] the right one , or
the converse . Computing the first branching bit of [ p0 ] and [ p1 ]
uses a nice property of twos - complement representation of integers .
union. Given two non-empty trees [t0] and [t1] with longest common
prefixes [p0] and [p1] respectively, which are supposed to
disagree, it creates the union of [t0] and [t1]. For this, it
computes the first bit [m] where [p0] and [p1] disagree and create
a branching node on that bit. Depending on the value of that bit
in [p0], [t0] will be the left subtree and [t1] the right one, or
the converse. Computing the first branching bit of [p0] and [p1]
uses a nice property of twos-complement representation of integers. *)
let lowest_bit x = x land (-x)
let branching_bit p0 p1 = lowest_bit (p0 lxor p1)
let mask p m = p land (m-1)
let join (p0,t0,p1,t1) =
let m = branching_bit p0 p1 in
if zero_bit p0 m then
Branch (mask p0 m, m, t0, t1)
else
Branch (mask p0 m, m, t1, t0)
s Then the insertion of value [ k ] in set [ t ] is easily implemented
using [ join ] . Insertion in a singleton is just the identity or a
call to [ join ] , depending on the value of [ k ] . When inserting in
a branching tree , we first check if the value to insert [ k ]
matches the prefix [ p ] : if not , [ join ] will take care of creating
the above branching ; if so , we just insert [ k ] in the appropriate
subtree , depending of the branching bit .
using [join]. Insertion in a singleton is just the identity or a
call to [join], depending on the value of [k]. When inserting in
a branching tree, we first check if the value to insert [k]
matches the prefix [p]: if not, [join] will take care of creating
the above branching; if so, we just insert [k] in the appropriate
subtree, depending of the branching bit. *)
let match_prefix k p m = (mask k m) == p
let add k t =
let rec ins = function
| Empty -> Leaf k
| Leaf j as t ->
if j.tag == k.tag then t else join (k.tag, Leaf k, j.tag, t)
| Branch (p,m,t0,t1) as t ->
if match_prefix k.tag p m then
if zero_bit k.tag m then
Branch (p, m, ins t0, t1)
else
Branch (p, m, t0, ins t1)
else
join (k.tag, Leaf k, p, t)
in
ins t
(*s The code to remove an element is basically similar to the code of
insertion. But since we have to maintain the invariant that both
subtrees of a [Branch] node are non-empty, we use here the
``smart constructor'' [branch] instead of [Branch]. *)
let branch = function
| (_,_,Empty,t) -> t
| (_,_,t,Empty) -> t
| (p,m,t0,t1) -> Branch (p,m,t0,t1)
let remove k t =
let rec rmv = function
| Empty -> Empty
| Leaf j as t -> if k.tag == j.tag then Empty else t
| Branch (p,m,t0,t1) as t ->
if match_prefix k.tag p m then
if zero_bit k.tag m then
branch (p, m, rmv t0, t1)
else
branch (p, m, t0, rmv t1)
else
t
in
rmv t
s One nice property of trees is to support a fast union
operation ( and also fast subset , difference and intersection
operations ) . When merging two branching trees we examine the
following four cases : ( 1 ) the trees have exactly the same
prefix ; ( 2/3 ) one prefix contains the other one ; and ( 4 ) the
prefixes disagree . In cases ( 1 ) , ( 2 ) and ( 3 ) the recursion is
immediate ; in case ( 4 ) the function [ join ] creates the appropriate
branching .
operation (and also fast subset, difference and intersection
operations). When merging two branching trees we examine the
following four cases: (1) the trees have exactly the same
prefix; (2/3) one prefix contains the other one; and (4) the
prefixes disagree. In cases (1), (2) and (3) the recursion is
immediate; in case (4) the function [join] creates the appropriate
branching. *)
let rec merge = function
| Empty, t -> t
| t, Empty -> t
| Leaf k, t -> add k t
| t, Leaf k -> add k t
| (Branch (p,m,s0,s1) as s), (Branch (q,n,t0,t1) as t) ->
if m == n && match_prefix q p m then
(* The trees have the same prefix. Merge the subtrees. *)
Branch (p, m, merge (s0,t0), merge (s1,t1))
else if m < n && match_prefix q p m then
(* [q] contains [p]. Merge [t] with a subtree of [s]. *)
if zero_bit q m then
Branch (p, m, merge (s0,t), s1)
else
Branch (p, m, s0, merge (s1,t))
else if m > n && match_prefix p q n then
(* [p] contains [q]. Merge [s] with a subtree of [t]. *)
if zero_bit p n then
Branch (q, n, merge (s,t0), t1)
else
Branch (q, n, t0, merge (s,t1))
else
(* The prefixes disagree. *)
join (p, s, q, t)
let union s t = merge (s,t)
s When checking if [ s1 ] is a subset of [ s2 ] only two of the above
four cases are relevant : when the prefixes are the same and when the
prefix of [ s1 ] contains the one of [ s2 ] , and then the recursion is
obvious . In the other two cases , the result is [ false ] .
four cases are relevant: when the prefixes are the same and when the
prefix of [s1] contains the one of [s2], and then the recursion is
obvious. In the other two cases, the result is [false]. *)
let rec subset s1 s2 = match (s1,s2) with
| Empty, _ -> true
| _, Empty -> false
| Leaf k1, _ -> mem k1 s2
| Branch _, Leaf _ -> false
| Branch (p1,m1,l1,r1), Branch (p2,m2,l2,r2) ->
if m1 == m2 && p1 == p2 then
subset l1 l2 && subset r1 r2
else if m1 > m2 && match_prefix p1 p2 m2 then
if zero_bit p1 m2 then
subset l1 l2 && subset r1 l2
else
subset l1 r2 && subset r1 r2
else
false
s To compute the intersection and the difference of two sets , we
still examine the same four cases as in [ merge ] . The recursion is
then obvious .
still examine the same four cases as in [merge]. The recursion is
then obvious. *)
let rec inter s1 s2 = match (s1,s2) with
| Empty, _ -> Empty
| _, Empty -> Empty
| Leaf k1, _ -> if mem k1 s2 then s1 else Empty
| _, Leaf k2 -> if mem k2 s1 then s2 else Empty
| Branch (p1,m1,l1,r1), Branch (p2,m2,l2,r2) ->
if m1 == m2 && p1 == p2 then
merge (inter l1 l2, inter r1 r2)
else if m1 < m2 && match_prefix p2 p1 m1 then
inter (if zero_bit p2 m1 then l1 else r1) s2
else if m1 > m2 && match_prefix p1 p2 m2 then
inter s1 (if zero_bit p1 m2 then l2 else r2)
else
Empty
let rec diff s1 s2 = match (s1,s2) with
| Empty, _ -> Empty
| _, Empty -> s1
| Leaf k1, _ -> if mem k1 s2 then Empty else s1
| _, Leaf k2 -> remove k2 s1
| Branch (p1,m1,l1,r1), Branch (p2,m2,l2,r2) ->
if m1 == m2 && p1 == p2 then
merge (diff l1 l2, diff r1 r2)
else if m1 < m2 && match_prefix p2 p1 m1 then
if zero_bit p2 m1 then
merge (diff l1 s2, r1)
else
merge (l1, diff r1 s2)
else if m1 > m2 && match_prefix p1 p2 m2 then
if zero_bit p1 m2 then diff s1 l2 else diff s1 r2
else
s1
(*s All the following operations ([cardinal], [iter], [fold], [for_all],
[exists], [filter], [partition], [choose], [elements]) are
implemented as for any other kind of binary trees. *)
let rec cardinal = function
| Empty -> 0
| Leaf _ -> 1
| Branch (_,_,t0,t1) -> cardinal t0 + cardinal t1
let rec iter f = function
| Empty -> ()
| Leaf k -> f k
| Branch (_,_,t0,t1) -> iter f t0; iter f t1
let rec fold f s accu = match s with
| Empty -> accu
| Leaf k -> f k accu
| Branch (_,_,t0,t1) -> fold f t0 (fold f t1 accu)
let rec for_all p = function
| Empty -> true
| Leaf k -> p k
| Branch (_,_,t0,t1) -> for_all p t0 && for_all p t1
let rec exists p = function
| Empty -> false
| Leaf k -> p k
| Branch (_,_,t0,t1) -> exists p t0 || exists p t1
let rec filter pr = function
| Empty -> Empty
| Leaf k as t -> if pr k then t else Empty
| Branch (p,m,t0,t1) -> branch (p, m, filter pr t0, filter pr t1)
let partition p s =
let rec part (t,f as acc) = function
| Empty -> acc
| Leaf k -> if p k then (add k t, f) else (t, add k f)
| Branch (_,_,t0,t1) -> part (part acc t0) t1
in
part (Empty, Empty) s
let rec choose = function
| Empty -> raise Not_found
| Leaf k -> k
| Branch (_, _,t0,_) -> choose t0 (* we know that [t0] is non-empty *)
let elements s =
let rec elements_aux acc = function
| Empty -> acc
| Leaf k -> k :: acc
| Branch (_,_,l,r) -> elements_aux (elements_aux acc l) r
in
elements_aux [] s
(*s There is no way to give an efficient implementation of [min_elt]
and [max_elt], as with binary search trees. The following
implementation is a traversal of all elements, barely more
efficient than [fold min t (choose t)] (resp. [fold max t (choose
t)]). Note that we use the fact that there is no constructor
[Empty] under [Branch] and therefore always a minimal
(resp. maximal) element there. *)
let rec min_elt = function
| Empty -> raise Not_found
| Leaf k -> k
| Branch (_,_,s,t) -> min (min_elt s) (min_elt t)
let rec max_elt = function
| Empty -> raise Not_found
| Leaf k -> k
| Branch (_,_,s,t) -> max (max_elt s) (max_elt t)
s Another nice property of trees is to be independent of the
order of insertion . As a consequence , two trees have the
same elements if and only if they are structurally equal .
order of insertion. As a consequence, two Patricia trees have the
same elements if and only if they are structurally equal. *)
let equal = (=)
let compare = Stdlib.compare
(*i*)
let make l = add l empty
(*i*)
(*s Additional functions w.r.t to [Set.S]. *)
let rec intersect s1 s2 = match (s1,s2) with
| Empty, _ -> false
| _, Empty -> false
| Leaf k1, _ -> mem k1 s2
| _, Leaf k2 -> mem k2 s1
| Branch (p1,m1,l1,r1), Branch (p2,m2,l2,r2) ->
if m1 == m2 && p1 == p2 then
intersect l1 l2 || intersect r1 r2
else if m1 < m2 && match_prefix p2 p1 m1 then
intersect (if zero_bit p2 m1 then l1 else r1) s2
else if m1 > m2 && match_prefix p1 p2 m2 then
intersect s1 (if zero_bit p1 m2 then l2 else r2)
else
false
(* TODO: implement split to be compatible with the signature Set.S *)
let split _ = invalid_arg "split"
Local Variables :
compile - command : " make -C .. -k "
tuareg - interactive - program : " ./kind2.top -I ./_build -I / SExpr "
indent - tabs - mode : nil
End :
Local Variables:
compile-command: "make -C .. -k"
tuareg-interactive-program: "./kind2.top -I ./_build -I ./_build/SExpr"
indent-tabs-mode: nil
End:
*)
| null | https://raw.githubusercontent.com/kind2-mc/kind2/37bde34036c5f7548a1b9ae6320f016ef3dc62d6/src/dead_code/hset.ml | ocaml | ************************************************************************
This software is free software; you can redistribute it and/or
described in file LICENSE.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
************************************************************************
i
i
s Empty set and singletons.
s Testing the occurrence of a value is similar to the search in a
binary search tree, where the branching bit is used to select the
appropriate subtree.
s The code to remove an element is basically similar to the code of
insertion. But since we have to maintain the invariant that both
subtrees of a [Branch] node are non-empty, we use here the
``smart constructor'' [branch] instead of [Branch].
The trees have the same prefix. Merge the subtrees.
[q] contains [p]. Merge [t] with a subtree of [s].
[p] contains [q]. Merge [s] with a subtree of [t].
The prefixes disagree.
s All the following operations ([cardinal], [iter], [fold], [for_all],
[exists], [filter], [partition], [choose], [elements]) are
implemented as for any other kind of binary trees.
we know that [t0] is non-empty
s There is no way to give an efficient implementation of [min_elt]
and [max_elt], as with binary search trees. The following
implementation is a traversal of all elements, barely more
efficient than [fold min t (choose t)] (resp. [fold max t (choose
t)]). Note that we use the fact that there is no constructor
[Empty] under [Branch] and therefore always a minimal
(resp. maximal) element there.
i
i
s Additional functions w.r.t to [Set.S].
TODO: implement split to be compatible with the signature Set.S | Copyright ( C )
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
open Hashcons
s Sets of integers implemented as , following and paper { \em Fast Mergeable Integer Maps }
( { \tt\small /\~{}cdo/papers.html\#ml98maps } ) .
trees provide faster operations than standard library 's
module [ Set ] , and especially very fast [ union ] , [ subset ] , [ inter ]
and [ diff ] operations .
Okasaki and Andrew Gill's paper {\em Fast Mergeable Integer Maps}
({\tt\small /\~{}cdo/papers.html\#ml98maps}).
Patricia trees provide faster operations than standard library's
module [Set], and especially very fast [union], [subset], [inter]
and [diff] operations. *)
s The idea behind trees is to build a { \em trie } on the
binary digits of the elements , and to compact the representation
by branching only one the relevant bits ( i.e. the ones for which
there is at least on element in each subtree ) . We implement here
} trees : bits are processed from
least - significant to most - significant . The trie is implemented by
the following type [ t ] . [ Empty ] stands for the empty trie , and
[ Leaf k ] for the singleton [ k ] . ( Note that [ k ] is the actual
element . ) [ Branch ( m , p , l , r ) ] represents a branching , where [ p ] is
the prefix ( from the root of the trie ) and [ m ] is the branching
bit ( a power of 2 ) . [ l ] and [ r ] contain the subsets for which the
branching bit is respectively 0 and 1 . Invariant : the trees [ l ]
and [ r ] are not empty .
binary digits of the elements, and to compact the representation
by branching only one the relevant bits (i.e. the ones for which
there is at least on element in each subtree). We implement here
{\em little-endian} Patricia trees: bits are processed from
least-significant to most-significant. The trie is implemented by
the following type [t]. [Empty] stands for the empty trie, and
[Leaf k] for the singleton [k]. (Note that [k] is the actual
element.) [Branch (m,p,l,r)] represents a branching, where [p] is
the prefix (from the root of the trie) and [m] is the branching
bit (a power of 2). [l] and [r] contain the subsets for which the
branching bit is respectively 0 and 1. Invariant: the trees [l]
and [r] are not empty. *)
type ('a, 'b) elt = ('a, 'b) hash_consed
type ('a, 'b) t =
| Empty
| Leaf of ('a, 'b) hash_consed
| Branch of int * int * ('a, 'b) t * ('a, 'b) t
s Example : the representation of the set $ \{1,4,5\}$ is
$ $ \mathtt{Branch~(0,~1,~Leaf~4,~Branch~(1,~4,~Leaf~1,~Leaf~5))}$$
The first branching bit is the bit 0 ( and the corresponding prefix
is [ 0b0 ] , not of use here ) , with $ \{4\}$ on the left and $ \{1,5\}$ on the
right . Then the right subtree branches on bit 2 ( and so has a branching
value of $ 2 ^ 2 = 4 $ ) , with prefix [ 0b01 = 1 ] .
$$\mathtt{Branch~(0,~1,~Leaf~4,~Branch~(1,~4,~Leaf~1,~Leaf~5))}$$
The first branching bit is the bit 0 (and the corresponding prefix
is [0b0], not of use here), with $\{4\}$ on the left and $\{1,5\}$ on the
right. Then the right subtree branches on bit 2 (and so has a branching
value of $2^2 = 4$), with prefix [0b01 = 1]. *)
let empty = Empty
let is_empty = function Empty -> true | _ -> false
let singleton k = Leaf k
let zero_bit k m = (k land m) == 0
let rec mem k = function
| Empty -> false
| Leaf j -> k.tag == j.tag
| Branch (_, m, l, r) -> mem k (if zero_bit k.tag m then l else r)
s The following operation [ join ] will be used in both insertion and
union . Given two non - empty trees [ t0 ] and [ t1 ] with longest common
prefixes [ p0 ] and [ p1 ] respectively , which are supposed to
disagree , it creates the union of [ t0 ] and [ t1 ] . For this , it
computes the first bit [ m ] where [ p0 ] and [ p1 ] disagree and create
a branching node on that bit . Depending on the value of that bit
in [ p0 ] , [ t0 ] will be the left subtree and [ t1 ] the right one , or
the converse . Computing the first branching bit of [ p0 ] and [ p1 ]
uses a nice property of twos - complement representation of integers .
union. Given two non-empty trees [t0] and [t1] with longest common
prefixes [p0] and [p1] respectively, which are supposed to
disagree, it creates the union of [t0] and [t1]. For this, it
computes the first bit [m] where [p0] and [p1] disagree and create
a branching node on that bit. Depending on the value of that bit
in [p0], [t0] will be the left subtree and [t1] the right one, or
the converse. Computing the first branching bit of [p0] and [p1]
uses a nice property of twos-complement representation of integers. *)
let lowest_bit x = x land (-x)
let branching_bit p0 p1 = lowest_bit (p0 lxor p1)
let mask p m = p land (m-1)
let join (p0,t0,p1,t1) =
let m = branching_bit p0 p1 in
if zero_bit p0 m then
Branch (mask p0 m, m, t0, t1)
else
Branch (mask p0 m, m, t1, t0)
s Then the insertion of value [ k ] in set [ t ] is easily implemented
using [ join ] . Insertion in a singleton is just the identity or a
call to [ join ] , depending on the value of [ k ] . When inserting in
a branching tree , we first check if the value to insert [ k ]
matches the prefix [ p ] : if not , [ join ] will take care of creating
the above branching ; if so , we just insert [ k ] in the appropriate
subtree , depending of the branching bit .
using [join]. Insertion in a singleton is just the identity or a
call to [join], depending on the value of [k]. When inserting in
a branching tree, we first check if the value to insert [k]
matches the prefix [p]: if not, [join] will take care of creating
the above branching; if so, we just insert [k] in the appropriate
subtree, depending of the branching bit. *)
let match_prefix k p m = (mask k m) == p
let add k t =
let rec ins = function
| Empty -> Leaf k
| Leaf j as t ->
if j.tag == k.tag then t else join (k.tag, Leaf k, j.tag, t)
| Branch (p,m,t0,t1) as t ->
if match_prefix k.tag p m then
if zero_bit k.tag m then
Branch (p, m, ins t0, t1)
else
Branch (p, m, t0, ins t1)
else
join (k.tag, Leaf k, p, t)
in
ins t
let branch = function
| (_,_,Empty,t) -> t
| (_,_,t,Empty) -> t
| (p,m,t0,t1) -> Branch (p,m,t0,t1)
let remove k t =
let rec rmv = function
| Empty -> Empty
| Leaf j as t -> if k.tag == j.tag then Empty else t
| Branch (p,m,t0,t1) as t ->
if match_prefix k.tag p m then
if zero_bit k.tag m then
branch (p, m, rmv t0, t1)
else
branch (p, m, t0, rmv t1)
else
t
in
rmv t
s One nice property of trees is to support a fast union
operation ( and also fast subset , difference and intersection
operations ) . When merging two branching trees we examine the
following four cases : ( 1 ) the trees have exactly the same
prefix ; ( 2/3 ) one prefix contains the other one ; and ( 4 ) the
prefixes disagree . In cases ( 1 ) , ( 2 ) and ( 3 ) the recursion is
immediate ; in case ( 4 ) the function [ join ] creates the appropriate
branching .
operation (and also fast subset, difference and intersection
operations). When merging two branching trees we examine the
following four cases: (1) the trees have exactly the same
prefix; (2/3) one prefix contains the other one; and (4) the
prefixes disagree. In cases (1), (2) and (3) the recursion is
immediate; in case (4) the function [join] creates the appropriate
branching. *)
let rec merge = function
| Empty, t -> t
| t, Empty -> t
| Leaf k, t -> add k t
| t, Leaf k -> add k t
| (Branch (p,m,s0,s1) as s), (Branch (q,n,t0,t1) as t) ->
if m == n && match_prefix q p m then
Branch (p, m, merge (s0,t0), merge (s1,t1))
else if m < n && match_prefix q p m then
if zero_bit q m then
Branch (p, m, merge (s0,t), s1)
else
Branch (p, m, s0, merge (s1,t))
else if m > n && match_prefix p q n then
if zero_bit p n then
Branch (q, n, merge (s,t0), t1)
else
Branch (q, n, t0, merge (s,t1))
else
join (p, s, q, t)
let union s t = merge (s,t)
s When checking if [ s1 ] is a subset of [ s2 ] only two of the above
four cases are relevant : when the prefixes are the same and when the
prefix of [ s1 ] contains the one of [ s2 ] , and then the recursion is
obvious . In the other two cases , the result is [ false ] .
four cases are relevant: when the prefixes are the same and when the
prefix of [s1] contains the one of [s2], and then the recursion is
obvious. In the other two cases, the result is [false]. *)
let rec subset s1 s2 = match (s1,s2) with
| Empty, _ -> true
| _, Empty -> false
| Leaf k1, _ -> mem k1 s2
| Branch _, Leaf _ -> false
| Branch (p1,m1,l1,r1), Branch (p2,m2,l2,r2) ->
if m1 == m2 && p1 == p2 then
subset l1 l2 && subset r1 r2
else if m1 > m2 && match_prefix p1 p2 m2 then
if zero_bit p1 m2 then
subset l1 l2 && subset r1 l2
else
subset l1 r2 && subset r1 r2
else
false
s To compute the intersection and the difference of two sets , we
still examine the same four cases as in [ merge ] . The recursion is
then obvious .
still examine the same four cases as in [merge]. The recursion is
then obvious. *)
let rec inter s1 s2 = match (s1,s2) with
| Empty, _ -> Empty
| _, Empty -> Empty
| Leaf k1, _ -> if mem k1 s2 then s1 else Empty
| _, Leaf k2 -> if mem k2 s1 then s2 else Empty
| Branch (p1,m1,l1,r1), Branch (p2,m2,l2,r2) ->
if m1 == m2 && p1 == p2 then
merge (inter l1 l2, inter r1 r2)
else if m1 < m2 && match_prefix p2 p1 m1 then
inter (if zero_bit p2 m1 then l1 else r1) s2
else if m1 > m2 && match_prefix p1 p2 m2 then
inter s1 (if zero_bit p1 m2 then l2 else r2)
else
Empty
let rec diff s1 s2 = match (s1,s2) with
| Empty, _ -> Empty
| _, Empty -> s1
| Leaf k1, _ -> if mem k1 s2 then Empty else s1
| _, Leaf k2 -> remove k2 s1
| Branch (p1,m1,l1,r1), Branch (p2,m2,l2,r2) ->
if m1 == m2 && p1 == p2 then
merge (diff l1 l2, diff r1 r2)
else if m1 < m2 && match_prefix p2 p1 m1 then
if zero_bit p2 m1 then
merge (diff l1 s2, r1)
else
merge (l1, diff r1 s2)
else if m1 > m2 && match_prefix p1 p2 m2 then
if zero_bit p1 m2 then diff s1 l2 else diff s1 r2
else
s1
let rec cardinal = function
| Empty -> 0
| Leaf _ -> 1
| Branch (_,_,t0,t1) -> cardinal t0 + cardinal t1
let rec iter f = function
| Empty -> ()
| Leaf k -> f k
| Branch (_,_,t0,t1) -> iter f t0; iter f t1
let rec fold f s accu = match s with
| Empty -> accu
| Leaf k -> f k accu
| Branch (_,_,t0,t1) -> fold f t0 (fold f t1 accu)
let rec for_all p = function
| Empty -> true
| Leaf k -> p k
| Branch (_,_,t0,t1) -> for_all p t0 && for_all p t1
let rec exists p = function
| Empty -> false
| Leaf k -> p k
| Branch (_,_,t0,t1) -> exists p t0 || exists p t1
let rec filter pr = function
| Empty -> Empty
| Leaf k as t -> if pr k then t else Empty
| Branch (p,m,t0,t1) -> branch (p, m, filter pr t0, filter pr t1)
let partition p s =
let rec part (t,f as acc) = function
| Empty -> acc
| Leaf k -> if p k then (add k t, f) else (t, add k f)
| Branch (_,_,t0,t1) -> part (part acc t0) t1
in
part (Empty, Empty) s
let rec choose = function
| Empty -> raise Not_found
| Leaf k -> k
let elements s =
let rec elements_aux acc = function
| Empty -> acc
| Leaf k -> k :: acc
| Branch (_,_,l,r) -> elements_aux (elements_aux acc l) r
in
elements_aux [] s
let rec min_elt = function
| Empty -> raise Not_found
| Leaf k -> k
| Branch (_,_,s,t) -> min (min_elt s) (min_elt t)
let rec max_elt = function
| Empty -> raise Not_found
| Leaf k -> k
| Branch (_,_,s,t) -> max (max_elt s) (max_elt t)
s Another nice property of trees is to be independent of the
order of insertion . As a consequence , two trees have the
same elements if and only if they are structurally equal .
order of insertion. As a consequence, two Patricia trees have the
same elements if and only if they are structurally equal. *)
let equal = (=)
let compare = Stdlib.compare
let make l = add l empty
let rec intersect s1 s2 = match (s1,s2) with
| Empty, _ -> false
| _, Empty -> false
| Leaf k1, _ -> mem k1 s2
| _, Leaf k2 -> mem k2 s1
| Branch (p1,m1,l1,r1), Branch (p2,m2,l2,r2) ->
if m1 == m2 && p1 == p2 then
intersect l1 l2 || intersect r1 r2
else if m1 < m2 && match_prefix p2 p1 m1 then
intersect (if zero_bit p2 m1 then l1 else r1) s2
else if m1 > m2 && match_prefix p1 p2 m2 then
intersect s1 (if zero_bit p1 m2 then l2 else r2)
else
false
let split _ = invalid_arg "split"
Local Variables :
compile - command : " make -C .. -k "
tuareg - interactive - program : " ./kind2.top -I ./_build -I / SExpr "
indent - tabs - mode : nil
End :
Local Variables:
compile-command: "make -C .. -k"
tuareg-interactive-program: "./kind2.top -I ./_build -I ./_build/SExpr"
indent-tabs-mode: nil
End:
*)
|
6bf750e6c4edc1e2cb1a267cfe64cb3d0d513e9108f4f23514cd4ef29a777e32 | OCamlPro/ocp-build | ocpReuse.ml | (**************************************************************************)
(* *)
(* Typerex Libraries *)
(* *)
Copyright 2011 - 2017 OCamlPro SAS
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
Reentrant buffers :
If you call a function that needs a buffer , you might want to use this
module to reuse such buffers , instead of reallocating them everytime .
This module is not thread - safe . Reentrance is only provided for a function
that uses a buffer , and might call another function using a similar
buffer .
Buffer sizes should be between and 1 MB .
If you call a function that needs a buffer, you might want to use this
module to reuse such buffers, instead of reallocating them everytime.
This module is not thread-safe. Reentrance is only provided for a function
that uses a buffer, and might call another function using a similar
buffer.
Buffer sizes should be between 4kB and 1MB.
*)
open OcpCompat
let sizes = Array.init 10 (fun _ -> Queue.create ())
let invalid_size size =
Printf.kprintf failwith
"ReentrantBuffer.get: size %d is not a power of two" size
let get_power size =
let rec find_power pos size =
if size = 1 then pos else
let size2 = size lsr 1 in
if size2 lsl 1 <> size then invalid_size size;
find_power (pos+1) size2
in
if (size lsr 10) lsl 10 <> size then invalid_size size;
find_power 0 (size lsr 10)
let _ =
assert (get_power 1024 = 0);
assert (get_power 2048 = 1);
()
let get size =
let pos = get_power size in
try
Queue.take sizes.(pos)
with Queue.Empty ->
Bytes.create size
let free s =
let size = Bytes.length s in
let pos = get_power size in
Queue.add s sizes.(pos)
| null | https://raw.githubusercontent.com/OCamlPro/ocp-build/56aff560bb438c12b2929feaf8379bc6f31b9840/libs/ocplib-lang/ocpReuse.ml | ocaml | ************************************************************************
Typerex Libraries
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************ | Copyright 2011 - 2017 OCamlPro SAS
the GNU Lesser General Public License version 2.1 , with the
Reentrant buffers :
If you call a function that needs a buffer , you might want to use this
module to reuse such buffers , instead of reallocating them everytime .
This module is not thread - safe . Reentrance is only provided for a function
that uses a buffer , and might call another function using a similar
buffer .
Buffer sizes should be between and 1 MB .
If you call a function that needs a buffer, you might want to use this
module to reuse such buffers, instead of reallocating them everytime.
This module is not thread-safe. Reentrance is only provided for a function
that uses a buffer, and might call another function using a similar
buffer.
Buffer sizes should be between 4kB and 1MB.
*)
open OcpCompat
let sizes = Array.init 10 (fun _ -> Queue.create ())
let invalid_size size =
Printf.kprintf failwith
"ReentrantBuffer.get: size %d is not a power of two" size
let get_power size =
let rec find_power pos size =
if size = 1 then pos else
let size2 = size lsr 1 in
if size2 lsl 1 <> size then invalid_size size;
find_power (pos+1) size2
in
if (size lsr 10) lsl 10 <> size then invalid_size size;
find_power 0 (size lsr 10)
let _ =
assert (get_power 1024 = 0);
assert (get_power 2048 = 1);
()
let get size =
let pos = get_power size in
try
Queue.take sizes.(pos)
with Queue.Empty ->
Bytes.create size
let free s =
let size = Bytes.length s in
let pos = get_power size in
Queue.add s sizes.(pos)
|
7fc03aa6d0127250afbfae02b23840bf726a9e4f7668745023cdddad6c4cd0c7 | polyfy/polylith | engine.clj | (ns polylith.clj.core.shell.candidate.engine
(:require [clojure.string :as str]
[polylith.clj.core.shell.candidate.shared :as shared]
[polylith.clj.core.shell.candidate.specification :as specification])
(:refer-clojure :exclude [next]))
(def ws (atom nil))
(def groups (atom nil))
(comment
(def workspace (-> "components/test-helper/resources/workspace.edn"
slurp read-string))
(require '[dev.jocke :as dev])
(reset! ws workspace)
(reset! groups specification/groups)
#__)
(defn clean-words [value]
(str/replace (str value) "\"" "'"))
(defn tap [fn-name word candidate]
(tap> {:fn fn-name
:word word
:candidate candidate}))
(defn spec-candidates []
(if @ws
(specification/candidates @ws)
specification/candidates-outside-ws-root))
(defn first-candidate []
{:type :candidates
:candidates (spec-candidates)})
(defn filter-exact-match [word candidates]
(filter #(= word (:parsed-value %)) candidates))
(defn starts-with [{:keys [parsed-value]} word]
(and (not (nil? parsed-value))
(str/starts-with? parsed-value word)))
(defn filter-candidates [word candidates potential-exact-match?]
(let [potentials (filter-exact-match word candidates)
filtered (filterv #(starts-with % word) candidates)
ordered (sort-by :order (filter #(:order %) filtered))
order (-> ordered first :order)]
(if (and potential-exact-match?
(= 1 (count potentials)))
potentials
(if (empty? ordered)
filtered
(vec (take-while #(= order (:order %))
ordered))))))
(defn empty-if-nil [candidates]
(or candidates #{}))
(defn set-group-arg! [{:keys [group] :as candidate} word]
(let [{:keys [id param]} group]
(when param
(let [[param word] (if (= :flag param)
[word true]
[param word])
args (get-in @groups [id param :args])]
(reset! groups
(assoc-in @groups [id param :args]
(if word
(if args
(conj args word)
[word])
(if args
args
[]))))))
candidate))
(defn next [{:keys [word group] :as candidate}]
(if word
(assoc candidate :type :candidates
:candidates [])
(if-let [id (:id group)]
(assoc candidate :type :candidates
:candidates (mapv second
(filterv #(-> % second :args not)
(-> groups deref id))))
(assoc candidate :type :candidates))))
(defn group-candidates [candidate]
(mapv second
(filterv #(-> % second :args not)
((deref groups) (-> candidate :group :id)))))
(defn select [{:keys [type function candidates] :as candidate}
word potential-exact-match?]
(let [next-candidates (case type
:candidates candidates
:remaining (group-candidates candidate)
:fn ((var-get function) candidate @groups @ws))
arg-value? (= "" (-> next-candidates first :parsed-value))
filtered-candidates (if arg-value?
next-candidates
(filter-candidates word next-candidates potential-exact-match?))
exact-match? (and (= 1 (count filtered-candidates))
(or arg-value?
(= word (-> filtered-candidates first :parsed-value))))]
(if exact-match?
(let [{:keys [child] :as match} (first filtered-candidates)]
(if child
child
(set-group-arg! match word)))
(do
(when potential-exact-match? (set-group-arg! candidate word))
(assoc candidate
:candidates filtered-candidates
:word word)))))
(defn select-candidates [candidate [word potential-exact-match?]]
(tap "select-candidates" word candidate)
(if (= :next word)
(next candidate)
(select candidate word potential-exact-match?)))
todo : : select och ej : type ,
då om : select är satt så är det implicit en : fn .
todo : : type till : select - as , alternative ha : select .
(defn reset-groups []
(reset! groups
(shared/groups (spec-candidates))))
(defn potential-exact-match [word next-word]
[word (and (not= :next word)
(not= :end next-word))])
(defn with-potential-exact-match [words]
(mapv potential-exact-match words (conj (vec (drop 1 words)) :end)))
(defn select-candidate [words]
(reset-groups)
(reduce select-candidates
(first-candidate)
(with-potential-exact-match words)))
(defn candidates [line words]
(tap> {:fn "----- candidates -----"
:line line
:first-candidate (first-candidate)
:words-str (clean-words words)
:words words
:groups @groups
:workspace @ws})
(let [result (-> (select-candidate words)
:candidates
empty-if-nil)]
(tap> {:candidates result
:groups @groups})
result))
(comment
(reset! ws nil)
(reset! ws (-> "components/test-helper/resources/workspace.edn"
slurp read-string))
(reset-groups)
(deref groups)
(with-potential-exact-match ["deps" :next "brick" "deployer" :next ""])
(select-candidate ["ws" :next "get" "components" "shell" "interface" ""])
(select-candidate ["ws" :next "get" "components" "shell" "interface"])
(select-candidate ["ws" :next "get" "components" "shell" ""])
(with-potential-exact-match ["deps" :next "brick" "deployer" :next "project" ""])
(select-candidate ["ws" :next "out" "components" :next ""])
["ws" :next "out" ".." ""]
["ws" :next "ws-file" ".." "usermanager-example" "ws.edn" :next ""]
( with - potential - exact - match [ " " : next " brick " " deployer " : next " " ] )
[ [ " " true ] [: next false ] [ " brick " true ] [ " deployer " true ] [: next false ] [ " " false ] ]
(def step1 (select-candidates (first-candidate) ["deps" true]))
(def step2 (select-candidates step1 [:next false]))
(def step3 (select-candidates step2 ["brick" true]))
(def step4 (select-candidates step3 ["deployer" true]))
(def step5 (select-candidates step4 [:next false]))
(def step6 (select-candidates step5 ["project" true]))
(def step7 (select-candidates step6 ["" false])))
| null | https://raw.githubusercontent.com/polyfy/polylith/0ffbd88effe0b665ca3b50ce211db7f875daa502/components/shell/src/polylith/clj/core/shell/candidate/engine.clj | clojure | (ns polylith.clj.core.shell.candidate.engine
(:require [clojure.string :as str]
[polylith.clj.core.shell.candidate.shared :as shared]
[polylith.clj.core.shell.candidate.specification :as specification])
(:refer-clojure :exclude [next]))
(def ws (atom nil))
(def groups (atom nil))
(comment
(def workspace (-> "components/test-helper/resources/workspace.edn"
slurp read-string))
(require '[dev.jocke :as dev])
(reset! ws workspace)
(reset! groups specification/groups)
#__)
(defn clean-words [value]
(str/replace (str value) "\"" "'"))
(defn tap [fn-name word candidate]
(tap> {:fn fn-name
:word word
:candidate candidate}))
(defn spec-candidates []
(if @ws
(specification/candidates @ws)
specification/candidates-outside-ws-root))
(defn first-candidate []
{:type :candidates
:candidates (spec-candidates)})
(defn filter-exact-match [word candidates]
(filter #(= word (:parsed-value %)) candidates))
(defn starts-with [{:keys [parsed-value]} word]
(and (not (nil? parsed-value))
(str/starts-with? parsed-value word)))
(defn filter-candidates [word candidates potential-exact-match?]
(let [potentials (filter-exact-match word candidates)
filtered (filterv #(starts-with % word) candidates)
ordered (sort-by :order (filter #(:order %) filtered))
order (-> ordered first :order)]
(if (and potential-exact-match?
(= 1 (count potentials)))
potentials
(if (empty? ordered)
filtered
(vec (take-while #(= order (:order %))
ordered))))))
(defn empty-if-nil [candidates]
(or candidates #{}))
(defn set-group-arg! [{:keys [group] :as candidate} word]
(let [{:keys [id param]} group]
(when param
(let [[param word] (if (= :flag param)
[word true]
[param word])
args (get-in @groups [id param :args])]
(reset! groups
(assoc-in @groups [id param :args]
(if word
(if args
(conj args word)
[word])
(if args
args
[]))))))
candidate))
(defn next [{:keys [word group] :as candidate}]
(if word
(assoc candidate :type :candidates
:candidates [])
(if-let [id (:id group)]
(assoc candidate :type :candidates
:candidates (mapv second
(filterv #(-> % second :args not)
(-> groups deref id))))
(assoc candidate :type :candidates))))
(defn group-candidates [candidate]
(mapv second
(filterv #(-> % second :args not)
((deref groups) (-> candidate :group :id)))))
(defn select [{:keys [type function candidates] :as candidate}
word potential-exact-match?]
(let [next-candidates (case type
:candidates candidates
:remaining (group-candidates candidate)
:fn ((var-get function) candidate @groups @ws))
arg-value? (= "" (-> next-candidates first :parsed-value))
filtered-candidates (if arg-value?
next-candidates
(filter-candidates word next-candidates potential-exact-match?))
exact-match? (and (= 1 (count filtered-candidates))
(or arg-value?
(= word (-> filtered-candidates first :parsed-value))))]
(if exact-match?
(let [{:keys [child] :as match} (first filtered-candidates)]
(if child
child
(set-group-arg! match word)))
(do
(when potential-exact-match? (set-group-arg! candidate word))
(assoc candidate
:candidates filtered-candidates
:word word)))))
(defn select-candidates [candidate [word potential-exact-match?]]
(tap "select-candidates" word candidate)
(if (= :next word)
(next candidate)
(select candidate word potential-exact-match?)))
todo : : select och ej : type ,
då om : select är satt så är det implicit en : fn .
todo : : type till : select - as , alternative ha : select .
(defn reset-groups []
(reset! groups
(shared/groups (spec-candidates))))
(defn potential-exact-match [word next-word]
[word (and (not= :next word)
(not= :end next-word))])
(defn with-potential-exact-match [words]
(mapv potential-exact-match words (conj (vec (drop 1 words)) :end)))
(defn select-candidate [words]
(reset-groups)
(reduce select-candidates
(first-candidate)
(with-potential-exact-match words)))
(defn candidates [line words]
(tap> {:fn "----- candidates -----"
:line line
:first-candidate (first-candidate)
:words-str (clean-words words)
:words words
:groups @groups
:workspace @ws})
(let [result (-> (select-candidate words)
:candidates
empty-if-nil)]
(tap> {:candidates result
:groups @groups})
result))
(comment
(reset! ws nil)
(reset! ws (-> "components/test-helper/resources/workspace.edn"
slurp read-string))
(reset-groups)
(deref groups)
(with-potential-exact-match ["deps" :next "brick" "deployer" :next ""])
(select-candidate ["ws" :next "get" "components" "shell" "interface" ""])
(select-candidate ["ws" :next "get" "components" "shell" "interface"])
(select-candidate ["ws" :next "get" "components" "shell" ""])
(with-potential-exact-match ["deps" :next "brick" "deployer" :next "project" ""])
(select-candidate ["ws" :next "out" "components" :next ""])
["ws" :next "out" ".." ""]
["ws" :next "ws-file" ".." "usermanager-example" "ws.edn" :next ""]
( with - potential - exact - match [ " " : next " brick " " deployer " : next " " ] )
[ [ " " true ] [: next false ] [ " brick " true ] [ " deployer " true ] [: next false ] [ " " false ] ]
(def step1 (select-candidates (first-candidate) ["deps" true]))
(def step2 (select-candidates step1 [:next false]))
(def step3 (select-candidates step2 ["brick" true]))
(def step4 (select-candidates step3 ["deployer" true]))
(def step5 (select-candidates step4 [:next false]))
(def step6 (select-candidates step5 ["project" true]))
(def step7 (select-candidates step6 ["" false])))
| |
7e57dee3c1f2e78efbf46aab491128185df8660405b7135b58c9b143b5cbc1cd | INRIA/zelus | defaultsolver.sundials.ml | include Solvers.Sundials_cvode | null | https://raw.githubusercontent.com/INRIA/zelus/685428574b0f9100ad5a41bbaa416cd7a2506d5e/lib/std/solvers/defaultsolver.sundials.ml | ocaml | include Solvers.Sundials_cvode | |
cf37f20129221fc532de0670fe2c2a44424fbf4fc4a0de5de6e1d0306e37047f | silky/quipper | Simplification.hs | This file is part of Quipper . Copyright ( C ) 2011 - 2016 . Please see the
-- file COPYRIGHT for a list of authors, copyright holders, licensing,
-- and other details. All rights reserved.
--
-- ======================================================================
{-# LANGUAGE BangPatterns #-}
# LANGUAGE MonadComprehensions #
-- | This module contains the core of the classical circuit
-- optimization algorithm.
module QuipperLib.ClassicalOptim.Simplification where
import qualified Data.Map as M
import qualified Data.List as L
import qualified Data.Set.Monad as S {- set-monad-0.1.0.0 -}
import qualified Data.IntSet as IS
import qualified Data.IntMap.Strict as IM {- containers-0.5.2.1 -}
import qualified Control.DeepSeq as Seq
import Control.Applicative (Applicative(..))
import Control.Monad (liftM, ap)
import qualified Libraries.Auxiliary as Q
import QuipperLib.ClassicalOptim.Circuit
import QuipperLib.ClassicalOptim.AlgExp
-- ----------------------------------------------------------------------
-- * Auxiliary definitions
-- | Internal definition of a trace, for debugging purposes. This is a
no - op , but can be replaced to turn on debugging .
trace :: String -> b -> b
trace a b = b
| Change a wire ID in a gate . The first two arguments are the old
-- and the new wire ID.
moveWire :: Wire -> Wire -> Gate -> Gate
moveWire from to NoOp = NoOp
moveWire from to (Init b w) = if (w == from) then error "moveWire" else (Init b w)
moveWire from to (Cnot w ctls) = Cnot w' ctls'
where
w' = if (from == w) then to else w
ctls' = map moveCtls ctls
moveCtls (w,b) = if (from == w) then (to,b) else (w,b)
-- | Flip the control on the given wire (from positive to negative or
-- vice versa).
flipCtl :: Wire -> Gate -> Gate
flipCtl _ NoOp = NoOp
flipCtl _ (Init b w) = Init b w
flipCtl w (Cnot w' ctls) = Cnot w' $ map (\(x,b) -> if (x == w) then (x,not b) else (x,b)) ctls
-- | Change a wire ID in a gate and flip the potential control.
moveWireFlip :: Wire -> Wire -> Gate -> Gate
moveWireFlip from to NoOp = NoOp
moveWireFlip from to (Init b w) = if (w == from) then error "moveWire" else (Init b w)
moveWireFlip from to (Cnot w ctls) = Cnot w' ctls'
where
w' = if (from == w) then to else w
ctls' = map moveCtls ctls
moveCtls (w,b) = if (from == w) then (to,b) else if (to == w) then (w,not b) else (w,b)
-- ----------------------------------------------------------------------
-- * Small, simple optimizations
-- | Suppress gates acting on garbage wires, i.e., wires that are not in the input set.
suppress_garbage :: [Gate] -> IS.IntSet -> [Gate]
suppress_garbage ((Cnot w ctls):gs) used =
if (IS.member w used) then g:gs1 else gs2
where
g = Cnot w ctls
gs1 = suppress_garbage gs $ IS.union (IS.insert w used) $ IS.fromList $ L.map fst ctls
gs2 = suppress_garbage gs used
suppress_garbage (g:gs) used = g:(suppress_garbage gs used)
suppress_garbage [] _ = []
-- | Like 'suppress_garbage', but packaged in a manner that is friendly for composition.
suppressGarbageGates :: ([Gate],[Wire]) -> ([Gate],[Wire])
suppressGarbageGates (gs,out) = (reverse $ suppress_garbage (reverse gs) $ IS.fromList out, out)
-- ----------------------------------------------------------------------
-- * Compression of wire numbering
-- $ As the optimization process goes on, many /init/ gates will end
-- up being discarded. The function 'compressWires' compacts the wire
-- numbering scheme to make a smaller circuit.
-- | Get the set of all wires used by the circuit.
getAllWires :: [Gate] -> IS.IntSet
getAllWires gs = L.foldl' IS.union IS.empty $ L.map aux gs
where
aux (Cnot w ctls) = IS.insert w $ L.foldl' (flip IS.insert) IS.empty $ L.map fst ctls
aux (Init _ w) = IS.singleton w
aux NoOp = IS.empty
-- | Get the set of wires initialized by the circuit.
getInitWires :: [Gate] -> IS.IntSet
getInitWires gs = L.foldl' IS.union IS.empty $ map aux gs
where
aux (Cnot _ _) = IS.empty
aux (Init _ w) = IS.singleton w
aux NoOp = IS.empty
-- | Get the set of input wires, i.e., the ones that are used but not initialized.
getInputWires :: [Gate] -> IS.IntSet
getInputWires gs = IS.difference (getAllWires gs) (getInitWires gs)
-- | Compress the wire numbering.
compressWires :: [Wire] -> ([Gate],[Wire]) -> ([Gate],[Wire])
compressWires inputwires (gs,output) = (gs',out')
where
iws = getInitWires gs
begin = if inputwires == []
then 0
else 1 + (head $ reverse $ L.sort inputwires)
end = begin + (IS.size iws)
listmap = zip ([0..begin-1] ++ (IS.toAscList iws)) [0 .. end]
remap = M.fromList $ trace (show listmap) listmap
out' = map (remap M.!) output
gs' = map (rewire remap) gs
rewire m (Cnot w ctls) = Cnot (m M.! w) $ map (\(x,b) -> (m M.! x, b)) ctls
rewire m (Init b w) = Init b (m M.! w)
rewire m NoOp = NoOp
-- ----------------------------------------------------------------------
-- * A useful data structure
-- $ When considering a particular point in a circuit (i.e., in a list
-- of gates), to decide whether a given wire is used or controlled
-- before or after, we keep a data-structure 'UsedWire'.
| The type of gate ID 's .
type GateId = Int
| A set of gate ID 's .
type GateIdSet = IS.IntSet
| A map from wires to pairs of ' 's . The left member gives the
ID of the first gate using the wire , and the right member gives the
-- ID of the last gate using the wire.
type UsedWire = IM.IntMap GateIdSet
| Get the minimum of a set of gate ID 's .
gateIdFindMin :: GateIdSet -> Maybe GateId
gateIdFindMin g = if (IS.null g) then Nothing else Just (IS.findMin g)
| Get the maximum of a set of gate ID 's .
gateIdFindMax :: GateIdSet -> Maybe GateId
gateIdFindMax g = if (IS.null g) then Nothing else Just (IS.findMax g)
-- | Get the pair corresponding to the given wire.
pairUsedWire :: UsedWire -> Wire -> GateIdSet
pairUsedWire m w = IM.findWithDefault IS.empty w m
| Get the first gate using the wire in the future .
firstUsedWire :: UsedWire -> Wire -> Maybe GateId
firstUsedWire = curry $ gateIdFindMin . (uncurry pairUsedWire)
-- | Get the last gate using the wire in the past. Return 0 if none.
lastUsedWire :: UsedWire -> Wire -> GateId
lastUsedWire w w'=
case (curry $ gateIdFindMax . (uncurry pairUsedWire)) w w' of
Just w -> w
Nothing -> 0
-- | 'nextUsedGate' /ws/ /g/ /g/' /w/: Look for the next gate in /ws/
corresponding to wire /w/ , starting from /g/. Return /g/ ' if none .
nextUsedGate :: UsedWire -> GateId -> GateId -> Wire -> GateId
nextUsedGate ws g g' w =
case (do gs <- IM.lookup w ws; IS.lookupGT g gs) of
Just g -> g
Nothing -> g'
-- | For each wire, find the set of gates placing a control on it.
circuitControlWires :: GateId -> [Gate] -> UsedWire
circuitControlWires id gs = aux id IM.empty gs
where
aux _ m [] = m
aux g m (Init _ _:gs) = aux (g+1) m gs
aux g m ((Cnot _ ctls):gs) = aux (g+1) m' gs
where
wires = map fst ctls
m' = L.foldl (\m'' w -> IM.alter (f g) w m'') m wires
f g Nothing = Just $ IS.singleton g
f g (Just s) = Just $ IS.insert g s
aux g m (NoOp:_) = error "circuitControlWires cannot deal with NoOp"
-- | For each wire, find the set of gates acting on it with NOT.
circuitNotWires :: GateId -> [Gate] -> UsedWire
circuitNotWires id gs = aux id IM.empty gs
where
aux _ m [] = m
aux g m (Init _ _:gs) = aux (g+1) m gs
aux g m ((Cnot w _):gs) = aux (g+1) m' gs
where
m' = IM.alter (f g) w m
f g Nothing = Just $ IS.singleton g
f g (Just s) = Just $ IS.insert g s
aux g m (_:gs) = aux (g+1) m gs
-- ----------------------------------------------------------------------
-- * Algebraic optimization method
-- $ To each wire in a circuit, we attach a set of formulas. At each
-- iteration, the wire that gets modified is updated with its new
-- value, using all the possible values, possibly together with a
-- fresh variable. At each iteration, we also strip away the
-- expressions that get too large. Here, the size of an algebraic
expression is measured by the ' ' function .
-- | Calculate the size of an algebraic expression.
exp_length :: Exp -> Int
exp_length e = L.foldl' (+) 0 $ L.map (\x -> let y = IS.size x in seq y y) $ S.toList e
-- | Given a list of sets of expressions, form the conjunction of
every possible choice of one expression from each set . For example .
--
-- > exp_list_and [{a,b}, {c,d}, {e,f}] =
> [ , a∧c∧f , a∧d∧e , a∧d∧f , , b∧c∧f , b∧d∧e , b∧d∧f ] .
exp_list_and :: [S.Set Exp] -> S.Set Exp
exp_list_and [] = S.singleton exp_true
exp_list_and [l] = l
exp_list_and (h:k:t) = exp_list_and ([exp_and x y | x <- h, y <- k]:t)
-- | Evaluate a control with respect to a state.
expEvalCtl :: (IM.IntMap (S.Set (Exp,Int))) -> (Wire,Bool) -> S.Set Exp
expEvalCtl m (w,True) = S.map fst (m IM.! w)
expEvalCtl m (w,False) = S.map exp_not $ S.map fst $ (IM.!) m w
-- | Evaluate a gate with respect to a state.
expEvalGate :: (IM.IntMap (S.Set (Exp,Int))) -> Gate -> IM.IntMap (S.Set (Exp,Int))
expEvalGate m (Init False w) = IM.insert w (S.singleton (exp_false,0)) m
expEvalGate m (Init True w) = IM.insert w (S.singleton (exp_true,1)) m
expEvalGate m NoOp = m
expEvalGate m (Cnot w ctls) = IM.insert w cnot m
where
ands = exp_list_and $ L.map (expEvalCtl m) ctls
cnot = S.map (\x -> (x,exp_length x)) [exp_xor x y |
x <- S.map fst $ (IM.!) m w,
y <- ands ]
-- ----------------------------------------------------------------------
-- ** State of the optimization automaton
-- | The state of the automaton. This contains in particular the
-- current state, the past and future gates, and a fresh variable.
data ExpState = ExpState {
gates_to_skip :: IM.IntMap Gate, -- ^ For use with 'stepSwapCirc'.
allWiresInCirc :: IS.IntSet, -- ^ All the wires in the circuit.
^ ID of the first gate in the future ( starts at 1 ) .
usedControlWires :: UsedWire, -- ^ Location of the controls.
usedNotWires :: UsedWire, -- ^ Location of the NOT gates.
future :: [Gate], -- ^ Gates left to explore.
past :: [Gate], -- ^ Gates already explored.
expMap :: IM.IntMap (S.Set (Exp,Int)), -- ^ Algebraic state of the wires. Also contains the size of the expression, so we don't have to recompute it each time.
freshVar :: Integer, -- ^ The next fresh wire.
outWires :: [Wire], -- ^ The output wires.
sizeCirc :: Int -- ^ Size of the circuit.
}
instance Seq.NFData Gate where
rnf (Init a b) = a `seq` b `seq` ()
rnf (Cnot w ctls) = ctls `Seq.deepseq` w `Seq.deepseq` ()
rnf NoOp = ()
instance Seq . NFData ExpState where
rnf e = { -allWiresInCirc e ` Seq.deepseq ` gateId e ` Seq.deepseq ` usedControlWires e ` Seq.deepseq ` usedNotWires e ` Seq.deepseq ` future e ` Seq.deepseq ` past e ` Seq.deepseq ` expMap e ` Seq.deepseq ` freshVar e ` Seq.deepseq ` outWires e
instance Seq.NFData ExpState where
rnf e = {-allWiresInCirc e `Seq.deepseq` gateId e `Seq.deepseq` usedControlWires e `Seq.deepseq` usedNotWires e `Seq.deepseq` future e `Seq.deepseq` past e `Seq.deepseq` expMap e `Seq.deepseq` freshVar e `Seq.deepseq` outWires e-} () `Seq.deepseq` ()
-}
-- | The initial state for a given set of parameters.
initExpState :: IS.IntSet -> [Wire] -> [Gate] -> ExpState
initExpState ws_in ws_out gs = ExpState {
gates_to_skip = IM.empty,
allWiresInCirc = getAllWires gs,
gateId = 1,
usedControlWires = circuitControlWires 1 gs,
usedNotWires = circuitNotWires 1 gs,
future = gs,
past = [],
expMap = IM.fromList $ L.map (\x -> (x, S.singleton (exp_var x, 1))) $ IS.toAscList ws_in,
freshVar = fromIntegral $ (+) 1 $ IS.findMax ws_in,
outWires = ws_out,
sizeCirc = length gs
}
-- ----------------------------------------------------------------------
-- ** The state monad
| The state monad corresponding to ' ExpState ' .
data EvalCirc a = EvalCirc (ExpState -> (ExpState, a))
instance Monad EvalCirc where
return x = EvalCirc (\y -> (y,x))
(>>=) (EvalCirc c) f = EvalCirc (\s -> let (s',x) = c s in
let (EvalCirc c') = f x in
c' s')
instance Applicative EvalCirc where
pure = return
(<*>) = ap
instance Functor EvalCirc where
fmap = liftM
-- ----------------------------------------------------------------------
-- ** Low-level access functions
| Construct an @'ExpState'@ out of an @'EvalCirc'@.
runEvalCirc :: IS.IntSet -> [Wire] -> [Gate] -> EvalCirc a -> ExpState
runEvalCirc ws_in ws_out gs (EvalCirc e) = fst $ e $ initExpState ws_in ws_out gs
-- | Retrieve the state.
getExpState :: EvalCirc ExpState
getExpState = EvalCirc (\s -> (s,s))
-- | Set the state.
setExpState :: ExpState -> EvalCirc ()
setExpState s = EvalCirc (\_ -> (s,()))
-- ----------------------------------------------------------------------
-- ** Higher-level access functions
-- | Create a fresh variable
newFreshVar :: EvalCirc Integer
newFreshVar = do
s <- getExpState
let v = freshVar s
setExpState (s { freshVar = v + 1 })
return v
-- | Pull a new gate to be analyzed out of the future.
pullNewGate :: EvalCirc (Maybe Gate)
pullNewGate = do
s <- getExpState
case (future s) of
(h:t) -> do setExpState (s { future = t } )
return (Just h)
[] -> return Nothing
-- | Modify the future gates.
changeFuture :: [Gate] -> EvalCirc ()
changeFuture gs = do
s <- getExpState
setExpState (s { future = gs } )
return ()
| Update the future using the given parameter function . Return two sets
of ' gateId 's that got modified : the first set concerns the controls ,
the second set the NOT gates .
updateFuture :: (Gate -> Gate) -> EvalCirc (IS.IntSet,IS.IntSet)
updateFuture f = do
s <- getExpState
let ((_,!gsModifCtls,!gsModifNots),new_future) =
L.mapAccumL (\(gid,gs,gs') g -> let g' = f g in
((
gid+1
,
if (ctlsOfGate g == ctlsOfGate g')
then gs
else IS.insert gid gs
,
if (wireOfGate g == wireOfGate g')
then gs'
else IS.insert gid gs'
),
g'))
(1 + (gateId s), IS.empty,IS.empty) (future s)
changeFuture new_future
return (gsModifCtls,gsModifNots)
-- | Store a gate in the past.
storeOldGate :: Gate -> EvalCirc ()
storeOldGate g = do
s <- getExpState
let p = past s
seq g $ seq p $ setExpState (s { past = g:p } )
return ()
-- | Increase the '@gateId@' (i.e., go forward).
incrGateId :: EvalCirc ()
incrGateId = do
s <- getExpState
setExpState (s { gateId = 1 + (gateId s) } )
return ()
-- | Get the set of all wires.
getAllWiresInCirc :: EvalCirc IS.IntSet
getAllWiresInCirc = do
s <- getExpState
return (allWiresInCirc s)
-- | Set the set of all wires.
setAllWiresInCirc :: IS.IntSet -> EvalCirc ()
setAllWiresInCirc ws = do
s <- getExpState
ws `seq` setExpState (s {allWiresInCirc = ws})
return ()
-- | Remove a gate from the set of all wires.
removeFromAllWiresInCirc :: Int -> EvalCirc ()
removeFromAllWiresInCirc w = do
ws <- getAllWiresInCirc
setAllWiresInCirc $ IS.delete w ws
return ()
-- | Get the algebraic representation of the set of wires.
getExpMap :: EvalCirc (IM.IntMap (S.Set (Exp,Int)))
getExpMap = do
s <- getExpState
s `seq` return (expMap s)
-- | Set the algebraic representation of the state of wires.
setExpMap :: (IM.IntMap (S.Set (Exp,Int))) -> EvalCirc ()
setExpMap m = do
s <- getExpState
m `seq` setExpState (s { expMap = m } )
return ()
-- | Update the database recording the controlled wires.
updateUsedControlWires :: (UsedWire -> UsedWire) -> EvalCirc ()
updateUsedControlWires f = do
s <- getExpState
let c = f $ usedControlWires s
c `seq` setExpState (s { usedControlWires = c } )
return ()
-- | Update the database recording the NOT gates.
updateUsedNotWires :: (UsedWire -> UsedWire) -> EvalCirc ()
updateUsedNotWires f = do
s <- getExpState
let c = f $ usedNotWires s
c `seq` setExpState (s { usedNotWires = c } )
return ()
-- | Update the list of output wires.
updateOutWires :: ([Wire] -> [Wire]) -> EvalCirc ()
updateOutWires f = do
s <- getExpState
let c = f $ outWires s
c `seq` setExpState (s { outWires = c } )
return ()
-- | Add a gate ID to the list of gates to skip.
addToSkipGates :: GateId -> Gate -> EvalCirc ()
addToSkipGates id g = do
s <- getExpState
let c = IM.insert id g (gates_to_skip s)
c `seq` setExpState (s {gates_to_skip = c} )
return ()
-- | Send a gate to the end of the future.
sendEndOfTime :: Gate -> EvalCirc ()
sendEndOfTime g = do
s <- getExpState
changeFuture ((future s) ++ [g])
return ()
-- | Place a gate at the given gate ID in the future.
shiftGate :: Gate -> GateId -> EvalCirc ()
shiftGate g x = do
s <- getExpState
let (!head, !tail) = splitAt x (future s)
let z = head ++ [g] ++ tail
z `Seq.deepseq` changeFuture z
return ()
-- ----------------------------------------------------------------------
-- ** Auxiliary functions
-- | @pairEqualExp m1 m2 ws@: returns a list of pairs of wires @(x,y)@
-- such that @m2 x = m1 x = m1 y@.
pairEqualExp :: (IM.IntMap [Exp]) -> (IM.IntMap [Exp]) -> [Wire] -> [(Wire,Wire)]
pairEqualExp m1 m2 ws =
L.map fst $ L.filter aux $ L.zip pair_ws (L.map value pair_ws)
where
all_pairs l = [(x,y) | x <- l, y <- l]
pair_ws = all_pairs ws
value (x,y) = (m2 IM.! x, m1 IM.! x, m1 IM.! y)
aux ((_,_),(a,b,c)) = a == b && b == c
-- | From a set of expressions (annotated with sizes), prune the ones
-- whose size is larger than /n/.
pruneListExp :: Int -> S.Set (Exp,Int) -> S.Set (Exp,Int)
pruneListExp n l = S.filter (\x -> snd x <= n) l
-- ----------------------------------------------------------------------
-- ** The algebraic optimization automaton
| Perform a set of filters acting on one gate at a time , looking
-- for:
--
-- * gates having no effect;
--
-- * orphan NOT-gates (i.e. NOT gates negating an out-wire) ;
--
-- * simple copy-cats (both positive and negative) ;
--
-- * hidden copy-cats.
--
-- Return 'False' when the end of the circuit is reached, 'True' otherwise.
stepEvalCirc :: EvalCirc Bool
stepEvalCirc = do
m_before <- getExpMap
trace ("the state of the system is " ++ (show $ m_before)) $ return ()
s <- getExpState
if ((gateId s) `mod` 1000 == 0) then trace ("Timestamp... " ++ (show (gateId s))) (return ()) else return ()
s <- getExpState
trace ("outside wires " ++ (show $ outWires s)) $ return ()
maybe_g <- pullNewGate
trace ("pulled new gate " ++ (show maybe_g)) $ return ()
s <- getExpState
case maybe_g of
Nothing -> return False
Just g -> do -- analyze the gate
m_before <- getExpMap
let m_after = expEvalGate m_before g
case g of
NoOp -> error "stepEvalCirc cannot deal with NoOp"
Init b w | not ((IM.member w $ usedNotWires s) || (IM.member w $ usedControlWires s) || L.elem w (outWires s))-> do
trace "got an orphan init, removing it" $ return ()
storeOldGate NoOp -- store a placeholder for the gate
incrGateId
removeFromAllWiresInCirc w
-- we could also clean expMap from the reference to w but I think it makes no gain
return True
Init _ _ -> do
trace "got a regular init" $ return ()
storeOldGate g
setExpMap m_after
incrGateId
return True
Cnot w _ | not $ S.null $ S.intersection (m_before IM.! w) (m_after IM.! w) -> do
trace "got a cnot where no change happened..." $ return ()
trace (show m_before) $ return ()
trace (show m_after) $ return ()
storeOldGate NoOp
incrGateId
return True
Cnot w [] | not (L.elem w $ outWires s) -> do
trace "got a not-gate that can be removed..." $ return ()
s <- getExpState
-- update future
changeFuture $ L.map (flipCtl w) $ future s
s <- getExpState
trace (show $ future s) $ return ()
storeOldGate NoOp
incrGateId
return True
Cnot w ctls | otherwise -> do
trace "got a general cnot" $ return ()
trace ("state after the gate is " ++ (show m_after)) $ return ()
allWs <- getAllWiresInCirc
s <- getExpState
let my_elem x = not (L.elem x $ outWires s)
let all_ws = IS.toAscList $ IS.filter future_ctl $
not ( L.elem x $ outWires s ) ) $
IS.filter (\x -> not $ S.null $
S.intersection (m_after IM.! x)
(m_after IM.! w)) $
IS.fromList $ L.map fst ctls
where
future_ctl x =
(lastUsedWire (usedNotWires s) x) <= gateId s
&&
(lastUsedWire (usedNotWires s) w) <= gateId s
let all_ws_neg = IS.toAscList $ IS.filter future_ctl $
IS.filter (\x -> not (L.elem x $ outWires s)) $
IS.filter (\x -> not $ S.null $
S.intersection (m_after IM.! x)
(S.map (\(e,i) -> (exp_not e, i)) (m_after IM.! w))) $
IS.filter (w /=) $ IS.fromList $ L.map fst ctls
where
future_ctl x =
(lastUsedWire (usedNotWires s) x) <= gateId s
&&
(lastUsedWire (usedNotWires s) w) <= gateId s
trace ("List of outside wires: " ++ (show $ outWires s)) (return ())
trace ("List of available wires: " ++ (show all_ws)) (return ())
trace ("List of available wires with neg: " ++ (show all_ws_neg)) (return ())
case all_ws of
[] -> do
case all_ws_neg of
[] -> do
-- There is no "simple" copy-cat...
-- Let's try to find a hidden one.
s <- getExpState
-- This helper function take a wire and look in
-- the past for the closest cnot acting on it
let getOlderCnot w = case (do set <- IM.lookup w (usedNotWires s); IS.lookupLT (gateId s) set) of
Nothing -> Nothing -- there is no previous not
Just g' -> -- there is one not... let's check that it is a cnot
case ((past s) !! ((gateId s) - g' - 1)) of
Cnot _ [ctl] -> Just (g',ctl)
_ -> Nothing
-- Helper acting on controls: only return
-- something if it is a single control.
let getOlderCnot_actOnCtls w1 [(w,b)] = do -- monad Maybe
other_ctl <- getOlderCnot w1
other_ctl `seq` return ((w,b),other_ctl)
getOlderCnot_actOnCtls _ _ = Nothing
let retrieveHiddenCnot w1 ctls = do -- monad Maybe
if ( L.elem w $ outWires s ) then Nothing
-- else return ()
((w2,b2),(g',(w3,b3))) <- getOlderCnot_actOnCtls w1 ctls
-- make sure w2 and w3 are distinct
if (w2 == w3) then Nothing else return ()
let m = m_after
-- check for the property w1 == w2 oplus w3
if (S.null $ S.intersection
[exp_xor x y | (x,_) <- m IM.! w2, (y,_) <- m IM.! w3]
[x | (x,_) <- m IM.! w1])
then Nothing
We have two CNOT candidates for hidden copy - cat .
else if ((not (L.elem w2 $ outWires s))
&&
(lastUsedWire (usedNotWires s) w2) <= gateId s
&&
(lastUsedWire (usedControlWires s) w2) <= gateId s)
then Just ((w2,b2),(w3,b3))
else if ((not (L.elem w3 $ outWires s))
&&
(lastUsedWire (usedNotWires s) w3) <= g'
&&
(lastUsedWire (usedControlWires s) w3) <= g')
then Just ((w3,b3),(w2,b2))
else Nothing
case retrieveHiddenCnot w ctls of
Just ((w2,b2),(w3,b3)) -> -- we have a hidden cnot candidate. Great.
-- w2 is the wire that is not used with NOT in future
do
trace "found one hidden copy-cat" $ return ()
updateOutWires $ map (\x -> if x == w then w2 else x)
(gsModifCtls,gsModifNots) <- updateFuture $ moveWire w w2
trace ("moving " ++ (show w) ++ " to " ++ (show w2)) $ return ()
trace (show gsModifCtls) $ return ()
trace (show gsModifNots) $ return ()
s <- getExpState
trace ("before: usedNotWire = " ++ (show $ usedNotWires s)) $ return ()
updateUsedControlWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.union gs gsModifCtls
Nothing -> Just gsModifCtls) w2 $
IM.update (\gs -> Just $ IS.difference gs gsModifCtls) w c
updateUsedControlWires $ \c ->
IM.update (\gs -> Just $ IS.delete (gateId s) gs) w2 c
updateUsedControlWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.insert (gateId s) gs
Nothing -> Just $ IS.singleton (gateId s)) w3 c
updateUsedNotWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.union gs gsModifNots
Nothing -> Just gsModifNots) w2 $
IM.update (\gs -> Just $ IS.difference gs gsModifNots) w c
updateUsedNotWires $ \c ->
IM.update (\gs -> Just $ IS.delete (gateId s) gs) w $
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.insert (gateId s) gs
Nothing -> Just $ IS.singleton (gateId s)) w2 c
s <- getExpState
trace ("after: usedNotWire = " ++ (show $ usedNotWires s)) $ return ()
Update ExpMap
setExpMap $ IM.insert w (m_before IM.! w) $
IM.insert w2 (m_after IM.! w) m_after
storeOldGate $ Cnot w2 [(w3,True)]
incrGateId
return True
_ -> -- No hidden Cnot, let's proceed...
do
let mw = m_after IM.! w
f <- if ((S.foldl' (\a (_,i) -> min a i) 3 mw) <= 1)
then return id
else do
v <- newFreshVar
return (S.insert (exp_var $ fromIntegral v, 1))
setExpMap $ IM.adjust (\a -> pruneListExp 3 a) w $
IM.adjust f w m_after
storeOldGate g
incrGateId
return True
-----------------
-- Case of simple copy-cats
(w':_) -> do
s <- getExpState
updateOutWires $ map (\x -> if x == w then w' else x)
s <- getExpState
trace (show $ future s) $ return ()
(gsModifCtls,_) <- updateFuture $ moveWireFlip w w'
update expMap : now , w is null and w ' is not(old w )
expMap <- getExpMap
setExpMap $ IM.insert w (m_before IM.! w) $
IM.insert w' (S.map (\(e,i) -> (exp_not e,i)) (expMap IM.! w')) expMap
trace ("moving " ++ (show w) ++ " to " ++ (show w')) $ return ()
trace (show gsModifCtls) $ return ()
s <- getExpState
trace (show $ future s) $ return ()
s <- getExpState
updateUsedControlWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.union gs gsModifCtls
Nothing -> Just gsModifCtls) w' $
IM.update (\gs -> Just $ IS.difference gs gsModifCtls) w c
updateUsedNotWires $ \c ->
IM.update (\gs -> Just $ IS.delete (gateId s) gs) w c
storeOldGate (Cnot w' []) -- Set a flip on the w' wire
incrGateId
return True
(w':_) -> do
s <- getExpState
updateOutWires $ map (\x -> if x == w then w' else x)
s <- getExpState
trace (show $ future s) $ return ()
trace ("usedNotWire = " ++ (show $ usedNotWires s)) $ return ()
(gsModifCtls,_) <- updateFuture $ moveWire w w'
trace ("moving " ++ (show w) ++ " to " ++ (show w')) $ return ()
trace (show gsModifCtls) $ return ()
s <- getExpState
trace (show $ future s) $ return ()
s <- getExpState
updateUsedControlWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.union gs gsModifCtls
Nothing -> Just gsModifCtls
) w' $
IM.update (\gs -> Just $ IS.difference gs gsModifCtls) w c
updateUsedNotWires $ \c ->
IM.update (\gs -> Just $ IS.delete (gateId s) gs) w c
storeOldGate NoOp -- replace g with NoOp so that gateId stays accurate
incrGateId
return True
-- | Shuffle the circuit by sending the CNOT gates as far as
-- possible (i.e., until they hit a control, or to the end).
-- Return 'False' when the end of the circuit is reached, 'True' otherwise.
stepSwapCirc :: EvalCirc Bool
stepSwapCirc = do
s <- getExpState
case (IM.lookup (gateId s) (gates_to_skip s)) of
Just g -> do
storeOldGate g
incrGateId
return True
Nothing -> do
maybe_g <- pullNewGate
trace ("pulled new gate " ++ (show maybe_g)) $ return ()
s <- getExpState
if ((gateId s) `mod` 1000 == 0) then trace ("Timestamp (swap)... " ++ (show (gateId s))) {-(s `Seq.deepseq` (setExpState s))-} (return ()) else return ()
case maybe_g of
Nothing -> return False
got a CNOT
trace ("got a cnot to analyze " ++ (show $ gateId s) ++ " " ++ (show $ gates_to_skip s)) $ return ()
let id = min (nextUsedGate (usedNotWires s) (gateId s) (1 + sizeCirc s) w2) $
(nextUsedGate (usedControlWires s) (gateId s) (1 + sizeCirc s) w1)
trace ("found id = " ++ (show id)) $ return ()
if ( id > 1 + gateId s ) -- && (id <= (sizeCirc s) )
then do ------------- there is something to move!
trace ("can be shifted to " ++ (show (id - 1))) $ return ()
addToSkipGates (id - 1) g
-- shiftGate g (id - 1 - (gateId s))
s <- getExpState
trace (show $ future s) $ return ()
Remove references to ( gateId s )
updateUsedControlWires $ \c ->
IM.update (\gs -> Just $ IS.delete (gateId s) gs) w2 c
updateUsedNotWires $ \c ->
IM.update (\gs -> Just $ IS.delete (gateId s) gs) w1 c
Shift the ones between ( gateId s ) and i d
updateUsedNotWires $
IM.map $ IS.map $ \x -> if (x <= gateId s) || (x >= id) then x
else x - 1
updateUsedControlWires $
IM.map $ IS.map $ \x -> if (x <= gateId s) || (x >= id) then x
else x - 1
s <- getExpState
let z = IM.mapKeys (\x -> if (x <= gateId s) || (x >= id) then x
else x - 1) (gates_to_skip s) in
z `seq` setExpState (s { gates_to_skip = z} )
-- Set g in position (id - 1)
updateUsedControlWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.insert (id - 1) gs
Nothing -> Just $ IS.singleton (id - 1)) w2 c
updateUsedNotWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.insert (id - 1) gs
Nothing -> Just $ IS.singleton (id - 1)) w1 c
-- Make sure we skip (id - 1) later on.
else do ------------- nothing to move...
trace "cannot be shifted" $ return ()
storeOldGate g
incrGateId
return True
Just g -> do
trace ("got a random " ++ (show g)) $ return ()
storeOldGate g
incrGateId
return True
-- | A more elementary version of @'stepSwapCirc'@: shuffle the
-- circuit by sending to the end all the NOT gates that can be sent
-- there. Return 'False' when the end of the circuit is reached,
-- 'True' otherwise.
stepSwapCirc_simple :: EvalCirc Bool
stepSwapCirc_simple = do
maybe_g <- pullNewGate
trace ("pulled new gate " ++ (show maybe_g)) $ return ()
s <- getExpState
case maybe_g of
Nothing -> return False
Just g | (gateId s) == (length $ past s) + (length $ future s) -> do
storeOldGate g
return False
Just g@(Cnot w1 [(w2,b2)]) |
(lastUsedWire (usedNotWires s) w2) <= gateId s &&
(lastUsedWire (usedNotWires s) w1) <= gateId s &&
got a CNOT
trace "got a cnot that can be sent to the end" $ return ()
sendEndOfTime g
-- do not store gate, but increase gateId
incrGateId
return True
Just g -> do
storeOldGate g
incrGateId
return True
-- ----------------------------------------------------------------------
-- ** Some wrappers
-- | Run the monad until 'False' occurs.
runWhile :: Monad m => (a -> Bool) -> m a -> m ()
runWhile f c = do
r <- c
if f r then runWhile f c else return ()
-- | Strip the 'NoOp' gates from a list of gates.
stripNoOp :: [Gate] -> [Gate]
stripNoOp = L.filter (/= NoOp)
-- | Wrapper around 'stepEvalCirc'.
alg_simplify :: ([Gate],[Wire]) -> ([Gate],[Wire])
alg_simplify (gs,out) = (stripNoOp gs',out')
where
gs' = (reverse $ past s) ++ (future s)
out' = outWires s
ws_in = getAllWires gs
s = runEvalCirc ws_in out gs $ trace "Starting new circuit!" (runWhile id stepEvalCirc)
-- | Wrapper around 'stepSwapCirc'.
alg_swap :: ([Gate],[Wire]) -> ([Gate],[Wire])
alg_swap (gs,out) = (stripNoOp gs',out')
where
gs' = (reverse $ past s) ++ (future s)
out' = outWires s
ws_in = getAllWires gs
s = runEvalCirc ws_in out gs $ trace "Starting new circuit!" (runWhile id stepSwapCirc)
-- | Wrapper around 'stepSwapCirc_simple'.
alg_swap_simple :: ([Gate],[Wire]) -> ([Gate],[Wire])
alg_swap_simple (gs,out) = (stripNoOp gs',out')
where
gs' = (reverse $ past s) ++ (future s)
out' = outWires s
ws_in = getAllWires gs
s = runEvalCirc ws_in out gs $ trace "Starting new circuit!" (runWhile id stepSwapCirc_simple)
-- ----------------------------------------------------------------------
-- * Multi-pass optimization
-- | Auxiliary function. Simultaneously compute the maximum of the
lengths of two lists , and their point - wise equality .
is_equal_list :: Eq a => [a] -> [a] -> Int -> (Int,Bool)
is_equal_list [] [] n = (n,True)
is_equal_list (h1:t1) (h2:t2) n | h1 == h2 = is_equal_list t1 t2 (n+1)
is_equal_list t1 t2 n = (n + max (length t1) (length t2),False)
-- | Get the list of initialized wires from a circuit.
get_list_init :: [Gate] -> [Wire]
get_list_init ((Init _ w):gs) = w:(get_list_init gs)
get_list_init (g:gs) = get_list_init gs
get_list_init [] = []
-- | Do several passes of @'alg_simplify'@ until it reaches a fixed point.
simplRec' :: ([Gate],[Wire]) -> ([Gate],[Wire])
simplRec' (l,output) = trace (show (l,output)) $
let (l',output') = alg_simplify (l, output) in
let (n,b) = is_equal_list l l' 0 in
if b then (l,output)
else trace (show n) simplRec' $ suppressGarbageGates (l',output')
| Do several passed of @'alg_swap'@ followed with @'simplRec'@
-- until it reaches a fixed point.
simplRec :: ([Gate],[Wire]) -> ([Gate],[Wire])
simplRec (l1,o1) =
let (l3,o3) = simplRec' $ alg_swap (l1,o1) in
let (n,b) = is_equal_list l1 l3 0 in
if b then (l3,o3)
else trace "Swapping!" $ simplRec $ (l3,o3)
| null | https://raw.githubusercontent.com/silky/quipper/1ef6d031984923d8b7ded1c14f05db0995791633/QuipperLib/ClassicalOptim/Simplification.hs | haskell | file COPYRIGHT for a list of authors, copyright holders, licensing,
and other details. All rights reserved.
======================================================================
# LANGUAGE BangPatterns #
| This module contains the core of the classical circuit
optimization algorithm.
set-monad-0.1.0.0
containers-0.5.2.1
----------------------------------------------------------------------
* Auxiliary definitions
| Internal definition of a trace, for debugging purposes. This is a
and the new wire ID.
| Flip the control on the given wire (from positive to negative or
vice versa).
| Change a wire ID in a gate and flip the potential control.
----------------------------------------------------------------------
* Small, simple optimizations
| Suppress gates acting on garbage wires, i.e., wires that are not in the input set.
| Like 'suppress_garbage', but packaged in a manner that is friendly for composition.
----------------------------------------------------------------------
* Compression of wire numbering
$ As the optimization process goes on, many /init/ gates will end
up being discarded. The function 'compressWires' compacts the wire
numbering scheme to make a smaller circuit.
| Get the set of all wires used by the circuit.
| Get the set of wires initialized by the circuit.
| Get the set of input wires, i.e., the ones that are used but not initialized.
| Compress the wire numbering.
----------------------------------------------------------------------
* A useful data structure
$ When considering a particular point in a circuit (i.e., in a list
of gates), to decide whether a given wire is used or controlled
before or after, we keep a data-structure 'UsedWire'.
ID of the last gate using the wire.
| Get the pair corresponding to the given wire.
| Get the last gate using the wire in the past. Return 0 if none.
| 'nextUsedGate' /ws/ /g/ /g/' /w/: Look for the next gate in /ws/
| For each wire, find the set of gates placing a control on it.
| For each wire, find the set of gates acting on it with NOT.
----------------------------------------------------------------------
* Algebraic optimization method
$ To each wire in a circuit, we attach a set of formulas. At each
iteration, the wire that gets modified is updated with its new
value, using all the possible values, possibly together with a
fresh variable. At each iteration, we also strip away the
expressions that get too large. Here, the size of an algebraic
| Calculate the size of an algebraic expression.
| Given a list of sets of expressions, form the conjunction of
> exp_list_and [{a,b}, {c,d}, {e,f}] =
| Evaluate a control with respect to a state.
| Evaluate a gate with respect to a state.
----------------------------------------------------------------------
** State of the optimization automaton
| The state of the automaton. This contains in particular the
current state, the past and future gates, and a fresh variable.
^ For use with 'stepSwapCirc'.
^ All the wires in the circuit.
^ Location of the controls.
^ Location of the NOT gates.
^ Gates left to explore.
^ Gates already explored.
^ Algebraic state of the wires. Also contains the size of the expression, so we don't have to recompute it each time.
^ The next fresh wire.
^ The output wires.
^ Size of the circuit.
allWiresInCirc e `Seq.deepseq` gateId e `Seq.deepseq` usedControlWires e `Seq.deepseq` usedNotWires e `Seq.deepseq` future e `Seq.deepseq` past e `Seq.deepseq` expMap e `Seq.deepseq` freshVar e `Seq.deepseq` outWires e
| The initial state for a given set of parameters.
----------------------------------------------------------------------
** The state monad
----------------------------------------------------------------------
** Low-level access functions
| Retrieve the state.
| Set the state.
----------------------------------------------------------------------
** Higher-level access functions
| Create a fresh variable
| Pull a new gate to be analyzed out of the future.
| Modify the future gates.
| Store a gate in the past.
| Increase the '@gateId@' (i.e., go forward).
| Get the set of all wires.
| Set the set of all wires.
| Remove a gate from the set of all wires.
| Get the algebraic representation of the set of wires.
| Set the algebraic representation of the state of wires.
| Update the database recording the controlled wires.
| Update the database recording the NOT gates.
| Update the list of output wires.
| Add a gate ID to the list of gates to skip.
| Send a gate to the end of the future.
| Place a gate at the given gate ID in the future.
----------------------------------------------------------------------
** Auxiliary functions
| @pairEqualExp m1 m2 ws@: returns a list of pairs of wires @(x,y)@
such that @m2 x = m1 x = m1 y@.
| From a set of expressions (annotated with sizes), prune the ones
whose size is larger than /n/.
----------------------------------------------------------------------
** The algebraic optimization automaton
for:
* gates having no effect;
* orphan NOT-gates (i.e. NOT gates negating an out-wire) ;
* simple copy-cats (both positive and negative) ;
* hidden copy-cats.
Return 'False' when the end of the circuit is reached, 'True' otherwise.
analyze the gate
store a placeholder for the gate
we could also clean expMap from the reference to w but I think it makes no gain
update future
There is no "simple" copy-cat...
Let's try to find a hidden one.
This helper function take a wire and look in
the past for the closest cnot acting on it
there is no previous not
there is one not... let's check that it is a cnot
Helper acting on controls: only return
something if it is a single control.
monad Maybe
monad Maybe
else return ()
make sure w2 and w3 are distinct
check for the property w1 == w2 oplus w3
we have a hidden cnot candidate. Great.
w2 is the wire that is not used with NOT in future
No hidden Cnot, let's proceed...
---------------
Case of simple copy-cats
Set a flip on the w' wire
replace g with NoOp so that gateId stays accurate
| Shuffle the circuit by sending the CNOT gates as far as
possible (i.e., until they hit a control, or to the end).
Return 'False' when the end of the circuit is reached, 'True' otherwise.
(s `Seq.deepseq` (setExpState s))
&& (id <= (sizeCirc s) )
----------- there is something to move!
shiftGate g (id - 1 - (gateId s))
Set g in position (id - 1)
Make sure we skip (id - 1) later on.
----------- nothing to move...
| A more elementary version of @'stepSwapCirc'@: shuffle the
circuit by sending to the end all the NOT gates that can be sent
there. Return 'False' when the end of the circuit is reached,
'True' otherwise.
do not store gate, but increase gateId
----------------------------------------------------------------------
** Some wrappers
| Run the monad until 'False' occurs.
| Strip the 'NoOp' gates from a list of gates.
| Wrapper around 'stepEvalCirc'.
| Wrapper around 'stepSwapCirc'.
| Wrapper around 'stepSwapCirc_simple'.
----------------------------------------------------------------------
* Multi-pass optimization
| Auxiliary function. Simultaneously compute the maximum of the
| Get the list of initialized wires from a circuit.
| Do several passes of @'alg_simplify'@ until it reaches a fixed point.
until it reaches a fixed point. | This file is part of Quipper . Copyright ( C ) 2011 - 2016 . Please see the
# LANGUAGE MonadComprehensions #
module QuipperLib.ClassicalOptim.Simplification where
import qualified Data.Map as M
import qualified Data.List as L
import qualified Data.IntSet as IS
import qualified Control.DeepSeq as Seq
import Control.Applicative (Applicative(..))
import Control.Monad (liftM, ap)
import qualified Libraries.Auxiliary as Q
import QuipperLib.ClassicalOptim.Circuit
import QuipperLib.ClassicalOptim.AlgExp
no - op , but can be replaced to turn on debugging .
trace :: String -> b -> b
trace a b = b
| Change a wire ID in a gate . The first two arguments are the old
moveWire :: Wire -> Wire -> Gate -> Gate
moveWire from to NoOp = NoOp
moveWire from to (Init b w) = if (w == from) then error "moveWire" else (Init b w)
moveWire from to (Cnot w ctls) = Cnot w' ctls'
where
w' = if (from == w) then to else w
ctls' = map moveCtls ctls
moveCtls (w,b) = if (from == w) then (to,b) else (w,b)
flipCtl :: Wire -> Gate -> Gate
flipCtl _ NoOp = NoOp
flipCtl _ (Init b w) = Init b w
flipCtl w (Cnot w' ctls) = Cnot w' $ map (\(x,b) -> if (x == w) then (x,not b) else (x,b)) ctls
moveWireFlip :: Wire -> Wire -> Gate -> Gate
moveWireFlip from to NoOp = NoOp
moveWireFlip from to (Init b w) = if (w == from) then error "moveWire" else (Init b w)
moveWireFlip from to (Cnot w ctls) = Cnot w' ctls'
where
w' = if (from == w) then to else w
ctls' = map moveCtls ctls
moveCtls (w,b) = if (from == w) then (to,b) else if (to == w) then (w,not b) else (w,b)
suppress_garbage :: [Gate] -> IS.IntSet -> [Gate]
suppress_garbage ((Cnot w ctls):gs) used =
if (IS.member w used) then g:gs1 else gs2
where
g = Cnot w ctls
gs1 = suppress_garbage gs $ IS.union (IS.insert w used) $ IS.fromList $ L.map fst ctls
gs2 = suppress_garbage gs used
suppress_garbage (g:gs) used = g:(suppress_garbage gs used)
suppress_garbage [] _ = []
suppressGarbageGates :: ([Gate],[Wire]) -> ([Gate],[Wire])
suppressGarbageGates (gs,out) = (reverse $ suppress_garbage (reverse gs) $ IS.fromList out, out)
getAllWires :: [Gate] -> IS.IntSet
getAllWires gs = L.foldl' IS.union IS.empty $ L.map aux gs
where
aux (Cnot w ctls) = IS.insert w $ L.foldl' (flip IS.insert) IS.empty $ L.map fst ctls
aux (Init _ w) = IS.singleton w
aux NoOp = IS.empty
getInitWires :: [Gate] -> IS.IntSet
getInitWires gs = L.foldl' IS.union IS.empty $ map aux gs
where
aux (Cnot _ _) = IS.empty
aux (Init _ w) = IS.singleton w
aux NoOp = IS.empty
getInputWires :: [Gate] -> IS.IntSet
getInputWires gs = IS.difference (getAllWires gs) (getInitWires gs)
compressWires :: [Wire] -> ([Gate],[Wire]) -> ([Gate],[Wire])
compressWires inputwires (gs,output) = (gs',out')
where
iws = getInitWires gs
begin = if inputwires == []
then 0
else 1 + (head $ reverse $ L.sort inputwires)
end = begin + (IS.size iws)
listmap = zip ([0..begin-1] ++ (IS.toAscList iws)) [0 .. end]
remap = M.fromList $ trace (show listmap) listmap
out' = map (remap M.!) output
gs' = map (rewire remap) gs
rewire m (Cnot w ctls) = Cnot (m M.! w) $ map (\(x,b) -> (m M.! x, b)) ctls
rewire m (Init b w) = Init b (m M.! w)
rewire m NoOp = NoOp
| The type of gate ID 's .
type GateId = Int
| A set of gate ID 's .
type GateIdSet = IS.IntSet
| A map from wires to pairs of ' 's . The left member gives the
ID of the first gate using the wire , and the right member gives the
type UsedWire = IM.IntMap GateIdSet
| Get the minimum of a set of gate ID 's .
gateIdFindMin :: GateIdSet -> Maybe GateId
gateIdFindMin g = if (IS.null g) then Nothing else Just (IS.findMin g)
| Get the maximum of a set of gate ID 's .
gateIdFindMax :: GateIdSet -> Maybe GateId
gateIdFindMax g = if (IS.null g) then Nothing else Just (IS.findMax g)
pairUsedWire :: UsedWire -> Wire -> GateIdSet
pairUsedWire m w = IM.findWithDefault IS.empty w m
| Get the first gate using the wire in the future .
firstUsedWire :: UsedWire -> Wire -> Maybe GateId
firstUsedWire = curry $ gateIdFindMin . (uncurry pairUsedWire)
lastUsedWire :: UsedWire -> Wire -> GateId
lastUsedWire w w'=
case (curry $ gateIdFindMax . (uncurry pairUsedWire)) w w' of
Just w -> w
Nothing -> 0
corresponding to wire /w/ , starting from /g/. Return /g/ ' if none .
nextUsedGate :: UsedWire -> GateId -> GateId -> Wire -> GateId
nextUsedGate ws g g' w =
case (do gs <- IM.lookup w ws; IS.lookupGT g gs) of
Just g -> g
Nothing -> g'
circuitControlWires :: GateId -> [Gate] -> UsedWire
circuitControlWires id gs = aux id IM.empty gs
where
aux _ m [] = m
aux g m (Init _ _:gs) = aux (g+1) m gs
aux g m ((Cnot _ ctls):gs) = aux (g+1) m' gs
where
wires = map fst ctls
m' = L.foldl (\m'' w -> IM.alter (f g) w m'') m wires
f g Nothing = Just $ IS.singleton g
f g (Just s) = Just $ IS.insert g s
aux g m (NoOp:_) = error "circuitControlWires cannot deal with NoOp"
circuitNotWires :: GateId -> [Gate] -> UsedWire
circuitNotWires id gs = aux id IM.empty gs
where
aux _ m [] = m
aux g m (Init _ _:gs) = aux (g+1) m gs
aux g m ((Cnot w _):gs) = aux (g+1) m' gs
where
m' = IM.alter (f g) w m
f g Nothing = Just $ IS.singleton g
f g (Just s) = Just $ IS.insert g s
aux g m (_:gs) = aux (g+1) m gs
expression is measured by the ' ' function .
exp_length :: Exp -> Int
exp_length e = L.foldl' (+) 0 $ L.map (\x -> let y = IS.size x in seq y y) $ S.toList e
every possible choice of one expression from each set . For example .
> [ , a∧c∧f , a∧d∧e , a∧d∧f , , b∧c∧f , b∧d∧e , b∧d∧f ] .
exp_list_and :: [S.Set Exp] -> S.Set Exp
exp_list_and [] = S.singleton exp_true
exp_list_and [l] = l
exp_list_and (h:k:t) = exp_list_and ([exp_and x y | x <- h, y <- k]:t)
expEvalCtl :: (IM.IntMap (S.Set (Exp,Int))) -> (Wire,Bool) -> S.Set Exp
expEvalCtl m (w,True) = S.map fst (m IM.! w)
expEvalCtl m (w,False) = S.map exp_not $ S.map fst $ (IM.!) m w
expEvalGate :: (IM.IntMap (S.Set (Exp,Int))) -> Gate -> IM.IntMap (S.Set (Exp,Int))
expEvalGate m (Init False w) = IM.insert w (S.singleton (exp_false,0)) m
expEvalGate m (Init True w) = IM.insert w (S.singleton (exp_true,1)) m
expEvalGate m NoOp = m
expEvalGate m (Cnot w ctls) = IM.insert w cnot m
where
ands = exp_list_and $ L.map (expEvalCtl m) ctls
cnot = S.map (\x -> (x,exp_length x)) [exp_xor x y |
x <- S.map fst $ (IM.!) m w,
y <- ands ]
data ExpState = ExpState {
^ ID of the first gate in the future ( starts at 1 ) .
}
instance Seq.NFData Gate where
rnf (Init a b) = a `seq` b `seq` ()
rnf (Cnot w ctls) = ctls `Seq.deepseq` w `Seq.deepseq` ()
rnf NoOp = ()
instance Seq . NFData ExpState where
rnf e = { -allWiresInCirc e ` Seq.deepseq ` gateId e ` Seq.deepseq ` usedControlWires e ` Seq.deepseq ` usedNotWires e ` Seq.deepseq ` future e ` Seq.deepseq ` past e ` Seq.deepseq ` expMap e ` Seq.deepseq ` freshVar e ` Seq.deepseq ` outWires e
instance Seq.NFData ExpState where
-}
initExpState :: IS.IntSet -> [Wire] -> [Gate] -> ExpState
initExpState ws_in ws_out gs = ExpState {
gates_to_skip = IM.empty,
allWiresInCirc = getAllWires gs,
gateId = 1,
usedControlWires = circuitControlWires 1 gs,
usedNotWires = circuitNotWires 1 gs,
future = gs,
past = [],
expMap = IM.fromList $ L.map (\x -> (x, S.singleton (exp_var x, 1))) $ IS.toAscList ws_in,
freshVar = fromIntegral $ (+) 1 $ IS.findMax ws_in,
outWires = ws_out,
sizeCirc = length gs
}
| The state monad corresponding to ' ExpState ' .
data EvalCirc a = EvalCirc (ExpState -> (ExpState, a))
instance Monad EvalCirc where
return x = EvalCirc (\y -> (y,x))
(>>=) (EvalCirc c) f = EvalCirc (\s -> let (s',x) = c s in
let (EvalCirc c') = f x in
c' s')
instance Applicative EvalCirc where
pure = return
(<*>) = ap
instance Functor EvalCirc where
fmap = liftM
| Construct an @'ExpState'@ out of an @'EvalCirc'@.
runEvalCirc :: IS.IntSet -> [Wire] -> [Gate] -> EvalCirc a -> ExpState
runEvalCirc ws_in ws_out gs (EvalCirc e) = fst $ e $ initExpState ws_in ws_out gs
getExpState :: EvalCirc ExpState
getExpState = EvalCirc (\s -> (s,s))
setExpState :: ExpState -> EvalCirc ()
setExpState s = EvalCirc (\_ -> (s,()))
newFreshVar :: EvalCirc Integer
newFreshVar = do
s <- getExpState
let v = freshVar s
setExpState (s { freshVar = v + 1 })
return v
pullNewGate :: EvalCirc (Maybe Gate)
pullNewGate = do
s <- getExpState
case (future s) of
(h:t) -> do setExpState (s { future = t } )
return (Just h)
[] -> return Nothing
changeFuture :: [Gate] -> EvalCirc ()
changeFuture gs = do
s <- getExpState
setExpState (s { future = gs } )
return ()
| Update the future using the given parameter function . Return two sets
of ' gateId 's that got modified : the first set concerns the controls ,
the second set the NOT gates .
updateFuture :: (Gate -> Gate) -> EvalCirc (IS.IntSet,IS.IntSet)
updateFuture f = do
s <- getExpState
let ((_,!gsModifCtls,!gsModifNots),new_future) =
L.mapAccumL (\(gid,gs,gs') g -> let g' = f g in
((
gid+1
,
if (ctlsOfGate g == ctlsOfGate g')
then gs
else IS.insert gid gs
,
if (wireOfGate g == wireOfGate g')
then gs'
else IS.insert gid gs'
),
g'))
(1 + (gateId s), IS.empty,IS.empty) (future s)
changeFuture new_future
return (gsModifCtls,gsModifNots)
storeOldGate :: Gate -> EvalCirc ()
storeOldGate g = do
s <- getExpState
let p = past s
seq g $ seq p $ setExpState (s { past = g:p } )
return ()
incrGateId :: EvalCirc ()
incrGateId = do
s <- getExpState
setExpState (s { gateId = 1 + (gateId s) } )
return ()
getAllWiresInCirc :: EvalCirc IS.IntSet
getAllWiresInCirc = do
s <- getExpState
return (allWiresInCirc s)
setAllWiresInCirc :: IS.IntSet -> EvalCirc ()
setAllWiresInCirc ws = do
s <- getExpState
ws `seq` setExpState (s {allWiresInCirc = ws})
return ()
removeFromAllWiresInCirc :: Int -> EvalCirc ()
removeFromAllWiresInCirc w = do
ws <- getAllWiresInCirc
setAllWiresInCirc $ IS.delete w ws
return ()
getExpMap :: EvalCirc (IM.IntMap (S.Set (Exp,Int)))
getExpMap = do
s <- getExpState
s `seq` return (expMap s)
setExpMap :: (IM.IntMap (S.Set (Exp,Int))) -> EvalCirc ()
setExpMap m = do
s <- getExpState
m `seq` setExpState (s { expMap = m } )
return ()
updateUsedControlWires :: (UsedWire -> UsedWire) -> EvalCirc ()
updateUsedControlWires f = do
s <- getExpState
let c = f $ usedControlWires s
c `seq` setExpState (s { usedControlWires = c } )
return ()
updateUsedNotWires :: (UsedWire -> UsedWire) -> EvalCirc ()
updateUsedNotWires f = do
s <- getExpState
let c = f $ usedNotWires s
c `seq` setExpState (s { usedNotWires = c } )
return ()
updateOutWires :: ([Wire] -> [Wire]) -> EvalCirc ()
updateOutWires f = do
s <- getExpState
let c = f $ outWires s
c `seq` setExpState (s { outWires = c } )
return ()
addToSkipGates :: GateId -> Gate -> EvalCirc ()
addToSkipGates id g = do
s <- getExpState
let c = IM.insert id g (gates_to_skip s)
c `seq` setExpState (s {gates_to_skip = c} )
return ()
sendEndOfTime :: Gate -> EvalCirc ()
sendEndOfTime g = do
s <- getExpState
changeFuture ((future s) ++ [g])
return ()
shiftGate :: Gate -> GateId -> EvalCirc ()
shiftGate g x = do
s <- getExpState
let (!head, !tail) = splitAt x (future s)
let z = head ++ [g] ++ tail
z `Seq.deepseq` changeFuture z
return ()
pairEqualExp :: (IM.IntMap [Exp]) -> (IM.IntMap [Exp]) -> [Wire] -> [(Wire,Wire)]
pairEqualExp m1 m2 ws =
L.map fst $ L.filter aux $ L.zip pair_ws (L.map value pair_ws)
where
all_pairs l = [(x,y) | x <- l, y <- l]
pair_ws = all_pairs ws
value (x,y) = (m2 IM.! x, m1 IM.! x, m1 IM.! y)
aux ((_,_),(a,b,c)) = a == b && b == c
pruneListExp :: Int -> S.Set (Exp,Int) -> S.Set (Exp,Int)
pruneListExp n l = S.filter (\x -> snd x <= n) l
| Perform a set of filters acting on one gate at a time , looking
stepEvalCirc :: EvalCirc Bool
stepEvalCirc = do
m_before <- getExpMap
trace ("the state of the system is " ++ (show $ m_before)) $ return ()
s <- getExpState
if ((gateId s) `mod` 1000 == 0) then trace ("Timestamp... " ++ (show (gateId s))) (return ()) else return ()
s <- getExpState
trace ("outside wires " ++ (show $ outWires s)) $ return ()
maybe_g <- pullNewGate
trace ("pulled new gate " ++ (show maybe_g)) $ return ()
s <- getExpState
case maybe_g of
Nothing -> return False
m_before <- getExpMap
let m_after = expEvalGate m_before g
case g of
NoOp -> error "stepEvalCirc cannot deal with NoOp"
Init b w | not ((IM.member w $ usedNotWires s) || (IM.member w $ usedControlWires s) || L.elem w (outWires s))-> do
trace "got an orphan init, removing it" $ return ()
incrGateId
removeFromAllWiresInCirc w
return True
Init _ _ -> do
trace "got a regular init" $ return ()
storeOldGate g
setExpMap m_after
incrGateId
return True
Cnot w _ | not $ S.null $ S.intersection (m_before IM.! w) (m_after IM.! w) -> do
trace "got a cnot where no change happened..." $ return ()
trace (show m_before) $ return ()
trace (show m_after) $ return ()
storeOldGate NoOp
incrGateId
return True
Cnot w [] | not (L.elem w $ outWires s) -> do
trace "got a not-gate that can be removed..." $ return ()
s <- getExpState
changeFuture $ L.map (flipCtl w) $ future s
s <- getExpState
trace (show $ future s) $ return ()
storeOldGate NoOp
incrGateId
return True
Cnot w ctls | otherwise -> do
trace "got a general cnot" $ return ()
trace ("state after the gate is " ++ (show m_after)) $ return ()
allWs <- getAllWiresInCirc
s <- getExpState
let my_elem x = not (L.elem x $ outWires s)
let all_ws = IS.toAscList $ IS.filter future_ctl $
not ( L.elem x $ outWires s ) ) $
IS.filter (\x -> not $ S.null $
S.intersection (m_after IM.! x)
(m_after IM.! w)) $
IS.fromList $ L.map fst ctls
where
future_ctl x =
(lastUsedWire (usedNotWires s) x) <= gateId s
&&
(lastUsedWire (usedNotWires s) w) <= gateId s
let all_ws_neg = IS.toAscList $ IS.filter future_ctl $
IS.filter (\x -> not (L.elem x $ outWires s)) $
IS.filter (\x -> not $ S.null $
S.intersection (m_after IM.! x)
(S.map (\(e,i) -> (exp_not e, i)) (m_after IM.! w))) $
IS.filter (w /=) $ IS.fromList $ L.map fst ctls
where
future_ctl x =
(lastUsedWire (usedNotWires s) x) <= gateId s
&&
(lastUsedWire (usedNotWires s) w) <= gateId s
trace ("List of outside wires: " ++ (show $ outWires s)) (return ())
trace ("List of available wires: " ++ (show all_ws)) (return ())
trace ("List of available wires with neg: " ++ (show all_ws_neg)) (return ())
case all_ws of
[] -> do
case all_ws_neg of
[] -> do
s <- getExpState
let getOlderCnot w = case (do set <- IM.lookup w (usedNotWires s); IS.lookupLT (gateId s) set) of
case ((past s) !! ((gateId s) - g' - 1)) of
Cnot _ [ctl] -> Just (g',ctl)
_ -> Nothing
other_ctl <- getOlderCnot w1
other_ctl `seq` return ((w,b),other_ctl)
getOlderCnot_actOnCtls _ _ = Nothing
if ( L.elem w $ outWires s ) then Nothing
((w2,b2),(g',(w3,b3))) <- getOlderCnot_actOnCtls w1 ctls
if (w2 == w3) then Nothing else return ()
let m = m_after
if (S.null $ S.intersection
[exp_xor x y | (x,_) <- m IM.! w2, (y,_) <- m IM.! w3]
[x | (x,_) <- m IM.! w1])
then Nothing
We have two CNOT candidates for hidden copy - cat .
else if ((not (L.elem w2 $ outWires s))
&&
(lastUsedWire (usedNotWires s) w2) <= gateId s
&&
(lastUsedWire (usedControlWires s) w2) <= gateId s)
then Just ((w2,b2),(w3,b3))
else if ((not (L.elem w3 $ outWires s))
&&
(lastUsedWire (usedNotWires s) w3) <= g'
&&
(lastUsedWire (usedControlWires s) w3) <= g')
then Just ((w3,b3),(w2,b2))
else Nothing
case retrieveHiddenCnot w ctls of
do
trace "found one hidden copy-cat" $ return ()
updateOutWires $ map (\x -> if x == w then w2 else x)
(gsModifCtls,gsModifNots) <- updateFuture $ moveWire w w2
trace ("moving " ++ (show w) ++ " to " ++ (show w2)) $ return ()
trace (show gsModifCtls) $ return ()
trace (show gsModifNots) $ return ()
s <- getExpState
trace ("before: usedNotWire = " ++ (show $ usedNotWires s)) $ return ()
updateUsedControlWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.union gs gsModifCtls
Nothing -> Just gsModifCtls) w2 $
IM.update (\gs -> Just $ IS.difference gs gsModifCtls) w c
updateUsedControlWires $ \c ->
IM.update (\gs -> Just $ IS.delete (gateId s) gs) w2 c
updateUsedControlWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.insert (gateId s) gs
Nothing -> Just $ IS.singleton (gateId s)) w3 c
updateUsedNotWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.union gs gsModifNots
Nothing -> Just gsModifNots) w2 $
IM.update (\gs -> Just $ IS.difference gs gsModifNots) w c
updateUsedNotWires $ \c ->
IM.update (\gs -> Just $ IS.delete (gateId s) gs) w $
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.insert (gateId s) gs
Nothing -> Just $ IS.singleton (gateId s)) w2 c
s <- getExpState
trace ("after: usedNotWire = " ++ (show $ usedNotWires s)) $ return ()
Update ExpMap
setExpMap $ IM.insert w (m_before IM.! w) $
IM.insert w2 (m_after IM.! w) m_after
storeOldGate $ Cnot w2 [(w3,True)]
incrGateId
return True
do
let mw = m_after IM.! w
f <- if ((S.foldl' (\a (_,i) -> min a i) 3 mw) <= 1)
then return id
else do
v <- newFreshVar
return (S.insert (exp_var $ fromIntegral v, 1))
setExpMap $ IM.adjust (\a -> pruneListExp 3 a) w $
IM.adjust f w m_after
storeOldGate g
incrGateId
return True
(w':_) -> do
s <- getExpState
updateOutWires $ map (\x -> if x == w then w' else x)
s <- getExpState
trace (show $ future s) $ return ()
(gsModifCtls,_) <- updateFuture $ moveWireFlip w w'
update expMap : now , w is null and w ' is not(old w )
expMap <- getExpMap
setExpMap $ IM.insert w (m_before IM.! w) $
IM.insert w' (S.map (\(e,i) -> (exp_not e,i)) (expMap IM.! w')) expMap
trace ("moving " ++ (show w) ++ " to " ++ (show w')) $ return ()
trace (show gsModifCtls) $ return ()
s <- getExpState
trace (show $ future s) $ return ()
s <- getExpState
updateUsedControlWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.union gs gsModifCtls
Nothing -> Just gsModifCtls) w' $
IM.update (\gs -> Just $ IS.difference gs gsModifCtls) w c
updateUsedNotWires $ \c ->
IM.update (\gs -> Just $ IS.delete (gateId s) gs) w c
incrGateId
return True
(w':_) -> do
s <- getExpState
updateOutWires $ map (\x -> if x == w then w' else x)
s <- getExpState
trace (show $ future s) $ return ()
trace ("usedNotWire = " ++ (show $ usedNotWires s)) $ return ()
(gsModifCtls,_) <- updateFuture $ moveWire w w'
trace ("moving " ++ (show w) ++ " to " ++ (show w')) $ return ()
trace (show gsModifCtls) $ return ()
s <- getExpState
trace (show $ future s) $ return ()
s <- getExpState
updateUsedControlWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.union gs gsModifCtls
Nothing -> Just gsModifCtls
) w' $
IM.update (\gs -> Just $ IS.difference gs gsModifCtls) w c
updateUsedNotWires $ \c ->
IM.update (\gs -> Just $ IS.delete (gateId s) gs) w c
incrGateId
return True
stepSwapCirc :: EvalCirc Bool
stepSwapCirc = do
s <- getExpState
case (IM.lookup (gateId s) (gates_to_skip s)) of
Just g -> do
storeOldGate g
incrGateId
return True
Nothing -> do
maybe_g <- pullNewGate
trace ("pulled new gate " ++ (show maybe_g)) $ return ()
s <- getExpState
case maybe_g of
Nothing -> return False
got a CNOT
trace ("got a cnot to analyze " ++ (show $ gateId s) ++ " " ++ (show $ gates_to_skip s)) $ return ()
let id = min (nextUsedGate (usedNotWires s) (gateId s) (1 + sizeCirc s) w2) $
(nextUsedGate (usedControlWires s) (gateId s) (1 + sizeCirc s) w1)
trace ("found id = " ++ (show id)) $ return ()
trace ("can be shifted to " ++ (show (id - 1))) $ return ()
addToSkipGates (id - 1) g
s <- getExpState
trace (show $ future s) $ return ()
Remove references to ( gateId s )
updateUsedControlWires $ \c ->
IM.update (\gs -> Just $ IS.delete (gateId s) gs) w2 c
updateUsedNotWires $ \c ->
IM.update (\gs -> Just $ IS.delete (gateId s) gs) w1 c
Shift the ones between ( gateId s ) and i d
updateUsedNotWires $
IM.map $ IS.map $ \x -> if (x <= gateId s) || (x >= id) then x
else x - 1
updateUsedControlWires $
IM.map $ IS.map $ \x -> if (x <= gateId s) || (x >= id) then x
else x - 1
s <- getExpState
let z = IM.mapKeys (\x -> if (x <= gateId s) || (x >= id) then x
else x - 1) (gates_to_skip s) in
z `seq` setExpState (s { gates_to_skip = z} )
updateUsedControlWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.insert (id - 1) gs
Nothing -> Just $ IS.singleton (id - 1)) w2 c
updateUsedNotWires $ \c ->
IM.alter (\maybe_gs -> case maybe_gs of
Just gs -> Just $ IS.insert (id - 1) gs
Nothing -> Just $ IS.singleton (id - 1)) w1 c
trace "cannot be shifted" $ return ()
storeOldGate g
incrGateId
return True
Just g -> do
trace ("got a random " ++ (show g)) $ return ()
storeOldGate g
incrGateId
return True
stepSwapCirc_simple :: EvalCirc Bool
stepSwapCirc_simple = do
maybe_g <- pullNewGate
trace ("pulled new gate " ++ (show maybe_g)) $ return ()
s <- getExpState
case maybe_g of
Nothing -> return False
Just g | (gateId s) == (length $ past s) + (length $ future s) -> do
storeOldGate g
return False
Just g@(Cnot w1 [(w2,b2)]) |
(lastUsedWire (usedNotWires s) w2) <= gateId s &&
(lastUsedWire (usedNotWires s) w1) <= gateId s &&
got a CNOT
trace "got a cnot that can be sent to the end" $ return ()
sendEndOfTime g
incrGateId
return True
Just g -> do
storeOldGate g
incrGateId
return True
runWhile :: Monad m => (a -> Bool) -> m a -> m ()
runWhile f c = do
r <- c
if f r then runWhile f c else return ()
stripNoOp :: [Gate] -> [Gate]
stripNoOp = L.filter (/= NoOp)
alg_simplify :: ([Gate],[Wire]) -> ([Gate],[Wire])
alg_simplify (gs,out) = (stripNoOp gs',out')
where
gs' = (reverse $ past s) ++ (future s)
out' = outWires s
ws_in = getAllWires gs
s = runEvalCirc ws_in out gs $ trace "Starting new circuit!" (runWhile id stepEvalCirc)
alg_swap :: ([Gate],[Wire]) -> ([Gate],[Wire])
alg_swap (gs,out) = (stripNoOp gs',out')
where
gs' = (reverse $ past s) ++ (future s)
out' = outWires s
ws_in = getAllWires gs
s = runEvalCirc ws_in out gs $ trace "Starting new circuit!" (runWhile id stepSwapCirc)
alg_swap_simple :: ([Gate],[Wire]) -> ([Gate],[Wire])
alg_swap_simple (gs,out) = (stripNoOp gs',out')
where
gs' = (reverse $ past s) ++ (future s)
out' = outWires s
ws_in = getAllWires gs
s = runEvalCirc ws_in out gs $ trace "Starting new circuit!" (runWhile id stepSwapCirc_simple)
lengths of two lists , and their point - wise equality .
is_equal_list :: Eq a => [a] -> [a] -> Int -> (Int,Bool)
is_equal_list [] [] n = (n,True)
is_equal_list (h1:t1) (h2:t2) n | h1 == h2 = is_equal_list t1 t2 (n+1)
is_equal_list t1 t2 n = (n + max (length t1) (length t2),False)
get_list_init :: [Gate] -> [Wire]
get_list_init ((Init _ w):gs) = w:(get_list_init gs)
get_list_init (g:gs) = get_list_init gs
get_list_init [] = []
simplRec' :: ([Gate],[Wire]) -> ([Gate],[Wire])
simplRec' (l,output) = trace (show (l,output)) $
let (l',output') = alg_simplify (l, output) in
let (n,b) = is_equal_list l l' 0 in
if b then (l,output)
else trace (show n) simplRec' $ suppressGarbageGates (l',output')
| Do several passed of @'alg_swap'@ followed with @'simplRec'@
simplRec :: ([Gate],[Wire]) -> ([Gate],[Wire])
simplRec (l1,o1) =
let (l3,o3) = simplRec' $ alg_swap (l1,o1) in
let (n,b) = is_equal_list l1 l3 0 in
if b then (l3,o3)
else trace "Swapping!" $ simplRec $ (l3,o3)
|
2961164b8a0d0e066dce3381b7717335155e1c9e9fa25965c714a71cd8d9f320 | dmitryvk/sbcl-win32-threads | vm.lisp | ;;;; miscellaneous VM definition noise for the Alpha
This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB!VM")
;;;; defining the registers
(eval-when (:compile-toplevel :load-toplevel :execute)
(defvar *register-names* (make-array 32 :initial-element nil)))
(macrolet ((defreg (name offset)
(let ((offset-sym (symbolicate name "-OFFSET")))
`(eval-when (:compile-toplevel :load-toplevel :execute)
(def!constant ,offset-sym ,offset)
(setf (svref *register-names* ,offset-sym)
,(symbol-name name)))))
(defregset (name &rest regs)
`(eval-when (:compile-toplevel :load-toplevel :execute)
(defparameter ,name
(list ,@(mapcar (lambda (name)
(symbolicate name "-OFFSET"))
regs))))))
c.f . / runtime / alpha - lispregs.h
;; Ra
(defreg lip 0)
Caller saved 0 - 7
(defreg a0 1)
(defreg a1 2)
(defreg a2 3)
(defreg a3 4)
(defreg a4 5)
(defreg a5 6)
(defreg l0 7)
(defreg nargs 8)
saved 0 - 6
(defreg csp 9)
(defreg cfp 10)
(defreg ocfp 11)
(defreg bsp 12)
(defreg lexenv 13)
(defreg code 14)
(defreg null 15)
;; Arg 0-5
(defreg nl0 16)
(defreg nl1 17)
(defreg nl2 18)
(defreg nl3 19)
(defreg nl4 20)
(defreg nl5 21)
Caller saved 8 - 11
(defreg alloc 22)
(defreg fdefn 23)
(defreg cfunc 24)
(defreg nfp 25)
;; Ra
(defreg lra 26)
Caller saved 12
(defreg l1 27)
Assembler temp ( at )
(defreg l2 28)
;; Global pointer (gp)
(defreg gp 29)
Stack pointer
(defreg nsp 30)
Wired zero
(defreg zero 31)
(defregset non-descriptor-regs
nl0 nl1 nl2 nl3 nl4 nl5 nfp cfunc)
(defregset descriptor-regs
fdefn lexenv nargs ocfp lra a0 a1 a2 a3 a4 a5 l0 l1 l2)
(defregset *register-arg-offsets*
a0 a1 a2 a3 a4 a5)
(defparameter register-arg-names '(a0 a1 a2 a3 a4 a5)))
(define-storage-base registers :finite :size 32)
(define-storage-base float-registers :finite :size 64)
(define-storage-base control-stack :unbounded :size 8)
(define-storage-base non-descriptor-stack :unbounded :size 0)
(define-storage-base constant :non-packed)
(define-storage-base immediate-constant :non-packed)
;;; a handy macro so we don't have to keep changing all the numbers
;;; whenever we insert a new storage class.
(defmacro !define-storage-classes (&rest classes)
(do ((forms (list 'progn)
(let* ((class (car classes))
(sc-name (car class))
(constant-name (intern (concatenate 'simple-string
(string sc-name)
"-SC-NUMBER"))))
(list* `(define-storage-class ,sc-name ,index
,@(cdr class))
`(def!constant ,constant-name ,index)
( The CMU CL version of this macro did
;; `(EXPORT ',CONSTANT-NAME)
here , but in SBCL we try to have package
structure described statically in one
;; master source file, instead of building it
;; dynamically by letting all the system code
;; modify it as the system boots.)
forms)))
(index 0 (1+ index))
(classes classes (cdr classes)))
((null classes)
(nreverse forms))))
(def!constant kludge-nondeterministic-catch-block-size 6)
(!define-storage-classes
;; non-immediate constants in the constant pool
(constant constant)
;; ZERO and NULL are in registers.
(zero immediate-constant)
(null immediate-constant)
(fp-single-zero immediate-constant)
(fp-double-zero immediate-constant)
;; Anything else that can be an immediate.
(immediate immediate-constant)
;; **** The stacks.
The control stack . ( Scanned by GC )
(control-stack control-stack)
We put ANY - REG and DESCRIPTOR - REG early so that their SC - NUMBER
;; is small and therefore the error trap information is smaller.
;; Moving them up here from their previous place down below saves
~250 K in core file size . --njf , 2006 - 01 - 27
Immediate descriptor objects . Do n't have to be seen by GC , but nothing
;; bad will happen if they are. (fixnums, characters, header values, etc).
(any-reg
registers
:locations #.(append non-descriptor-regs descriptor-regs)
:constant-scs (zero immediate)
:save-p t
:alternate-scs (control-stack))
Pointer descriptor objects . Must be seen by GC .
(descriptor-reg registers
:locations #.descriptor-regs
:constant-scs (constant null immediate)
:save-p t
:alternate-scs (control-stack))
;; The non-descriptor stacks.
(signed-stack non-descriptor-stack
:element-size 2 :alignment 2) ; (signed-byte 64)
(unsigned-stack non-descriptor-stack
:element-size 2 :alignment 2) ; (unsigned-byte 64)
(character-stack non-descriptor-stack) ; non-descriptor characters.
(sap-stack non-descriptor-stack
:element-size 2 :alignment 2) ; System area pointers.
(single-stack non-descriptor-stack) ; single-floats
(double-stack non-descriptor-stack
:element-size 2 :alignment 2) ; double floats.
(complex-single-stack non-descriptor-stack :element-size 2)
(complex-double-stack non-descriptor-stack :element-size 4 :alignment 2)
;; **** Things that can go in the integer registers.
;; Non-Descriptor characters
(character-reg registers
:locations #.non-descriptor-regs
:constant-scs (immediate)
:save-p t
:alternate-scs (character-stack))
Non - Descriptor SAP 's ( arbitrary pointers into address space )
(sap-reg registers
:locations #.non-descriptor-regs
:constant-scs (immediate)
:save-p t
:alternate-scs (sap-stack))
Non - Descriptor ( signed or unsigned ) numbers .
(signed-reg registers
:locations #.non-descriptor-regs
:constant-scs (zero immediate)
:save-p t
:alternate-scs (signed-stack))
(unsigned-reg registers
:locations #.non-descriptor-regs
:constant-scs (zero immediate)
:save-p t
:alternate-scs (unsigned-stack))
Random objects that must not be seen by GC . Used only as temporaries .
(non-descriptor-reg registers
:locations #.non-descriptor-regs)
Pointers to the interior of objects . Used only as an temporary .
(interior-reg registers
:locations (#.lip-offset))
;; **** Things that can go in the floating point registers.
Non - Descriptor single - floats .
(single-reg float-registers
:locations #.(loop for i from 4 to 30 collect i)
:constant-scs (fp-single-zero)
:save-p t
:alternate-scs (single-stack))
Non - Descriptor double - floats .
(double-reg float-registers
:locations #.(loop for i from 4 to 30 collect i)
:constant-scs (fp-double-zero)
:save-p t
:alternate-scs (double-stack))
(complex-single-reg float-registers
:locations #.(loop for i from 4 to 28 by 2 collect i)
:element-size 2
:constant-scs ()
:save-p t
:alternate-scs (complex-single-stack))
(complex-double-reg float-registers
:locations #.(loop for i from 4 to 28 by 2 collect i)
:element-size 2
:constant-scs ()
:save-p t
:alternate-scs (complex-double-stack))
;; A catch or unwind block.
(catch-block control-stack
:element-size kludge-nondeterministic-catch-block-size))
;;; Make some random tns for important registers.
(macrolet ((defregtn (name sc)
(let ((offset-sym (symbolicate name "-OFFSET"))
(tn-sym (symbolicate name "-TN")))
`(defparameter ,tn-sym
(make-random-tn :kind :normal
:sc (sc-or-lose ',sc)
:offset ,offset-sym)))))
;; These, we access by foo-TN only
(defregtn zero any-reg)
(defregtn null descriptor-reg)
(defregtn code descriptor-reg)
(defregtn alloc any-reg)
(defregtn bsp any-reg)
(defregtn csp any-reg)
(defregtn cfp any-reg)
(defregtn nsp any-reg)
;; These alias regular locations, so we have to make sure we don't bypass
;; the register allocator when using them.
(defregtn nargs any-reg)
(defregtn ocfp any-reg)
(defregtn lip interior-reg))
;; and some floating point values..
(defparameter fp-single-zero-tn
(make-random-tn :kind :normal
:sc (sc-or-lose 'single-reg)
:offset 31))
(defparameter fp-double-zero-tn
(make-random-tn :kind :normal
:sc (sc-or-lose 'double-reg)
:offset 31))
;;; If value can be represented as an immediate constant, then return
the appropriate SC number , otherwise return NIL .
(!def-vm-support-routine immediate-constant-sc (value)
(typecase value
((integer 0 0)
(sc-number-or-lose 'zero))
(null
(sc-number-or-lose 'null ))
((or (integer #.sb!xc:most-negative-fixnum #.sb!xc:most-positive-fixnum)
character)
(sc-number-or-lose 'immediate ))
(symbol
(if (static-symbol-p value)
(sc-number-or-lose 'immediate )
nil))
(single-float
(if (eql value 0f0)
(sc-number-or-lose 'fp-single-zero )
nil))
(double-float
(if (eql value 0d0)
(sc-number-or-lose 'fp-double-zero )
nil))))
;;;; function call parameters
the SC numbers for register and stack arguments / return values
(def!constant register-arg-scn (meta-sc-number-or-lose 'descriptor-reg))
(def!constant immediate-arg-scn (meta-sc-number-or-lose 'any-reg))
(def!constant control-stack-arg-scn (meta-sc-number-or-lose 'control-stack))
(eval-when (:compile-toplevel :load-toplevel :execute)
;;; offsets of special stack frame locations
(def!constant ocfp-save-offset 0)
(def!constant lra-save-offset 1)
(def!constant nfp-save-offset 2)
;;; the number of arguments/return values passed in registers
(def!constant register-arg-count 6)
;;; (Names to use for the argument registers would go here, but there
;;; are none.)
EVAL - WHEN
a list of TN 's describing the register arguments
(defparameter *register-arg-tns*
(mapcar (lambda (n)
(make-random-tn :kind :normal
:sc (sc-or-lose 'descriptor-reg)
:offset n))
*register-arg-offsets*))
;;; This is used by the debugger.
(def!constant single-value-return-byte-offset 4)
;;; This function is called by debug output routines that want a
pretty name for a TN 's location . It returns a thing that can be
printed with PRINC .
(!def-vm-support-routine location-print-name (tn)
( declare ( type ) )
(let ((sb (sb-name (sc-sb (tn-sc tn))))
(offset (tn-offset tn)))
(ecase sb
(registers (or (svref *register-names* offset)
(format nil "R~D" offset)))
(float-registers (format nil "F~D" offset))
(control-stack (format nil "CS~D" offset))
(non-descriptor-stack (format nil "NS~D" offset))
(constant (format nil "Const~D" offset))
(immediate-constant "Immed"))))
(!def-vm-support-routine combination-implementation-style (node)
(declare (type sb!c::combination node) (ignore node))
(values :default nil))
| null | https://raw.githubusercontent.com/dmitryvk/sbcl-win32-threads/5abfd64b00a0937ba2df2919f177697d1d91bde4/src/compiler/alpha/vm.lisp | lisp | miscellaneous VM definition noise for the Alpha
more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
defining the registers
Ra
Arg 0-5
Ra
Global pointer (gp)
a handy macro so we don't have to keep changing all the numbers
whenever we insert a new storage class.
`(EXPORT ',CONSTANT-NAME)
master source file, instead of building it
dynamically by letting all the system code
modify it as the system boots.)
non-immediate constants in the constant pool
ZERO and NULL are in registers.
Anything else that can be an immediate.
**** The stacks.
is small and therefore the error trap information is smaller.
Moving them up here from their previous place down below saves
bad will happen if they are. (fixnums, characters, header values, etc).
The non-descriptor stacks.
(signed-byte 64)
(unsigned-byte 64)
non-descriptor characters.
System area pointers.
single-floats
double floats.
**** Things that can go in the integer registers.
Non-Descriptor characters
**** Things that can go in the floating point registers.
A catch or unwind block.
Make some random tns for important registers.
These, we access by foo-TN only
These alias regular locations, so we have to make sure we don't bypass
the register allocator when using them.
and some floating point values..
If value can be represented as an immediate constant, then return
function call parameters
offsets of special stack frame locations
the number of arguments/return values passed in registers
(Names to use for the argument registers would go here, but there
are none.)
This is used by the debugger.
This function is called by debug output routines that want a |
This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB!VM")
(eval-when (:compile-toplevel :load-toplevel :execute)
(defvar *register-names* (make-array 32 :initial-element nil)))
(macrolet ((defreg (name offset)
(let ((offset-sym (symbolicate name "-OFFSET")))
`(eval-when (:compile-toplevel :load-toplevel :execute)
(def!constant ,offset-sym ,offset)
(setf (svref *register-names* ,offset-sym)
,(symbol-name name)))))
(defregset (name &rest regs)
`(eval-when (:compile-toplevel :load-toplevel :execute)
(defparameter ,name
(list ,@(mapcar (lambda (name)
(symbolicate name "-OFFSET"))
regs))))))
c.f . / runtime / alpha - lispregs.h
(defreg lip 0)
Caller saved 0 - 7
(defreg a0 1)
(defreg a1 2)
(defreg a2 3)
(defreg a3 4)
(defreg a4 5)
(defreg a5 6)
(defreg l0 7)
(defreg nargs 8)
saved 0 - 6
(defreg csp 9)
(defreg cfp 10)
(defreg ocfp 11)
(defreg bsp 12)
(defreg lexenv 13)
(defreg code 14)
(defreg null 15)
(defreg nl0 16)
(defreg nl1 17)
(defreg nl2 18)
(defreg nl3 19)
(defreg nl4 20)
(defreg nl5 21)
Caller saved 8 - 11
(defreg alloc 22)
(defreg fdefn 23)
(defreg cfunc 24)
(defreg nfp 25)
(defreg lra 26)
Caller saved 12
(defreg l1 27)
Assembler temp ( at )
(defreg l2 28)
(defreg gp 29)
Stack pointer
(defreg nsp 30)
Wired zero
(defreg zero 31)
(defregset non-descriptor-regs
nl0 nl1 nl2 nl3 nl4 nl5 nfp cfunc)
(defregset descriptor-regs
fdefn lexenv nargs ocfp lra a0 a1 a2 a3 a4 a5 l0 l1 l2)
(defregset *register-arg-offsets*
a0 a1 a2 a3 a4 a5)
(defparameter register-arg-names '(a0 a1 a2 a3 a4 a5)))
(define-storage-base registers :finite :size 32)
(define-storage-base float-registers :finite :size 64)
(define-storage-base control-stack :unbounded :size 8)
(define-storage-base non-descriptor-stack :unbounded :size 0)
(define-storage-base constant :non-packed)
(define-storage-base immediate-constant :non-packed)
(defmacro !define-storage-classes (&rest classes)
(do ((forms (list 'progn)
(let* ((class (car classes))
(sc-name (car class))
(constant-name (intern (concatenate 'simple-string
(string sc-name)
"-SC-NUMBER"))))
(list* `(define-storage-class ,sc-name ,index
,@(cdr class))
`(def!constant ,constant-name ,index)
( The CMU CL version of this macro did
here , but in SBCL we try to have package
structure described statically in one
forms)))
(index 0 (1+ index))
(classes classes (cdr classes)))
((null classes)
(nreverse forms))))
(def!constant kludge-nondeterministic-catch-block-size 6)
(!define-storage-classes
(constant constant)
(zero immediate-constant)
(null immediate-constant)
(fp-single-zero immediate-constant)
(fp-double-zero immediate-constant)
(immediate immediate-constant)
The control stack . ( Scanned by GC )
(control-stack control-stack)
We put ANY - REG and DESCRIPTOR - REG early so that their SC - NUMBER
~250 K in core file size . --njf , 2006 - 01 - 27
Immediate descriptor objects . Do n't have to be seen by GC , but nothing
(any-reg
registers
:locations #.(append non-descriptor-regs descriptor-regs)
:constant-scs (zero immediate)
:save-p t
:alternate-scs (control-stack))
Pointer descriptor objects . Must be seen by GC .
(descriptor-reg registers
:locations #.descriptor-regs
:constant-scs (constant null immediate)
:save-p t
:alternate-scs (control-stack))
(signed-stack non-descriptor-stack
(unsigned-stack non-descriptor-stack
(sap-stack non-descriptor-stack
(double-stack non-descriptor-stack
(complex-single-stack non-descriptor-stack :element-size 2)
(complex-double-stack non-descriptor-stack :element-size 4 :alignment 2)
(character-reg registers
:locations #.non-descriptor-regs
:constant-scs (immediate)
:save-p t
:alternate-scs (character-stack))
Non - Descriptor SAP 's ( arbitrary pointers into address space )
(sap-reg registers
:locations #.non-descriptor-regs
:constant-scs (immediate)
:save-p t
:alternate-scs (sap-stack))
Non - Descriptor ( signed or unsigned ) numbers .
(signed-reg registers
:locations #.non-descriptor-regs
:constant-scs (zero immediate)
:save-p t
:alternate-scs (signed-stack))
(unsigned-reg registers
:locations #.non-descriptor-regs
:constant-scs (zero immediate)
:save-p t
:alternate-scs (unsigned-stack))
Random objects that must not be seen by GC . Used only as temporaries .
(non-descriptor-reg registers
:locations #.non-descriptor-regs)
Pointers to the interior of objects . Used only as an temporary .
(interior-reg registers
:locations (#.lip-offset))
Non - Descriptor single - floats .
(single-reg float-registers
:locations #.(loop for i from 4 to 30 collect i)
:constant-scs (fp-single-zero)
:save-p t
:alternate-scs (single-stack))
Non - Descriptor double - floats .
(double-reg float-registers
:locations #.(loop for i from 4 to 30 collect i)
:constant-scs (fp-double-zero)
:save-p t
:alternate-scs (double-stack))
(complex-single-reg float-registers
:locations #.(loop for i from 4 to 28 by 2 collect i)
:element-size 2
:constant-scs ()
:save-p t
:alternate-scs (complex-single-stack))
(complex-double-reg float-registers
:locations #.(loop for i from 4 to 28 by 2 collect i)
:element-size 2
:constant-scs ()
:save-p t
:alternate-scs (complex-double-stack))
(catch-block control-stack
:element-size kludge-nondeterministic-catch-block-size))
(macrolet ((defregtn (name sc)
(let ((offset-sym (symbolicate name "-OFFSET"))
(tn-sym (symbolicate name "-TN")))
`(defparameter ,tn-sym
(make-random-tn :kind :normal
:sc (sc-or-lose ',sc)
:offset ,offset-sym)))))
(defregtn zero any-reg)
(defregtn null descriptor-reg)
(defregtn code descriptor-reg)
(defregtn alloc any-reg)
(defregtn bsp any-reg)
(defregtn csp any-reg)
(defregtn cfp any-reg)
(defregtn nsp any-reg)
(defregtn nargs any-reg)
(defregtn ocfp any-reg)
(defregtn lip interior-reg))
(defparameter fp-single-zero-tn
(make-random-tn :kind :normal
:sc (sc-or-lose 'single-reg)
:offset 31))
(defparameter fp-double-zero-tn
(make-random-tn :kind :normal
:sc (sc-or-lose 'double-reg)
:offset 31))
the appropriate SC number , otherwise return NIL .
(!def-vm-support-routine immediate-constant-sc (value)
(typecase value
((integer 0 0)
(sc-number-or-lose 'zero))
(null
(sc-number-or-lose 'null ))
((or (integer #.sb!xc:most-negative-fixnum #.sb!xc:most-positive-fixnum)
character)
(sc-number-or-lose 'immediate ))
(symbol
(if (static-symbol-p value)
(sc-number-or-lose 'immediate )
nil))
(single-float
(if (eql value 0f0)
(sc-number-or-lose 'fp-single-zero )
nil))
(double-float
(if (eql value 0d0)
(sc-number-or-lose 'fp-double-zero )
nil))))
the SC numbers for register and stack arguments / return values
(def!constant register-arg-scn (meta-sc-number-or-lose 'descriptor-reg))
(def!constant immediate-arg-scn (meta-sc-number-or-lose 'any-reg))
(def!constant control-stack-arg-scn (meta-sc-number-or-lose 'control-stack))
(eval-when (:compile-toplevel :load-toplevel :execute)
(def!constant ocfp-save-offset 0)
(def!constant lra-save-offset 1)
(def!constant nfp-save-offset 2)
(def!constant register-arg-count 6)
EVAL - WHEN
a list of TN 's describing the register arguments
(defparameter *register-arg-tns*
(mapcar (lambda (n)
(make-random-tn :kind :normal
:sc (sc-or-lose 'descriptor-reg)
:offset n))
*register-arg-offsets*))
(def!constant single-value-return-byte-offset 4)
pretty name for a TN 's location . It returns a thing that can be
printed with PRINC .
(!def-vm-support-routine location-print-name (tn)
( declare ( type ) )
(let ((sb (sb-name (sc-sb (tn-sc tn))))
(offset (tn-offset tn)))
(ecase sb
(registers (or (svref *register-names* offset)
(format nil "R~D" offset)))
(float-registers (format nil "F~D" offset))
(control-stack (format nil "CS~D" offset))
(non-descriptor-stack (format nil "NS~D" offset))
(constant (format nil "Const~D" offset))
(immediate-constant "Immed"))))
(!def-vm-support-routine combination-implementation-style (node)
(declare (type sb!c::combination node) (ignore node))
(values :default nil))
|
4e128e1b8bd140a96336cad046d471419f3de743f71eb596247f15d93dbf0885 | Minoru/hakyll-convert | Blogger.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE StandaloneDeriving #
module Spec.Blogger (tests) where
import qualified Data.Text as T
import Hakyll.Convert.Blogger
import Hakyll.Convert.Common (DistilledPost (..))
import Spec.SpecHelpers
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.ExpectedFailure (expectFail)
import Test.Tasty.HUnit
import qualified Text.Atom.Feed as Atom
deriving instance Eq DistilledPost
deriving instance Show DistilledPost
tests :: TestTree
tests =
testGroup
"Blogger.distill"
[ extractsPostUri,
extractsPostBody,
extractsPostTitle,
canSkipComments,
canExtractComments,
enumeratesAllCommentAuthors,
errorsOnNonHtmlPost,
errorsOnNonHtmlComment,
turnsIncorrectDatesIntoEpochStart,
parsesDates,
extractsPostTags
]
extractsPostUri :: TestTree
extractsPostUri =
testGroup
"extracts post's URI"
[ testCase (T.unpack uri) (dpUri (distill False (createInput uri)) @?= uri)
| uri <-
[ "-post-uris",
"/~joe/posts.atom"
]
]
where
createInput uri =
FullPost
{ fpPost = entry,
fpComments = [],
fpUri = uri
}
entry =
Atom.nullEntry
""
(Atom.TextString "Test post")
"2003-12-13T18:30:02Z"
extractsPostBody :: TestTree
extractsPostBody =
testGroup
"extracts post's body"
[ testCase (T.unpack body) (dpBody (distill False (createInput body)) @?= body)
| body <-
[ "<p>Today was a snowy day, and I decided to...</p>",
"<h3>My opinion on current affairs</h3><p>So you see, I...</p>"
]
]
where
createInput body =
FullPost
{ fpPost = createEntry body,
fpComments = [],
fpUri = ""
}
createEntry body =
( Atom.nullEntry
""
(Atom.TextString "Test post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent body)
}
extractsPostTitle :: TestTree
extractsPostTitle =
testGroup
"extracts post's title"
[ testCase (T.unpack title) (dpTitle (distill False (createInput title)) @?= Just (title))
| title <-
[ "First post",
"You won't believe what happened to me today",
"Trying out <i>things</i>…"
]
]
where
createInput title =
FullPost
{ fpPost = createEntry title,
fpComments = [],
fpUri = ""
}
createEntry title =
Atom.nullEntry
""
(Atom.TextString title)
"2003-12-13T18:30:02Z"
canSkipComments :: TestTree
canSkipComments =
testCase
"does not extract comments if first argument is False"
(dpBody (distill False input) @?= expected)
where
input =
FullPost
{ fpPost = entry,
fpComments = [comment],
fpUri = ""
}
entry =
( Atom.nullEntry
""
(Atom.TextString "First post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent "<p>Hello, world!</p>"),
Atom.entryPublished = Just "2003-12-13T18:30:02Z"
}
comment =
( Atom.nullEntry
"#comment1"
(Atom.TextString "Nice")
"2003-12-13T20:00:03Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent "<p>Nice post.</p>")
}
expected = "<p>Hello, world!</p>"
canExtractComments :: TestTree
canExtractComments =
testGroup
"extracts comments if first argument is True"
[ noDateNoAuthor,
dateNoAuthor,
noDateAuthor,
dateAuthor
]
where
createInput comment =
FullPost
{ fpPost = entry,
fpComments = [comment],
fpUri = ""
}
entry =
( Atom.nullEntry
""
(Atom.TextString "First post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent "<p>Hello, world!</p>"),
Atom.entryPublished = Just "2003-12-13T18:30:02Z"
}
noDateNoAuthor =
testCase
"comments with no \"published\" date and no author"
(dpBody (distill True (createInput commentNoDateNoAuthor)) @?= expectedNoDateNoAuthor)
commentNoDateNoAuthor =
( Atom.nullEntry
"#comment1"
(Atom.TextString "Nice")
"2003-12-13T20:00:03Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent "<p>Nice post.</p>")
}
expectedNoDateNoAuthor =
"<p>Hello, world!</p>\n\n\
\<h3 id='hakyll-convert-comments-title'>Comments</h3>\n\
\<div class='hakyll-convert-comment'>\n\
\<p class='hakyll-convert-comment-date'>On unknown date, wrote:</p>\n\
\<div class='hakyll-convert-comment-body'>\n\
\<p>Nice post.</p>\n\
\</div>\n\
\</div>"
dateNoAuthor =
testCase
"comments with a \"published\" date but no author"
(dpBody (distill True (createInput commentDateNoAuthor)) @?= expectedDateNoAuthor)
commentDateNoAuthor =
commentNoDateNoAuthor
{ Atom.entryPublished = Just "2019-01-02T03:04:05Z"
}
expectedDateNoAuthor =
"<p>Hello, world!</p>\n\n\
\<h3 id='hakyll-convert-comments-title'>Comments</h3>\n\
\<div class='hakyll-convert-comment'>\n\
\<p class='hakyll-convert-comment-date'>On 2019-01-02T03:04:05Z, wrote:</p>\n\
\<div class='hakyll-convert-comment-body'>\n\
\<p>Nice post.</p>\n\
\</div>\n\
\</div>"
noDateAuthor =
testCase
"comments with no \"published\" date but with an author"
(dpBody (distill True (createInput commentNoDateAuthor)) @?= expectedNoDateAuthor)
commentNoDateAuthor =
commentNoDateNoAuthor
{ Atom.entryAuthors = [Atom.nullPerson {Atom.personName = "John Doe"}]
}
expectedNoDateAuthor =
"<p>Hello, world!</p>\n\n\
\<h3 id='hakyll-convert-comments-title'>Comments</h3>\n\
\<div class='hakyll-convert-comment'>\n\
\<p class='hakyll-convert-comment-date'>On unknown date, John Doe wrote:</p>\n\
\<div class='hakyll-convert-comment-body'>\n\
\<p>Nice post.</p>\n\
\</div>\n\
\</div>"
dateAuthor =
testCase
"comments with a \"published\" date and an author"
(dpBody (distill True (createInput commentDateAuthor)) @?= expectedDateAuthor)
commentDateAuthor =
commentNoDateNoAuthor
{ Atom.entryPublished = Just "2019-01-02T03:04:05Z",
Atom.entryAuthors = [Atom.nullPerson {Atom.personName = "John Doe"}]
}
expectedDateAuthor =
"<p>Hello, world!</p>\n\n\
\<h3 id='hakyll-convert-comments-title'>Comments</h3>\n\
\<div class='hakyll-convert-comment'>\n\
\<p class='hakyll-convert-comment-date'>On 2019-01-02T03:04:05Z, John Doe wrote:</p>\n\
\<div class='hakyll-convert-comment-body'>\n\
\<p>Nice post.</p>\n\
\</div>\n\
\</div>"
enumeratesAllCommentAuthors :: TestTree
enumeratesAllCommentAuthors =
testCase
"enumerates all authors of a multi-author comment"
(dpBody (distill True input) @?= expected)
where
input =
FullPost
{ fpPost = entry,
fpComments = [comment],
fpUri = ""
}
entry =
( Atom.nullEntry
""
(Atom.TextString "First post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent "<p>Hello, world!</p>"),
Atom.entryPublished = Just "2003-12-13T18:30:02Z"
}
comment =
( Atom.nullEntry
"#comment1"
(Atom.TextString "Nice")
"2103-05-11T18:37:49Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent "<p>Nice post.</p>"),
Atom.entryAuthors =
[ Atom.nullPerson {Atom.personName = "First Author"},
Atom.nullPerson {Atom.personName = "Second Author"}
]
}
expected =
"<p>Hello, world!</p>\n\n\
\<h3 id='hakyll-convert-comments-title'>Comments</h3>\n\
\<div class='hakyll-convert-comment'>\n\
\<p class='hakyll-convert-comment-date'>On unknown date, First Author Second Author wrote:</p>\n\
\<div class='hakyll-convert-comment-body'>\n\
\<p>Nice post.</p>\n\
\</div>\n\
\</div>"
nullDistilledPost :: DistilledPost
nullDistilledPost =
DistilledPost
{ dpUri = "",
dpBody = "",
dpTitle = Nothing,
dpTags = [],
dpCategories = [],
dpDate = fromGregorian 2003 12 13 18 30 2
}
errorsOnNonHtmlPost :: TestTree
errorsOnNonHtmlPost =
expectFail $
testCase
"`error`s if post has non-HTML body"
(distill False input @?= nullDistilledPost)
where
input =
FullPost
{ fpPost = entry,
fpComments = [],
fpUri = ""
}
entry =
( Atom.nullEntry
""
(Atom.TextString "First post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.TextContent "oops, this will fail")
}
errorsOnNonHtmlComment :: TestTree
errorsOnNonHtmlComment =
expectFail $
testCase
"`error`s if comment has non-HTML body"
(distill False input @?= nullDistilledPost)
where
input =
FullPost
{ fpPost = entry,
fpComments = [comment],
fpUri = ""
}
entry =
( Atom.nullEntry
""
(Atom.TextString "First post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.TextContent "testing...")
}
comment =
( Atom.nullEntry
"#2"
(Atom.TextString "test comment")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.TextContent "oops, this will fail")
}
turnsIncorrectDatesIntoEpochStart :: TestTree
turnsIncorrectDatesIntoEpochStart =
testGroup
"turns incorrect \"published\" dates into Unix epoch start date"
[ testCase (T.unpack date) (dpDate (distill False (createInput date)) @?= expected)
| date <-
[ "First of April",
"2020.07.30",
"2020.07.30 00:01",
"2020-07-30 00:01",
"2020-07-30T00:01",
"2020-07-30T00:01Z",
"Sun, 31st July, 2020"
]
]
where
createInput date =
FullPost
{ fpPost = createEntry date,
fpComments = [],
fpUri = ""
}
createEntry date =
( Atom.nullEntry
""
(Atom.TextString "First post")
date
)
{ Atom.entryContent = Just (Atom.HTMLContent ""),
Atom.entryPublished = Just date
}
expected = fromGregorian 1970 1 1 0 0 0
parsesDates :: TestTree
parsesDates =
testGroup
"parses \"published\" dates"
[ testCase (T.unpack dateStr) (dpDate (distill False (createInput dateStr)) @?= expected)
| (dateStr, expected) <-
[ ("2020-07-30T15:50:21Z", fromGregorian 2020 7 30 15 50 21),
("1015-02-18T01:04:13Z", fromGregorian 1015 2 18 1 4 13),
("2020-07-30T15:50:21+0000", fromGregorian 2020 7 30 15 50 21),
("1015-02-18T01:04:13+0000", fromGregorian 1015 2 18 1 4 13),
("1015-02-18T01:04:13+0001", fromGregorian 1015 2 18 1 (4 - 1) 13),
("1015-02-18T01:04:13-0001", fromGregorian 1015 2 18 1 (4 + 1) 13),
("1015-02-18T01:04:13+0100", fromGregorian 1015 2 18 (1 - 1) 4 13),
("1015-02-18T01:04:13-0100", fromGregorian 1015 2 18 (1 + 1) 4 13)
]
]
where
createInput date =
FullPost
{ fpPost = createEntry date,
fpComments = [],
fpUri = ""
}
createEntry date =
( Atom.nullEntry
""
(Atom.TextString "First post")
date
)
{ Atom.entryContent = Just (Atom.HTMLContent ""),
Atom.entryPublished = Just date
}
extractsPostTags :: TestTree
extractsPostTags =
testCase
"extracts post's tags"
(dpTags (distill False input) @?= expected)
where
input =
FullPost
{ fpPost = entry,
fpComments = [],
fpUri = ""
}
entry =
( Atom.nullEntry
""
(Atom.TextString "First post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent ""),
Atom.entryCategories =
[ Atom.newCategory "first tag",
Atom.newCategory "second tag",
Atom.newCategory "third tag",
(Atom.newCategory "blogger category (should be ignored)")
{ Atom.catScheme = Just "#kind"
}
]
}
expected = ["first tag", "second tag", "third tag"]
| null | https://raw.githubusercontent.com/Minoru/hakyll-convert/aafe20ecbc4a0aec66092a890346ebb3850c74c0/test/spec/Spec/Blogger.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE StandaloneDeriving #
module Spec.Blogger (tests) where
import qualified Data.Text as T
import Hakyll.Convert.Blogger
import Hakyll.Convert.Common (DistilledPost (..))
import Spec.SpecHelpers
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.ExpectedFailure (expectFail)
import Test.Tasty.HUnit
import qualified Text.Atom.Feed as Atom
deriving instance Eq DistilledPost
deriving instance Show DistilledPost
tests :: TestTree
tests =
testGroup
"Blogger.distill"
[ extractsPostUri,
extractsPostBody,
extractsPostTitle,
canSkipComments,
canExtractComments,
enumeratesAllCommentAuthors,
errorsOnNonHtmlPost,
errorsOnNonHtmlComment,
turnsIncorrectDatesIntoEpochStart,
parsesDates,
extractsPostTags
]
extractsPostUri :: TestTree
extractsPostUri =
testGroup
"extracts post's URI"
[ testCase (T.unpack uri) (dpUri (distill False (createInput uri)) @?= uri)
| uri <-
[ "-post-uris",
"/~joe/posts.atom"
]
]
where
createInput uri =
FullPost
{ fpPost = entry,
fpComments = [],
fpUri = uri
}
entry =
Atom.nullEntry
""
(Atom.TextString "Test post")
"2003-12-13T18:30:02Z"
extractsPostBody :: TestTree
extractsPostBody =
testGroup
"extracts post's body"
[ testCase (T.unpack body) (dpBody (distill False (createInput body)) @?= body)
| body <-
[ "<p>Today was a snowy day, and I decided to...</p>",
"<h3>My opinion on current affairs</h3><p>So you see, I...</p>"
]
]
where
createInput body =
FullPost
{ fpPost = createEntry body,
fpComments = [],
fpUri = ""
}
createEntry body =
( Atom.nullEntry
""
(Atom.TextString "Test post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent body)
}
extractsPostTitle :: TestTree
extractsPostTitle =
testGroup
"extracts post's title"
[ testCase (T.unpack title) (dpTitle (distill False (createInput title)) @?= Just (title))
| title <-
[ "First post",
"You won't believe what happened to me today",
"Trying out <i>things</i>…"
]
]
where
createInput title =
FullPost
{ fpPost = createEntry title,
fpComments = [],
fpUri = ""
}
createEntry title =
Atom.nullEntry
""
(Atom.TextString title)
"2003-12-13T18:30:02Z"
canSkipComments :: TestTree
canSkipComments =
testCase
"does not extract comments if first argument is False"
(dpBody (distill False input) @?= expected)
where
input =
FullPost
{ fpPost = entry,
fpComments = [comment],
fpUri = ""
}
entry =
( Atom.nullEntry
""
(Atom.TextString "First post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent "<p>Hello, world!</p>"),
Atom.entryPublished = Just "2003-12-13T18:30:02Z"
}
comment =
( Atom.nullEntry
"#comment1"
(Atom.TextString "Nice")
"2003-12-13T20:00:03Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent "<p>Nice post.</p>")
}
expected = "<p>Hello, world!</p>"
canExtractComments :: TestTree
canExtractComments =
testGroup
"extracts comments if first argument is True"
[ noDateNoAuthor,
dateNoAuthor,
noDateAuthor,
dateAuthor
]
where
createInput comment =
FullPost
{ fpPost = entry,
fpComments = [comment],
fpUri = ""
}
entry =
( Atom.nullEntry
""
(Atom.TextString "First post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent "<p>Hello, world!</p>"),
Atom.entryPublished = Just "2003-12-13T18:30:02Z"
}
noDateNoAuthor =
testCase
"comments with no \"published\" date and no author"
(dpBody (distill True (createInput commentNoDateNoAuthor)) @?= expectedNoDateNoAuthor)
commentNoDateNoAuthor =
( Atom.nullEntry
"#comment1"
(Atom.TextString "Nice")
"2003-12-13T20:00:03Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent "<p>Nice post.</p>")
}
expectedNoDateNoAuthor =
"<p>Hello, world!</p>\n\n\
\<h3 id='hakyll-convert-comments-title'>Comments</h3>\n\
\<div class='hakyll-convert-comment'>\n\
\<p class='hakyll-convert-comment-date'>On unknown date, wrote:</p>\n\
\<div class='hakyll-convert-comment-body'>\n\
\<p>Nice post.</p>\n\
\</div>\n\
\</div>"
dateNoAuthor =
testCase
"comments with a \"published\" date but no author"
(dpBody (distill True (createInput commentDateNoAuthor)) @?= expectedDateNoAuthor)
commentDateNoAuthor =
commentNoDateNoAuthor
{ Atom.entryPublished = Just "2019-01-02T03:04:05Z"
}
expectedDateNoAuthor =
"<p>Hello, world!</p>\n\n\
\<h3 id='hakyll-convert-comments-title'>Comments</h3>\n\
\<div class='hakyll-convert-comment'>\n\
\<p class='hakyll-convert-comment-date'>On 2019-01-02T03:04:05Z, wrote:</p>\n\
\<div class='hakyll-convert-comment-body'>\n\
\<p>Nice post.</p>\n\
\</div>\n\
\</div>"
noDateAuthor =
testCase
"comments with no \"published\" date but with an author"
(dpBody (distill True (createInput commentNoDateAuthor)) @?= expectedNoDateAuthor)
commentNoDateAuthor =
commentNoDateNoAuthor
{ Atom.entryAuthors = [Atom.nullPerson {Atom.personName = "John Doe"}]
}
expectedNoDateAuthor =
"<p>Hello, world!</p>\n\n\
\<h3 id='hakyll-convert-comments-title'>Comments</h3>\n\
\<div class='hakyll-convert-comment'>\n\
\<p class='hakyll-convert-comment-date'>On unknown date, John Doe wrote:</p>\n\
\<div class='hakyll-convert-comment-body'>\n\
\<p>Nice post.</p>\n\
\</div>\n\
\</div>"
dateAuthor =
testCase
"comments with a \"published\" date and an author"
(dpBody (distill True (createInput commentDateAuthor)) @?= expectedDateAuthor)
commentDateAuthor =
commentNoDateNoAuthor
{ Atom.entryPublished = Just "2019-01-02T03:04:05Z",
Atom.entryAuthors = [Atom.nullPerson {Atom.personName = "John Doe"}]
}
expectedDateAuthor =
"<p>Hello, world!</p>\n\n\
\<h3 id='hakyll-convert-comments-title'>Comments</h3>\n\
\<div class='hakyll-convert-comment'>\n\
\<p class='hakyll-convert-comment-date'>On 2019-01-02T03:04:05Z, John Doe wrote:</p>\n\
\<div class='hakyll-convert-comment-body'>\n\
\<p>Nice post.</p>\n\
\</div>\n\
\</div>"
enumeratesAllCommentAuthors :: TestTree
enumeratesAllCommentAuthors =
testCase
"enumerates all authors of a multi-author comment"
(dpBody (distill True input) @?= expected)
where
input =
FullPost
{ fpPost = entry,
fpComments = [comment],
fpUri = ""
}
entry =
( Atom.nullEntry
""
(Atom.TextString "First post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent "<p>Hello, world!</p>"),
Atom.entryPublished = Just "2003-12-13T18:30:02Z"
}
comment =
( Atom.nullEntry
"#comment1"
(Atom.TextString "Nice")
"2103-05-11T18:37:49Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent "<p>Nice post.</p>"),
Atom.entryAuthors =
[ Atom.nullPerson {Atom.personName = "First Author"},
Atom.nullPerson {Atom.personName = "Second Author"}
]
}
expected =
"<p>Hello, world!</p>\n\n\
\<h3 id='hakyll-convert-comments-title'>Comments</h3>\n\
\<div class='hakyll-convert-comment'>\n\
\<p class='hakyll-convert-comment-date'>On unknown date, First Author Second Author wrote:</p>\n\
\<div class='hakyll-convert-comment-body'>\n\
\<p>Nice post.</p>\n\
\</div>\n\
\</div>"
nullDistilledPost :: DistilledPost
nullDistilledPost =
DistilledPost
{ dpUri = "",
dpBody = "",
dpTitle = Nothing,
dpTags = [],
dpCategories = [],
dpDate = fromGregorian 2003 12 13 18 30 2
}
errorsOnNonHtmlPost :: TestTree
errorsOnNonHtmlPost =
expectFail $
testCase
"`error`s if post has non-HTML body"
(distill False input @?= nullDistilledPost)
where
input =
FullPost
{ fpPost = entry,
fpComments = [],
fpUri = ""
}
entry =
( Atom.nullEntry
""
(Atom.TextString "First post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.TextContent "oops, this will fail")
}
errorsOnNonHtmlComment :: TestTree
errorsOnNonHtmlComment =
expectFail $
testCase
"`error`s if comment has non-HTML body"
(distill False input @?= nullDistilledPost)
where
input =
FullPost
{ fpPost = entry,
fpComments = [comment],
fpUri = ""
}
entry =
( Atom.nullEntry
""
(Atom.TextString "First post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.TextContent "testing...")
}
comment =
( Atom.nullEntry
"#2"
(Atom.TextString "test comment")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.TextContent "oops, this will fail")
}
turnsIncorrectDatesIntoEpochStart :: TestTree
turnsIncorrectDatesIntoEpochStart =
testGroup
"turns incorrect \"published\" dates into Unix epoch start date"
[ testCase (T.unpack date) (dpDate (distill False (createInput date)) @?= expected)
| date <-
[ "First of April",
"2020.07.30",
"2020.07.30 00:01",
"2020-07-30 00:01",
"2020-07-30T00:01",
"2020-07-30T00:01Z",
"Sun, 31st July, 2020"
]
]
where
createInput date =
FullPost
{ fpPost = createEntry date,
fpComments = [],
fpUri = ""
}
createEntry date =
( Atom.nullEntry
""
(Atom.TextString "First post")
date
)
{ Atom.entryContent = Just (Atom.HTMLContent ""),
Atom.entryPublished = Just date
}
expected = fromGregorian 1970 1 1 0 0 0
parsesDates :: TestTree
parsesDates =
testGroup
"parses \"published\" dates"
[ testCase (T.unpack dateStr) (dpDate (distill False (createInput dateStr)) @?= expected)
| (dateStr, expected) <-
[ ("2020-07-30T15:50:21Z", fromGregorian 2020 7 30 15 50 21),
("1015-02-18T01:04:13Z", fromGregorian 1015 2 18 1 4 13),
("2020-07-30T15:50:21+0000", fromGregorian 2020 7 30 15 50 21),
("1015-02-18T01:04:13+0000", fromGregorian 1015 2 18 1 4 13),
("1015-02-18T01:04:13+0001", fromGregorian 1015 2 18 1 (4 - 1) 13),
("1015-02-18T01:04:13-0001", fromGregorian 1015 2 18 1 (4 + 1) 13),
("1015-02-18T01:04:13+0100", fromGregorian 1015 2 18 (1 - 1) 4 13),
("1015-02-18T01:04:13-0100", fromGregorian 1015 2 18 (1 + 1) 4 13)
]
]
where
createInput date =
FullPost
{ fpPost = createEntry date,
fpComments = [],
fpUri = ""
}
createEntry date =
( Atom.nullEntry
""
(Atom.TextString "First post")
date
)
{ Atom.entryContent = Just (Atom.HTMLContent ""),
Atom.entryPublished = Just date
}
extractsPostTags :: TestTree
extractsPostTags =
testCase
"extracts post's tags"
(dpTags (distill False input) @?= expected)
where
input =
FullPost
{ fpPost = entry,
fpComments = [],
fpUri = ""
}
entry =
( Atom.nullEntry
""
(Atom.TextString "First post")
"2003-12-13T18:30:02Z"
)
{ Atom.entryContent = Just (Atom.HTMLContent ""),
Atom.entryCategories =
[ Atom.newCategory "first tag",
Atom.newCategory "second tag",
Atom.newCategory "third tag",
(Atom.newCategory "blogger category (should be ignored)")
{ Atom.catScheme = Just "#kind"
}
]
}
expected = ["first tag", "second tag", "third tag"]
|
e838d16d4ca1a866d5f11d632a77ac5a6d0515dd13758ac2149eaced831fbc1c | jobjo/popper | image.ml | module Image = struct
type color =
| White
| Black
| Transparent
type t =
{ width : int
; height : int
; get_pixel : x:int -> y:int -> color
}
let make ~width ~height get_pixel = { width; height; get_pixel }
let render { width; height; get_pixel } =
List.init height (fun y ->
List.init width (fun x ->
match get_pixel ~x ~y with
| White -> "w "
| Black -> "b "
| Transparent -> "- ")
|> String.concat "")
|> String.concat "\n"
let transpose { width; height; get_pixel } =
{ width = height
; height = width
; get_pixel = (fun ~x ~y -> get_pixel ~x:y ~y:x)
}
let invert { width; height; get_pixel } =
let get_pixel ~x ~y =
match get_pixel ~x ~y with
| White -> Black
| Black -> White
| Transparent -> White
in
{ width; height; get_pixel }
let next
{ width = w1; height = h1; get_pixel = g1 }
{ width = w2; height = h2; get_pixel = g2 }
=
let width = w1 + w2 in
let height = max h1 h2 in
let get_pixel ~x ~y =
if x < w1 then
if y < h1 then
g1 ~x ~y
else
Transparent
else if y < h2 then
g2 ~x:(x - w1) ~y
else
Transparent
in
{ width; height; get_pixel }
let above i1 i2 = transpose @@ next (transpose i1) (transpose i2)
end
open Popper
open Sample.Syntax
let sample_img =
let* width = Sample.Int.range 0 10 in
let* height = Sample.Int.range 0 10 in
let* lookup =
Sample.fn
(Sample.one_value_of [ Image.Black; Image.White; Image.Transparent ])
in
let get_pixel ~x ~y = lookup (x, y) in
Sample.return (Image.make ~width ~height get_pixel)
let test_invert_twice =
test ~config:Config.(all [ num_samples 100; verbose ]) @@ fun () ->
let* img = sample_img in
equal
Comparator.string
(Image.render img)
(Image.render @@ Image.invert @@ Image.invert img)
let suite = suite [ ("Invert twice", test_invert_twice) ]
| null | https://raw.githubusercontent.com/jobjo/popper/33da372946d1d842f75994e086fa81c8cf62986e/examples/image.ml | ocaml | module Image = struct
type color =
| White
| Black
| Transparent
type t =
{ width : int
; height : int
; get_pixel : x:int -> y:int -> color
}
let make ~width ~height get_pixel = { width; height; get_pixel }
let render { width; height; get_pixel } =
List.init height (fun y ->
List.init width (fun x ->
match get_pixel ~x ~y with
| White -> "w "
| Black -> "b "
| Transparent -> "- ")
|> String.concat "")
|> String.concat "\n"
let transpose { width; height; get_pixel } =
{ width = height
; height = width
; get_pixel = (fun ~x ~y -> get_pixel ~x:y ~y:x)
}
let invert { width; height; get_pixel } =
let get_pixel ~x ~y =
match get_pixel ~x ~y with
| White -> Black
| Black -> White
| Transparent -> White
in
{ width; height; get_pixel }
let next
{ width = w1; height = h1; get_pixel = g1 }
{ width = w2; height = h2; get_pixel = g2 }
=
let width = w1 + w2 in
let height = max h1 h2 in
let get_pixel ~x ~y =
if x < w1 then
if y < h1 then
g1 ~x ~y
else
Transparent
else if y < h2 then
g2 ~x:(x - w1) ~y
else
Transparent
in
{ width; height; get_pixel }
let above i1 i2 = transpose @@ next (transpose i1) (transpose i2)
end
open Popper
open Sample.Syntax
let sample_img =
let* width = Sample.Int.range 0 10 in
let* height = Sample.Int.range 0 10 in
let* lookup =
Sample.fn
(Sample.one_value_of [ Image.Black; Image.White; Image.Transparent ])
in
let get_pixel ~x ~y = lookup (x, y) in
Sample.return (Image.make ~width ~height get_pixel)
let test_invert_twice =
test ~config:Config.(all [ num_samples 100; verbose ]) @@ fun () ->
let* img = sample_img in
equal
Comparator.string
(Image.render img)
(Image.render @@ Image.invert @@ Image.invert img)
let suite = suite [ ("Invert twice", test_invert_twice) ]
| |
a24ad46057494dca2be60f2cef525a6c65ab4f818b14f054fa34a2adfe2fcb9f | Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library | GetAccountsAccount.hs | {-# LANGUAGE ExplicitForAll #-}
{-# LANGUAGE MultiWayIf #-}
CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
{-# LANGUAGE OverloadedStrings #-}
-- | Contains the different functions to run the operation getAccountsAccount
module StripeAPI.Operations.GetAccountsAccount where
import qualified Control.Monad.Fail
import qualified Control.Monad.Trans.Reader
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Either
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified Data.Vector
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified Network.HTTP.Client
import qualified Network.HTTP.Client as Network.HTTP.Client.Request
import qualified Network.HTTP.Client as Network.HTTP.Client.Types
import qualified Network.HTTP.Simple
import qualified Network.HTTP.Types
import qualified Network.HTTP.Types as Network.HTTP.Types.Status
import qualified Network.HTTP.Types as Network.HTTP.Types.URI
import qualified StripeAPI.Common
import StripeAPI.Types
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
-- | > GET /v1/accounts/{account}
--
\<p > Retrieves the details of an account.\<\/p >
getAccountsAccount ::
forall m.
StripeAPI.Common.MonadHTTP m =>
-- | Contains all available parameters of this operation (query and path parameters)
GetAccountsAccountParameters ->
-- | Monadic computation which returns the result of the operation
StripeAPI.Common.ClientT m (Network.HTTP.Client.Types.Response GetAccountsAccountResponse)
getAccountsAccount parameters =
GHC.Base.fmap
( \response_0 ->
GHC.Base.fmap
( Data.Either.either GetAccountsAccountResponseError GHC.Base.id
GHC.Base.. ( \response body ->
if
| (\status_1 -> Network.HTTP.Types.Status.statusCode status_1 GHC.Classes.== 200) (Network.HTTP.Client.Types.responseStatus response) ->
GetAccountsAccountResponse200
Data.Functor.<$> ( Data.Aeson.eitherDecodeStrict body ::
Data.Either.Either
GHC.Base.String
Account
)
| GHC.Base.const GHC.Types.True (Network.HTTP.Client.Types.responseStatus response) ->
GetAccountsAccountResponseDefault
Data.Functor.<$> ( Data.Aeson.eitherDecodeStrict body ::
Data.Either.Either
GHC.Base.String
Error
)
| GHC.Base.otherwise -> Data.Either.Left "Missing default response type"
)
response_0
)
response_0
)
(StripeAPI.Common.doCallWithConfigurationM (Data.Text.toUpper GHC.Base.$ Data.Text.pack "GET") (Data.Text.pack ("/v1/accounts/" GHC.Base.++ (Data.ByteString.Char8.unpack (Network.HTTP.Types.URI.urlEncode GHC.Types.True GHC.Base.$ (Data.ByteString.Char8.pack GHC.Base.$ StripeAPI.Common.stringifyModel (getAccountsAccountParametersPathAccount parameters))) GHC.Base.++ ""))) [StripeAPI.Common.QueryParameter (Data.Text.pack "expand") (Data.Aeson.Types.ToJSON.toJSON Data.Functor.<$> getAccountsAccountParametersQueryExpand parameters) (Data.Text.pack "deepObject") GHC.Types.True])
-- | Defines the object schema located at @paths.\/v1\/accounts\/{account}.GET.parameters@ in the specification.
data GetAccountsAccountParameters = GetAccountsAccountParameters
{ -- | pathAccount: Represents the parameter named \'account\'
--
-- Constraints:
--
* Maximum length of 5000
getAccountsAccountParametersPathAccount :: Data.Text.Internal.Text,
-- | queryExpand: Represents the parameter named \'expand\'
--
-- Specifies which fields in the response should be expanded.
getAccountsAccountParametersQueryExpand :: (GHC.Maybe.Maybe ([Data.Text.Internal.Text]))
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON GetAccountsAccountParameters where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (["pathAccount" Data.Aeson.Types.ToJSON..= getAccountsAccountParametersPathAccount obj] : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("queryExpand" Data.Aeson.Types.ToJSON..=)) (getAccountsAccountParametersQueryExpand obj) : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (["pathAccount" Data.Aeson.Types.ToJSON..= getAccountsAccountParametersPathAccount obj] : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("queryExpand" Data.Aeson.Types.ToJSON..=)) (getAccountsAccountParametersQueryExpand obj) : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON GetAccountsAccountParameters where
parseJSON = Data.Aeson.Types.FromJSON.withObject "GetAccountsAccountParameters" (\obj -> (GHC.Base.pure GetAccountsAccountParameters GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..: "pathAccount")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "queryExpand"))
-- | Create a new 'GetAccountsAccountParameters' with all required fields.
mkGetAccountsAccountParameters ::
| ' getAccountsAccountParametersPathAccount '
Data.Text.Internal.Text ->
GetAccountsAccountParameters
mkGetAccountsAccountParameters getAccountsAccountParametersPathAccount =
GetAccountsAccountParameters
{ getAccountsAccountParametersPathAccount = getAccountsAccountParametersPathAccount,
getAccountsAccountParametersQueryExpand = GHC.Maybe.Nothing
}
-- | Represents a response of the operation 'getAccountsAccount'.
--
-- The response constructor is chosen by the status code of the response. If no case matches (no specific case for the response code, no range case, no default case), 'GetAccountsAccountResponseError' is used.
data GetAccountsAccountResponse
= -- | Means either no matching case available or a parse error
GetAccountsAccountResponseError GHC.Base.String
| -- | Successful response.
GetAccountsAccountResponse200 Account
| -- | Error response.
GetAccountsAccountResponseDefault Error
deriving (GHC.Show.Show, GHC.Classes.Eq)
| null | https://raw.githubusercontent.com/Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library/ba4401f083ff054f8da68c741f762407919de42f/src/StripeAPI/Operations/GetAccountsAccount.hs | haskell | # LANGUAGE ExplicitForAll #
# LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
| Contains the different functions to run the operation getAccountsAccount
| > GET /v1/accounts/{account}
| Contains all available parameters of this operation (query and path parameters)
| Monadic computation which returns the result of the operation
| Defines the object schema located at @paths.\/v1\/accounts\/{account}.GET.parameters@ in the specification.
| pathAccount: Represents the parameter named \'account\'
Constraints:
| queryExpand: Represents the parameter named \'expand\'
Specifies which fields in the response should be expanded.
| Create a new 'GetAccountsAccountParameters' with all required fields.
| Represents a response of the operation 'getAccountsAccount'.
The response constructor is chosen by the status code of the response. If no case matches (no specific case for the response code, no range case, no default case), 'GetAccountsAccountResponseError' is used.
| Means either no matching case available or a parse error
| Successful response.
| Error response. | CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
module StripeAPI.Operations.GetAccountsAccount where
import qualified Control.Monad.Fail
import qualified Control.Monad.Trans.Reader
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Either
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified Data.Vector
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified Network.HTTP.Client
import qualified Network.HTTP.Client as Network.HTTP.Client.Request
import qualified Network.HTTP.Client as Network.HTTP.Client.Types
import qualified Network.HTTP.Simple
import qualified Network.HTTP.Types
import qualified Network.HTTP.Types as Network.HTTP.Types.Status
import qualified Network.HTTP.Types as Network.HTTP.Types.URI
import qualified StripeAPI.Common
import StripeAPI.Types
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
\<p > Retrieves the details of an account.\<\/p >
getAccountsAccount ::
forall m.
StripeAPI.Common.MonadHTTP m =>
GetAccountsAccountParameters ->
StripeAPI.Common.ClientT m (Network.HTTP.Client.Types.Response GetAccountsAccountResponse)
getAccountsAccount parameters =
GHC.Base.fmap
( \response_0 ->
GHC.Base.fmap
( Data.Either.either GetAccountsAccountResponseError GHC.Base.id
GHC.Base.. ( \response body ->
if
| (\status_1 -> Network.HTTP.Types.Status.statusCode status_1 GHC.Classes.== 200) (Network.HTTP.Client.Types.responseStatus response) ->
GetAccountsAccountResponse200
Data.Functor.<$> ( Data.Aeson.eitherDecodeStrict body ::
Data.Either.Either
GHC.Base.String
Account
)
| GHC.Base.const GHC.Types.True (Network.HTTP.Client.Types.responseStatus response) ->
GetAccountsAccountResponseDefault
Data.Functor.<$> ( Data.Aeson.eitherDecodeStrict body ::
Data.Either.Either
GHC.Base.String
Error
)
| GHC.Base.otherwise -> Data.Either.Left "Missing default response type"
)
response_0
)
response_0
)
(StripeAPI.Common.doCallWithConfigurationM (Data.Text.toUpper GHC.Base.$ Data.Text.pack "GET") (Data.Text.pack ("/v1/accounts/" GHC.Base.++ (Data.ByteString.Char8.unpack (Network.HTTP.Types.URI.urlEncode GHC.Types.True GHC.Base.$ (Data.ByteString.Char8.pack GHC.Base.$ StripeAPI.Common.stringifyModel (getAccountsAccountParametersPathAccount parameters))) GHC.Base.++ ""))) [StripeAPI.Common.QueryParameter (Data.Text.pack "expand") (Data.Aeson.Types.ToJSON.toJSON Data.Functor.<$> getAccountsAccountParametersQueryExpand parameters) (Data.Text.pack "deepObject") GHC.Types.True])
data GetAccountsAccountParameters = GetAccountsAccountParameters
* Maximum length of 5000
getAccountsAccountParametersPathAccount :: Data.Text.Internal.Text,
getAccountsAccountParametersQueryExpand :: (GHC.Maybe.Maybe ([Data.Text.Internal.Text]))
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON GetAccountsAccountParameters where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (["pathAccount" Data.Aeson.Types.ToJSON..= getAccountsAccountParametersPathAccount obj] : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("queryExpand" Data.Aeson.Types.ToJSON..=)) (getAccountsAccountParametersQueryExpand obj) : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (["pathAccount" Data.Aeson.Types.ToJSON..= getAccountsAccountParametersPathAccount obj] : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("queryExpand" Data.Aeson.Types.ToJSON..=)) (getAccountsAccountParametersQueryExpand obj) : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON GetAccountsAccountParameters where
parseJSON = Data.Aeson.Types.FromJSON.withObject "GetAccountsAccountParameters" (\obj -> (GHC.Base.pure GetAccountsAccountParameters GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..: "pathAccount")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "queryExpand"))
mkGetAccountsAccountParameters ::
| ' getAccountsAccountParametersPathAccount '
Data.Text.Internal.Text ->
GetAccountsAccountParameters
mkGetAccountsAccountParameters getAccountsAccountParametersPathAccount =
GetAccountsAccountParameters
{ getAccountsAccountParametersPathAccount = getAccountsAccountParametersPathAccount,
getAccountsAccountParametersQueryExpand = GHC.Maybe.Nothing
}
data GetAccountsAccountResponse
GetAccountsAccountResponseError GHC.Base.String
GetAccountsAccountResponse200 Account
GetAccountsAccountResponseDefault Error
deriving (GHC.Show.Show, GHC.Classes.Eq)
|
1c2c903facd57cc4dc3bf588c1406055c4f212a0d9b625c98e16458d38f22628 | raspasov/neversleep | b_plus_tree_sorted_map.clj | (ns neversleep-db.b-plus-tree-sorted-map
(:require [clojure.core.async :refer [chan go >! <! <!! >!! go-loop put! thread alts! alts!! timeout pipeline close!]]
[neversleep-db.println-m :refer [println-m]]
[neversleep-db.state :as state]))
(defn get-b-plus-tree [entity-id]
(get @state/all-b-plus-trees-in-mem entity-id))
(defn save-b-plus-tree [id b-plus-tree]
(alter state/all-b-plus-trees-in-mem (fn [x] (assoc x id b-plus-tree))))
;end
;constructors
(defn new-b-tree []
(sorted-map))
;end
(def io-assoc-agent (agent nil))
( defn io - assoc [ entity - id k v confirm - ch ]
; (dosync
; (let [tree (if-let [tree (get-b-plus-tree entity-id)]
; tree
; (new-b-tree))]
; (save-b-plus-tree entity-id (assoc tree k v))))
; (>!! confirm-ch true))
(defn io-assoc [entity-id k v confirm-ch]
(send io-assoc-agent (fn [_]
(dosync
(let [tree (if-let [tree (get-b-plus-tree entity-id)]
tree
(new-b-tree))]
(save-b-plus-tree entity-id (assoc tree k v))))
(>!! confirm-ch true)
nil)))
(defn io-find-range
"Searches the b-plus tree for a range of keys.
If key-start > key-end, search is DESC-like"
[entity-id key-start key-end limit responce-ch]
(if-let [tree (get-b-plus-tree entity-id)]
(>!! responce-ch
{:result
(->> (rsubseq tree >= key-end <= key-start)
(take limit)
(into []))})
;empty
(>!! responce-ch
{:result []})))
(defn clean-up-keys-less-than-or-equal-to [k entity-id]
(dosync
(when-let [tree (get-b-plus-tree entity-id)]
(let [key-vals-to-remove (take-while #(<= ^long (nth % 0) ^long k) tree)
keys-seq (map first key-vals-to-remove)
new-tree (apply dissoc tree keys-seq)]
(save-b-plus-tree entity-id new-tree)))))
| null | https://raw.githubusercontent.com/raspasov/neversleep/7fd968f4ab20fa6ef71e1049e3eec289ea6691e4/src/neversleep_db/b_plus_tree_sorted_map.clj | clojure | end
constructors
end
(dosync
(let [tree (if-let [tree (get-b-plus-tree entity-id)]
tree
(new-b-tree))]
(save-b-plus-tree entity-id (assoc tree k v))))
(>!! confirm-ch true))
empty | (ns neversleep-db.b-plus-tree-sorted-map
(:require [clojure.core.async :refer [chan go >! <! <!! >!! go-loop put! thread alts! alts!! timeout pipeline close!]]
[neversleep-db.println-m :refer [println-m]]
[neversleep-db.state :as state]))
(defn get-b-plus-tree [entity-id]
(get @state/all-b-plus-trees-in-mem entity-id))
(defn save-b-plus-tree [id b-plus-tree]
(alter state/all-b-plus-trees-in-mem (fn [x] (assoc x id b-plus-tree))))
(defn new-b-tree []
(sorted-map))
(def io-assoc-agent (agent nil))
( defn io - assoc [ entity - id k v confirm - ch ]
(defn io-assoc [entity-id k v confirm-ch]
(send io-assoc-agent (fn [_]
(dosync
(let [tree (if-let [tree (get-b-plus-tree entity-id)]
tree
(new-b-tree))]
(save-b-plus-tree entity-id (assoc tree k v))))
(>!! confirm-ch true)
nil)))
(defn io-find-range
"Searches the b-plus tree for a range of keys.
If key-start > key-end, search is DESC-like"
[entity-id key-start key-end limit responce-ch]
(if-let [tree (get-b-plus-tree entity-id)]
(>!! responce-ch
{:result
(->> (rsubseq tree >= key-end <= key-start)
(take limit)
(into []))})
(>!! responce-ch
{:result []})))
(defn clean-up-keys-less-than-or-equal-to [k entity-id]
(dosync
(when-let [tree (get-b-plus-tree entity-id)]
(let [key-vals-to-remove (take-while #(<= ^long (nth % 0) ^long k) tree)
keys-seq (map first key-vals-to-remove)
new-tree (apply dissoc tree keys-seq)]
(save-b-plus-tree entity-id new-tree)))))
|
c88851bd3f5e8c38f75a4c5e2c697aa12ac1e325271ec5201fd7af06febe7657 | mmottl/gsl-ocaml | vector_flat.ml | gsl - ocaml - OCaml interface to GSL
Copyright ( © ) 2002 - 2012 - Olivier Andrieu
Distributed under the terms of the GPL version 3
let () = Error.init ()
type double_vector_flat =
{ data : float array ;
off : int ;
len : int ;
stride : int ; }
type vector = double_vector_flat
let check v =
let size = Array.length v.data in
if v.off < 0 || v.len < 0 || v.stride < 1 ||
v.off + (v.len - 1) * v.stride >= size
then failwith "Vector_flat.check" ;
v
let create ?(init=0.) len =
{ data = Array.make len init;
off = 0;
len = len;
stride = 1; }
let of_array arr =
{ data = Array.copy arr; off = 0;
len = Array.length arr; stride = 1; }
let length { len = len } =
len
let get v i =
v.data.(v.off + i*v.stride)
let set v i d =
v.data.(v.off + i*v.stride) <- d
let set_all v d =
for i=0 to pred v.len do
set v i d
done
let set_zero v =
set_all v 0.
let set_basis v i =
set_zero v ;
set v i 1.
let to_array v =
Array.init v.len (get v)
let subvector ?(stride=1) v ~off ~len =
check
{ v with
off = off * v.stride + v.off ;
len = len ;
stride = stride * v.stride ; }
let view_array ?(stride=1) ?(off=0) ?len arr =
let len = match len with
| None -> Array.length arr
| Some l -> l in
check
{ data = arr ; off = off ;
stride = stride ; len = len }
let memcpy ~src:v ~dst:w =
if v.len <> w.len
then invalid_arg "Vector.memcpy" ;
for i=0 to pred v.len do
set w i (get v i)
done
let copy v =
{ v with data = Array.copy v.data }
let swap_element v i j =
let d = get v i in
let d' = get v j in
set v j d ;
set v i d'
let reverse v =
for i=0 to pred (v.len/2) do
swap_element v i (pred v.len - i)
done
external add : vector -> vector -> unit
= "ml_gsl_vector_add"
external sub : vector -> vector -> unit
= "ml_gsl_vector_sub"
external mul : vector -> vector -> unit
= "ml_gsl_vector_mul"
external div : vector -> vector -> unit
= "ml_gsl_vector_div"
external scale : vector -> float -> unit
= "ml_gsl_vector_scale"
external add_constant : vector -> float -> unit
= "ml_gsl_vector_add_constant"
external is_null : vector -> bool
= "ml_gsl_vector_isnull"
external max : vector -> float
= "ml_gsl_vector_max"
external min : vector -> float
= "ml_gsl_vector_min"
external minmax : vector -> float * float
= "ml_gsl_vector_minmax"
external max_index : vector -> int
= "ml_gsl_vector_maxindex"
external min_index : vector -> int
= "ml_gsl_vector_minindex"
external minmax_index : vector -> int * int
= "ml_gsl_vector_minmaxindex"
| null | https://raw.githubusercontent.com/mmottl/gsl-ocaml/76f8d93cccc1f23084f4a33d3e0a8f1289450580/src/vector_flat.ml | ocaml | gsl - ocaml - OCaml interface to GSL
Copyright ( © ) 2002 - 2012 - Olivier Andrieu
Distributed under the terms of the GPL version 3
let () = Error.init ()
type double_vector_flat =
{ data : float array ;
off : int ;
len : int ;
stride : int ; }
type vector = double_vector_flat
let check v =
let size = Array.length v.data in
if v.off < 0 || v.len < 0 || v.stride < 1 ||
v.off + (v.len - 1) * v.stride >= size
then failwith "Vector_flat.check" ;
v
let create ?(init=0.) len =
{ data = Array.make len init;
off = 0;
len = len;
stride = 1; }
let of_array arr =
{ data = Array.copy arr; off = 0;
len = Array.length arr; stride = 1; }
let length { len = len } =
len
let get v i =
v.data.(v.off + i*v.stride)
let set v i d =
v.data.(v.off + i*v.stride) <- d
let set_all v d =
for i=0 to pred v.len do
set v i d
done
let set_zero v =
set_all v 0.
let set_basis v i =
set_zero v ;
set v i 1.
let to_array v =
Array.init v.len (get v)
let subvector ?(stride=1) v ~off ~len =
check
{ v with
off = off * v.stride + v.off ;
len = len ;
stride = stride * v.stride ; }
let view_array ?(stride=1) ?(off=0) ?len arr =
let len = match len with
| None -> Array.length arr
| Some l -> l in
check
{ data = arr ; off = off ;
stride = stride ; len = len }
let memcpy ~src:v ~dst:w =
if v.len <> w.len
then invalid_arg "Vector.memcpy" ;
for i=0 to pred v.len do
set w i (get v i)
done
let copy v =
{ v with data = Array.copy v.data }
let swap_element v i j =
let d = get v i in
let d' = get v j in
set v j d ;
set v i d'
let reverse v =
for i=0 to pred (v.len/2) do
swap_element v i (pred v.len - i)
done
external add : vector -> vector -> unit
= "ml_gsl_vector_add"
external sub : vector -> vector -> unit
= "ml_gsl_vector_sub"
external mul : vector -> vector -> unit
= "ml_gsl_vector_mul"
external div : vector -> vector -> unit
= "ml_gsl_vector_div"
external scale : vector -> float -> unit
= "ml_gsl_vector_scale"
external add_constant : vector -> float -> unit
= "ml_gsl_vector_add_constant"
external is_null : vector -> bool
= "ml_gsl_vector_isnull"
external max : vector -> float
= "ml_gsl_vector_max"
external min : vector -> float
= "ml_gsl_vector_min"
external minmax : vector -> float * float
= "ml_gsl_vector_minmax"
external max_index : vector -> int
= "ml_gsl_vector_maxindex"
external min_index : vector -> int
= "ml_gsl_vector_minindex"
external minmax_index : vector -> int * int
= "ml_gsl_vector_minmaxindex"
| |
4a969b5837b5fe51cccdb8f1e343a3a58142d093208abe53705f5b1af684ab6d | klarna/snabbkaffe | snabbkaffe_nemesis.erl | Copyright 2019 - 2020 Klarna Bank AB
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% @doc This module implements "nemesis" process that injects faults
%% into system under test in order to test its fault-tolerance.
%%
%% == Usage ==
%%
%% === Somewhere in the tested code ===
%%
%% ```
? maybe_crash(kind1 , # { data1 = > Foo , field2 = > Bar } )
%% '''
%%
%% === Somewhere in the run stage ===
%%
%% ```
? inject_crash ( # { ? snk_kind : = kind1 , : = 42 }
, snabbkaffe_nemesis : ( )
%% )
%% '''
%% @end
-module(snabbkaffe_nemesis).
-include("snabbkaffe_internal.hrl").
-behaviour(gen_server).
%% API
-export([ start_link/0
, inject_crash/2
, inject_crash/3
, fix_crash/1
, maybe_crash/2
%% Failure scenarios
, always_crash/0
, recover_after/1
, random_crash/1
, periodic_crash/3
]).
-export_type([fault_scenario/0]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2]).
-define(SERVER, ?MODULE).
-define(ERROR_TAB, snabbkaffe_injected_errors).
-define(STATE_TAB, snabbkaffe_fault_states).
-define(SINGLETON_KEY, 0).
%%%===================================================================
%%% Types
%%%===================================================================
%% @doc
%% Type of fault patterns, such as "always fail", "fail randomly" or
%% "recover after N attempts"
%% @end
%%
%% This type is pretty magical. For performance reasons, state of the
%% failure scenario is encoded as an integer counter, that is
incremented every time the scenario is run . ( BEAM VM can do this
%% atomically and fast). Therefore "failure scenario" should map
%% integer to boolean.
-opaque fault_scenario() :: fun((integer()) -> boolean()).
-type fault_key() :: term().
State of failure point ( it 's a simple counter , see above comment ):
-type fault_state() :: {fault_key(), integer()}.
%% Injected error:
-record(fault,
{ reference :: reference()
, predicate :: snabbkaffe:prediacate()
, scenario :: snabbkaffe:fault_scenario()
, reason :: term()
}).
%% Currently this gen_server just holds the ets tables and
%% synchronizes writes to the fault table, but in the future it may be
%% used to mess up the system in more interesting ways
-record(s,
{ injected_errors :: ets:tid()
, fault_states :: ets:tid()
}).
%%%===================================================================
%%% API
%%%===================================================================
%% @doc Start the server
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
%% @equiv inject_crash(Predicate, Scenario, notmyday)
-spec inject_crash(snabbkaffe:predicate(), fault_scenario()) -> reference().
inject_crash(Predicate, Scenario) ->
inject_crash(Predicate, Scenario, notmayday).
%% @doc Inject crash into the system
-spec inject_crash(snabbkaffe:predicate(), fault_scenario(), term()) -> reference().
inject_crash(Predicate, Scenario, Reason) ->
Ref = make_ref(),
Crash = #fault{ reference = Ref
, predicate = Predicate
, scenario = Scenario
, reason = Reason
},
ok = gen_server:call(?SERVER, {inject_crash, Crash}, infinity),
Ref.
%% @doc Remove injected fault
-spec fix_crash(reference()) -> ok.
fix_crash(Ref) ->
gen_server:call(?SERVER, {fix_crash, Ref}, infinity).
%% @doc Check if there are any injected crashes that match this data,
%% and respond with the crash reason if so.
-spec maybe_crash(fault_key(), map()) -> ok.
maybe_crash(Key, Data) ->
[{_, Faults}] = ets:lookup(?ERROR_TAB, ?SINGLETON_KEY),
%% Check if any of the injected errors have predicates matching my
%% data:
Fun = fun(#fault{predicate = P}) -> P(Data) end,
case lists:filter(Fun, Faults) of
[] ->
%% None of the injected faults match my data:
ok;
[#fault{scenario = S, reason = R}|_] ->
NewVal = ets:update_counter(?STATE_TAB, Key, {2, 1}, {Key, 0}),
%% Run fault_scenario function to see if we need to crash this
%% time:
case S(NewVal) of
true ->
snabbkaffe_collector:tp(snabbkaffe_crash, Data#{ crash_kind => Key
}),
error(R);
false ->
ok
end
end.
%%%===================================================================
%%% Fault scenarios
%%%===================================================================
-spec always_crash() -> fault_scenario().
always_crash() ->
fun(_) ->
true
end.
-spec recover_after(non_neg_integer()) -> fault_scenario().
recover_after(Times) ->
fun(X) ->
X =< Times
end.
-spec random_crash(float()) -> fault_scenario().
random_crash(CrashProbability) ->
fun(X) ->
Range = 2 bsl 16,
%% Turn a sequential number into a sufficiently plausible
%% pseudorandom one:
Val = erlang:phash2(X, Range),
Val < CrashProbability * Range
end.
%% @doc A type of fault that occurs and fixes periodically.
-spec periodic_crash(integer(), float(), float()) -> fault_scenario().
periodic_crash(Period, DutyCycle, Phase) ->
DC = DutyCycle * Period,
P = round(Phase/(math:pi()*2)*Period),
fun(X) ->
(X + P - 1) rem Period >= DC
end.
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
@private
init([]) ->
ST = ets:new(?STATE_TAB, [ named_table
, {write_concurrency, true}
, {read_concurrency, true}
, public
]),
FT = ets:new(?ERROR_TAB, [ named_table
, {write_concurrency, false}
, {read_concurrency, true}
, protected
]),
ets:insert(?ERROR_TAB, {?SINGLETON_KEY, []}),
{ok, #s{ injected_errors = FT
, fault_states = ST
}}.
@private
handle_call({inject_crash, Crash}, _From, State) ->
[{_, Faults}] = ets:lookup(?ERROR_TAB, ?SINGLETON_KEY),
ets:insert(?ERROR_TAB, {?SINGLETON_KEY, [Crash|Faults]}),
{reply, ok, State};
handle_call({fix_crash, Ref}, _From, State) ->
[{_, Faults0}] = ets:lookup(?ERROR_TAB, ?SINGLETON_KEY),
Faults = lists:keydelete(Ref, #fault.reference, Faults0),
ets:insert(?ERROR_TAB, {?SINGLETON_KEY, Faults}),
{reply, ok, State};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
@private
handle_cast(_Request, State) ->
{noreply, State}.
@private
handle_info(_Info, State) ->
{noreply, State}.
@private
terminate(_Reason, _State) ->
ok.
%%%===================================================================
Internal functions
%%%===================================================================
| null | https://raw.githubusercontent.com/klarna/snabbkaffe/2bdf6e842c825ca935b34884528f51158dd31e6e/src/snabbkaffe_nemesis.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc This module implements "nemesis" process that injects faults
into system under test in order to test its fault-tolerance.
== Usage ==
=== Somewhere in the tested code ===
```
'''
=== Somewhere in the run stage ===
```
)
'''
@end
API
Failure scenarios
gen_server callbacks
===================================================================
Types
===================================================================
@doc
Type of fault patterns, such as "always fail", "fail randomly" or
"recover after N attempts"
@end
This type is pretty magical. For performance reasons, state of the
failure scenario is encoded as an integer counter, that is
atomically and fast). Therefore "failure scenario" should map
integer to boolean.
Injected error:
Currently this gen_server just holds the ets tables and
synchronizes writes to the fault table, but in the future it may be
used to mess up the system in more interesting ways
===================================================================
API
===================================================================
@doc Start the server
@equiv inject_crash(Predicate, Scenario, notmyday)
@doc Inject crash into the system
@doc Remove injected fault
@doc Check if there are any injected crashes that match this data,
and respond with the crash reason if so.
Check if any of the injected errors have predicates matching my
data:
None of the injected faults match my data:
Run fault_scenario function to see if we need to crash this
time:
===================================================================
Fault scenarios
===================================================================
Turn a sequential number into a sufficiently plausible
pseudorandom one:
@doc A type of fault that occurs and fixes periodically.
===================================================================
gen_server callbacks
===================================================================
===================================================================
=================================================================== | Copyright 2019 - 2020 Klarna Bank AB
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
? maybe_crash(kind1 , # { data1 = > Foo , field2 = > Bar } )
? inject_crash ( # { ? snk_kind : = kind1 , : = 42 }
, snabbkaffe_nemesis : ( )
-module(snabbkaffe_nemesis).
-include("snabbkaffe_internal.hrl").
-behaviour(gen_server).
-export([ start_link/0
, inject_crash/2
, inject_crash/3
, fix_crash/1
, maybe_crash/2
, always_crash/0
, recover_after/1
, random_crash/1
, periodic_crash/3
]).
-export_type([fault_scenario/0]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2]).
-define(SERVER, ?MODULE).
-define(ERROR_TAB, snabbkaffe_injected_errors).
-define(STATE_TAB, snabbkaffe_fault_states).
-define(SINGLETON_KEY, 0).
incremented every time the scenario is run . ( BEAM VM can do this
-opaque fault_scenario() :: fun((integer()) -> boolean()).
-type fault_key() :: term().
State of failure point ( it 's a simple counter , see above comment ):
-type fault_state() :: {fault_key(), integer()}.
-record(fault,
{ reference :: reference()
, predicate :: snabbkaffe:prediacate()
, scenario :: snabbkaffe:fault_scenario()
, reason :: term()
}).
-record(s,
{ injected_errors :: ets:tid()
, fault_states :: ets:tid()
}).
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
-spec inject_crash(snabbkaffe:predicate(), fault_scenario()) -> reference().
inject_crash(Predicate, Scenario) ->
inject_crash(Predicate, Scenario, notmayday).
-spec inject_crash(snabbkaffe:predicate(), fault_scenario(), term()) -> reference().
inject_crash(Predicate, Scenario, Reason) ->
Ref = make_ref(),
Crash = #fault{ reference = Ref
, predicate = Predicate
, scenario = Scenario
, reason = Reason
},
ok = gen_server:call(?SERVER, {inject_crash, Crash}, infinity),
Ref.
-spec fix_crash(reference()) -> ok.
fix_crash(Ref) ->
gen_server:call(?SERVER, {fix_crash, Ref}, infinity).
-spec maybe_crash(fault_key(), map()) -> ok.
maybe_crash(Key, Data) ->
[{_, Faults}] = ets:lookup(?ERROR_TAB, ?SINGLETON_KEY),
Fun = fun(#fault{predicate = P}) -> P(Data) end,
case lists:filter(Fun, Faults) of
[] ->
ok;
[#fault{scenario = S, reason = R}|_] ->
NewVal = ets:update_counter(?STATE_TAB, Key, {2, 1}, {Key, 0}),
case S(NewVal) of
true ->
snabbkaffe_collector:tp(snabbkaffe_crash, Data#{ crash_kind => Key
}),
error(R);
false ->
ok
end
end.
-spec always_crash() -> fault_scenario().
always_crash() ->
fun(_) ->
true
end.
-spec recover_after(non_neg_integer()) -> fault_scenario().
recover_after(Times) ->
fun(X) ->
X =< Times
end.
-spec random_crash(float()) -> fault_scenario().
random_crash(CrashProbability) ->
fun(X) ->
Range = 2 bsl 16,
Val = erlang:phash2(X, Range),
Val < CrashProbability * Range
end.
-spec periodic_crash(integer(), float(), float()) -> fault_scenario().
periodic_crash(Period, DutyCycle, Phase) ->
DC = DutyCycle * Period,
P = round(Phase/(math:pi()*2)*Period),
fun(X) ->
(X + P - 1) rem Period >= DC
end.
@private
init([]) ->
ST = ets:new(?STATE_TAB, [ named_table
, {write_concurrency, true}
, {read_concurrency, true}
, public
]),
FT = ets:new(?ERROR_TAB, [ named_table
, {write_concurrency, false}
, {read_concurrency, true}
, protected
]),
ets:insert(?ERROR_TAB, {?SINGLETON_KEY, []}),
{ok, #s{ injected_errors = FT
, fault_states = ST
}}.
@private
handle_call({inject_crash, Crash}, _From, State) ->
[{_, Faults}] = ets:lookup(?ERROR_TAB, ?SINGLETON_KEY),
ets:insert(?ERROR_TAB, {?SINGLETON_KEY, [Crash|Faults]}),
{reply, ok, State};
handle_call({fix_crash, Ref}, _From, State) ->
[{_, Faults0}] = ets:lookup(?ERROR_TAB, ?SINGLETON_KEY),
Faults = lists:keydelete(Ref, #fault.reference, Faults0),
ets:insert(?ERROR_TAB, {?SINGLETON_KEY, Faults}),
{reply, ok, State};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
@private
handle_cast(_Request, State) ->
{noreply, State}.
@private
handle_info(_Info, State) ->
{noreply, State}.
@private
terminate(_Reason, _State) ->
ok.
Internal functions
|
1c01086cb9e5d4d257fd37e62667ad358ac94e5d59d6d53232be45ea1da19ee5 | brendanhay/amazonka | ImportCertificate.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE TypeFamilies #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
-- Module : Amazonka.Transfer.ImportCertificate
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
--
-- Imports the signing and encryption certificates that you need to create
-- local (AS2) profiles and partner profiles.
module Amazonka.Transfer.ImportCertificate
( -- * Creating a Request
ImportCertificate (..),
newImportCertificate,
-- * Request Lenses
importCertificate_activeDate,
importCertificate_certificateChain,
importCertificate_description,
importCertificate_inactiveDate,
importCertificate_privateKey,
importCertificate_tags,
importCertificate_usage,
importCertificate_certificate,
-- * Destructuring the Response
ImportCertificateResponse (..),
newImportCertificateResponse,
-- * Response Lenses
importCertificateResponse_httpStatus,
importCertificateResponse_certificateId,
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import qualified Amazonka.Request as Request
import qualified Amazonka.Response as Response
import Amazonka.Transfer.Types
-- | /See:/ 'newImportCertificate' smart constructor.
data ImportCertificate = ImportCertificate'
{ -- | An optional date that specifies when the certificate becomes active.
activeDate :: Prelude.Maybe Data.POSIX,
-- | An optional list of certificates that make up the chain for the
-- certificate that\'s being imported.
certificateChain :: Prelude.Maybe (Data.Sensitive Prelude.Text),
-- | A short description that helps identify the certificate.
description :: Prelude.Maybe Prelude.Text,
-- | An optional date that specifies when the certificate becomes inactive.
inactiveDate :: Prelude.Maybe Data.POSIX,
-- | The file that contains the private key for the certificate that\'s being
-- imported.
privateKey :: Prelude.Maybe (Data.Sensitive Prelude.Text),
-- | Key-value pairs that can be used to group and search for certificates.
tags :: Prelude.Maybe (Prelude.NonEmpty Tag),
-- | Specifies whether this certificate is used for signing or encryption.
usage :: CertificateUsageType,
-- | The file that contains the certificate to import.
certificate :: Data.Sensitive Prelude.Text
}
deriving (Prelude.Eq, Prelude.Show, Prelude.Generic)
-- |
-- Create a value of 'ImportCertificate' with all optional fields omitted.
--
Use < -lens generic - lens > or < optics > to modify other optional fields .
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
-- 'activeDate', 'importCertificate_activeDate' - An optional date that specifies when the certificate becomes active.
--
-- 'certificateChain', 'importCertificate_certificateChain' - An optional list of certificates that make up the chain for the
-- certificate that\'s being imported.
--
-- 'description', 'importCertificate_description' - A short description that helps identify the certificate.
--
-- 'inactiveDate', 'importCertificate_inactiveDate' - An optional date that specifies when the certificate becomes inactive.
--
-- 'privateKey', 'importCertificate_privateKey' - The file that contains the private key for the certificate that\'s being
-- imported.
--
-- 'tags', 'importCertificate_tags' - Key-value pairs that can be used to group and search for certificates.
--
-- 'usage', 'importCertificate_usage' - Specifies whether this certificate is used for signing or encryption.
--
-- 'certificate', 'importCertificate_certificate' - The file that contains the certificate to import.
newImportCertificate ::
-- | 'usage'
CertificateUsageType ->
-- | 'certificate'
Prelude.Text ->
ImportCertificate
newImportCertificate pUsage_ pCertificate_ =
ImportCertificate'
{ activeDate = Prelude.Nothing,
certificateChain = Prelude.Nothing,
description = Prelude.Nothing,
inactiveDate = Prelude.Nothing,
privateKey = Prelude.Nothing,
tags = Prelude.Nothing,
usage = pUsage_,
certificate = Data._Sensitive Lens.# pCertificate_
}
-- | An optional date that specifies when the certificate becomes active.
importCertificate_activeDate :: Lens.Lens' ImportCertificate (Prelude.Maybe Prelude.UTCTime)
importCertificate_activeDate = Lens.lens (\ImportCertificate' {activeDate} -> activeDate) (\s@ImportCertificate' {} a -> s {activeDate = a} :: ImportCertificate) Prelude.. Lens.mapping Data._Time
-- | An optional list of certificates that make up the chain for the
-- certificate that\'s being imported.
importCertificate_certificateChain :: Lens.Lens' ImportCertificate (Prelude.Maybe Prelude.Text)
importCertificate_certificateChain = Lens.lens (\ImportCertificate' {certificateChain} -> certificateChain) (\s@ImportCertificate' {} a -> s {certificateChain = a} :: ImportCertificate) Prelude.. Lens.mapping Data._Sensitive
-- | A short description that helps identify the certificate.
importCertificate_description :: Lens.Lens' ImportCertificate (Prelude.Maybe Prelude.Text)
importCertificate_description = Lens.lens (\ImportCertificate' {description} -> description) (\s@ImportCertificate' {} a -> s {description = a} :: ImportCertificate)
-- | An optional date that specifies when the certificate becomes inactive.
importCertificate_inactiveDate :: Lens.Lens' ImportCertificate (Prelude.Maybe Prelude.UTCTime)
importCertificate_inactiveDate = Lens.lens (\ImportCertificate' {inactiveDate} -> inactiveDate) (\s@ImportCertificate' {} a -> s {inactiveDate = a} :: ImportCertificate) Prelude.. Lens.mapping Data._Time
-- | The file that contains the private key for the certificate that\'s being
-- imported.
importCertificate_privateKey :: Lens.Lens' ImportCertificate (Prelude.Maybe Prelude.Text)
importCertificate_privateKey = Lens.lens (\ImportCertificate' {privateKey} -> privateKey) (\s@ImportCertificate' {} a -> s {privateKey = a} :: ImportCertificate) Prelude.. Lens.mapping Data._Sensitive
-- | Key-value pairs that can be used to group and search for certificates.
importCertificate_tags :: Lens.Lens' ImportCertificate (Prelude.Maybe (Prelude.NonEmpty Tag))
importCertificate_tags = Lens.lens (\ImportCertificate' {tags} -> tags) (\s@ImportCertificate' {} a -> s {tags = a} :: ImportCertificate) Prelude.. Lens.mapping Lens.coerced
-- | Specifies whether this certificate is used for signing or encryption.
importCertificate_usage :: Lens.Lens' ImportCertificate CertificateUsageType
importCertificate_usage = Lens.lens (\ImportCertificate' {usage} -> usage) (\s@ImportCertificate' {} a -> s {usage = a} :: ImportCertificate)
-- | The file that contains the certificate to import.
importCertificate_certificate :: Lens.Lens' ImportCertificate Prelude.Text
importCertificate_certificate = Lens.lens (\ImportCertificate' {certificate} -> certificate) (\s@ImportCertificate' {} a -> s {certificate = a} :: ImportCertificate) Prelude.. Data._Sensitive
instance Core.AWSRequest ImportCertificate where
type
AWSResponse ImportCertificate =
ImportCertificateResponse
request overrides =
Request.postJSON (overrides defaultService)
response =
Response.receiveJSON
( \s h x ->
ImportCertificateResponse'
Prelude.<$> (Prelude.pure (Prelude.fromEnum s))
Prelude.<*> (x Data..:> "CertificateId")
)
instance Prelude.Hashable ImportCertificate where
hashWithSalt _salt ImportCertificate' {..} =
_salt `Prelude.hashWithSalt` activeDate
`Prelude.hashWithSalt` certificateChain
`Prelude.hashWithSalt` description
`Prelude.hashWithSalt` inactiveDate
`Prelude.hashWithSalt` privateKey
`Prelude.hashWithSalt` tags
`Prelude.hashWithSalt` usage
`Prelude.hashWithSalt` certificate
instance Prelude.NFData ImportCertificate where
rnf ImportCertificate' {..} =
Prelude.rnf activeDate
`Prelude.seq` Prelude.rnf certificateChain
`Prelude.seq` Prelude.rnf description
`Prelude.seq` Prelude.rnf inactiveDate
`Prelude.seq` Prelude.rnf privateKey
`Prelude.seq` Prelude.rnf tags
`Prelude.seq` Prelude.rnf usage
`Prelude.seq` Prelude.rnf certificate
instance Data.ToHeaders ImportCertificate where
toHeaders =
Prelude.const
( Prelude.mconcat
[ "X-Amz-Target"
Data.=# ( "TransferService.ImportCertificate" ::
Prelude.ByteString
),
"Content-Type"
Data.=# ( "application/x-amz-json-1.1" ::
Prelude.ByteString
)
]
)
instance Data.ToJSON ImportCertificate where
toJSON ImportCertificate' {..} =
Data.object
( Prelude.catMaybes
[ ("ActiveDate" Data..=) Prelude.<$> activeDate,
("CertificateChain" Data..=)
Prelude.<$> certificateChain,
("Description" Data..=) Prelude.<$> description,
("InactiveDate" Data..=) Prelude.<$> inactiveDate,
("PrivateKey" Data..=) Prelude.<$> privateKey,
("Tags" Data..=) Prelude.<$> tags,
Prelude.Just ("Usage" Data..= usage),
Prelude.Just ("Certificate" Data..= certificate)
]
)
instance Data.ToPath ImportCertificate where
toPath = Prelude.const "/"
instance Data.ToQuery ImportCertificate where
toQuery = Prelude.const Prelude.mempty
-- | /See:/ 'newImportCertificateResponse' smart constructor.
data ImportCertificateResponse = ImportCertificateResponse'
{ -- | The response's http status code.
httpStatus :: Prelude.Int,
-- | An array of identifiers for the imported certificates. You use this
-- identifier for working with profiles and partner profiles.
certificateId :: Prelude.Text
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
-- |
-- Create a value of 'ImportCertificateResponse' with all optional fields omitted.
--
Use < -lens generic - lens > or < optics > to modify other optional fields .
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
-- 'httpStatus', 'importCertificateResponse_httpStatus' - The response's http status code.
--
-- 'certificateId', 'importCertificateResponse_certificateId' - An array of identifiers for the imported certificates. You use this
-- identifier for working with profiles and partner profiles.
newImportCertificateResponse ::
-- | 'httpStatus'
Prelude.Int ->
-- | 'certificateId'
Prelude.Text ->
ImportCertificateResponse
newImportCertificateResponse
pHttpStatus_
pCertificateId_ =
ImportCertificateResponse'
{ httpStatus =
pHttpStatus_,
certificateId = pCertificateId_
}
-- | The response's http status code.
importCertificateResponse_httpStatus :: Lens.Lens' ImportCertificateResponse Prelude.Int
importCertificateResponse_httpStatus = Lens.lens (\ImportCertificateResponse' {httpStatus} -> httpStatus) (\s@ImportCertificateResponse' {} a -> s {httpStatus = a} :: ImportCertificateResponse)
-- | An array of identifiers for the imported certificates. You use this
-- identifier for working with profiles and partner profiles.
importCertificateResponse_certificateId :: Lens.Lens' ImportCertificateResponse Prelude.Text
importCertificateResponse_certificateId = Lens.lens (\ImportCertificateResponse' {certificateId} -> certificateId) (\s@ImportCertificateResponse' {} a -> s {certificateId = a} :: ImportCertificateResponse)
instance Prelude.NFData ImportCertificateResponse where
rnf ImportCertificateResponse' {..} =
Prelude.rnf httpStatus
`Prelude.seq` Prelude.rnf certificateId
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-transfer/gen/Amazonka/Transfer/ImportCertificate.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Module : Amazonka.Transfer.ImportCertificate
Stability : auto-generated
Imports the signing and encryption certificates that you need to create
local (AS2) profiles and partner profiles.
* Creating a Request
* Request Lenses
* Destructuring the Response
* Response Lenses
| /See:/ 'newImportCertificate' smart constructor.
| An optional date that specifies when the certificate becomes active.
| An optional list of certificates that make up the chain for the
certificate that\'s being imported.
| A short description that helps identify the certificate.
| An optional date that specifies when the certificate becomes inactive.
| The file that contains the private key for the certificate that\'s being
imported.
| Key-value pairs that can be used to group and search for certificates.
| Specifies whether this certificate is used for signing or encryption.
| The file that contains the certificate to import.
|
Create a value of 'ImportCertificate' with all optional fields omitted.
The following record fields are available, with the corresponding lenses provided
for backwards compatibility:
'activeDate', 'importCertificate_activeDate' - An optional date that specifies when the certificate becomes active.
'certificateChain', 'importCertificate_certificateChain' - An optional list of certificates that make up the chain for the
certificate that\'s being imported.
'description', 'importCertificate_description' - A short description that helps identify the certificate.
'inactiveDate', 'importCertificate_inactiveDate' - An optional date that specifies when the certificate becomes inactive.
'privateKey', 'importCertificate_privateKey' - The file that contains the private key for the certificate that\'s being
imported.
'tags', 'importCertificate_tags' - Key-value pairs that can be used to group and search for certificates.
'usage', 'importCertificate_usage' - Specifies whether this certificate is used for signing or encryption.
'certificate', 'importCertificate_certificate' - The file that contains the certificate to import.
| 'usage'
| 'certificate'
| An optional date that specifies when the certificate becomes active.
| An optional list of certificates that make up the chain for the
certificate that\'s being imported.
| A short description that helps identify the certificate.
| An optional date that specifies when the certificate becomes inactive.
| The file that contains the private key for the certificate that\'s being
imported.
| Key-value pairs that can be used to group and search for certificates.
| Specifies whether this certificate is used for signing or encryption.
| The file that contains the certificate to import.
| /See:/ 'newImportCertificateResponse' smart constructor.
| The response's http status code.
| An array of identifiers for the imported certificates. You use this
identifier for working with profiles and partner profiles.
|
Create a value of 'ImportCertificateResponse' with all optional fields omitted.
The following record fields are available, with the corresponding lenses provided
for backwards compatibility:
'httpStatus', 'importCertificateResponse_httpStatus' - The response's http status code.
'certificateId', 'importCertificateResponse_certificateId' - An array of identifiers for the imported certificates. You use this
identifier for working with profiles and partner profiles.
| 'httpStatus'
| 'certificateId'
| The response's http status code.
| An array of identifiers for the imported certificates. You use this
identifier for working with profiles and partner profiles. | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Amazonka.Transfer.ImportCertificate
ImportCertificate (..),
newImportCertificate,
importCertificate_activeDate,
importCertificate_certificateChain,
importCertificate_description,
importCertificate_inactiveDate,
importCertificate_privateKey,
importCertificate_tags,
importCertificate_usage,
importCertificate_certificate,
ImportCertificateResponse (..),
newImportCertificateResponse,
importCertificateResponse_httpStatus,
importCertificateResponse_certificateId,
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import qualified Amazonka.Request as Request
import qualified Amazonka.Response as Response
import Amazonka.Transfer.Types
data ImportCertificate = ImportCertificate'
activeDate :: Prelude.Maybe Data.POSIX,
certificateChain :: Prelude.Maybe (Data.Sensitive Prelude.Text),
description :: Prelude.Maybe Prelude.Text,
inactiveDate :: Prelude.Maybe Data.POSIX,
privateKey :: Prelude.Maybe (Data.Sensitive Prelude.Text),
tags :: Prelude.Maybe (Prelude.NonEmpty Tag),
usage :: CertificateUsageType,
certificate :: Data.Sensitive Prelude.Text
}
deriving (Prelude.Eq, Prelude.Show, Prelude.Generic)
Use < -lens generic - lens > or < optics > to modify other optional fields .
newImportCertificate ::
CertificateUsageType ->
Prelude.Text ->
ImportCertificate
newImportCertificate pUsage_ pCertificate_ =
ImportCertificate'
{ activeDate = Prelude.Nothing,
certificateChain = Prelude.Nothing,
description = Prelude.Nothing,
inactiveDate = Prelude.Nothing,
privateKey = Prelude.Nothing,
tags = Prelude.Nothing,
usage = pUsage_,
certificate = Data._Sensitive Lens.# pCertificate_
}
importCertificate_activeDate :: Lens.Lens' ImportCertificate (Prelude.Maybe Prelude.UTCTime)
importCertificate_activeDate = Lens.lens (\ImportCertificate' {activeDate} -> activeDate) (\s@ImportCertificate' {} a -> s {activeDate = a} :: ImportCertificate) Prelude.. Lens.mapping Data._Time
importCertificate_certificateChain :: Lens.Lens' ImportCertificate (Prelude.Maybe Prelude.Text)
importCertificate_certificateChain = Lens.lens (\ImportCertificate' {certificateChain} -> certificateChain) (\s@ImportCertificate' {} a -> s {certificateChain = a} :: ImportCertificate) Prelude.. Lens.mapping Data._Sensitive
importCertificate_description :: Lens.Lens' ImportCertificate (Prelude.Maybe Prelude.Text)
importCertificate_description = Lens.lens (\ImportCertificate' {description} -> description) (\s@ImportCertificate' {} a -> s {description = a} :: ImportCertificate)
importCertificate_inactiveDate :: Lens.Lens' ImportCertificate (Prelude.Maybe Prelude.UTCTime)
importCertificate_inactiveDate = Lens.lens (\ImportCertificate' {inactiveDate} -> inactiveDate) (\s@ImportCertificate' {} a -> s {inactiveDate = a} :: ImportCertificate) Prelude.. Lens.mapping Data._Time
importCertificate_privateKey :: Lens.Lens' ImportCertificate (Prelude.Maybe Prelude.Text)
importCertificate_privateKey = Lens.lens (\ImportCertificate' {privateKey} -> privateKey) (\s@ImportCertificate' {} a -> s {privateKey = a} :: ImportCertificate) Prelude.. Lens.mapping Data._Sensitive
importCertificate_tags :: Lens.Lens' ImportCertificate (Prelude.Maybe (Prelude.NonEmpty Tag))
importCertificate_tags = Lens.lens (\ImportCertificate' {tags} -> tags) (\s@ImportCertificate' {} a -> s {tags = a} :: ImportCertificate) Prelude.. Lens.mapping Lens.coerced
importCertificate_usage :: Lens.Lens' ImportCertificate CertificateUsageType
importCertificate_usage = Lens.lens (\ImportCertificate' {usage} -> usage) (\s@ImportCertificate' {} a -> s {usage = a} :: ImportCertificate)
importCertificate_certificate :: Lens.Lens' ImportCertificate Prelude.Text
importCertificate_certificate = Lens.lens (\ImportCertificate' {certificate} -> certificate) (\s@ImportCertificate' {} a -> s {certificate = a} :: ImportCertificate) Prelude.. Data._Sensitive
instance Core.AWSRequest ImportCertificate where
type
AWSResponse ImportCertificate =
ImportCertificateResponse
request overrides =
Request.postJSON (overrides defaultService)
response =
Response.receiveJSON
( \s h x ->
ImportCertificateResponse'
Prelude.<$> (Prelude.pure (Prelude.fromEnum s))
Prelude.<*> (x Data..:> "CertificateId")
)
instance Prelude.Hashable ImportCertificate where
hashWithSalt _salt ImportCertificate' {..} =
_salt `Prelude.hashWithSalt` activeDate
`Prelude.hashWithSalt` certificateChain
`Prelude.hashWithSalt` description
`Prelude.hashWithSalt` inactiveDate
`Prelude.hashWithSalt` privateKey
`Prelude.hashWithSalt` tags
`Prelude.hashWithSalt` usage
`Prelude.hashWithSalt` certificate
instance Prelude.NFData ImportCertificate where
rnf ImportCertificate' {..} =
Prelude.rnf activeDate
`Prelude.seq` Prelude.rnf certificateChain
`Prelude.seq` Prelude.rnf description
`Prelude.seq` Prelude.rnf inactiveDate
`Prelude.seq` Prelude.rnf privateKey
`Prelude.seq` Prelude.rnf tags
`Prelude.seq` Prelude.rnf usage
`Prelude.seq` Prelude.rnf certificate
instance Data.ToHeaders ImportCertificate where
toHeaders =
Prelude.const
( Prelude.mconcat
[ "X-Amz-Target"
Data.=# ( "TransferService.ImportCertificate" ::
Prelude.ByteString
),
"Content-Type"
Data.=# ( "application/x-amz-json-1.1" ::
Prelude.ByteString
)
]
)
instance Data.ToJSON ImportCertificate where
toJSON ImportCertificate' {..} =
Data.object
( Prelude.catMaybes
[ ("ActiveDate" Data..=) Prelude.<$> activeDate,
("CertificateChain" Data..=)
Prelude.<$> certificateChain,
("Description" Data..=) Prelude.<$> description,
("InactiveDate" Data..=) Prelude.<$> inactiveDate,
("PrivateKey" Data..=) Prelude.<$> privateKey,
("Tags" Data..=) Prelude.<$> tags,
Prelude.Just ("Usage" Data..= usage),
Prelude.Just ("Certificate" Data..= certificate)
]
)
instance Data.ToPath ImportCertificate where
toPath = Prelude.const "/"
instance Data.ToQuery ImportCertificate where
toQuery = Prelude.const Prelude.mempty
data ImportCertificateResponse = ImportCertificateResponse'
httpStatus :: Prelude.Int,
certificateId :: Prelude.Text
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
Use < -lens generic - lens > or < optics > to modify other optional fields .
newImportCertificateResponse ::
Prelude.Int ->
Prelude.Text ->
ImportCertificateResponse
newImportCertificateResponse
pHttpStatus_
pCertificateId_ =
ImportCertificateResponse'
{ httpStatus =
pHttpStatus_,
certificateId = pCertificateId_
}
importCertificateResponse_httpStatus :: Lens.Lens' ImportCertificateResponse Prelude.Int
importCertificateResponse_httpStatus = Lens.lens (\ImportCertificateResponse' {httpStatus} -> httpStatus) (\s@ImportCertificateResponse' {} a -> s {httpStatus = a} :: ImportCertificateResponse)
importCertificateResponse_certificateId :: Lens.Lens' ImportCertificateResponse Prelude.Text
importCertificateResponse_certificateId = Lens.lens (\ImportCertificateResponse' {certificateId} -> certificateId) (\s@ImportCertificateResponse' {} a -> s {certificateId = a} :: ImportCertificateResponse)
instance Prelude.NFData ImportCertificateResponse where
rnf ImportCertificateResponse' {..} =
Prelude.rnf httpStatus
`Prelude.seq` Prelude.rnf certificateId
|
c8911fe49842a70189c1b99a40542c455b343cb99926bd4d7bde661f025fd14e | slyrus/cl-bio | taxon.lisp | ;;; Taxon
;;; Classes, generic functions, methods and functions for working
;;; with taxonomy data
;;;
Copyright ( c ) 2006 - 2008 ( )
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; * Redistributions in binary form must reproduce the above
;;; copyright notice, this list of conditions and the following
;;; disclaimer in the documentation and/or other materials
;;; provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
;;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
;;; GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
;;; WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
(in-package :bio-taxonomy)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Taxon
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; rucksack storage and parsing parameters
(defparameter *taxonomy-data-directory*
(merge-pathnames #p"projects/cl-bio-data/taxonomy/"
(user-homedir-pathname)))
(define-symbol-macro *tax-nodes-file*
(merge-pathnames #p"nodes.dmp"
*taxonomy-data-directory*))
(define-symbol-macro *tax-names-file*
(merge-pathnames #p"names.dmp"
*taxonomy-data-directory*))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; taxon classes
(defclass taxon (bio-object)
((tax-id :accessor tax-id :initarg :tax-id)
(parent-id :accessor parent-id :initarg :parent-id)
(rank :accessor rank :initarg :rank)
(embl-code :accessor embl-code :initarg :embl-code)
(division-id :accessor division-id :initarg :division-id)
(division-inherited :accessor division-inherited :initarg :division-inherited)
(genetic-code-id :accessor genetic-code-id :initarg :genetic-code-id)
(genetic-code-inherited :accessor genetic-code-inherited :initarg :genetic-code-inherited)
(mitochondrial-genetic-code-id
:accessor mitochondrial-genetic-code-id
:initarg :mitochondrial-genetic-code-id)
(mitochondrial-genetic-code-inherited
:accessor mitochondrial-genetic-code-inherited
:initarg :mitochondrial-genetic-code-inherited)
(genbank-hidden :accessor genbank-hidden :initarg :genbank-hidden)
(hidden-subtree :accessor hidden-subtree :initarg :hidden-subtree)
(comments :accessor comments :initarg :comments)))
(defclass tax-name ()
((tax-id :accessor tax-id :initarg :tax-id)
(name :accessor name :initarg :name)
(unique-name :accessor unique-name :initarg :unique-name)
(name-class :accessor name-class :initarg :name-class)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; rucksack macros
(defmacro with-bio-rucksack ((rucksack) &body body)
`(rucksack:with-rucksack (,rucksack *bio-rucksack*)
(rucksack:with-transaction ()
(progn
,@body))))
(defmacro maybe-with-rucksack ((rucksack) &body body)
`(if rucksack
(progn
,@body)
(with-bio-rucksack (,rucksack)
,@body)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; utility macros and functions
(defun tree-map (fn x)
(if (atom x)
(when x (funcall fn x))
(cons (tree-map fn (car x))
(tree-map fn (cdr x)))))
(defmacro string-int-boolean (arg)
`(not (zerop (parse-integer ,arg))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; rucksack persistent classes
(eval-when (:compile-toplevel :load-toplevel :execute)
(rucksack:with-rucksack (rucksack *bio-rucksack*)
(rucksack:with-transaction ()
(defclass p-taxon (taxon)
((tax-id :accessor tax-id :initarg :tax-id :unique t :index :number-index)
(parent-id :accessor parent-id :initarg :parent-id :index :number-index)
(rank :accessor rank :initarg :rank)
embl-code
division-id
division-inherited
genetic-code-id
genetic-code-inherited
mitochondrial-genetic-code-id
mitochondrial-genetic-code-inherited
genbank-hidden
hidden-subtree
comments)
(:index t)
(:metaclass rucksack:persistent-class)))))
(eval-when (:compile-toplevel :load-toplevel :execute)
(rucksack:with-rucksack (rucksack *bio-rucksack*)
(rucksack:with-transaction ()
(defclass p-tax-name (tax-name)
((tax-id :accessor tax-id :initarg :tax-id :index :number-index)
(name :accessor name :initarg :name :index :string-index)
unique-name
name-class)
(:index t)
(:metaclass rucksack:persistent-class)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; flat file parsing
(defparameter *default-batch-size* 5000)
(defun parse-tax-nodes (&key (file *tax-nodes-file*))
(let ((batch-size *default-batch-size*))
(flet ((parse-batch (stream)
(rucksack:with-rucksack (rucksack *bio-rucksack*)
(print 'new-transaction)
(rucksack:with-transaction (:inhibit-gc t)
(loop for i below batch-size
for line = (read-line stream nil nil)
while line
do
(let ((strings (cl-ppcre:split "\\t\\|\\t" line)))
#+nil (print strings)
(destructuring-bind
(tax-id
parent-id
rank
embl-code
division-id
division-inherited
genetic-code-id
genetic-code-inherited
mitochondrial-genetic-code-id
mitochondrial-genetic-code-inherited
genbank-hidden
hidden-subtree
comments)
strings
(let ((tax-id (parse-integer tax-id))
(parent-id (parse-integer parent-id)))
(make-instance 'p-taxon
:tax-id tax-id
:parent-id parent-id
:rank rank
:embl-code embl-code
:division-id (parse-integer division-id)
:division-inherited (string-int-boolean
division-inherited)
:genetic-code-id (parse-integer genetic-code-id)
:genetic-code-inherited (string-int-boolean
genetic-code-inherited)
:mitochondrial-genetic-code-id (parse-integer
mitochondrial-genetic-code-id)
:mitochondrial-genetic-code-inherited
(string-int-boolean mitochondrial-genetic-code-inherited)
:genbank-hidden (string-int-boolean genbank-hidden)
:hidden-subtree (string-int-boolean hidden-subtree)
:comments (subseq comments 0 (- (length comments) 2))))))
finally (return line))))))
(with-open-file (stream file)
(loop with eof = nil
while (not eof)
for i by batch - size below 20000
do (setf eof (not (parse-batch stream))))))))
(defun parse-tax-names (&key (file *tax-names-file*))
(let ((batch-size *default-batch-size*))
(flet ((parse-batch (stream)
(rucksack:with-rucksack (rucksack *bio-rucksack*)
(print 'new-transaction)
(rucksack:with-transaction (:inhibit-gc t)
(loop for i below batch-size
for line = (read-line stream nil nil)
while line
do
(let ((strings (cl-ppcre:split "\\t\\|\\t" line)))
#+nil (print strings)
(destructuring-bind
(tax-id
name
unique-name
name-class)
strings
(let ((tax-id (parse-integer tax-id))
(unique-name (if (plusp (length unique-name))
unique-name
name)))
(make-instance 'p-tax-name
:tax-id tax-id
:name name
:unique-name unique-name
:name-class (subseq name-class 0 (- (length name-class) 2))))))
finally (return line))))))
(with-open-file (stream file)
(loop with eof = nil
for i by batch - size below 20000
while (not eof)
do (setf eof (not (parse-batch stream))))))))
(defun load-taxon-data ()
(parse-tax-nodes)
(parse-tax-names))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; taxonomy data retrieval
;;; since tax-id is unique, just return the single tax node
(defun get-tax-node (id &key rucksack)
(maybe-with-rucksack (rucksack)
(let ((objects))
(rucksack:rucksack-map-slot
rucksack 'p-taxon 'tax-id
(lambda (x)
(push x objects))
:equal id)
(car objects))))
(defun get-tax-node-children (id &key rucksack)
(maybe-with-rucksack (rucksack)
(let ((objects))
(rucksack:rucksack-map-slot
rucksack 'p-taxon 'parent-id
(lambda (x)
(unless (= id (tax-id x))
(push x objects)))
:equal id)
(nreverse objects))))
(defun get-sibling-tax-nodes (id)
(let ((taxnode (get-tax-node id)))
(get-tax-node-children (parent-id taxnode))))
(defun get-tax-node-ancestors (id &key rucksack)
(maybe-with-rucksack (rucksack)
(labels ((%get-tax-node-ancestors (id)
(let ((node (get-tax-node id :rucksack rucksack)))
(when node (if (= (parent-id node) id)
(list id)
(cons id (%get-tax-node-ancestors (parent-id node))))))))
(%get-tax-node-ancestors id))))
(defun get-tax-node-descendents (id &key rucksack)
(labels ((%get-tax-node-descendents (id rucksack)
(let ((children (get-tax-node-children id :rucksack rucksack)))
(mapcar #'(lambda (node)
(when node (unless (= (parent-id node) id))
(let ((subs (%get-tax-node-descendents (tax-id node) rucksack)))
(cons node subs))))
children))))
(maybe-with-rucksack (rucksack)
(%get-tax-node-descendents id rucksack))))
(defun get-tax-names (id &key rucksack)
(maybe-with-rucksack (rucksack)
(let ((objects))
(rucksack:rucksack-map-slot
rucksack 'p-tax-name 'tax-id
(lambda (x)
(push x objects))
:equal id)
(nreverse objects))))
(defun get-preferred-tax-name (id &key rucksack)
(maybe-with-rucksack (rucksack)
(let ((names (get-tax-names id :rucksack rucksack)))
(when names
(name (or (find "scientific name" names :test 'equal :key #'name-class)
(car names)))))))
(defun lookup-tax-name (name &key rucksack partial)
(maybe-with-rucksack (rucksack)
(let ((objects))
(apply
#'rucksack:rucksack-map-slot
rucksack 'p-tax-name 'name
(lambda (x)
(push x objects))
(if partial
(list :min name :include-min t
:max (let ((max-name (copy-seq name))
(len (length name)))
(setf (elt max-name (1- len))
(code-char (1+ (char-code (elt max-name (1- len))))))
max-name))
(list :equal name)))
(nreverse objects))))
(defun get-tax-node-ancestor-names (id &key rucksack)
(maybe-with-rucksack (rucksack)
(mapcar #'(lambda (x)
(let ((name
(let ((names (get-tax-names
(tax-id
(get-tax-node x))
:rucksack rucksack)))
(find "scientific name" names :test 'equal :key #'name-class))))
(when name (name name))))
(get-tax-node-ancestors id :rucksack rucksack))))
| null | https://raw.githubusercontent.com/slyrus/cl-bio/e6de2bc7f4accaa11466902407e43fae3184973f/taxonomy/taxon.lisp | lisp | Taxon
Classes, generic functions, methods and functions for working
with taxonomy data
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Taxon
rucksack storage and parsing parameters
taxon classes
rucksack macros
utility macros and functions
rucksack persistent classes
flat file parsing
taxonomy data retrieval
since tax-id is unique, just return the single tax node | Copyright ( c ) 2006 - 2008 ( )
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
(in-package :bio-taxonomy)
(defparameter *taxonomy-data-directory*
(merge-pathnames #p"projects/cl-bio-data/taxonomy/"
(user-homedir-pathname)))
(define-symbol-macro *tax-nodes-file*
(merge-pathnames #p"nodes.dmp"
*taxonomy-data-directory*))
(define-symbol-macro *tax-names-file*
(merge-pathnames #p"names.dmp"
*taxonomy-data-directory*))
(defclass taxon (bio-object)
((tax-id :accessor tax-id :initarg :tax-id)
(parent-id :accessor parent-id :initarg :parent-id)
(rank :accessor rank :initarg :rank)
(embl-code :accessor embl-code :initarg :embl-code)
(division-id :accessor division-id :initarg :division-id)
(division-inherited :accessor division-inherited :initarg :division-inherited)
(genetic-code-id :accessor genetic-code-id :initarg :genetic-code-id)
(genetic-code-inherited :accessor genetic-code-inherited :initarg :genetic-code-inherited)
(mitochondrial-genetic-code-id
:accessor mitochondrial-genetic-code-id
:initarg :mitochondrial-genetic-code-id)
(mitochondrial-genetic-code-inherited
:accessor mitochondrial-genetic-code-inherited
:initarg :mitochondrial-genetic-code-inherited)
(genbank-hidden :accessor genbank-hidden :initarg :genbank-hidden)
(hidden-subtree :accessor hidden-subtree :initarg :hidden-subtree)
(comments :accessor comments :initarg :comments)))
(defclass tax-name ()
((tax-id :accessor tax-id :initarg :tax-id)
(name :accessor name :initarg :name)
(unique-name :accessor unique-name :initarg :unique-name)
(name-class :accessor name-class :initarg :name-class)))
(defmacro with-bio-rucksack ((rucksack) &body body)
`(rucksack:with-rucksack (,rucksack *bio-rucksack*)
(rucksack:with-transaction ()
(progn
,@body))))
(defmacro maybe-with-rucksack ((rucksack) &body body)
`(if rucksack
(progn
,@body)
(with-bio-rucksack (,rucksack)
,@body)))
(defun tree-map (fn x)
(if (atom x)
(when x (funcall fn x))
(cons (tree-map fn (car x))
(tree-map fn (cdr x)))))
(defmacro string-int-boolean (arg)
`(not (zerop (parse-integer ,arg))))
(eval-when (:compile-toplevel :load-toplevel :execute)
(rucksack:with-rucksack (rucksack *bio-rucksack*)
(rucksack:with-transaction ()
(defclass p-taxon (taxon)
((tax-id :accessor tax-id :initarg :tax-id :unique t :index :number-index)
(parent-id :accessor parent-id :initarg :parent-id :index :number-index)
(rank :accessor rank :initarg :rank)
embl-code
division-id
division-inherited
genetic-code-id
genetic-code-inherited
mitochondrial-genetic-code-id
mitochondrial-genetic-code-inherited
genbank-hidden
hidden-subtree
comments)
(:index t)
(:metaclass rucksack:persistent-class)))))
(eval-when (:compile-toplevel :load-toplevel :execute)
(rucksack:with-rucksack (rucksack *bio-rucksack*)
(rucksack:with-transaction ()
(defclass p-tax-name (tax-name)
((tax-id :accessor tax-id :initarg :tax-id :index :number-index)
(name :accessor name :initarg :name :index :string-index)
unique-name
name-class)
(:index t)
(:metaclass rucksack:persistent-class)))))
(defparameter *default-batch-size* 5000)
(defun parse-tax-nodes (&key (file *tax-nodes-file*))
(let ((batch-size *default-batch-size*))
(flet ((parse-batch (stream)
(rucksack:with-rucksack (rucksack *bio-rucksack*)
(print 'new-transaction)
(rucksack:with-transaction (:inhibit-gc t)
(loop for i below batch-size
for line = (read-line stream nil nil)
while line
do
(let ((strings (cl-ppcre:split "\\t\\|\\t" line)))
#+nil (print strings)
(destructuring-bind
(tax-id
parent-id
rank
embl-code
division-id
division-inherited
genetic-code-id
genetic-code-inherited
mitochondrial-genetic-code-id
mitochondrial-genetic-code-inherited
genbank-hidden
hidden-subtree
comments)
strings
(let ((tax-id (parse-integer tax-id))
(parent-id (parse-integer parent-id)))
(make-instance 'p-taxon
:tax-id tax-id
:parent-id parent-id
:rank rank
:embl-code embl-code
:division-id (parse-integer division-id)
:division-inherited (string-int-boolean
division-inherited)
:genetic-code-id (parse-integer genetic-code-id)
:genetic-code-inherited (string-int-boolean
genetic-code-inherited)
:mitochondrial-genetic-code-id (parse-integer
mitochondrial-genetic-code-id)
:mitochondrial-genetic-code-inherited
(string-int-boolean mitochondrial-genetic-code-inherited)
:genbank-hidden (string-int-boolean genbank-hidden)
:hidden-subtree (string-int-boolean hidden-subtree)
:comments (subseq comments 0 (- (length comments) 2))))))
finally (return line))))))
(with-open-file (stream file)
(loop with eof = nil
while (not eof)
for i by batch - size below 20000
do (setf eof (not (parse-batch stream))))))))
(defun parse-tax-names (&key (file *tax-names-file*))
(let ((batch-size *default-batch-size*))
(flet ((parse-batch (stream)
(rucksack:with-rucksack (rucksack *bio-rucksack*)
(print 'new-transaction)
(rucksack:with-transaction (:inhibit-gc t)
(loop for i below batch-size
for line = (read-line stream nil nil)
while line
do
(let ((strings (cl-ppcre:split "\\t\\|\\t" line)))
#+nil (print strings)
(destructuring-bind
(tax-id
name
unique-name
name-class)
strings
(let ((tax-id (parse-integer tax-id))
(unique-name (if (plusp (length unique-name))
unique-name
name)))
(make-instance 'p-tax-name
:tax-id tax-id
:name name
:unique-name unique-name
:name-class (subseq name-class 0 (- (length name-class) 2))))))
finally (return line))))))
(with-open-file (stream file)
(loop with eof = nil
for i by batch - size below 20000
while (not eof)
do (setf eof (not (parse-batch stream))))))))
(defun load-taxon-data ()
(parse-tax-nodes)
(parse-tax-names))
(defun get-tax-node (id &key rucksack)
(maybe-with-rucksack (rucksack)
(let ((objects))
(rucksack:rucksack-map-slot
rucksack 'p-taxon 'tax-id
(lambda (x)
(push x objects))
:equal id)
(car objects))))
(defun get-tax-node-children (id &key rucksack)
(maybe-with-rucksack (rucksack)
(let ((objects))
(rucksack:rucksack-map-slot
rucksack 'p-taxon 'parent-id
(lambda (x)
(unless (= id (tax-id x))
(push x objects)))
:equal id)
(nreverse objects))))
(defun get-sibling-tax-nodes (id)
(let ((taxnode (get-tax-node id)))
(get-tax-node-children (parent-id taxnode))))
(defun get-tax-node-ancestors (id &key rucksack)
(maybe-with-rucksack (rucksack)
(labels ((%get-tax-node-ancestors (id)
(let ((node (get-tax-node id :rucksack rucksack)))
(when node (if (= (parent-id node) id)
(list id)
(cons id (%get-tax-node-ancestors (parent-id node))))))))
(%get-tax-node-ancestors id))))
(defun get-tax-node-descendents (id &key rucksack)
(labels ((%get-tax-node-descendents (id rucksack)
(let ((children (get-tax-node-children id :rucksack rucksack)))
(mapcar #'(lambda (node)
(when node (unless (= (parent-id node) id))
(let ((subs (%get-tax-node-descendents (tax-id node) rucksack)))
(cons node subs))))
children))))
(maybe-with-rucksack (rucksack)
(%get-tax-node-descendents id rucksack))))
(defun get-tax-names (id &key rucksack)
(maybe-with-rucksack (rucksack)
(let ((objects))
(rucksack:rucksack-map-slot
rucksack 'p-tax-name 'tax-id
(lambda (x)
(push x objects))
:equal id)
(nreverse objects))))
(defun get-preferred-tax-name (id &key rucksack)
(maybe-with-rucksack (rucksack)
(let ((names (get-tax-names id :rucksack rucksack)))
(when names
(name (or (find "scientific name" names :test 'equal :key #'name-class)
(car names)))))))
(defun lookup-tax-name (name &key rucksack partial)
(maybe-with-rucksack (rucksack)
(let ((objects))
(apply
#'rucksack:rucksack-map-slot
rucksack 'p-tax-name 'name
(lambda (x)
(push x objects))
(if partial
(list :min name :include-min t
:max (let ((max-name (copy-seq name))
(len (length name)))
(setf (elt max-name (1- len))
(code-char (1+ (char-code (elt max-name (1- len))))))
max-name))
(list :equal name)))
(nreverse objects))))
(defun get-tax-node-ancestor-names (id &key rucksack)
(maybe-with-rucksack (rucksack)
(mapcar #'(lambda (x)
(let ((name
(let ((names (get-tax-names
(tax-id
(get-tax-node x))
:rucksack rucksack)))
(find "scientific name" names :test 'equal :key #'name-class))))
(when name (name name))))
(get-tax-node-ancestors id :rucksack rucksack))))
|
887a75e538840835f0b61b7bd30a771f1d3e6f8ae930c61be3ff6535e21b203f | icicle-lang/zebra-ambiata | Index.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
{-# OPTIONS_GHC -funbox-strict-fields #-}
module Zebra.Factset.Block.Index (
BlockIndex(..)
, Tombstone(..)
, indicesOfFacts
) where
import Data.Vector.Unboxed.Deriving (derivingUnbox)
import GHC.Generics (Generic)
import P
import qualified X.Data.Vector as Boxed
import qualified X.Data.Vector.Unboxed as Unboxed
import Zebra.Factset.Data
import Zebra.Factset.Fact
-- FIXME Might be good if this were using 3x Storable.Vector instead of a
FIXME single Unboxed . Vector , as it would make translation to C smoother .
data BlockIndex =
BlockIndex {
indexTime :: !Time
, indexFactsetId :: !FactsetId
, indexTombstone :: !Tombstone
} deriving (Eq, Ord, Show, Generic)
indicesOfFacts :: Boxed.Vector Fact -> Unboxed.Vector BlockIndex
indicesOfFacts =
let
fromFact fact =
BlockIndex
(factTime fact)
(factFactsetId fact)
(maybe' Tombstone (const NotTombstone) $ factValue fact)
in
Unboxed.convert . fmap fromFact
derivingUnbox "BlockIndex"
[t| BlockIndex -> (Time, FactsetId, Tombstone) |]
[| \(BlockIndex x y z) -> (x, y, z) |]
[| \(x, y, z) -> BlockIndex x y z |]
| null | https://raw.githubusercontent.com/icicle-lang/zebra-ambiata/394ee5f98b4805df2c76abb52cdaad9fd7825f81/zebra-core/src/Zebra/Factset/Block/Index.hs | haskell | # OPTIONS_GHC -funbox-strict-fields #
FIXME Might be good if this were using 3x Storable.Vector instead of a | # LANGUAGE DeriveGeneric #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
module Zebra.Factset.Block.Index (
BlockIndex(..)
, Tombstone(..)
, indicesOfFacts
) where
import Data.Vector.Unboxed.Deriving (derivingUnbox)
import GHC.Generics (Generic)
import P
import qualified X.Data.Vector as Boxed
import qualified X.Data.Vector.Unboxed as Unboxed
import Zebra.Factset.Data
import Zebra.Factset.Fact
FIXME single Unboxed . Vector , as it would make translation to C smoother .
data BlockIndex =
BlockIndex {
indexTime :: !Time
, indexFactsetId :: !FactsetId
, indexTombstone :: !Tombstone
} deriving (Eq, Ord, Show, Generic)
indicesOfFacts :: Boxed.Vector Fact -> Unboxed.Vector BlockIndex
indicesOfFacts =
let
fromFact fact =
BlockIndex
(factTime fact)
(factFactsetId fact)
(maybe' Tombstone (const NotTombstone) $ factValue fact)
in
Unboxed.convert . fmap fromFact
derivingUnbox "BlockIndex"
[t| BlockIndex -> (Time, FactsetId, Tombstone) |]
[| \(BlockIndex x y z) -> (x, y, z) |]
[| \(x, y, z) -> BlockIndex x y z |]
|
f66f50437f607b8b783095535f05d945fc16903c528c742adfdd747060bab026 | RJMetrics/sweet-liberty | t_request_interpretation.clj | (ns com.rjmetrics.sweet-liberty.unit.t-request-interpretation
(:require [midje.sweet :refer :all]
[com.rjmetrics.sweet-liberty.request-interpretation :refer :all]))
(facts "about extract-merged-body-params"
(fact "just form-params"
(extract-merged-body-params {:form-params {:a 1}}
{})
=> {:a 1})
(fact "just body-params as a map"
(extract-merged-body-params {:body-params {:a 1}}
{})
=> {:a 1})
(fact "just body-params as a vector"
(extract-merged-body-params {:body-params [{:a 1} {:a 2}]}
{})
=> [{:a 1} {:a 2}])
(fact "just url-params"
(extract-merged-body-params {}
{:a 1})
=> nil)
(fact "form-params and body-params as a map"
(extract-merged-body-params {:form-params {:a 1 :b 3}
:body-params {:a 2 :c 4}}
{})
=> {:a 1 :b 3})
(fact "form-params and body-params as a vector"
(extract-merged-body-params {:form-params {:a 1 :b 3}
:body-params [{:a 2 :c 4}{:a 5 :c 6}]}
{})
=> {:a 1 :b 3})
(fact "form-params and url-params"
(extract-merged-body-params {:form-params {:a 1 :b 3}}
{:a 2 :c 4})
=> {:a 1 :b 3 :c 4})
(fact "body-params as map and url-params"
(extract-merged-body-params {:body-params {:a 1 :b 3}}
{:a 2 :c 4})
=> {:a 1 :b 3 :c 4})
(fact "body-params as vector and url-params"
(extract-merged-body-params {:body-params [{:a 2 :c 4}{:a 5 :c 6}]}
{:a 2 :c 4})
=> [{:a 2 :c 4}{:a 5 :c 6}]))
(facts "about extract-merged-query-params"
(fact "just query-params"
(extract-merged-query-params {:query-params {:a 1}}
{})
=> {:a 1})
(fact "just route-params"
(extract-merged-query-params {:route-params {:a 1}}
{})
=> {:a 1})
(fact "just url-params"
(extract-merged-query-params {}
{:a 1})
=> {:a 1})
(fact "query-params and route-params"
(extract-merged-query-params {:query-params {:a 1 :b 3}
:route-params {:a 2 :c 4}}
{})
=> {:a 2 :b 3 :c 4})
(fact "query-params and url-params"
(extract-merged-query-params {:query-params {:a 1 :b 3}}
{:a 2 :c 4})
=> {:a 2 :b 3 :c 4})
(fact "route-params and url-params"
(extract-merged-query-params {:route-params {:a 1 :b 3}}
{:a 2 :c 4})
=> {:a 2 :b 3 :c 4})) | null | https://raw.githubusercontent.com/RJMetrics/sweet-liberty/812a1caee1a6ef2053d0545a9e05d83e03011212/test/com/rjmetrics/sweet_liberty/unit/t_request_interpretation.clj | clojure | (ns com.rjmetrics.sweet-liberty.unit.t-request-interpretation
(:require [midje.sweet :refer :all]
[com.rjmetrics.sweet-liberty.request-interpretation :refer :all]))
(facts "about extract-merged-body-params"
(fact "just form-params"
(extract-merged-body-params {:form-params {:a 1}}
{})
=> {:a 1})
(fact "just body-params as a map"
(extract-merged-body-params {:body-params {:a 1}}
{})
=> {:a 1})
(fact "just body-params as a vector"
(extract-merged-body-params {:body-params [{:a 1} {:a 2}]}
{})
=> [{:a 1} {:a 2}])
(fact "just url-params"
(extract-merged-body-params {}
{:a 1})
=> nil)
(fact "form-params and body-params as a map"
(extract-merged-body-params {:form-params {:a 1 :b 3}
:body-params {:a 2 :c 4}}
{})
=> {:a 1 :b 3})
(fact "form-params and body-params as a vector"
(extract-merged-body-params {:form-params {:a 1 :b 3}
:body-params [{:a 2 :c 4}{:a 5 :c 6}]}
{})
=> {:a 1 :b 3})
(fact "form-params and url-params"
(extract-merged-body-params {:form-params {:a 1 :b 3}}
{:a 2 :c 4})
=> {:a 1 :b 3 :c 4})
(fact "body-params as map and url-params"
(extract-merged-body-params {:body-params {:a 1 :b 3}}
{:a 2 :c 4})
=> {:a 1 :b 3 :c 4})
(fact "body-params as vector and url-params"
(extract-merged-body-params {:body-params [{:a 2 :c 4}{:a 5 :c 6}]}
{:a 2 :c 4})
=> [{:a 2 :c 4}{:a 5 :c 6}]))
(facts "about extract-merged-query-params"
(fact "just query-params"
(extract-merged-query-params {:query-params {:a 1}}
{})
=> {:a 1})
(fact "just route-params"
(extract-merged-query-params {:route-params {:a 1}}
{})
=> {:a 1})
(fact "just url-params"
(extract-merged-query-params {}
{:a 1})
=> {:a 1})
(fact "query-params and route-params"
(extract-merged-query-params {:query-params {:a 1 :b 3}
:route-params {:a 2 :c 4}}
{})
=> {:a 2 :b 3 :c 4})
(fact "query-params and url-params"
(extract-merged-query-params {:query-params {:a 1 :b 3}}
{:a 2 :c 4})
=> {:a 2 :b 3 :c 4})
(fact "route-params and url-params"
(extract-merged-query-params {:route-params {:a 1 :b 3}}
{:a 2 :c 4})
=> {:a 2 :b 3 :c 4})) | |
e481b17bccd462379abca3a987b650466a99565ab1e0f2a7d5baa8d1f6365560 | ocaml-flambda/ocaml-jst | quickcheck_lists_arrays_haskell_python.ml | -*- compile - command : " ocamlopt -w + A-4 - 40 - 42 - 44 str.cmxa unix.cmxa quickcheck_lists_arrays_haskell_python.ml -o quickcheck - lists - arrays - haskell - python & & ./quickcheck - lists - arrays - haskell - python " ; -*-
CR aspectorzabusky : This file was great for validating comprehensions , but we
ca n't put it in the compiler test suite : it spins up three different REPL
processes , one each for , Python , and OCaml , and we ca n't count on the
first two existing . But it would be a shame to delete this code and just
leave ` comprehensions_from_quickcheck.ml ` ; if things change , it 's good to
have access to QuickCheck . What should we do with this , do we think ?
can't put it in the compiler test suite: it spins up three different REPL
processes, one each for Haskell, Python, and OCaml, and we can't count on the
first two existing. But it would be a shame to delete this code and just
leave `comprehensions_from_quickcheck.ml`; if things change, it's good to
have access to QuickCheck. What should we do with this, do we think? *)
module No_polymorphic_compare = struct
let ( = ) = Int.equal
let ( < ) x y = Int.compare x y < 0
let ( > ) x y = Int.compare x y > 0
let ( <= ) x y = Int.compare x y <= 0
let ( >= ) x y = Int.compare x y >= 0
end
open No_polymorphic_compare
module Util = struct
module List_monad = struct
let pure x = [x]
let bind xs f = List.concat_map f xs
let (let*) = bind
let (let+) xs f = List.map f xs
(* I think this is right *)
let (and*) xs ys =
let* x = xs in
let+ y = ys in
x,y
let (and+) = (and*)
let rec traverse f = function
| [] ->
pure []
| x :: xs ->
let+ y = f x
and+ ys = traverse f xs in
y :: ys
end
let rec take_while p = function
| x :: xs when p x -> x :: take_while p xs
| _ -> []
let guard c x = if c then [x] else []
let max x y = if x > y then x else y
let range_to start stop =
List.init (max 0 (stop - start + 1)) (fun i -> start + i)
let range_downto start stop =
List.init (max 0 (start - stop + 1)) (fun i -> start - i)
(* For repeatability *)
external random_seed : unit -> int array = "caml_sys_random_seed"
let output_line oc str = begin
output_string oc str;
output_char oc '\n';
flush oc
end
end
module QuickCheck = struct
type 'a prop_result =
| OK
| Failed_with of 'a
type 'a failure_data =
| Data of 'a
| Exception of exn
type ('a, 'b) failure =
{ counterexample : 'a
; data : 'b failure_data
; tests : int
; shrinks : int }
type ('a, 'b) result =
| Passed
| Failed of ('a, 'b) failure
module Print (Printf : sig
type destination
type result
val printf : destination -> ('a, destination, result) format -> 'a
end) =
struct
(* This only works with some words but that's ok *)
let quantity dst (n, thing) =
Printf.printf dst "%d %s%s" n thing (if n = 1 then "" else "s")
let tests dst tests = quantity dst (tests, "test")
let and_shrinks dst = function
| 0 -> Printf.printf dst ""
| shrinks -> Printf.printf dst " and %a" quantity (shrinks, "shrink")
let and_shrinks_and_iteration dst = function
| shrinks, 0 ->
and_shrinks dst shrinks
| shrinks, iteration ->
Printf.printf dst " and %d.%d shrinks" shrinks iteration
end
module SPrint = Print (struct
type destination = unit
type result = string
let printf () = Printf.sprintf
end)
module FPrint = Print (struct
type destination = out_channel
type result = unit
let printf = Printf.fprintf
end)
module Reporter = struct
type t =
{ report_test : int -> unit
; report_shrink : tests:int -> shrinks:int -> iteration:int -> unit
; finish_reporting : unit -> unit
}
let silent =
{ report_test = Fun.const ()
; report_shrink = (fun ~tests:_ ~shrinks:_ ~iteration:_ -> ())
; finish_reporting = Fun.const ()
}
type interactive_output_mode =
| Backspace_moves
| Backspace_deletes
let interactive_output_mode oc =
match Unix.(isatty (descr_of_out_channel oc)) with
| true ->
Some (if Option.is_some (Sys.getenv_opt "INSIDE_EMACS") ||
Option.is_some (Sys.getenv_opt "EMACS")
then Backspace_deletes
else Backspace_moves)
| false | exception _ ->
None
let interactive_main iom oc =
This line - clearing technique was taken from Haskell 's QuickCheck ,
although sadly it does n't work in Emacs
although sadly it doesn't work in Emacs *)
let string_as_char s c = String.make (String.length s) c in
let backspace_prev_line = ref "" in
let clear_prev_line = match iom with
| Backspace_moves -> fun () ->
output_string oc (string_as_char !backspace_prev_line ' ');
output_string oc !backspace_prev_line
| Backspace_deletes -> fun () ->
output_string oc !backspace_prev_line
in
let move_cursor_for_this_line = match iom with
| Backspace_moves -> output_string oc
| Backspace_deletes -> Fun.const ()
in
let report fstr =
Printf.ksprintf
(fun line ->
clear_prev_line ();
let backspace_this_line = string_as_char line '\b' in
output_string oc line;
move_cursor_for_this_line backspace_this_line;
flush oc;
backspace_prev_line := backspace_this_line)
fstr
in
{ report_test = (fun tests ->
report "(%a...)" SPrint.tests tests)
; report_shrink = (fun ~tests ~shrinks ~iteration ->
report "Failed! (%a%a...)"
SPrint.tests tests
SPrint.and_shrinks_and_iteration (shrinks, iteration))
; finish_reporting = (fun () ->
clear_prev_line ();
flush oc)
}
let main oc = match interactive_output_mode oc with
| Some iom -> interactive_main iom oc
| None -> silent
end
let rec find_counterexample ~report iteration prop = function
| [] -> None
| x :: xs ->
report ~iteration;
match prop x with
| OK -> find_counterexample ~report (iteration+1) prop xs
| Failed_with data -> Some (x, Data data)
| exception exn -> Some (x, Exception exn)
let find_counterexample ?(report = fun ~iteration:_ -> ()) prop =
find_counterexample ~report 0 prop
let rec minimize
?(report = fun ~shrinks:_ ~iteration:_ -> ()) shrink prop failure =
match
find_counterexample ~report:(report ~shrinks:failure.shrinks)
prop (shrink failure.counterexample)
with
| Some (counterexample, data) ->
minimize ~report shrink prop
{ failure with counterexample; data; shrinks = failure.shrinks + 1 }
| None ->
failure
let test (type a b) ?(reporter = Reporter.silent) n gen shrink prop =
let exception Counterexample of (a, b) failure in
let result =
match
for tests = 1 to n do
reporter.report_test tests;
let x = gen () in
let stop_with_this_counterexample data =
raise (Counterexample
{ counterexample = x; data = data; tests; shrinks = 0 })
in
match prop x with
| OK -> ()
| Failed_with data -> stop_with_this_counterexample (Data data)
| exception exn -> stop_with_this_counterexample (Exception exn)
done
with
| () ->
Passed
| exception Counterexample failure ->
Failed (minimize ~report:(reporter.report_shrink ~tests:failure.tests)
shrink prop failure)
in
reporter.finish_reporting ();
result
let main
?(seed = Util.random_seed ()) ?(output = stdout)
max_tests gen shrink print_failure prop =
let printf fstr = Printf.fprintf output fstr in
Random.full_init seed;
match test ~reporter:(Reporter.main output) max_tests gen shrink prop with
| Passed ->
printf "OK, passed %a.\n" FPrint.tests max_tests
| Failed { counterexample; data; tests; shrinks } ->
let what, odata, print_extra_information = match data with
| Data data ->
"Counterexample",
Some data,
(fun () -> ())
| Exception exn ->
"Exception",
None,
(fun () ->
printf " Exception:\n %s\n"
(exn
|> Printexc.to_string
|> Str.global_replace (Str.regexp "\n") "\n "))
in
printf "Failed with seed [|%s|]!\n"
(String.concat "; " (Array.to_list (Array.map Int.to_string seed)));
printf "%s (after %a%a):\n"
what
FPrint.tests tests
FPrint.and_shrinks shrinks;
print_failure output counterexample odata;
print_extra_information ()
module Generator = struct
let replicateG n g =
Array.make n Fun.id |> Array.to_list |> List.map (fun _ -> g ())
let pick_without_replacement xs =
let rec go i xs = match i, xs with
| 0, x :: xs -> x, xs
| i, x :: xs -> let y, ys = go (i-1) xs
in y, x :: ys
| _, [] -> assert false
in
go (Random.int (List.length xs)) xs
let pick xs = List.nth xs (Random.int (List.length xs))
let small_int () = Random.int 7 - 3 (* [-3,3] *)
end
module Shrink = struct
let rec del1_and_shrink1 shrink = function
| [] ->
[], []
| x :: xs ->
let del, shrunk = del1_and_shrink1 shrink xs in
let cons_x xs' = x :: xs' in
( xs :: List.map cons_x del
, List.map (fun x' -> x' :: xs) (shrink x) @ List.map cons_x shrunk
)
let nonempty_list shrink xs =
match del1_and_shrink1 shrink xs with
| [[]], shrunk -> shrunk
| del, shrunk -> del @ shrunk
let list shrink xs =
let del, shrunk = del1_and_shrink1 shrink xs in
del @ shrunk
From Haskell 's QuickCheck : make it positive , 0 , then smaller by jumping
half the distance each time
half the distance each time *)
let int i =
let rec halves = function
| 0 -> []
| d -> i - d :: halves (d/2)
in
Util.guard (i < 0 && i <> Int.min_int) (-i) @
Util.guard (i <> 0) 0 @
halves (i/2)
Allow either one or two shrinks from the given shrinker
let shrink2 shrink x =
let shrink1 = shrink x in
shrink1 @ List.concat_map shrink shrink1
end
end
module Var : sig
type t = string
module Set : Set.S with type elt := t
module Map : Map.S with type key := t
val equal : t -> t -> bool
val vars : t list
val wildcard : t
val pattern_vars : t list
end = struct
type t = string
module Set = Set.Make(String)
module Map = Map.Make(String)
let equal = String.equal
let vars =
List.init 26 (fun i -> String.make 1 (Char.chr (Char.code 'a' + i)))
let wildcard = "_"
let pattern_vars = wildcard :: vars
end
module Environment : sig
type t
val empty : t
val of_variables : Var.t list -> t
val add : Var.t -> t -> t
val union : t -> t -> t
val is_empty : t -> bool
val is_bound : Var.t -> t -> bool
val is_free : Var.t -> t -> bool
val variables : t -> Var.t list
val variables_seq : t -> Var.t Seq.t
end = struct
include Var.Set
let of_variables = of_list
let is_bound = mem
let is_free x env = not (is_bound x env)
let variables = elements
let variables_seq = to_seq
end
module Substitution : sig
type binding =
| Deleted
| Renamed of Var.t
type t
val identity : t
val delete : Var.t -> t
val rename : Var.t -> Var.t -> t
val delete_env : Environment.t -> t
val rename_env : Environment.t -> (Var.t -> Var.t) -> t
val shadow_env : Environment.t -> t -> t
val apply : t -> Var.t -> binding option
end = struct
type binding =
| Deleted
| Renamed of Var.t
include Var.Map
type nonrec t = binding t
let identity = empty
let delete x = singleton x Deleted
let rename x y = singleton x (Renamed y)
let create_with_env f env =
of_seq (Seq.map (fun x -> x, f x) (Environment.variables_seq env))
let delete_env = create_with_env (Fun.const Deleted)
let rename_env env f = create_with_env (fun x -> Renamed (f x)) env
let shadow_env env = filter (fun x _ -> Environment.is_free x env)
let apply subst x = find_opt x subst
end
module Comprehension = struct
type int_term =
| Literal of int
| Variable of Var.t
type direction =
| To
| Downto
type iterator =
| Range of { start : int_term
; direction : direction
; stop : int_term }
| Sequence of int_term list
type binding = { var : Var.t; iterator : iterator }
type predicate =
| Positive
| Negative
| Nonzero
| Even
| Odd
let all_predicates = [Positive; Negative; Nonzero; Even; Odd]
type clause =
| For of binding list
| When of predicate * Var.t
(* We assume the body is a tuple of all the variables in the environment *)
type t = { env : Environment.t ; clauses : clause list }
module Bound_vars = struct
let bindings bs =
bs |>
List.filter_map (fun {var; iterator = _} ->
if Var.equal var Var.wildcard
then None
else Some var) |>
Environment.of_variables
let clauses =
List.fold_left
(fun env -> function
| For bs -> Environment.union (bindings bs) env
| When _ -> env)
Environment.empty
end
module Generator = struct
open QuickCheck.Generator
let in_scope_var env = pick (Environment.variables env)
let int_term env =
if not (Environment.is_empty env) && Random.int 10 < 1 then
Variable (in_scope_var env)
else
Literal (small_int ())
let iterator env =
if Random.bool ()
then Range { start = int_term env
; direction = if Random.bool () then To else Downto
; stop = int_term env }
else Sequence (replicateG (Random.int 8) (fun () -> int_term env))
Both Ranges and Sequences can range from length 0 to 7 ( inclusive ) ,
although with different probabilities
although with different probabilities *)
let predicate () =
match Random.int 5 with
| 0 -> Positive
| 1 -> Negative
| 2 -> Nonzero
| 3 -> Even
| 4 -> Odd
| _ -> assert false
(* Generates bindings that don't share variables *)
let bindings env sz =
let rec go ~bindings ~available ~used = function
| 0 ->
We reverse the list because [ _ ] becomes slightly more likely for
later - generated values , and this shifts them towards the end of
the for - and clause
later-generated values, and this shifts them towards the end of
the for-and clause *)
List.rev bindings, used
| n ->
let var, available = pick_without_replacement available in
let available, used =
if Var.equal var Var.wildcard
then Var.wildcard :: available, used
else available, Environment.add var used
in
let bindings = { var; iterator = iterator env } :: bindings in
go ~bindings ~available ~used (n-1)
in
go
~bindings:[]
~available:Var.pattern_vars
~used:Environment.empty
(Random.int sz + 1)
let clause env sz =
if not (Environment.is_empty env) && Random.int 4 < 1 then
When(predicate (), in_scope_var env), env
else
let bs, env' = bindings env sz in
For bs, Environment.union env env'
let comprehension () =
[ 1,5 ]
[ 2,6 ]
let rec go env i =
if i = clause_n then
[], env
else
let b, env' = clause env for_max in
let bs, env'' = go (Environment.union env env') (i+1) in
b :: bs, env''
in
let clauses, env = go Environment.empty 0 in
{env; clauses}
end
module Shrink = struct
open QuickCheck.Shrink
(* [-3,3], in increasing order of "complexity" *)
let all_small_ints =
let pos = List.init 3 (( + ) 1) in
let neg = List.map Int.neg pos in
0 :: (pos @ neg)
let all_small_int_lits = List.map (fun n -> Literal n) all_small_ints
let pattern_var x = Util.take_while (fun p -> x <> p) Var.pattern_vars
let int_term = function
| Literal n -> List.map (fun n -> Literal n) (int n)
| Variable _ -> all_small_int_lits
let iterator = function
| Range { start; direction; stop } ->
[Sequence [start]; Sequence [stop]] @
Util.guard
(match direction with Downto -> true | To -> false)
(Range { start = stop; direction = To; stop = start }) @
List.map
(fun start -> Range { start; direction; stop })
(int_term start) @
List.map
(fun stop -> Range { start; direction; stop })
(int_term stop) @
(match start, stop with
| Literal start, Literal stop ->
let range = match direction with
| To -> Util.range_to
| Downto -> Util.range_downto
in
[Sequence (List.map (fun n -> Literal n) (range start stop))]
| Variable _, _ | _, Variable _ -> [])
| Sequence seq ->
List.map (fun seq -> Sequence seq) (list int_term seq)
let binding ({var = x; iterator = i} as b) =
List.map (fun iterator -> {b with iterator}) (iterator i) @
List.map (fun var -> {b with var}) (pattern_var x)
let predicate p =
Util.take_while (fun p' -> p <> p') all_predicates
let parallel_bindings bs =
(* I think preventing name collisions genuinely requires a separate
traversal *)
let env = Bound_vars.bindings bs in
let rec del1_shrink1 = function
| [] ->
[], []
| ({var = x; iterator = i} as b) :: bs ->
let del, shrunk = del1_shrink1 bs in
let cons_b (bs', subst) = b :: bs', subst in
( (bs, Substitution.delete x) :: List.map cons_b del
, List.map
(fun iterator -> {b with iterator} :: bs, Substitution.identity)
(iterator i) @
List.filter_map
(fun var ->
if Environment.is_bound var env
then None
else Some ({b with var} :: bs,
if Var.equal var Var.wildcard
then Substitution.delete x
else Substitution.rename x var))
(pattern_var x) @
List.map cons_b shrunk )
in
match del1_shrink1 bs with
| [[], _], shrunk -> shrunk
| del, shrunk -> del @ shrunk
(* Shrinking-specific substitution: deleted variables become every possible
value *)
module Substitute = struct
open Util.List_monad
let list elt subst = traverse (elt subst)
let int_term subst = function
| Literal n -> pure (Literal n)
| Variable x -> match Substitution.apply subst x with
| None -> pure (Variable x)
| Some Deleted -> all_small_int_lits
| Some (Renamed x') -> pure (Variable x')
let iterator subst = function
| Range { start; direction; stop } ->
let+ start = int_term subst start
and+ stop = int_term subst stop in
Range { start; direction; stop }
| Sequence seq ->
let+ seq = list int_term subst seq in
Sequence seq
let rec parallel_bindings subst = function
| [] ->
(pure [], Environment.empty)
| ({var; iterator = i} as b) :: bs ->
let bss, env = parallel_bindings subst bs in
( (let+ iterator = iterator subst i
and+ bs = bss in
{b with iterator} :: bs)
, Environment.add var env )
let rec clauses subst = function
| [] ->
pure []
| For bs :: cs ->
let bss, env = parallel_bindings subst bs in
let subst = Substitution.shadow_env env subst in
let+ cs = clauses subst cs
and+ bs = bss in
For bs :: cs
| (When(pred, x) as c) :: cs ->
let css = clauses subst cs in
match Substitution.apply subst x with
| None ->
let+ cs = css in
c :: cs
| Some Deleted ->
css
| Some (Renamed x') ->
let+ cs = css in
When(pred, x') :: cs
end
let clauses cs =
let rec del1_shrink1 = function
| [] ->
[], []
| (For bs as c) :: cs ->
let env = Bound_vars.bindings bs in
let bss_substs = parallel_bindings bs in
let del, shrunk = del1_shrink1 cs in
let cons_c cs' = c :: cs' in
( Substitute.clauses (Substitution.delete_env env) cs @
List.map cons_c del
, (let open Util.List_monad in
let* bs, subst = bss_substs in
let+ cs = Substitute.clauses subst cs in
For bs :: cs) @
List.map cons_c shrunk )
| (When(pred, x) as c) :: cs ->
(* By the time we get here, [x] is guaranteed to be in scope;
otherwise, [Substitute.clauses] would have deleted it *)
let del, shrunk = del1_shrink1 cs in
let cons_c cs' = c :: cs' in
( cs :: List.map cons_c del
, List.map (fun pred -> When(pred, x) :: cs) (predicate pred) @
List.map cons_c shrunk )
in
match del1_shrink1 cs with
| [[]], shrunk -> shrunk
| del, shrunk -> del @ shrunk
let comprehension {env = _; clauses = cs} =
I do n't think there 's a nice way to either ( 1 ) rule out empty lists of
clauses ahead of time , or ( 2 ) compute the environment along the way , so
we handle both directly via post - processing here .
clauses ahead of time, or (2) compute the environment along the way, so
we handle both directly via post-processing here. *)
List.filter_map
(fun clauses ->
match clauses with
| [] -> None
| _ :: _ -> Some { env = Bound_vars.clauses clauses; clauses })
(clauses cs)
Shrinking twice simplifies both bugs this found on its first go - round ,
since this way we can shrink both the endpoints of a to / downto range or
shrink two parallel variable names at once .
since this way we can shrink both the endpoints of a to/downto range or
shrink two parallel variable names at once. *)
let comprehension = QuickCheck.Shrink.shrink2 comprehension
end
module To_string = struct
type ocaml_type =
| List
| Mutable_array
| Immutable_array
type format =
| OCaml of ocaml_type
| Haskell
| Python
let surround o c s = o ^ s ^ c
let parenthesize = surround "(" ")"
let bracket = surround "[" "]"
let spaced = surround " " " "
let tokens = String.concat " "
let comma_separated = String.concat ", "
let comprehension_clauses o = match o with
| OCaml _ | Python -> tokens
| Haskell -> comma_separated
let tuple = function
| [tok] -> tok
| toks -> toks |> comma_separated |> parenthesize
let sequence = function
| OCaml List | Haskell | Python -> bracket
| OCaml Mutable_array -> surround "[|" "|]"
| OCaml Immutable_array -> surround "[:" ":]"
let mod_ = function
| OCaml _ -> "mod"
| Haskell -> "`mod`"
| Python -> "%"
let eq = function
| OCaml _ -> "="
| Haskell | Python -> "=="
let neq = function
| OCaml _ -> "<>"
| Haskell -> "/="
| Python -> "!="
let int_term = function
| Literal n -> Int.to_string n
| Variable x -> x
let succ_int_term = function
| Literal n -> Int.to_string (n + 1)
| Variable x -> x ^ "+1"
let pred_int_term = function
| Literal n -> Int.to_string (n - 1)
| Variable x -> x ^ "-1"
let modulo_check o tgt = [mod_ o; "2"; eq o; tgt]
let predicate o = function
| Positive -> [], [">"; "0"]
| Negative -> [], ["<"; "0"]
| Nonzero -> [], [neq o; "0"]
| Even -> begin
match o with
| OCaml _ -> ["abs"], modulo_check o "0"
| Haskell -> ["even"], []
| Python -> [], modulo_check o "0"
end
| Odd -> begin
match o with
| OCaml _ -> ["abs"], modulo_check o "1"
| Haskell -> ["odd"], []
| Python -> [], modulo_check o "1"
end
let ocaml_direction = function
| To -> "to"
| Downto -> "downto"
let binding o {var; iterator} =
let iter = match iterator with
| Range {start; direction; stop} -> begin
match o with
| OCaml _ ->
tokens [ "="
; int_term start
; ocaml_direction direction
; int_term stop ]
| Haskell ->
let step_sep, format_dotdot = match stop with
| Literal n when n < 0 -> " ", spaced
| _ -> "", Fun.id
in
let step = match direction with
| To -> ""
| Downto -> "," ^ step_sep ^ pred_int_term start
in
tokens [ "<-"
; "[" ^
int_term start ^ step ^
format_dotdot ".." ^
int_term stop ^
"]" ]
| Python ->
let stop, step = match direction with
| To -> succ_int_term stop, []
| Downto -> pred_int_term stop, ["-1"]
in
"in range" ^ tuple ([int_term start; stop] @ step)
end
| Sequence seq ->
There is one edge case where can report an ambiguous type
error : if two variables are drawn from empty lists , and then one
is enumerated to the other , such as in
[ [ ( a , b , c ) | a < - [ ] , b < - [ ] , c < - [ a .. b ] ] ] , or even more simply
in [ [ ( a , b ) | a < - [ ] , b < - [ a .. a ] ] ] . Thus , if we have an empty
list in Haskell , we give it a type .
error: if two variables are drawn from empty lists, and then one
is enumerated to the other, such as in
[[(a,b,c) | a <- [], b <- [], c <- [a..b]]], or even more simply
in [[(a,b) | a <- [], b <- [a..a]]]. Thus, if we have an empty
list in Haskell, we give it a type. *)
let maybe_type_annotation = match o, seq with
| Haskell, [] -> ["::"; "[Int]"]
| _, _ -> []
in
let sep = match o with
| OCaml _ -> ";"
| Haskell | Python -> ","
in
let seq = seq
|> List.map int_term
|> String.concat (sep ^ " ")
|> sequence o
in
let bind = match o with
| OCaml _ | Python -> "in"
| Haskell -> "<-"
in
tokens ([bind; seq] @ maybe_type_annotation)
in
tokens [var; iter]
In and Python , parallel bindings are interpreted as sequential
bindings . Python has other problems , so we need a heavier hammer ( see
[ Make_all_variables_unique ] ) , but for , this is the only
difference we need to address . It does n't cause problems unless ( 1 ) a
variable [ x ] is in scope for the parallel bindings , ( 2 ) one of the
parallel bindings binds [ x ] to something new , and ( 3 ) [ x ] is used on the
right - hand side of a later binding . In this case , will see the
new binding of [ x ] , which will shadow the old one ; in OCaml , as these are
all in parallel , this is not the case . This function renames all such
variables to [ outer_x ] , with the given let - binding construct .
bindings. Python has other problems, so we need a heavier hammer (see
[Make_all_variables_unique]), but for Haskell, this is the only
difference we need to address. It doesn't cause problems unless (1) a
variable [x] is in scope for the parallel bindings, (2) one of the
parallel bindings binds [x] to something new, and (3) [x] is used on the
right-hand side of a later binding. In this case, Haskell will see the
new binding of [x], which will shadow the old one; in OCaml, as these are
all in parallel, this is not the case. This function renames all such
variables to [outer_x], with the given let-binding construct. *)
let protect_parallel_bindings let_clause bindings =
let (_bound_vars, _free_vars, outer_lets), bindings =
List.fold_left_map
(fun (shadowed, free_vars, outer_lets) {var; iterator} ->
let protect free_vars = function
| Variable x when Environment.is_bound x shadowed ->
let outer = "outer_" ^ x in
let free_vars, outer_let =
if Environment.is_bound x free_vars
then free_vars,
None
else Environment.add x free_vars,
Some (let_clause outer x)
in
Variable outer, free_vars, outer_let
| t ->
t, free_vars, None
in
let iterator, free_vars, outer_lets' =
match iterator with
| Range { start; direction; stop } ->
let start, free_vars, start_outer =
protect free_vars start
in
let stop, free_vars, stop_outer =
protect free_vars stop
in
let outer_lets' =
List.filter_map Fun.id [start_outer; stop_outer]
in
Range { start; direction; stop }, free_vars, outer_lets'
| Sequence seq ->
let rev_seq, free_vars, outer_lets' =
List.fold_left
(fun (rev_ts, free_vars, outer_lets') t ->
let t, free_vars, outer = protect free_vars t in
t :: rev_ts,
free_vars,
Option.fold
~none:Fun.id ~some:List.cons outer outer_lets')
([], free_vars, [])
seq
in
Sequence (List.rev rev_seq), free_vars, outer_lets'
in
( ( Environment.add var shadowed
, free_vars
, outer_lets' :: outer_lets )
, {var; iterator} ))
(Environment.empty, Environment.empty, [])
bindings
in
let outer_lets =
let rec rev_rev_concat acc = function
| [] -> acc
| xs :: xss -> rev_rev_concat (List.rev_append xs acc) xss
in rev_rev_concat [] outer_lets
in
outer_lets, bindings
Python does n't shadow variables which have the same name , it reuses the
same mutable cell . Thus , in the Python list comprehension
[ [ a for a in [ 0 ] for _ in [ 0 , 0 ] for a in [ a , 1 ] ] ] , the second [ a ]
clobbers the first , and the result is [ [ 0 , 1 , 1 , 1 ] ] instead of ( as it
would be in OCaml or ) [ [ 0 , 1 , 0 , 1 ] ] . To avoid this , we make
every variable in a Python comprehension unique ; the above comprehension
would become [ [ a for a2 in [ 0 ] for _ in [ 0 , 0 ] for a in [ a2 , 1 ] ] ] .
same mutable cell. Thus, in the Python list comprehension
[[a for a in [0] for _ in [0, 0] for a in [a, 1]]], the second [a]
clobbers the first, and the result is [[0, 1, 1, 1]] instead of (as it
would be in OCaml or Haskell) [[0, 1, 0, 1]]. To avoid this, we make
every variable in a Python comprehension unique; the above comprehension
would become [[a for a2 in [0] for _ in [0, 0] for a in [a2, 1]]]. *)
module Make_all_variables_unique = struct
module Rename = struct
let var renaming x =
Option.value ~default:x (Var.Map.find_opt x renaming)
let int_term renaming = function
| Literal n -> Literal n
| Variable x -> Variable (var renaming x)
let iterator renaming = function
| Range { start; direction; stop } ->
Range { start = int_term renaming start
; direction
; stop = int_term renaming stop }
| Sequence seq ->
Sequence (List.map (int_term renaming) seq)
end
let duplicate_bindings clauses =
let merge_counts f =
List.fold_left
(fun m x -> Var.Map.union (fun _ n1 n2 -> Some (n1 + n2)) (f x) m)
Var.Map.empty
in
Var.Map.filter
(fun _ n -> n > 1)
(merge_counts
(function
| For bs ->
merge_counts (fun {var; _} -> Var.Map.singleton var 1) bs
| When _ ->
Var.Map.empty)
clauses)
let bindings dups renaming =
List.fold_left_map
(fun (dups, renaming') {var; iterator} ->
let iterator = Rename.iterator renaming iterator in
match Var.Map.find_opt var dups with
| Some n ->
let var' = var ^ Int.to_string n in
let renaming' = Var.Map.add var var' renaming' in
let dups =
Var.Map.update
var
(function
| Some 2 -> None
| Some n -> Some (n-1)
| None -> assert false)
dups
in
(dups, renaming'), {var = var'; iterator}
| None ->
(dups, Var.Map.remove var renaming'), {var; iterator})
(dups, renaming)
let clauses cs =
cs |>
List.fold_left_map
(fun ((dups, renaming) as acc) -> function
| For bs ->
let (dups, renaming), bs = bindings dups renaming bs in
(dups, renaming), For bs
| When(pred, x) ->
acc, When(pred, Rename.var renaming x))
(duplicate_bindings cs, Var.Map.empty) |>
snd
end
let clause o = function
| For bindings ->
let intro, sep, (extra_clauses, bindings) =
match o with
| OCaml _ ->
["for"], " and ", ([], bindings)
| Haskell ->
[],
", ",
protect_parallel_bindings
(fun x e -> tokens ["let"; x; "="; e])
bindings
| Python ->
(* [Make_all_variables_unique] has already been applied, so we
don't need to call [protect_parallel_bindings] *)
["for"], " for ", ([], bindings)
in
comprehension_clauses o
(extra_clauses @
intro @
[bindings |> List.map (binding o) |> String.concat sep])
| When(pred, x) ->
let kwd = match o with
| OCaml _ -> ["when"]
| Haskell -> []
| Python -> ["if"]
in
let pred_pre, pred_post = predicate o pred in
tokens (kwd @ pred_pre @ (x :: pred_post))
let comprehension o {env; clauses} =
let clauses = match o with
| OCaml _ | Haskell -> clauses
| Python -> Make_all_variables_unique.clauses clauses
in
let body = tuple (Environment.variables env) in
let clauses = comprehension_clauses o (List.map (clause o) clauses) in
let sep = match o with
| OCaml _ | Python -> " "
| Haskell -> " | "
in
sequence o (body ^ sep ^ clauses)
end
let generator = Generator.comprehension
let shrink = Shrink.comprehension
let to_string = To_string.comprehension
end
module Interactive_command = struct
let command cmd args ~setup ~input ~output ~f =
let inch, outch =
Unix.open_process_args cmd (Array.of_list (cmd :: args))
in
let output str = Util.output_line outch (output str) in
let interact str =
output str;
input inch
in
let cleanup () = ignore (Unix.close_process (inch, outch)) in
match setup output; f interact with
| result -> cleanup (); result
| exception e -> cleanup (); raise e
We need to read every comprehension 's output in a character - wise identical
way . We settle on Python 's list syntax : square brackets ( no pipes ) ,
comma - separated , with spaces after all the commas . This choice is because
( 1 ) it 's easier to replace all of 's semicolons with commas than it it
is to replace * some * of Haskell / Python 's commas with semicolons ; and ( 2 ) it
looks nicer to have spaces after commas ( like Python , as well as OCaml )
than to not do so ( like ) .
way. We settle on Python's list syntax: square brackets (no pipes),
comma-separated, with spaces after all the commas. This choice is because
(1) it's easier to replace all of OCaml's semicolons with commas than it it
is to replace *some* of Haskell/Python's commas with semicolons; and (2) it
looks nicer to have spaces after commas (like Python, as well as OCaml)
than to not do so (like Haskell). *)
(* This custom printer is necessary because long lists cause the default
printer to stack overflow. Since we're writing our own, we use commas as a
separator here, a la Python, rather than relying on the substitution later.
(We do still have to substitute later, though, for arrays.) *)
let ocaml_code_pp_list_as_python = {|
let pp_list pp_elt fmt xs =
let buf = Buffer.create 256 in
let rec fill_buf prefix = function
| x :: xs ->
let fbuf = Format.formatter_of_buffer buf in
Format.pp_set_max_indent fbuf Int.max_int;
Buffer.add_string buf prefix;
Format.fprintf fbuf "%a%!" pp_elt x;
fill_buf ", " xs
| [] ->
();
in
Buffer.add_char buf '[';
fill_buf "" xs;
Buffer.add_char buf ']';
Format.fprintf fmt "%s" (Buffer.contents buf)
|}
let input_ocaml_list_or_array_as_python_list i =
let input = Buffer.create 16 in
let rec input_lines () =
let line = input_line i in
Buffer.add_string input line;
if not (String.contains line ']') then input_lines ()
in
input_lines ();
let raw_list = Buffer.contents input in
let start = String.index raw_list '[' in
let stop = String.rindex raw_list ']' in
let list = String.sub raw_list start (stop - start + 1) in
list
|> Str.global_replace (Str.regexp "[ \n]+") " "
|> Str.global_replace (Str.regexp "\\[[|:]") "["
|> Str.global_replace (Str.regexp "[|:]\\]") "]"
|> Str.global_replace (Str.regexp ";") ","
let input_haskell_list_as_python_list i =
i |> input_line |> Str.global_replace (Str.regexp ",") ", "
let ocaml ~f =
command
"../../../ocaml"
[ "-extension"; "comprehensions"
; "-extension"; "immutable_arrays"
; "-noprompt"; "-no-version"
; "-w"; "no-unused-var" ]
~setup:(fun output ->
output ("#print_length " ^ Int.to_string Int.max_int);
output ocaml_code_pp_list_as_python;
output "#install_printer pp_list")
~input:input_ocaml_list_or_array_as_python_list
~output:(fun str -> str ^ ";;")
~f
If GHCi is n't on a tty , it does n't display a prompt , AFAICT
let haskell ~f =
command
"/usr/bin/ghci"
["-v0"; "-ignore-dot-ghci"]
~setup:(Fun.const ())
~input:input_haskell_list_as_python_list
~output:Fun.id
~f
let python ~f =
command
"/usr/bin/python3"
["-qic"; "import sys\nsys.ps1 = ''"]
~setup:(Fun.const ())
~input:input_line
~output:Fun.id
~f
end
module Log_test_cases = struct
let to_file file f =
let oc = open_out file in
try
output_string oc
{|(* TEST
flags = "-extension comprehensions -extension immutable_arrays"
* expect
*)
Generated by quickcheck_lists_arrays_haskell_python.ml ; filtered down to all
tests of reasonable size ; and reflowed to fit in 80 - character lines .
tests of reasonable size; and reflowed to fit in 80-character lines. *)
NOTE : If you 're saving these tests , do n't forget to make those last two
changes !
changes! *)|};
let r = f (Printf.fprintf oc "\n\n%s;;\n[%%%%expect{||}];;%!") in
output_char oc '\n';
close_out oc;
r
with
| exn ->
close_out_noerr oc;
raise exn
end
module Main = struct
type output = { ocaml_list : string
; ocaml_mutable_array : string
; ocaml_immutable_array : string
; haskell : string
; python : string }
let output_for o output =
match (o : Comprehension.To_string.format) with
| OCaml List -> output.ocaml_list
| OCaml Mutable_array -> output.ocaml_mutable_array
| OCaml Immutable_array -> output.ocaml_immutable_array
| Haskell -> output.haskell
| Python -> output.python
let print_counterexample oc counterexample data =
let printf format_string = Printf.fprintf oc format_string in
let output_for, printf_for_data = match data with
| Some data -> (fun o -> output_for o data), printf
| None -> (fun _ -> ""), Printf.ifprintf oc
in
let print_comprehension tag align o =
let counterexample_str = Comprehension.to_string o counterexample in
let indent = String.make (String.length tag) ' ' in
printf " %s:%s %s\n" tag align counterexample_str;
printf_for_data " %s %s = %s\n" indent align (output_for o)
in
print_comprehension "OCaml list" " " (OCaml List);
print_comprehension "OCaml array" " " (OCaml Mutable_array);
print_comprehension "OCaml iarray" "" (OCaml Immutable_array);
print_comprehension "Haskell" " " Haskell;
print_comprehension "Python" " " Python
let different_comprehensions_agree ?seed ?output max_tests =
let ( = ) = String.equal in
Interactive_command.ocaml ~f:(fun ocaml ->
Interactive_command.haskell ~f:(fun haskell ->
Interactive_command.python ~f:(fun python ->
Log_test_cases.to_file "comprehensions_from_quickcheck-log.ml" (fun log ->
let ocaml comp = log comp; ocaml comp in
QuickCheck.main
?seed ?output
max_tests
Comprehension.generator Comprehension.shrink
print_counterexample
(fun c ->
let run repl fmt = repl (Comprehension.to_string fmt c) in
let ocaml_list = run ocaml (OCaml List) in
let ocaml_mutable_array = run ocaml (OCaml Mutable_array) in
let ocaml_immutable_array = run ocaml (OCaml Immutable_array) in
let haskell = run haskell Haskell in
let python = run python Python in
if ocaml_list = ocaml_mutable_array &&
ocaml_mutable_array = ocaml_immutable_array &&
ocaml_immutable_array = haskell &&
haskell = python
then OK
else Failed_with
{ ocaml_list
; ocaml_mutable_array
; ocaml_immutable_array
; haskell
; python })))))
end
let () = Main.different_comprehensions_agree 1_000
| null | https://raw.githubusercontent.com/ocaml-flambda/ocaml-jst/b0a649516b33b83b7290c6fa5d7cb4c8c343df8c/testsuite/tests/comprehensions/quickcheck_lists_arrays_haskell_python.ml | ocaml | I think this is right
For repeatability
This only works with some words but that's ok
[-3,3]
We assume the body is a tuple of all the variables in the environment
Generates bindings that don't share variables
[-3,3], in increasing order of "complexity"
I think preventing name collisions genuinely requires a separate
traversal
Shrinking-specific substitution: deleted variables become every possible
value
By the time we get here, [x] is guaranteed to be in scope;
otherwise, [Substitute.clauses] would have deleted it
[Make_all_variables_unique] has already been applied, so we
don't need to call [protect_parallel_bindings]
This custom printer is necessary because long lists cause the default
printer to stack overflow. Since we're writing our own, we use commas as a
separator here, a la Python, rather than relying on the substitution later.
(We do still have to substitute later, though, for arrays.)
TEST
flags = "-extension comprehensions -extension immutable_arrays"
* expect
| -*- compile - command : " ocamlopt -w + A-4 - 40 - 42 - 44 str.cmxa unix.cmxa quickcheck_lists_arrays_haskell_python.ml -o quickcheck - lists - arrays - haskell - python & & ./quickcheck - lists - arrays - haskell - python " ; -*-
CR aspectorzabusky : This file was great for validating comprehensions , but we
ca n't put it in the compiler test suite : it spins up three different REPL
processes , one each for , Python , and OCaml , and we ca n't count on the
first two existing . But it would be a shame to delete this code and just
leave ` comprehensions_from_quickcheck.ml ` ; if things change , it 's good to
have access to QuickCheck . What should we do with this , do we think ?
can't put it in the compiler test suite: it spins up three different REPL
processes, one each for Haskell, Python, and OCaml, and we can't count on the
first two existing. But it would be a shame to delete this code and just
leave `comprehensions_from_quickcheck.ml`; if things change, it's good to
have access to QuickCheck. What should we do with this, do we think? *)
module No_polymorphic_compare = struct
let ( = ) = Int.equal
let ( < ) x y = Int.compare x y < 0
let ( > ) x y = Int.compare x y > 0
let ( <= ) x y = Int.compare x y <= 0
let ( >= ) x y = Int.compare x y >= 0
end
open No_polymorphic_compare
module Util = struct
module List_monad = struct
let pure x = [x]
let bind xs f = List.concat_map f xs
let (let*) = bind
let (let+) xs f = List.map f xs
let (and*) xs ys =
let* x = xs in
let+ y = ys in
x,y
let (and+) = (and*)
let rec traverse f = function
| [] ->
pure []
| x :: xs ->
let+ y = f x
and+ ys = traverse f xs in
y :: ys
end
let rec take_while p = function
| x :: xs when p x -> x :: take_while p xs
| _ -> []
let guard c x = if c then [x] else []
let max x y = if x > y then x else y
let range_to start stop =
List.init (max 0 (stop - start + 1)) (fun i -> start + i)
let range_downto start stop =
List.init (max 0 (start - stop + 1)) (fun i -> start - i)
external random_seed : unit -> int array = "caml_sys_random_seed"
let output_line oc str = begin
output_string oc str;
output_char oc '\n';
flush oc
end
end
module QuickCheck = struct
type 'a prop_result =
| OK
| Failed_with of 'a
type 'a failure_data =
| Data of 'a
| Exception of exn
type ('a, 'b) failure =
{ counterexample : 'a
; data : 'b failure_data
; tests : int
; shrinks : int }
type ('a, 'b) result =
| Passed
| Failed of ('a, 'b) failure
module Print (Printf : sig
type destination
type result
val printf : destination -> ('a, destination, result) format -> 'a
end) =
struct
let quantity dst (n, thing) =
Printf.printf dst "%d %s%s" n thing (if n = 1 then "" else "s")
let tests dst tests = quantity dst (tests, "test")
let and_shrinks dst = function
| 0 -> Printf.printf dst ""
| shrinks -> Printf.printf dst " and %a" quantity (shrinks, "shrink")
let and_shrinks_and_iteration dst = function
| shrinks, 0 ->
and_shrinks dst shrinks
| shrinks, iteration ->
Printf.printf dst " and %d.%d shrinks" shrinks iteration
end
module SPrint = Print (struct
type destination = unit
type result = string
let printf () = Printf.sprintf
end)
module FPrint = Print (struct
type destination = out_channel
type result = unit
let printf = Printf.fprintf
end)
module Reporter = struct
type t =
{ report_test : int -> unit
; report_shrink : tests:int -> shrinks:int -> iteration:int -> unit
; finish_reporting : unit -> unit
}
let silent =
{ report_test = Fun.const ()
; report_shrink = (fun ~tests:_ ~shrinks:_ ~iteration:_ -> ())
; finish_reporting = Fun.const ()
}
type interactive_output_mode =
| Backspace_moves
| Backspace_deletes
let interactive_output_mode oc =
match Unix.(isatty (descr_of_out_channel oc)) with
| true ->
Some (if Option.is_some (Sys.getenv_opt "INSIDE_EMACS") ||
Option.is_some (Sys.getenv_opt "EMACS")
then Backspace_deletes
else Backspace_moves)
| false | exception _ ->
None
let interactive_main iom oc =
This line - clearing technique was taken from Haskell 's QuickCheck ,
although sadly it does n't work in Emacs
although sadly it doesn't work in Emacs *)
let string_as_char s c = String.make (String.length s) c in
let backspace_prev_line = ref "" in
let clear_prev_line = match iom with
| Backspace_moves -> fun () ->
output_string oc (string_as_char !backspace_prev_line ' ');
output_string oc !backspace_prev_line
| Backspace_deletes -> fun () ->
output_string oc !backspace_prev_line
in
let move_cursor_for_this_line = match iom with
| Backspace_moves -> output_string oc
| Backspace_deletes -> Fun.const ()
in
let report fstr =
Printf.ksprintf
(fun line ->
clear_prev_line ();
let backspace_this_line = string_as_char line '\b' in
output_string oc line;
move_cursor_for_this_line backspace_this_line;
flush oc;
backspace_prev_line := backspace_this_line)
fstr
in
{ report_test = (fun tests ->
report "(%a...)" SPrint.tests tests)
; report_shrink = (fun ~tests ~shrinks ~iteration ->
report "Failed! (%a%a...)"
SPrint.tests tests
SPrint.and_shrinks_and_iteration (shrinks, iteration))
; finish_reporting = (fun () ->
clear_prev_line ();
flush oc)
}
let main oc = match interactive_output_mode oc with
| Some iom -> interactive_main iom oc
| None -> silent
end
let rec find_counterexample ~report iteration prop = function
| [] -> None
| x :: xs ->
report ~iteration;
match prop x with
| OK -> find_counterexample ~report (iteration+1) prop xs
| Failed_with data -> Some (x, Data data)
| exception exn -> Some (x, Exception exn)
let find_counterexample ?(report = fun ~iteration:_ -> ()) prop =
find_counterexample ~report 0 prop
let rec minimize
?(report = fun ~shrinks:_ ~iteration:_ -> ()) shrink prop failure =
match
find_counterexample ~report:(report ~shrinks:failure.shrinks)
prop (shrink failure.counterexample)
with
| Some (counterexample, data) ->
minimize ~report shrink prop
{ failure with counterexample; data; shrinks = failure.shrinks + 1 }
| None ->
failure
let test (type a b) ?(reporter = Reporter.silent) n gen shrink prop =
let exception Counterexample of (a, b) failure in
let result =
match
for tests = 1 to n do
reporter.report_test tests;
let x = gen () in
let stop_with_this_counterexample data =
raise (Counterexample
{ counterexample = x; data = data; tests; shrinks = 0 })
in
match prop x with
| OK -> ()
| Failed_with data -> stop_with_this_counterexample (Data data)
| exception exn -> stop_with_this_counterexample (Exception exn)
done
with
| () ->
Passed
| exception Counterexample failure ->
Failed (minimize ~report:(reporter.report_shrink ~tests:failure.tests)
shrink prop failure)
in
reporter.finish_reporting ();
result
let main
?(seed = Util.random_seed ()) ?(output = stdout)
max_tests gen shrink print_failure prop =
let printf fstr = Printf.fprintf output fstr in
Random.full_init seed;
match test ~reporter:(Reporter.main output) max_tests gen shrink prop with
| Passed ->
printf "OK, passed %a.\n" FPrint.tests max_tests
| Failed { counterexample; data; tests; shrinks } ->
let what, odata, print_extra_information = match data with
| Data data ->
"Counterexample",
Some data,
(fun () -> ())
| Exception exn ->
"Exception",
None,
(fun () ->
printf " Exception:\n %s\n"
(exn
|> Printexc.to_string
|> Str.global_replace (Str.regexp "\n") "\n "))
in
printf "Failed with seed [|%s|]!\n"
(String.concat "; " (Array.to_list (Array.map Int.to_string seed)));
printf "%s (after %a%a):\n"
what
FPrint.tests tests
FPrint.and_shrinks shrinks;
print_failure output counterexample odata;
print_extra_information ()
module Generator = struct
let replicateG n g =
Array.make n Fun.id |> Array.to_list |> List.map (fun _ -> g ())
let pick_without_replacement xs =
let rec go i xs = match i, xs with
| 0, x :: xs -> x, xs
| i, x :: xs -> let y, ys = go (i-1) xs
in y, x :: ys
| _, [] -> assert false
in
go (Random.int (List.length xs)) xs
let pick xs = List.nth xs (Random.int (List.length xs))
end
module Shrink = struct
let rec del1_and_shrink1 shrink = function
| [] ->
[], []
| x :: xs ->
let del, shrunk = del1_and_shrink1 shrink xs in
let cons_x xs' = x :: xs' in
( xs :: List.map cons_x del
, List.map (fun x' -> x' :: xs) (shrink x) @ List.map cons_x shrunk
)
let nonempty_list shrink xs =
match del1_and_shrink1 shrink xs with
| [[]], shrunk -> shrunk
| del, shrunk -> del @ shrunk
let list shrink xs =
let del, shrunk = del1_and_shrink1 shrink xs in
del @ shrunk
From Haskell 's QuickCheck : make it positive , 0 , then smaller by jumping
half the distance each time
half the distance each time *)
let int i =
let rec halves = function
| 0 -> []
| d -> i - d :: halves (d/2)
in
Util.guard (i < 0 && i <> Int.min_int) (-i) @
Util.guard (i <> 0) 0 @
halves (i/2)
Allow either one or two shrinks from the given shrinker
let shrink2 shrink x =
let shrink1 = shrink x in
shrink1 @ List.concat_map shrink shrink1
end
end
module Var : sig
type t = string
module Set : Set.S with type elt := t
module Map : Map.S with type key := t
val equal : t -> t -> bool
val vars : t list
val wildcard : t
val pattern_vars : t list
end = struct
type t = string
module Set = Set.Make(String)
module Map = Map.Make(String)
let equal = String.equal
let vars =
List.init 26 (fun i -> String.make 1 (Char.chr (Char.code 'a' + i)))
let wildcard = "_"
let pattern_vars = wildcard :: vars
end
module Environment : sig
type t
val empty : t
val of_variables : Var.t list -> t
val add : Var.t -> t -> t
val union : t -> t -> t
val is_empty : t -> bool
val is_bound : Var.t -> t -> bool
val is_free : Var.t -> t -> bool
val variables : t -> Var.t list
val variables_seq : t -> Var.t Seq.t
end = struct
include Var.Set
let of_variables = of_list
let is_bound = mem
let is_free x env = not (is_bound x env)
let variables = elements
let variables_seq = to_seq
end
module Substitution : sig
type binding =
| Deleted
| Renamed of Var.t
type t
val identity : t
val delete : Var.t -> t
val rename : Var.t -> Var.t -> t
val delete_env : Environment.t -> t
val rename_env : Environment.t -> (Var.t -> Var.t) -> t
val shadow_env : Environment.t -> t -> t
val apply : t -> Var.t -> binding option
end = struct
type binding =
| Deleted
| Renamed of Var.t
include Var.Map
type nonrec t = binding t
let identity = empty
let delete x = singleton x Deleted
let rename x y = singleton x (Renamed y)
let create_with_env f env =
of_seq (Seq.map (fun x -> x, f x) (Environment.variables_seq env))
let delete_env = create_with_env (Fun.const Deleted)
let rename_env env f = create_with_env (fun x -> Renamed (f x)) env
let shadow_env env = filter (fun x _ -> Environment.is_free x env)
let apply subst x = find_opt x subst
end
module Comprehension = struct
type int_term =
| Literal of int
| Variable of Var.t
type direction =
| To
| Downto
type iterator =
| Range of { start : int_term
; direction : direction
; stop : int_term }
| Sequence of int_term list
type binding = { var : Var.t; iterator : iterator }
type predicate =
| Positive
| Negative
| Nonzero
| Even
| Odd
let all_predicates = [Positive; Negative; Nonzero; Even; Odd]
type clause =
| For of binding list
| When of predicate * Var.t
type t = { env : Environment.t ; clauses : clause list }
module Bound_vars = struct
let bindings bs =
bs |>
List.filter_map (fun {var; iterator = _} ->
if Var.equal var Var.wildcard
then None
else Some var) |>
Environment.of_variables
let clauses =
List.fold_left
(fun env -> function
| For bs -> Environment.union (bindings bs) env
| When _ -> env)
Environment.empty
end
module Generator = struct
open QuickCheck.Generator
let in_scope_var env = pick (Environment.variables env)
let int_term env =
if not (Environment.is_empty env) && Random.int 10 < 1 then
Variable (in_scope_var env)
else
Literal (small_int ())
let iterator env =
if Random.bool ()
then Range { start = int_term env
; direction = if Random.bool () then To else Downto
; stop = int_term env }
else Sequence (replicateG (Random.int 8) (fun () -> int_term env))
Both Ranges and Sequences can range from length 0 to 7 ( inclusive ) ,
although with different probabilities
although with different probabilities *)
let predicate () =
match Random.int 5 with
| 0 -> Positive
| 1 -> Negative
| 2 -> Nonzero
| 3 -> Even
| 4 -> Odd
| _ -> assert false
let bindings env sz =
let rec go ~bindings ~available ~used = function
| 0 ->
We reverse the list because [ _ ] becomes slightly more likely for
later - generated values , and this shifts them towards the end of
the for - and clause
later-generated values, and this shifts them towards the end of
the for-and clause *)
List.rev bindings, used
| n ->
let var, available = pick_without_replacement available in
let available, used =
if Var.equal var Var.wildcard
then Var.wildcard :: available, used
else available, Environment.add var used
in
let bindings = { var; iterator = iterator env } :: bindings in
go ~bindings ~available ~used (n-1)
in
go
~bindings:[]
~available:Var.pattern_vars
~used:Environment.empty
(Random.int sz + 1)
let clause env sz =
if not (Environment.is_empty env) && Random.int 4 < 1 then
When(predicate (), in_scope_var env), env
else
let bs, env' = bindings env sz in
For bs, Environment.union env env'
let comprehension () =
[ 1,5 ]
[ 2,6 ]
let rec go env i =
if i = clause_n then
[], env
else
let b, env' = clause env for_max in
let bs, env'' = go (Environment.union env env') (i+1) in
b :: bs, env''
in
let clauses, env = go Environment.empty 0 in
{env; clauses}
end
module Shrink = struct
open QuickCheck.Shrink
let all_small_ints =
let pos = List.init 3 (( + ) 1) in
let neg = List.map Int.neg pos in
0 :: (pos @ neg)
let all_small_int_lits = List.map (fun n -> Literal n) all_small_ints
let pattern_var x = Util.take_while (fun p -> x <> p) Var.pattern_vars
let int_term = function
| Literal n -> List.map (fun n -> Literal n) (int n)
| Variable _ -> all_small_int_lits
let iterator = function
| Range { start; direction; stop } ->
[Sequence [start]; Sequence [stop]] @
Util.guard
(match direction with Downto -> true | To -> false)
(Range { start = stop; direction = To; stop = start }) @
List.map
(fun start -> Range { start; direction; stop })
(int_term start) @
List.map
(fun stop -> Range { start; direction; stop })
(int_term stop) @
(match start, stop with
| Literal start, Literal stop ->
let range = match direction with
| To -> Util.range_to
| Downto -> Util.range_downto
in
[Sequence (List.map (fun n -> Literal n) (range start stop))]
| Variable _, _ | _, Variable _ -> [])
| Sequence seq ->
List.map (fun seq -> Sequence seq) (list int_term seq)
let binding ({var = x; iterator = i} as b) =
List.map (fun iterator -> {b with iterator}) (iterator i) @
List.map (fun var -> {b with var}) (pattern_var x)
let predicate p =
Util.take_while (fun p' -> p <> p') all_predicates
let parallel_bindings bs =
let env = Bound_vars.bindings bs in
let rec del1_shrink1 = function
| [] ->
[], []
| ({var = x; iterator = i} as b) :: bs ->
let del, shrunk = del1_shrink1 bs in
let cons_b (bs', subst) = b :: bs', subst in
( (bs, Substitution.delete x) :: List.map cons_b del
, List.map
(fun iterator -> {b with iterator} :: bs, Substitution.identity)
(iterator i) @
List.filter_map
(fun var ->
if Environment.is_bound var env
then None
else Some ({b with var} :: bs,
if Var.equal var Var.wildcard
then Substitution.delete x
else Substitution.rename x var))
(pattern_var x) @
List.map cons_b shrunk )
in
match del1_shrink1 bs with
| [[], _], shrunk -> shrunk
| del, shrunk -> del @ shrunk
module Substitute = struct
open Util.List_monad
let list elt subst = traverse (elt subst)
let int_term subst = function
| Literal n -> pure (Literal n)
| Variable x -> match Substitution.apply subst x with
| None -> pure (Variable x)
| Some Deleted -> all_small_int_lits
| Some (Renamed x') -> pure (Variable x')
let iterator subst = function
| Range { start; direction; stop } ->
let+ start = int_term subst start
and+ stop = int_term subst stop in
Range { start; direction; stop }
| Sequence seq ->
let+ seq = list int_term subst seq in
Sequence seq
let rec parallel_bindings subst = function
| [] ->
(pure [], Environment.empty)
| ({var; iterator = i} as b) :: bs ->
let bss, env = parallel_bindings subst bs in
( (let+ iterator = iterator subst i
and+ bs = bss in
{b with iterator} :: bs)
, Environment.add var env )
let rec clauses subst = function
| [] ->
pure []
| For bs :: cs ->
let bss, env = parallel_bindings subst bs in
let subst = Substitution.shadow_env env subst in
let+ cs = clauses subst cs
and+ bs = bss in
For bs :: cs
| (When(pred, x) as c) :: cs ->
let css = clauses subst cs in
match Substitution.apply subst x with
| None ->
let+ cs = css in
c :: cs
| Some Deleted ->
css
| Some (Renamed x') ->
let+ cs = css in
When(pred, x') :: cs
end
let clauses cs =
let rec del1_shrink1 = function
| [] ->
[], []
| (For bs as c) :: cs ->
let env = Bound_vars.bindings bs in
let bss_substs = parallel_bindings bs in
let del, shrunk = del1_shrink1 cs in
let cons_c cs' = c :: cs' in
( Substitute.clauses (Substitution.delete_env env) cs @
List.map cons_c del
, (let open Util.List_monad in
let* bs, subst = bss_substs in
let+ cs = Substitute.clauses subst cs in
For bs :: cs) @
List.map cons_c shrunk )
| (When(pred, x) as c) :: cs ->
let del, shrunk = del1_shrink1 cs in
let cons_c cs' = c :: cs' in
( cs :: List.map cons_c del
, List.map (fun pred -> When(pred, x) :: cs) (predicate pred) @
List.map cons_c shrunk )
in
match del1_shrink1 cs with
| [[]], shrunk -> shrunk
| del, shrunk -> del @ shrunk
let comprehension {env = _; clauses = cs} =
I do n't think there 's a nice way to either ( 1 ) rule out empty lists of
clauses ahead of time , or ( 2 ) compute the environment along the way , so
we handle both directly via post - processing here .
clauses ahead of time, or (2) compute the environment along the way, so
we handle both directly via post-processing here. *)
List.filter_map
(fun clauses ->
match clauses with
| [] -> None
| _ :: _ -> Some { env = Bound_vars.clauses clauses; clauses })
(clauses cs)
Shrinking twice simplifies both bugs this found on its first go - round ,
since this way we can shrink both the endpoints of a to / downto range or
shrink two parallel variable names at once .
since this way we can shrink both the endpoints of a to/downto range or
shrink two parallel variable names at once. *)
let comprehension = QuickCheck.Shrink.shrink2 comprehension
end
module To_string = struct
type ocaml_type =
| List
| Mutable_array
| Immutable_array
type format =
| OCaml of ocaml_type
| Haskell
| Python
let surround o c s = o ^ s ^ c
let parenthesize = surround "(" ")"
let bracket = surround "[" "]"
let spaced = surround " " " "
let tokens = String.concat " "
let comma_separated = String.concat ", "
let comprehension_clauses o = match o with
| OCaml _ | Python -> tokens
| Haskell -> comma_separated
let tuple = function
| [tok] -> tok
| toks -> toks |> comma_separated |> parenthesize
let sequence = function
| OCaml List | Haskell | Python -> bracket
| OCaml Mutable_array -> surround "[|" "|]"
| OCaml Immutable_array -> surround "[:" ":]"
let mod_ = function
| OCaml _ -> "mod"
| Haskell -> "`mod`"
| Python -> "%"
let eq = function
| OCaml _ -> "="
| Haskell | Python -> "=="
let neq = function
| OCaml _ -> "<>"
| Haskell -> "/="
| Python -> "!="
let int_term = function
| Literal n -> Int.to_string n
| Variable x -> x
let succ_int_term = function
| Literal n -> Int.to_string (n + 1)
| Variable x -> x ^ "+1"
let pred_int_term = function
| Literal n -> Int.to_string (n - 1)
| Variable x -> x ^ "-1"
let modulo_check o tgt = [mod_ o; "2"; eq o; tgt]
let predicate o = function
| Positive -> [], [">"; "0"]
| Negative -> [], ["<"; "0"]
| Nonzero -> [], [neq o; "0"]
| Even -> begin
match o with
| OCaml _ -> ["abs"], modulo_check o "0"
| Haskell -> ["even"], []
| Python -> [], modulo_check o "0"
end
| Odd -> begin
match o with
| OCaml _ -> ["abs"], modulo_check o "1"
| Haskell -> ["odd"], []
| Python -> [], modulo_check o "1"
end
let ocaml_direction = function
| To -> "to"
| Downto -> "downto"
let binding o {var; iterator} =
let iter = match iterator with
| Range {start; direction; stop} -> begin
match o with
| OCaml _ ->
tokens [ "="
; int_term start
; ocaml_direction direction
; int_term stop ]
| Haskell ->
let step_sep, format_dotdot = match stop with
| Literal n when n < 0 -> " ", spaced
| _ -> "", Fun.id
in
let step = match direction with
| To -> ""
| Downto -> "," ^ step_sep ^ pred_int_term start
in
tokens [ "<-"
; "[" ^
int_term start ^ step ^
format_dotdot ".." ^
int_term stop ^
"]" ]
| Python ->
let stop, step = match direction with
| To -> succ_int_term stop, []
| Downto -> pred_int_term stop, ["-1"]
in
"in range" ^ tuple ([int_term start; stop] @ step)
end
| Sequence seq ->
There is one edge case where can report an ambiguous type
error : if two variables are drawn from empty lists , and then one
is enumerated to the other , such as in
[ [ ( a , b , c ) | a < - [ ] , b < - [ ] , c < - [ a .. b ] ] ] , or even more simply
in [ [ ( a , b ) | a < - [ ] , b < - [ a .. a ] ] ] . Thus , if we have an empty
list in Haskell , we give it a type .
error: if two variables are drawn from empty lists, and then one
is enumerated to the other, such as in
[[(a,b,c) | a <- [], b <- [], c <- [a..b]]], or even more simply
in [[(a,b) | a <- [], b <- [a..a]]]. Thus, if we have an empty
list in Haskell, we give it a type. *)
let maybe_type_annotation = match o, seq with
| Haskell, [] -> ["::"; "[Int]"]
| _, _ -> []
in
let sep = match o with
| OCaml _ -> ";"
| Haskell | Python -> ","
in
let seq = seq
|> List.map int_term
|> String.concat (sep ^ " ")
|> sequence o
in
let bind = match o with
| OCaml _ | Python -> "in"
| Haskell -> "<-"
in
tokens ([bind; seq] @ maybe_type_annotation)
in
tokens [var; iter]
In and Python , parallel bindings are interpreted as sequential
bindings . Python has other problems , so we need a heavier hammer ( see
[ Make_all_variables_unique ] ) , but for , this is the only
difference we need to address . It does n't cause problems unless ( 1 ) a
variable [ x ] is in scope for the parallel bindings , ( 2 ) one of the
parallel bindings binds [ x ] to something new , and ( 3 ) [ x ] is used on the
right - hand side of a later binding . In this case , will see the
new binding of [ x ] , which will shadow the old one ; in OCaml , as these are
all in parallel , this is not the case . This function renames all such
variables to [ outer_x ] , with the given let - binding construct .
bindings. Python has other problems, so we need a heavier hammer (see
[Make_all_variables_unique]), but for Haskell, this is the only
difference we need to address. It doesn't cause problems unless (1) a
variable [x] is in scope for the parallel bindings, (2) one of the
parallel bindings binds [x] to something new, and (3) [x] is used on the
right-hand side of a later binding. In this case, Haskell will see the
new binding of [x], which will shadow the old one; in OCaml, as these are
all in parallel, this is not the case. This function renames all such
variables to [outer_x], with the given let-binding construct. *)
let protect_parallel_bindings let_clause bindings =
let (_bound_vars, _free_vars, outer_lets), bindings =
List.fold_left_map
(fun (shadowed, free_vars, outer_lets) {var; iterator} ->
let protect free_vars = function
| Variable x when Environment.is_bound x shadowed ->
let outer = "outer_" ^ x in
let free_vars, outer_let =
if Environment.is_bound x free_vars
then free_vars,
None
else Environment.add x free_vars,
Some (let_clause outer x)
in
Variable outer, free_vars, outer_let
| t ->
t, free_vars, None
in
let iterator, free_vars, outer_lets' =
match iterator with
| Range { start; direction; stop } ->
let start, free_vars, start_outer =
protect free_vars start
in
let stop, free_vars, stop_outer =
protect free_vars stop
in
let outer_lets' =
List.filter_map Fun.id [start_outer; stop_outer]
in
Range { start; direction; stop }, free_vars, outer_lets'
| Sequence seq ->
let rev_seq, free_vars, outer_lets' =
List.fold_left
(fun (rev_ts, free_vars, outer_lets') t ->
let t, free_vars, outer = protect free_vars t in
t :: rev_ts,
free_vars,
Option.fold
~none:Fun.id ~some:List.cons outer outer_lets')
([], free_vars, [])
seq
in
Sequence (List.rev rev_seq), free_vars, outer_lets'
in
( ( Environment.add var shadowed
, free_vars
, outer_lets' :: outer_lets )
, {var; iterator} ))
(Environment.empty, Environment.empty, [])
bindings
in
let outer_lets =
let rec rev_rev_concat acc = function
| [] -> acc
| xs :: xss -> rev_rev_concat (List.rev_append xs acc) xss
in rev_rev_concat [] outer_lets
in
outer_lets, bindings
Python does n't shadow variables which have the same name , it reuses the
same mutable cell . Thus , in the Python list comprehension
[ [ a for a in [ 0 ] for _ in [ 0 , 0 ] for a in [ a , 1 ] ] ] , the second [ a ]
clobbers the first , and the result is [ [ 0 , 1 , 1 , 1 ] ] instead of ( as it
would be in OCaml or ) [ [ 0 , 1 , 0 , 1 ] ] . To avoid this , we make
every variable in a Python comprehension unique ; the above comprehension
would become [ [ a for a2 in [ 0 ] for _ in [ 0 , 0 ] for a in [ a2 , 1 ] ] ] .
same mutable cell. Thus, in the Python list comprehension
[[a for a in [0] for _ in [0, 0] for a in [a, 1]]], the second [a]
clobbers the first, and the result is [[0, 1, 1, 1]] instead of (as it
would be in OCaml or Haskell) [[0, 1, 0, 1]]. To avoid this, we make
every variable in a Python comprehension unique; the above comprehension
would become [[a for a2 in [0] for _ in [0, 0] for a in [a2, 1]]]. *)
module Make_all_variables_unique = struct
module Rename = struct
let var renaming x =
Option.value ~default:x (Var.Map.find_opt x renaming)
let int_term renaming = function
| Literal n -> Literal n
| Variable x -> Variable (var renaming x)
let iterator renaming = function
| Range { start; direction; stop } ->
Range { start = int_term renaming start
; direction
; stop = int_term renaming stop }
| Sequence seq ->
Sequence (List.map (int_term renaming) seq)
end
let duplicate_bindings clauses =
let merge_counts f =
List.fold_left
(fun m x -> Var.Map.union (fun _ n1 n2 -> Some (n1 + n2)) (f x) m)
Var.Map.empty
in
Var.Map.filter
(fun _ n -> n > 1)
(merge_counts
(function
| For bs ->
merge_counts (fun {var; _} -> Var.Map.singleton var 1) bs
| When _ ->
Var.Map.empty)
clauses)
let bindings dups renaming =
List.fold_left_map
(fun (dups, renaming') {var; iterator} ->
let iterator = Rename.iterator renaming iterator in
match Var.Map.find_opt var dups with
| Some n ->
let var' = var ^ Int.to_string n in
let renaming' = Var.Map.add var var' renaming' in
let dups =
Var.Map.update
var
(function
| Some 2 -> None
| Some n -> Some (n-1)
| None -> assert false)
dups
in
(dups, renaming'), {var = var'; iterator}
| None ->
(dups, Var.Map.remove var renaming'), {var; iterator})
(dups, renaming)
let clauses cs =
cs |>
List.fold_left_map
(fun ((dups, renaming) as acc) -> function
| For bs ->
let (dups, renaming), bs = bindings dups renaming bs in
(dups, renaming), For bs
| When(pred, x) ->
acc, When(pred, Rename.var renaming x))
(duplicate_bindings cs, Var.Map.empty) |>
snd
end
let clause o = function
| For bindings ->
let intro, sep, (extra_clauses, bindings) =
match o with
| OCaml _ ->
["for"], " and ", ([], bindings)
| Haskell ->
[],
", ",
protect_parallel_bindings
(fun x e -> tokens ["let"; x; "="; e])
bindings
| Python ->
["for"], " for ", ([], bindings)
in
comprehension_clauses o
(extra_clauses @
intro @
[bindings |> List.map (binding o) |> String.concat sep])
| When(pred, x) ->
let kwd = match o with
| OCaml _ -> ["when"]
| Haskell -> []
| Python -> ["if"]
in
let pred_pre, pred_post = predicate o pred in
tokens (kwd @ pred_pre @ (x :: pred_post))
let comprehension o {env; clauses} =
let clauses = match o with
| OCaml _ | Haskell -> clauses
| Python -> Make_all_variables_unique.clauses clauses
in
let body = tuple (Environment.variables env) in
let clauses = comprehension_clauses o (List.map (clause o) clauses) in
let sep = match o with
| OCaml _ | Python -> " "
| Haskell -> " | "
in
sequence o (body ^ sep ^ clauses)
end
let generator = Generator.comprehension
let shrink = Shrink.comprehension
let to_string = To_string.comprehension
end
module Interactive_command = struct
let command cmd args ~setup ~input ~output ~f =
let inch, outch =
Unix.open_process_args cmd (Array.of_list (cmd :: args))
in
let output str = Util.output_line outch (output str) in
let interact str =
output str;
input inch
in
let cleanup () = ignore (Unix.close_process (inch, outch)) in
match setup output; f interact with
| result -> cleanup (); result
| exception e -> cleanup (); raise e
We need to read every comprehension 's output in a character - wise identical
way . We settle on Python 's list syntax : square brackets ( no pipes ) ,
comma - separated , with spaces after all the commas . This choice is because
( 1 ) it 's easier to replace all of 's semicolons with commas than it it
is to replace * some * of Haskell / Python 's commas with semicolons ; and ( 2 ) it
looks nicer to have spaces after commas ( like Python , as well as OCaml )
than to not do so ( like ) .
way. We settle on Python's list syntax: square brackets (no pipes),
comma-separated, with spaces after all the commas. This choice is because
(1) it's easier to replace all of OCaml's semicolons with commas than it it
is to replace *some* of Haskell/Python's commas with semicolons; and (2) it
looks nicer to have spaces after commas (like Python, as well as OCaml)
than to not do so (like Haskell). *)
let ocaml_code_pp_list_as_python = {|
let pp_list pp_elt fmt xs =
let buf = Buffer.create 256 in
let rec fill_buf prefix = function
| x :: xs ->
let fbuf = Format.formatter_of_buffer buf in
Format.pp_set_max_indent fbuf Int.max_int;
Buffer.add_string buf prefix;
Format.fprintf fbuf "%a%!" pp_elt x;
fill_buf ", " xs
| [] ->
();
in
Buffer.add_char buf '[';
fill_buf "" xs;
Buffer.add_char buf ']';
Format.fprintf fmt "%s" (Buffer.contents buf)
|}
let input_ocaml_list_or_array_as_python_list i =
let input = Buffer.create 16 in
let rec input_lines () =
let line = input_line i in
Buffer.add_string input line;
if not (String.contains line ']') then input_lines ()
in
input_lines ();
let raw_list = Buffer.contents input in
let start = String.index raw_list '[' in
let stop = String.rindex raw_list ']' in
let list = String.sub raw_list start (stop - start + 1) in
list
|> Str.global_replace (Str.regexp "[ \n]+") " "
|> Str.global_replace (Str.regexp "\\[[|:]") "["
|> Str.global_replace (Str.regexp "[|:]\\]") "]"
|> Str.global_replace (Str.regexp ";") ","
let input_haskell_list_as_python_list i =
i |> input_line |> Str.global_replace (Str.regexp ",") ", "
let ocaml ~f =
command
"../../../ocaml"
[ "-extension"; "comprehensions"
; "-extension"; "immutable_arrays"
; "-noprompt"; "-no-version"
; "-w"; "no-unused-var" ]
~setup:(fun output ->
output ("#print_length " ^ Int.to_string Int.max_int);
output ocaml_code_pp_list_as_python;
output "#install_printer pp_list")
~input:input_ocaml_list_or_array_as_python_list
~output:(fun str -> str ^ ";;")
~f
If GHCi is n't on a tty , it does n't display a prompt , AFAICT
let haskell ~f =
command
"/usr/bin/ghci"
["-v0"; "-ignore-dot-ghci"]
~setup:(Fun.const ())
~input:input_haskell_list_as_python_list
~output:Fun.id
~f
let python ~f =
command
"/usr/bin/python3"
["-qic"; "import sys\nsys.ps1 = ''"]
~setup:(Fun.const ())
~input:input_line
~output:Fun.id
~f
end
module Log_test_cases = struct
let to_file file f =
let oc = open_out file in
try
output_string oc
Generated by quickcheck_lists_arrays_haskell_python.ml ; filtered down to all
tests of reasonable size ; and reflowed to fit in 80 - character lines .
tests of reasonable size; and reflowed to fit in 80-character lines. *)
NOTE : If you 're saving these tests , do n't forget to make those last two
changes !
changes! *)|};
let r = f (Printf.fprintf oc "\n\n%s;;\n[%%%%expect{||}];;%!") in
output_char oc '\n';
close_out oc;
r
with
| exn ->
close_out_noerr oc;
raise exn
end
module Main = struct
type output = { ocaml_list : string
; ocaml_mutable_array : string
; ocaml_immutable_array : string
; haskell : string
; python : string }
let output_for o output =
match (o : Comprehension.To_string.format) with
| OCaml List -> output.ocaml_list
| OCaml Mutable_array -> output.ocaml_mutable_array
| OCaml Immutable_array -> output.ocaml_immutable_array
| Haskell -> output.haskell
| Python -> output.python
let print_counterexample oc counterexample data =
let printf format_string = Printf.fprintf oc format_string in
let output_for, printf_for_data = match data with
| Some data -> (fun o -> output_for o data), printf
| None -> (fun _ -> ""), Printf.ifprintf oc
in
let print_comprehension tag align o =
let counterexample_str = Comprehension.to_string o counterexample in
let indent = String.make (String.length tag) ' ' in
printf " %s:%s %s\n" tag align counterexample_str;
printf_for_data " %s %s = %s\n" indent align (output_for o)
in
print_comprehension "OCaml list" " " (OCaml List);
print_comprehension "OCaml array" " " (OCaml Mutable_array);
print_comprehension "OCaml iarray" "" (OCaml Immutable_array);
print_comprehension "Haskell" " " Haskell;
print_comprehension "Python" " " Python
let different_comprehensions_agree ?seed ?output max_tests =
let ( = ) = String.equal in
Interactive_command.ocaml ~f:(fun ocaml ->
Interactive_command.haskell ~f:(fun haskell ->
Interactive_command.python ~f:(fun python ->
Log_test_cases.to_file "comprehensions_from_quickcheck-log.ml" (fun log ->
let ocaml comp = log comp; ocaml comp in
QuickCheck.main
?seed ?output
max_tests
Comprehension.generator Comprehension.shrink
print_counterexample
(fun c ->
let run repl fmt = repl (Comprehension.to_string fmt c) in
let ocaml_list = run ocaml (OCaml List) in
let ocaml_mutable_array = run ocaml (OCaml Mutable_array) in
let ocaml_immutable_array = run ocaml (OCaml Immutable_array) in
let haskell = run haskell Haskell in
let python = run python Python in
if ocaml_list = ocaml_mutable_array &&
ocaml_mutable_array = ocaml_immutable_array &&
ocaml_immutable_array = haskell &&
haskell = python
then OK
else Failed_with
{ ocaml_list
; ocaml_mutable_array
; ocaml_immutable_array
; haskell
; python })))))
end
let () = Main.different_comprehensions_agree 1_000
|
4a97d8b2c8d64c687b3475cb91c1845f317c7c52c216d2c2d6b8a9abc44b3f9e | mejgun/haskell-tdlib | AddedReactions.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
module TD.Data.AddedReactions where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified TD.Data.AddedReaction as AddedReaction
import qualified Utils as U
-- |
| Represents a list of reactions added to a message @total_count The total number of found reactions @reactions The list of added reactions @next_offset The offset for the next request . If empty , there are no more results
AddedReactions
{ -- |
next_offset :: Maybe String,
-- |
reactions :: Maybe [AddedReaction.AddedReaction],
-- |
total_count :: Maybe Int
}
deriving (Eq)
instance Show AddedReactions where
show
AddedReactions
{ next_offset = next_offset_,
reactions = reactions_,
total_count = total_count_
} =
"AddedReactions"
++ U.cc
[ U.p "next_offset" next_offset_,
U.p "reactions" reactions_,
U.p "total_count" total_count_
]
instance T.FromJSON AddedReactions where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"addedReactions" -> parseAddedReactions v
_ -> mempty
where
parseAddedReactions :: A.Value -> T.Parser AddedReactions
parseAddedReactions = A.withObject "AddedReactions" $ \o -> do
next_offset_ <- o A..:? "next_offset"
reactions_ <- o A..:? "reactions"
total_count_ <- o A..:? "total_count"
return $ AddedReactions {next_offset = next_offset_, reactions = reactions_, total_count = total_count_}
parseJSON _ = mempty
instance T.ToJSON AddedReactions where
toJSON
AddedReactions
{ next_offset = next_offset_,
reactions = reactions_,
total_count = total_count_
} =
A.object
[ "@type" A..= T.String "addedReactions",
"next_offset" A..= next_offset_,
"reactions" A..= reactions_,
"total_count" A..= total_count_
]
| null | https://raw.githubusercontent.com/mejgun/haskell-tdlib/ddcb47043c7a339b4e5995355e5d5a228e21ccb0/src/TD/Data/AddedReactions.hs | haskell | # LANGUAGE OverloadedStrings #
|
|
|
|
| |
module TD.Data.AddedReactions where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified TD.Data.AddedReaction as AddedReaction
import qualified Utils as U
| Represents a list of reactions added to a message @total_count The total number of found reactions @reactions The list of added reactions @next_offset The offset for the next request . If empty , there are no more results
AddedReactions
next_offset :: Maybe String,
reactions :: Maybe [AddedReaction.AddedReaction],
total_count :: Maybe Int
}
deriving (Eq)
instance Show AddedReactions where
show
AddedReactions
{ next_offset = next_offset_,
reactions = reactions_,
total_count = total_count_
} =
"AddedReactions"
++ U.cc
[ U.p "next_offset" next_offset_,
U.p "reactions" reactions_,
U.p "total_count" total_count_
]
instance T.FromJSON AddedReactions where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"addedReactions" -> parseAddedReactions v
_ -> mempty
where
parseAddedReactions :: A.Value -> T.Parser AddedReactions
parseAddedReactions = A.withObject "AddedReactions" $ \o -> do
next_offset_ <- o A..:? "next_offset"
reactions_ <- o A..:? "reactions"
total_count_ <- o A..:? "total_count"
return $ AddedReactions {next_offset = next_offset_, reactions = reactions_, total_count = total_count_}
parseJSON _ = mempty
instance T.ToJSON AddedReactions where
toJSON
AddedReactions
{ next_offset = next_offset_,
reactions = reactions_,
total_count = total_count_
} =
A.object
[ "@type" A..= T.String "addedReactions",
"next_offset" A..= next_offset_,
"reactions" A..= reactions_,
"total_count" A..= total_count_
]
|
8c26405034ce7c4a7fde65a753c2552e0e347516212d1015cf5e2e006e18d550 | owlbarn/owl_opt | gd_d.ml | (** Vanilla gradient descent *)
module Make = Gd.Make (Owl.Algodiff.D)
| null | https://raw.githubusercontent.com/owlbarn/owl_opt/c3b34072dddbce2d70e1698c5f1fd84d783f9cef/src/opt/gd/gd_d.ml | ocaml | * Vanilla gradient descent | module Make = Gd.Make (Owl.Algodiff.D)
|
09e937a63fd07f82a62d0492ca5488c2b387d91d3b7a3604b6c0db92b016789e | melange-re/ppx_jsx_embed | test.expected.ml | let x ~children =
((div ~id:(("omg")[@reason.raw_literal "omg"]) ~children:[] ())[@JSX ])
[@@react.component ]
let x ~some_prop ~children =
((div ~some_prop ~other_prop:some_prop ~children:[] ())[@JSX ])[@@react.component
]
| null | https://raw.githubusercontent.com/melange-re/ppx_jsx_embed/6952024fdc5b82efa909fd6fe65f09b6ec92214f/test/test.expected.ml | ocaml | let x ~children =
((div ~id:(("omg")[@reason.raw_literal "omg"]) ~children:[] ())[@JSX ])
[@@react.component ]
let x ~some_prop ~children =
((div ~some_prop ~other_prop:some_prop ~children:[] ())[@JSX ])[@@react.component
]
| |
f2799d58da1f55043e9e47b56e87eac1821f7ef59e6d57349799a7d18a5d3091 | mgmillani/pictikz | Graph.hs | Copyright 2017
This file is part of pictikz .
pictikz is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
pictikz is distributed in the hope that it will be useful ,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-- along with pictikz. If not, see </>.
module Pictikz.Graph where
import Pictikz.Elements
import qualified Pictikz.Geometry as G
import qualified Pictikz.Text as T
import qualified Pictikz.Output.Tikz as Tikz
import Pictikz.Parser
import Data.List
data Node a = Node a a String [T.Text] [GraphStyle] (Int, Int) deriving (Show, Read, Eq, Ord)
data Edge = Edge String String [GraphStyle] (Int, Int) deriving (Show, Read, Eq, Ord)
data Graph a = Graph [Node a] [Edge] deriving (Show, Read, Eq, Ord)
instance Positionable Node where
getPos (Node x y _ _ _ _) = (x,y)
fPos f (Node x y id name style time) = let (x1,y1) = f (x,y) in Node x1 y1 id name style time
instance Temporal (Node a) where
getTime (Node _ _ _ _ _ time) = time
fTime f (Node x y id name style time) = (Node x y id name style (f time))
instance Temporal Edge where
getTime (Edge _ _ _ time) = time
fTime f (Edge n1 n2 style time) = (Edge n1 n2 style (f time))
instance Tikz.Drawable GraphStyle where
draw Dotted = ", pictikz-dotted"
draw Dashed = ", pictikz-dashed"
draw Thick = ", pictikz-thick"
draw Rectangle = ", pictikz-rectangle"
draw Circle = ", pictikz-node"
draw (Fill c) = ", fill=" ++ c
draw (Stroke c) = ", draw=" ++ c
draw (Arrow ArrowNone) = ""
draw (Arrow t) = ", " ++ show t
draw LeftAligned = ", left"
draw RightAligned = ", right"
draw Centered = ", center"
instance (Num a, Show a) => Tikz.Drawable (Node a) where
draw (Node x y id name style (t0, t1)) = concat
[ if t1 > 0 then "\\uncover<" ++ show t0 ++ "-" ++ show t1 ++ ">{ " else ""
, "\\node["
, drop 2 $ concatMap Tikz.draw $ filter (\s -> not $ s `elem` [LeftAligned, Centered, RightAligned]) style
, "] ("
, id
, ") at ("
, show x
, ", "
, show y
, ") [align=" ++ (drop 2 $ Tikz.draw alignment) ++ "]{"
, escapeLines $ concatMap Tikz.draw name
, "};"
, if t1 > 0 then " }\n" else "\n"
]
where
escapeLines "\n" = []
escapeLines ('\n':r) = "\\\\ " ++ escapeLines r
escapeLines (a:r) = a : escapeLines r
escapeLines [] = []
alignment = head $ filter (\f -> f `elem` [LeftAligned, RightAligned, Centered]) (style ++ [LeftAligned])
instance Tikz.Drawable Edge where
draw (Edge n1 n2 style (t0, t1)) = concat
[ if t1 > 0 then "\\uncover<" ++ show t0 ++ "-" ++ show t1 ++">{ " else ""
, "\\draw["
, drop 2 $ concatMap Tikz.draw style
, "] ("
, n1
, ") edge ("
, n2
, ");"
, if t1 > 0 then " }\n" else "\n"
]
instance (Num a, Show a) => Tikz.Drawable (Graph a) where
draw (Graph nodes edges) = concat $ map Tikz.draw nodes ++ map Tikz.draw edges
makeGraph layers colors = map (makeGraph' colors) $ zip layers [0..]
makeGraph' colors ((Layer elements), t) =
let gnames = filter isText elements
gnodes = assignNames (fixGraphStyle colors (fixIDs $ filter isObject elements)) gnames
gedges = map (closest t gnodes) $ fixGraphStyle colors $ filter isLine elements
in Graph (map (fPos (\(x,y) -> (x,-y))) (map (toNode t) gnodes)) (gedges) :: Graph Double
assignNames [] _ = []
assignNames gnodes [] = gnodes
assignNames gnodes (t:ts) =
let (Object s iD n style) = assignName gnodes t
in (Object s iD n style) : assignNames (filter (\(Object _ iD1 _ _) -> iD1 /= iD) gnodes) ts
assignName gnodes (Paragraph x0 y0 text format) =
let dist s = G.squareDistance (x0,y0) s
(Object s iD _ style) = minimumBy (\(Object s0 _ _ _) (Object s1 _ _ _) -> compare (dist s0) (dist s1) ) gnodes
style' = format ++ style
in (Object s iD text style')
fixGraphStyle :: (Ord a, Floating a) => [(Color, String)] -> [Element a] -> [Element a]
fixGraphStyle colors ls =
let strokeWs = map (\(RawGraphStyle s v) -> v) $ filter (\(RawGraphStyle s v) -> s == Thick) $ concatMap getStyle ls
minStroke = minimum strokeWs
maxStroke = maximum strokeWs
midStroke = minStroke + (maxStroke - minStroke) / 2
fixLine arrow [] = [Parsed $ Arrow arrow]
fixLine arrow (s:ss) = case s of
RawGraphStyle Thick v -> if v > midStroke && v > minStroke * 1.4 then Parsed Thick : fixLine arrow ss else (fixLine arrow ss)
RawGraphStyle Dashed v -> (Parsed $ if v > 2*minStroke then Dashed else Dotted) : (fixLine arrow ss)
RawGraphStyle (Arrow a) _ -> fixLine (joinArrow a arrow) ss
RawGraphStyle (Fill c) _ ->
let color = readColor c
dists = map (\(c', n) -> (rgbDist c' color, n)) colors
(_, cname) = minimumBy (\(c0, _) (c1, _) -> compare c0 c1) dists
in Parsed (Fill cname) : fixLine arrow ss
RawGraphStyle (Stroke c) _ ->
let color = readColor c
dists = map (\(c', n) -> (rgbDist c' color, n)) colors
(_, cname) = minimumBy (\(c0, _) (c1, _) -> compare c0 c1) dists
in Parsed (Stroke cname) : fixLine arrow ss
RawGraphStyle s _ -> Parsed s : (fixLine arrow ss)
in map (fStyle (fixLine ArrowNone)) ls
fixIDs objs = zipWith (\(Object s iD name style) i -> if null iD then (Object s (show i) name style) else (Object s iD name style)) objs [1..]
closest t vertices (Line x0 y0 x1 y1 a) =
let p0 = (x0, y0)
p1 = (x1, y1)
(n0,_) = minimumBy (\p q -> compare (snd p) (snd q)) $ map (\(Object shape id _ _) -> (id, G.squareDistance p0 shape)) vertices
(n1,_) = minimumBy (\p q -> compare (snd p) (snd q)) $ map (\(Object shape id _ _) -> (id, G.squareDistance p1 shape)) vertices
in Edge n0 n1 (map graphStyle a) (t,t)
toNode t (Object (G.Rectangle x y w h) id name style) = Node (x + w/2) (y + h/2) id name (map graphStyle style) (t,t)
toNode t (Object (G.Ellipsis x y _ _) id name style) = Node x y id name (map graphStyle style) (t,t)
| null | https://raw.githubusercontent.com/mgmillani/pictikz/3bae9db14a8563f5b2c97bb5d9d30ba54f75b62f/src/Pictikz/Graph.hs | haskell | (at your option) any later version.
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with pictikz. If not, see </>. | Copyright 2017
This file is part of pictikz .
pictikz is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
pictikz is distributed in the hope that it will be useful ,
You should have received a copy of the GNU General Public License
module Pictikz.Graph where
import Pictikz.Elements
import qualified Pictikz.Geometry as G
import qualified Pictikz.Text as T
import qualified Pictikz.Output.Tikz as Tikz
import Pictikz.Parser
import Data.List
data Node a = Node a a String [T.Text] [GraphStyle] (Int, Int) deriving (Show, Read, Eq, Ord)
data Edge = Edge String String [GraphStyle] (Int, Int) deriving (Show, Read, Eq, Ord)
data Graph a = Graph [Node a] [Edge] deriving (Show, Read, Eq, Ord)
instance Positionable Node where
getPos (Node x y _ _ _ _) = (x,y)
fPos f (Node x y id name style time) = let (x1,y1) = f (x,y) in Node x1 y1 id name style time
instance Temporal (Node a) where
getTime (Node _ _ _ _ _ time) = time
fTime f (Node x y id name style time) = (Node x y id name style (f time))
instance Temporal Edge where
getTime (Edge _ _ _ time) = time
fTime f (Edge n1 n2 style time) = (Edge n1 n2 style (f time))
instance Tikz.Drawable GraphStyle where
draw Dotted = ", pictikz-dotted"
draw Dashed = ", pictikz-dashed"
draw Thick = ", pictikz-thick"
draw Rectangle = ", pictikz-rectangle"
draw Circle = ", pictikz-node"
draw (Fill c) = ", fill=" ++ c
draw (Stroke c) = ", draw=" ++ c
draw (Arrow ArrowNone) = ""
draw (Arrow t) = ", " ++ show t
draw LeftAligned = ", left"
draw RightAligned = ", right"
draw Centered = ", center"
instance (Num a, Show a) => Tikz.Drawable (Node a) where
draw (Node x y id name style (t0, t1)) = concat
[ if t1 > 0 then "\\uncover<" ++ show t0 ++ "-" ++ show t1 ++ ">{ " else ""
, "\\node["
, drop 2 $ concatMap Tikz.draw $ filter (\s -> not $ s `elem` [LeftAligned, Centered, RightAligned]) style
, "] ("
, id
, ") at ("
, show x
, ", "
, show y
, ") [align=" ++ (drop 2 $ Tikz.draw alignment) ++ "]{"
, escapeLines $ concatMap Tikz.draw name
, "};"
, if t1 > 0 then " }\n" else "\n"
]
where
escapeLines "\n" = []
escapeLines ('\n':r) = "\\\\ " ++ escapeLines r
escapeLines (a:r) = a : escapeLines r
escapeLines [] = []
alignment = head $ filter (\f -> f `elem` [LeftAligned, RightAligned, Centered]) (style ++ [LeftAligned])
instance Tikz.Drawable Edge where
draw (Edge n1 n2 style (t0, t1)) = concat
[ if t1 > 0 then "\\uncover<" ++ show t0 ++ "-" ++ show t1 ++">{ " else ""
, "\\draw["
, drop 2 $ concatMap Tikz.draw style
, "] ("
, n1
, ") edge ("
, n2
, ");"
, if t1 > 0 then " }\n" else "\n"
]
instance (Num a, Show a) => Tikz.Drawable (Graph a) where
draw (Graph nodes edges) = concat $ map Tikz.draw nodes ++ map Tikz.draw edges
makeGraph layers colors = map (makeGraph' colors) $ zip layers [0..]
makeGraph' colors ((Layer elements), t) =
let gnames = filter isText elements
gnodes = assignNames (fixGraphStyle colors (fixIDs $ filter isObject elements)) gnames
gedges = map (closest t gnodes) $ fixGraphStyle colors $ filter isLine elements
in Graph (map (fPos (\(x,y) -> (x,-y))) (map (toNode t) gnodes)) (gedges) :: Graph Double
assignNames [] _ = []
assignNames gnodes [] = gnodes
assignNames gnodes (t:ts) =
let (Object s iD n style) = assignName gnodes t
in (Object s iD n style) : assignNames (filter (\(Object _ iD1 _ _) -> iD1 /= iD) gnodes) ts
assignName gnodes (Paragraph x0 y0 text format) =
let dist s = G.squareDistance (x0,y0) s
(Object s iD _ style) = minimumBy (\(Object s0 _ _ _) (Object s1 _ _ _) -> compare (dist s0) (dist s1) ) gnodes
style' = format ++ style
in (Object s iD text style')
fixGraphStyle :: (Ord a, Floating a) => [(Color, String)] -> [Element a] -> [Element a]
fixGraphStyle colors ls =
let strokeWs = map (\(RawGraphStyle s v) -> v) $ filter (\(RawGraphStyle s v) -> s == Thick) $ concatMap getStyle ls
minStroke = minimum strokeWs
maxStroke = maximum strokeWs
midStroke = minStroke + (maxStroke - minStroke) / 2
fixLine arrow [] = [Parsed $ Arrow arrow]
fixLine arrow (s:ss) = case s of
RawGraphStyle Thick v -> if v > midStroke && v > minStroke * 1.4 then Parsed Thick : fixLine arrow ss else (fixLine arrow ss)
RawGraphStyle Dashed v -> (Parsed $ if v > 2*minStroke then Dashed else Dotted) : (fixLine arrow ss)
RawGraphStyle (Arrow a) _ -> fixLine (joinArrow a arrow) ss
RawGraphStyle (Fill c) _ ->
let color = readColor c
dists = map (\(c', n) -> (rgbDist c' color, n)) colors
(_, cname) = minimumBy (\(c0, _) (c1, _) -> compare c0 c1) dists
in Parsed (Fill cname) : fixLine arrow ss
RawGraphStyle (Stroke c) _ ->
let color = readColor c
dists = map (\(c', n) -> (rgbDist c' color, n)) colors
(_, cname) = minimumBy (\(c0, _) (c1, _) -> compare c0 c1) dists
in Parsed (Stroke cname) : fixLine arrow ss
RawGraphStyle s _ -> Parsed s : (fixLine arrow ss)
in map (fStyle (fixLine ArrowNone)) ls
fixIDs objs = zipWith (\(Object s iD name style) i -> if null iD then (Object s (show i) name style) else (Object s iD name style)) objs [1..]
closest t vertices (Line x0 y0 x1 y1 a) =
let p0 = (x0, y0)
p1 = (x1, y1)
(n0,_) = minimumBy (\p q -> compare (snd p) (snd q)) $ map (\(Object shape id _ _) -> (id, G.squareDistance p0 shape)) vertices
(n1,_) = minimumBy (\p q -> compare (snd p) (snd q)) $ map (\(Object shape id _ _) -> (id, G.squareDistance p1 shape)) vertices
in Edge n0 n1 (map graphStyle a) (t,t)
toNode t (Object (G.Rectangle x y w h) id name style) = Node (x + w/2) (y + h/2) id name (map graphStyle style) (t,t)
toNode t (Object (G.Ellipsis x y _ _) id name style) = Node x y id name (map graphStyle style) (t,t)
|
c44e4cb8ee2a939a0b285e482c892206dff44c965ae15bc21c7a92c04822ffd9 | EMSL-NMR-EPR/Haskell-MFAPipe-Executable | FluxCovarianceMatrix.hs | module MFAPipe.Csv.Types.FluxCovarianceMatrix
( FluxCovarianceMatrixRecords(..)
, FluxCovarianceMatrixRecord(..)
, encode
, encodeWith
) where
import Control.Applicative (liftA2)
import Data.ByteString.Lazy (ByteString)
import qualified Data.Csv
import Data.Csv (ToField(), ToNamedRecord(toNamedRecord), EncodeOptions, Header)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict
import Data.Set (Set)
import qualified Data.Set
import qualified GHC.Exts
import MFAPipe.Csv.Constants
data FluxCovarianceMatrixRecords i e = FluxCovarianceMatrixRecords (Set i) (Map i (Map i e))
deriving (Eq, Ord, Read, Show)
data FluxCovarianceMatrixRecord i e = FluxCovarianceMatrixRecord (Set i) i (Map i e)
deriving (Eq, Ord, Read, Show)
instance (ToField i, ToField e, Ord i, Num e) => ToNamedRecord (FluxCovarianceMatrixRecord i e) where
toNamedRecord (FluxCovarianceMatrixRecord ixs ix covarMap) = Data.Csv.namedRecord $
Data.Csv.namedField cFluxVarFieldName ix
: GHC.Exts.build (\cons nil -> Data.Set.foldr (\ix' -> let covar = Data.Map.Strict.findWithDefault 0 ix' covarMap in cons (Data.Csv.namedField (Data.Csv.toField ix') covar)) nil ixs)
encode :: (ToField i, ToField e, Ord i, Num e) => FluxCovarianceMatrixRecords i e -> ByteString
encode = encodeWith Data.Csv.defaultEncodeOptions
encodeWith :: (ToField i, ToField e, Ord i, Num e) => EncodeOptions -> FluxCovarianceMatrixRecords i e -> ByteString
encodeWith opts = liftA2 (Data.Csv.encodeByNameWith opts) toHeader toList
toHeader :: (ToField i) => FluxCovarianceMatrixRecords i e -> Header
toHeader (FluxCovarianceMatrixRecords ixs _) = Data.Csv.header $
cFluxVarFieldName
: GHC.Exts.build (\cons nil -> Data.Set.foldr (\ix -> cons (Data.Csv.toField ix)) nil ixs)
toList :: FluxCovarianceMatrixRecords i e -> [FluxCovarianceMatrixRecord i e]
toList (FluxCovarianceMatrixRecords ixs m) = GHC.Exts.build (\cons nil -> Data.Map.Strict.foldrWithKey (\ix covarMap -> cons (FluxCovarianceMatrixRecord ixs ix covarMap)) nil m)
| null | https://raw.githubusercontent.com/EMSL-NMR-EPR/Haskell-MFAPipe-Executable/8a7fd13202d3b6b7380af52d86e851e995a9b53e/MFAPipe/app/MFAPipe/Csv/Types/FluxCovarianceMatrix.hs | haskell | module MFAPipe.Csv.Types.FluxCovarianceMatrix
( FluxCovarianceMatrixRecords(..)
, FluxCovarianceMatrixRecord(..)
, encode
, encodeWith
) where
import Control.Applicative (liftA2)
import Data.ByteString.Lazy (ByteString)
import qualified Data.Csv
import Data.Csv (ToField(), ToNamedRecord(toNamedRecord), EncodeOptions, Header)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict
import Data.Set (Set)
import qualified Data.Set
import qualified GHC.Exts
import MFAPipe.Csv.Constants
data FluxCovarianceMatrixRecords i e = FluxCovarianceMatrixRecords (Set i) (Map i (Map i e))
deriving (Eq, Ord, Read, Show)
data FluxCovarianceMatrixRecord i e = FluxCovarianceMatrixRecord (Set i) i (Map i e)
deriving (Eq, Ord, Read, Show)
instance (ToField i, ToField e, Ord i, Num e) => ToNamedRecord (FluxCovarianceMatrixRecord i e) where
toNamedRecord (FluxCovarianceMatrixRecord ixs ix covarMap) = Data.Csv.namedRecord $
Data.Csv.namedField cFluxVarFieldName ix
: GHC.Exts.build (\cons nil -> Data.Set.foldr (\ix' -> let covar = Data.Map.Strict.findWithDefault 0 ix' covarMap in cons (Data.Csv.namedField (Data.Csv.toField ix') covar)) nil ixs)
encode :: (ToField i, ToField e, Ord i, Num e) => FluxCovarianceMatrixRecords i e -> ByteString
encode = encodeWith Data.Csv.defaultEncodeOptions
encodeWith :: (ToField i, ToField e, Ord i, Num e) => EncodeOptions -> FluxCovarianceMatrixRecords i e -> ByteString
encodeWith opts = liftA2 (Data.Csv.encodeByNameWith opts) toHeader toList
toHeader :: (ToField i) => FluxCovarianceMatrixRecords i e -> Header
toHeader (FluxCovarianceMatrixRecords ixs _) = Data.Csv.header $
cFluxVarFieldName
: GHC.Exts.build (\cons nil -> Data.Set.foldr (\ix -> cons (Data.Csv.toField ix)) nil ixs)
toList :: FluxCovarianceMatrixRecords i e -> [FluxCovarianceMatrixRecord i e]
toList (FluxCovarianceMatrixRecords ixs m) = GHC.Exts.build (\cons nil -> Data.Map.Strict.foldrWithKey (\ix covarMap -> cons (FluxCovarianceMatrixRecord ixs ix covarMap)) nil m)
| |
e2376ae508383bde8a03482711ad34e5a2452f5774e0e9db49997795f8f50cfe | raaz-crypto/raaz | Instances.hs | {-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleInstances #
# OPTIONS_GHC -fno - warn - orphans #
-- | Some common instances that are required by the test cases.
module Tests.Core.Instances () where
import Tests.Core.Imports
import Raaz.Primitive.Poly1305.Internal as Poly1305
import Raaz.Core.Types.Internal
import Raaz.Primitive.Keyed.Internal as Keyed
instance Arbitrary w => Arbitrary (LE w) where
arbitrary = littleEndian <$> arbitrary
instance Arbitrary w => Arbitrary (BE w) where
arbitrary = bigEndian <$> arbitrary
instance Arbitrary w => Arbitrary (BYTES w) where
arbitrary = BYTES <$> arbitrary
instance Arbitrary ByteString where
arbitrary = pack <$> arbitrary
--------------- Arbitrary instances for Hashes ----------------
instance Arbitrary Sha256 where
arbitrary = genEncodable
instance Arbitrary Sha512 where
arbitrary = genEncodable
instance Arbitrary Blake2b where
arbitrary = genEncodable
instance Arbitrary Blake2s where
arbitrary = genEncodable
-------------- Parameter block for -------------
---------------- Arbitrary instaces of encoded data --------------
instance Arbitrary Base16 where
arbitrary = encodeByteString . pack <$> listOf arbitrary
instance Arbitrary Base64 where
arbitrary = encodeByteString . pack <$> listOf arbitrary
------------------ For ChaCha20 types -------------------------
instance Arbitrary (Key ChaCha20) where
arbitrary = genEncodable
instance Arbitrary (Nounce ChaCha20) where
arbitrary = genEncodable
instance Arbitrary (BlockCount ChaCha20) where
arbitrary = toEnum <$> arbitrary
------------------ For XChaCha20 types -------------------------
instance Arbitrary (Key XChaCha20) where
arbitrary = genEncodable
instance Arbitrary (Key (Keyed prim)) where
arbitrary = Keyed.Key . pack <$> listOf1 arbitrary
instance Arbitrary (Nounce XChaCha20) where
arbitrary = genEncodable
instance Arbitrary (BlockCount XChaCha20) where
arbitrary = toEnum <$> arbitrary
---------------- Arbitrary instances for -------------
instance Arbitrary Poly1305.R where
arbitrary = genEncodable
instance Arbitrary Poly1305.S where
arbitrary = genEncodable
instance Arbitrary Poly1305.Poly1305 where
arbitrary = genEncodable
instance Arbitrary (Key Poly1305) where
arbitrary = Poly1305.Key <$> arbitrary <*> arbitrary
genEncodable :: (Encodable a, Storable a) => Gen a
genEncodable = go undefined
where go :: (Encodable a, Storable a) => a -> Gen a
go x = unsafeFromByteString . pack <$> vector (fromEnum $ sizeOf $ pure x)
| null | https://raw.githubusercontent.com/raaz-crypto/raaz/1d17ead6d33c5441a59dbc4ff33197e2bd6eb6ec/tests/core/Tests/Core/Instances.hs | haskell | # LANGUAGE CPP #
# LANGUAGE DataKinds #
| Some common instances that are required by the test cases.
------------- Arbitrary instances for Hashes ----------------
------------ Parameter block for -------------
-------------- Arbitrary instaces of encoded data --------------
---------------- For ChaCha20 types -------------------------
---------------- For XChaCha20 types -------------------------
-------------- Arbitrary instances for ------------- | # LANGUAGE FlexibleInstances #
# OPTIONS_GHC -fno - warn - orphans #
module Tests.Core.Instances () where
import Tests.Core.Imports
import Raaz.Primitive.Poly1305.Internal as Poly1305
import Raaz.Core.Types.Internal
import Raaz.Primitive.Keyed.Internal as Keyed
instance Arbitrary w => Arbitrary (LE w) where
arbitrary = littleEndian <$> arbitrary
instance Arbitrary w => Arbitrary (BE w) where
arbitrary = bigEndian <$> arbitrary
instance Arbitrary w => Arbitrary (BYTES w) where
arbitrary = BYTES <$> arbitrary
instance Arbitrary ByteString where
arbitrary = pack <$> arbitrary
instance Arbitrary Sha256 where
arbitrary = genEncodable
instance Arbitrary Sha512 where
arbitrary = genEncodable
instance Arbitrary Blake2b where
arbitrary = genEncodable
instance Arbitrary Blake2s where
arbitrary = genEncodable
instance Arbitrary Base16 where
arbitrary = encodeByteString . pack <$> listOf arbitrary
instance Arbitrary Base64 where
arbitrary = encodeByteString . pack <$> listOf arbitrary
instance Arbitrary (Key ChaCha20) where
arbitrary = genEncodable
instance Arbitrary (Nounce ChaCha20) where
arbitrary = genEncodable
instance Arbitrary (BlockCount ChaCha20) where
arbitrary = toEnum <$> arbitrary
instance Arbitrary (Key XChaCha20) where
arbitrary = genEncodable
instance Arbitrary (Key (Keyed prim)) where
arbitrary = Keyed.Key . pack <$> listOf1 arbitrary
instance Arbitrary (Nounce XChaCha20) where
arbitrary = genEncodable
instance Arbitrary (BlockCount XChaCha20) where
arbitrary = toEnum <$> arbitrary
instance Arbitrary Poly1305.R where
arbitrary = genEncodable
instance Arbitrary Poly1305.S where
arbitrary = genEncodable
instance Arbitrary Poly1305.Poly1305 where
arbitrary = genEncodable
instance Arbitrary (Key Poly1305) where
arbitrary = Poly1305.Key <$> arbitrary <*> arbitrary
genEncodable :: (Encodable a, Storable a) => Gen a
genEncodable = go undefined
where go :: (Encodable a, Storable a) => a -> Gen a
go x = unsafeFromByteString . pack <$> vector (fromEnum $ sizeOf $ pure x)
|
b332885f1dc7d8f5778d1ee71678ea13b9345aeeb7dc938d71e5cd65f355de1a | Decentralized-Pictures/T4L3NT | test_endorsement.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
* Testing
-------
Component : Protocol ( endorsement )
Invocation : dune exec src / proto_alpha / lib_protocol / test / main.exe -- test " ^endorsement$ "
Subject : Endorsing a block adds an extra layer of confidence
to the Tezos ' PoS algorithm . The block endorsing
operation must be included in the following block .
-------
Component: Protocol (endorsement)
Invocation: dune exec src/proto_alpha/lib_protocol/test/main.exe -- test "^endorsement$"
Subject: Endorsing a block adds an extra layer of confidence
to the Tezos' PoS algorithm. The block endorsing
operation must be included in the following block.
*)
open Protocol
open Alpha_context
let init_genesis ?policy () =
Context.init ~consensus_threshold:0 5 >>=? fun (genesis, _) ->
Block.bake ?policy genesis >>=? fun b -> return (genesis, b)
(** inject an endorsement and return the block with the endorsement and its
parent. *)
let inject_the_first_endorsement () =
init_genesis () >>=? fun (genesis, b) ->
Op.endorsement ~endorsed_block:b (B genesis) () >>=? fun op ->
Block.bake ~operations:[Operation.pack op] b >>=? fun b' -> return (b', b)
(****************************************************************)
(* Tests *)
(****************************************************************)
(** Apply a single endorsement from the slot 0 endorser. *)
let test_simple_endorsement () =
inject_the_first_endorsement () >>=? fun _ -> return_unit
(****************************************************************)
(* The following test scenarios are supposed to raise errors. *)
(****************************************************************)
(** Apply an endorsement with a negative slot. *)
let test_negative_slot () =
Context.init 5 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b ->
Context.get_endorser (B b) >>=? fun (delegate, _slots) ->
Lwt.catch
(fun () ->
Op.endorsement
~delegate:(delegate, [Slot.of_int_do_not_use_except_for_parameters (-1)])
~endorsed_block:b
(B genesis)
()
>>=? fun _ ->
failwith "negative slot should not be accepted by the binary format")
(function
| Data_encoding.Binary.Write_error _ -> return_unit | e -> Lwt.fail e)
(** Apply an endorsement with a non-normalized slot (that is, not the smallest
possible). *)
let test_non_normalized_slot () =
Context.init 5 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b ->
Context.get_endorsers (B b) >>=? fun endorsers_list ->
find an endorsers with more than 1 slot
List.find_map
(function
| {Plugin.RPC.Validators.delegate; slots; _} ->
if Compare.List_length_with.(slots > 1) then Some (delegate, slots)
else None)
endorsers_list
|> function
| None -> assert false
| Some (delegate, slots) ->
let set_slots = Slot.Set.of_list slots in
(* no duplicated slots *)
Assert.equal_int
~loc:__LOC__
(Slot.Set.cardinal set_slots)
(List.length slots)
>>=? fun () ->
the first slot should be the smallest slot
Assert.equal
~loc:__LOC__
(fun x y -> Slot.compare x y = 0)
"the first slot is not the smallest"
Slot.pp
(WithExceptions.Option.get ~loc:__LOC__ @@ List.hd slots)
(WithExceptions.Option.get ~loc:__LOC__ @@ Slot.Set.min_elt set_slots)
>>=? fun () ->
Op.endorsement
~delegate:(delegate, List.rev slots)
~endorsed_block:b
(B genesis)
()
>>=? fun op ->
let policy = Block.Excluding [delegate] in
Block.bake ~policy ~operations:[Operation.pack op] b >>= fun res ->
Assert.proto_error ~loc:__LOC__ res (function err ->
let error_info =
Error_monad.find_info_of_error (Environment.wrap_tzerror err)
in
error_info.title = "wrong slot")
(** Wrong endorsement predecessor : apply an endorsement with an
incorrect block predecessor. *)
let test_wrong_endorsement_predecessor () =
init_genesis () >>=? fun (genesis, b) ->
Op.endorsement ~endorsed_block:b (B genesis) ~signing_context:(B b) ()
>>=? fun operation ->
let operation = Operation.pack operation in
Block.bake ~operation b >>= fun res ->
Assert.proto_error ~loc:__LOC__ res (function
| Apply.Wrong_consensus_operation_branch _ -> true
| _ -> false)
(** Invalid_endorsement_level: apply an endorsement with an incorrect
level (i.e. the predecessor level). *)
let test_invalid_endorsement_level () =
init_genesis () >>=? fun (genesis, b) ->
Context.get_level (B genesis) >>?= fun genesis_level ->
Op.endorsement ~level:genesis_level ~endorsed_block:b (B genesis) ()
>>=? fun op ->
Block.bake ~operations:[Operation.pack op] b >>= fun res ->
Assert.proto_error ~loc:__LOC__ res (function
| Apply.Wrong_level_for_consensus_operation _ -> true
| _ -> false)
(** Duplicate endorsement : apply an endorsement that has already been applied. *)
let test_duplicate_endorsement () =
init_genesis () >>=? fun (genesis, b) ->
Incremental.begin_construction b >>=? fun inc ->
Op.endorsement ~endorsed_block:b (B genesis) () >>=? fun operation ->
let operation = Operation.pack operation in
Incremental.add_operation inc operation >>=? fun inc ->
Op.endorsement ~endorsed_block:b (B genesis) () >>=? fun operation ->
let operation = Operation.pack operation in
Incremental.add_operation inc operation >>= fun res ->
Assert.proto_error_with_info
~loc:__LOC__
res
"double inclusion of consensus operation"
(** Consensus operation for future level : apply an endorsement with a level in the future *)
let test_consensus_operation_endorsement_for_future_level () =
init_genesis () >>=? fun (genesis, pred) ->
let raw_level = Raw_level.of_int32 (Int32.of_int 10) in
let level = match raw_level with Ok l -> l | Error _ -> assert false in
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:pred
~level
~error_title:"Consensus operation for future level"
~context:(Context.B genesis)
~construction_mode:(pred, None)
()
* Consensus operation for old level : apply an endorsement one level in the past
let test_consensus_operation_endorsement_for_predecessor_level () =
init_genesis () >>=? fun (genesis, pred) ->
let raw_level = Raw_level.of_int32 (Int32.of_int 0) in
let level = match raw_level with Ok l -> l | Error _ -> assert false in
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:pred
~level
~error_title:"Endorsement for previous level"
~context:(Context.B genesis)
~construction_mode:(pred, None)
()
* Consensus operation for old level : apply an endorsement with more than one level in the past
let test_consensus_operation_endorsement_for_old_level () =
init_genesis () >>=? fun (genesis, pred) ->
Block.bake genesis >>=? fun next_block ->
let raw_level = Raw_level.of_int32 (Int32.of_int 0) in
let level = match raw_level with Ok l -> l | Error _ -> assert false in
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:pred
~level
~error_title:"Consensus operation for old level"
~context:(Context.B next_block)
~construction_mode:(pred, None)
()
(** Consensus operation for future round : apply an endorsement with a round in the future *)
let test_consensus_operation_endorsement_for_future_round () =
init_genesis () >>=? fun (genesis, pred) ->
Environment.wrap_tzresult (Round.of_int 21) >>?= fun round ->
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:pred
~round
~error_title:"Consensus operation for future round"
~context:(Context.B genesis)
~construction_mode:(pred, None)
()
(** Consensus operation for old round : apply an endorsement with a round in the past *)
let test_consensus_operation_endorsement_for_old_round () =
init_genesis ~policy:(By_round 10) () >>=? fun (genesis, pred) ->
Environment.wrap_tzresult (Round.of_int 0) >>?= fun round ->
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:pred
~round
~error_title:"Consensus operation for old round"
~context:(Context.B genesis)
~construction_mode:(pred, None)
()
(** Consensus operation on competing proposal : apply an endorsement on a competing proposal *)
let test_consensus_operation_endorsement_on_competing_proposal () =
init_genesis () >>=? fun (genesis, pred) ->
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:pred
~block_payload_hash:Block_payload_hash.zero
~error_title:"Consensus operation on competing proposal"
~context:(Context.B genesis)
~construction_mode:(pred, None)
()
(** Wrong round : apply an endorsement with an incorrect round *)
let test_wrong_round () =
init_genesis () >>=? fun (genesis, b) ->
Environment.wrap_tzresult (Round.of_int 2) >>?= fun round ->
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:b
~round
~error_title:"wrong round for consensus operation"
~context:(Context.B genesis)
()
(** Wrong level : apply an endorsement with an incorrect level *)
let test_wrong_level () =
init_genesis () >>=? fun (genesis, b) ->
let context = Context.B genesis in
let raw_level = Raw_level.of_int32 (Int32.of_int 0) in
let level = match raw_level with Ok l -> l | Error _ -> assert false in
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:b
~level
~error_title:"wrong level for consensus operation"
~context
()
(** Wrong payload hash : apply an endorsement with an incorrect payload hash *)
let test_wrong_payload_hash () =
init_genesis () >>=? fun (genesis, b) ->
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:b
~block_payload_hash:Block_payload_hash.zero
~error_title:"wrong payload hash for consensus operation"
~context:(Context.B genesis)
()
let test_wrong_slot_used () =
init_genesis () >>=? fun (genesis, b) ->
Context.get_endorser (B b) >>=? fun (_, slots) ->
(match slots with
| _x :: y :: _ -> return y
| _ -> failwith "Slots size should be at least of 2 ")
>>=? fun slot ->
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:b
~slot
~error_title:"wrong slot"
~context:(Context.B genesis)
()
(** Check that:
- a block with not enough endorsement cannot be baked;
- a block with enough endorsement is baked. *)
let test_endorsement_threshold ~sufficient_threshold () =
We choose a relative large number of accounts so that the probability that
any delegate has [ consensus_threshold ] slots is low and most delegates have
about 1 slot so we can get closer to the limit of [ consensus_threshold ] : we
check that a block with endorsing power [ consensus_threshold - 1 ] wo n't be
baked .
any delegate has [consensus_threshold] slots is low and most delegates have
about 1 slot so we can get closer to the limit of [consensus_threshold]: we
check that a block with endorsing power [consensus_threshold - 1] won't be
baked. *)
Context.init 10 >>=? fun (genesis, _contracts) ->
Block.bake genesis >>=? fun b ->
Context.get_constants (B b)
>>=? fun {parametric = {consensus_threshold; _}; _} ->
Context.get_endorsers (B b) >>=? fun endorsers_list ->
Block.get_round b >>?= fun round ->
List.fold_left_es
(fun (counter, endos) {Plugin.RPC.Validators.delegate; slots; _} ->
let new_counter = counter + List.length slots in
if
(sufficient_threshold && counter < consensus_threshold)
|| ((not sufficient_threshold) && new_counter < consensus_threshold)
then
Op.endorsement
~round
~delegate:(delegate, slots)
~endorsed_block:b
(B genesis)
()
>>=? fun endo -> return (new_counter, Operation.pack endo :: endos)
else return (counter, endos))
(0, [])
endorsers_list
>>=? fun (_, endos) ->
Block.bake ~operations:endos b >>= fun b ->
if sufficient_threshold then return_unit
else
Assert.proto_error ~loc:__LOC__ b (function err ->
let error_info =
Error_monad.find_info_of_error (Environment.wrap_tzerror err)
in
error_info.title = "Not enough endorsements")
* Fitness gap : this is a straightforward update from to , that
is , check that the level is incremented in a child block .
is, check that the level is incremented in a child block. *)
let test_fitness_gap () =
inject_the_first_endorsement () >>=? fun (b, pred_b) ->
let fitness =
match Fitness.from_raw b.header.shell.fitness with
| Ok fitness -> fitness
| _ -> assert false
in
let pred_fitness =
match Fitness.from_raw pred_b.header.shell.fitness with
| Ok fitness -> fitness
| _ -> assert false
in
let level = Fitness.level fitness in
let pred_level = Fitness.level pred_fitness in
let level_diff =
Int32.sub (Raw_level.to_int32 level) (Raw_level.to_int32 pred_level)
in
Assert.equal_int32 ~loc:__LOC__ level_diff 1l
let test_preendorsement_endorsement_same_level () =
Context.init ~consensus_threshold:0 1 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b1 ->
Incremental.begin_construction ~mempool_mode:true ~policy:(By_round 2) b1
>>=? fun i ->
Op.endorsement ~endorsed_block:b1 (B genesis) () >>=? fun op_endo ->
let op_endo = Alpha_context.Operation.pack op_endo in
Incremental.add_operation i op_endo >>=? fun _i ->
Op.preendorsement ~endorsed_block:b1 (B genesis) () >>=? fun op_preendo ->
let op_preendo = Alpha_context.Operation.pack op_preendo in
Incremental.add_operation i op_preendo >>=? fun _i -> return ()
(** Test for endorsement injection with wrong slot in mempool mode. This
test is expected to fail *)
let test_wrong_endorsement_slot_in_mempool_mode () =
Context.init ~consensus_threshold:1 5 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b1 ->
let module V = Plugin.RPC.Validators in
(Context.get_endorsers (B b1) >>=? function
| {V.slots = _ :: non_canonical_slot :: _; _} :: _ ->
(* we didn't use min slot for the injection. It's bad !*)
return (Some non_canonical_slot)
| _ -> assert false)
>>=? fun slot ->
Op.endorsement ~endorsed_block:b1 (B genesis) ?slot () >>=? fun endo ->
let endo = Operation.pack endo in
Incremental.begin_construction ~mempool_mode:true b1 >>=? fun i ->
Incremental.add_operation i endo >>= fun res ->
Assert.proto_error_with_info ~loc:__LOC__ res "wrong slot"
(** Endorsement for next level *)
let test_endorsement_for_next_level () =
init_genesis () >>=? fun (genesis, _) ->
Consensus_helpers.test_consensus_op_for_next
~genesis
~kind:`Endorsement
~next:`Level
(** Endorsement for next round *)
let test_endorsement_for_next_round () =
init_genesis () >>=? fun (genesis, _) ->
Consensus_helpers.test_consensus_op_for_next
~genesis
~kind:`Endorsement
~next:`Round
(** Endorsement of grandparent *)
let test_endorsement_grandparent () =
Context.init ~consensus_threshold:0 1 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b_gp ->
Block.bake b_gp >>=? fun b ->
Incremental.begin_construction ~mempool_mode:true b >>=? fun i ->
(* Endorsement on grandparent *)
Op.endorsement ~endorsed_block:b_gp (B genesis) () >>=? fun op1 ->
(* Endorsement on parent *)
Op.endorsement ~endorsed_block:b (B b_gp) () >>=? fun op2 ->
let op1 = Alpha_context.Operation.pack op1 in
let op2 = Alpha_context.Operation.pack op2 in
Both should be accepted by the
Incremental.add_operation i op1 >>=? fun i ->
Incremental.add_operation i op2 >>=? fun _i -> return ()
(** Double inclusion of grandparent endorsement *)
let test_double_endorsement_grandparent () =
Context.init ~consensus_threshold:0 1 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b_gp ->
Block.bake b_gp >>=? fun b ->
Incremental.begin_construction ~mempool_mode:true b >>=? fun i ->
(* Endorsement on grandparent *)
Op.endorsement ~endorsed_block:b_gp (B genesis) () >>=? fun op1 ->
(* Endorsement on parent *)
Op.endorsement ~endorsed_block:b (B b_gp) () >>=? fun op2 ->
let op1 = Alpha_context.Operation.pack op1 in
let op2 = Alpha_context.Operation.pack op2 in
The first grand parent endorsement should be accepted by the
mempool but the second rejected .
mempool but the second rejected. *)
Incremental.add_operation i op1 >>=? fun i ->
Incremental.add_operation i op1 >>= fun res ->
Assert.proto_error_with_info
~loc:__LOC__
res
"double inclusion of consensus operation"
>>=? fun () ->
Incremental.add_operation i op2 >>=? fun _i -> return ()
(** Endorsement of grandparent on same slot as parent *)
let test_endorsement_grandparent_same_slot () =
Context.init ~consensus_threshold:0 1 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b_gp ->
Block.bake b_gp >>=? fun b ->
Incremental.begin_construction ~mempool_mode:true b >>=? fun i ->
(* Endorsement on parent *)
Consensus_helpers.delegate_of_first_slot (B b) >>=? fun (delegate, slot) ->
Op.endorsement ~endorsed_block:b ~delegate (B b_gp) () >>=? fun op2 ->
(* Endorsement on grandparent *)
Consensus_helpers.delegate_of_slot slot (B b_gp) >>=? fun delegate ->
Op.endorsement ~endorsed_block:b_gp ~delegate (B genesis) () >>=? fun op1 ->
let op1 = Alpha_context.Operation.pack op1 in
let op2 = Alpha_context.Operation.pack op2 in
Both should be accepted by the
Incremental.add_operation i op1 >>=? fun i ->
Incremental.add_operation i op2 >>=? fun _i -> return ()
(** Endorsement of grandparent in application mode should be rejected *)
let test_endorsement_grandparent_application () =
Context.init ~consensus_threshold:0 1 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b_gp ->
Block.bake b_gp >>=? fun b ->
Op.endorsement ~endorsed_block:b_gp (B genesis) () >>=? fun op ->
Block.bake ~operations:[Operation.pack op] b >>= fun res ->
Assert.proto_error ~loc:__LOC__ res (function
| Apply.Wrong_level_for_consensus_operation _ -> true
| _ -> false)
(** Endorsement of grandparent in full construction mode should be rejected *)
let test_endorsement_grandparent_full_construction () =
Context.init ~consensus_threshold:0 1 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b_gp ->
Block.bake b_gp >>=? fun b ->
Incremental.begin_construction b >>=? fun i ->
(* Endorsement on grandparent *)
Op.endorsement ~endorsed_block:b_gp (B genesis) () >>=? fun op1 ->
let op1 = Alpha_context.Operation.pack op1 in
Incremental.add_operation i op1 >>= fun res ->
Assert.proto_error ~loc:__LOC__ res (function
| Apply.Wrong_level_for_consensus_operation _ -> true
| _ -> false)
let tests =
[
Tztest.tztest "Simple endorsement" `Quick test_simple_endorsement;
Tztest.tztest "Endorsement with slot -1" `Quick test_negative_slot;
Tztest.tztest
"Endorsement wrapped with non-normalized slot"
`Quick
test_non_normalized_slot;
Tztest.tztest "Fitness gap" `Quick test_fitness_gap;
(* Fail scenarios *)
Tztest.tztest
"Wrong endorsement predecessor"
`Quick
test_wrong_endorsement_predecessor;
Tztest.tztest
"Invalid endorsement level"
`Quick
test_invalid_endorsement_level;
Tztest.tztest "Duplicate endorsement" `Quick test_duplicate_endorsement;
Tztest.tztest
"Endorsement for future level"
`Quick
test_consensus_operation_endorsement_for_future_level;
Tztest.tztest
"Endorsement for predecessor level"
`Quick
test_consensus_operation_endorsement_for_old_level;
Tztest.tztest
"Endorsement for old level"
`Quick
test_consensus_operation_endorsement_for_old_level;
Tztest.tztest
"Endorsement for future round"
`Quick
test_consensus_operation_endorsement_for_future_round;
Tztest.tztest
"Endorsement for old round"
`Quick
test_consensus_operation_endorsement_for_old_round;
Tztest.tztest
"Endorsement on competing proposal"
`Quick
test_consensus_operation_endorsement_on_competing_proposal;
Tztest.tztest "Wrong level for consensus operation" `Quick test_wrong_level;
Tztest.tztest "Wrong round for consensus operation" `Quick test_wrong_round;
Tztest.tztest
"Wrong payload hash for consensus operation"
`Quick
test_wrong_payload_hash;
Tztest.tztest
"Wrong slot used for consensus operation"
`Quick
test_wrong_slot_used;
Tztest.tztest
"sufficient endorsement threshold"
`Quick
(test_endorsement_threshold ~sufficient_threshold:true);
Tztest.tztest
"insufficient endorsement threshold"
`Quick
(test_endorsement_threshold ~sufficient_threshold:false);
Tztest.tztest
"Endorsement/Preendorsement at same level"
`Quick
test_preendorsement_endorsement_same_level;
Tztest.tztest
"Wrong endorsement slot in mempool mode"
`Quick
test_wrong_endorsement_slot_in_mempool_mode;
Tztest.tztest
"Endorsement for next level"
`Quick
test_endorsement_for_next_level;
Tztest.tztest
"Endorsement for next round"
`Quick
test_endorsement_for_next_round;
Tztest.tztest
"Endorsement for grandparent"
`Quick
test_endorsement_grandparent;
Tztest.tztest
"Double endorsement of grandparent"
`Quick
test_double_endorsement_grandparent;
Tztest.tztest
"Endorsement for grandparent on same slot as parent"
`Quick
test_endorsement_grandparent_same_slot;
Tztest.tztest
"Endorsement for grandparent in application mode"
`Quick
test_endorsement_grandparent_application;
Tztest.tztest
"Endorsement for grandparent in full construction mode"
`Quick
test_endorsement_grandparent_full_construction;
]
| null | https://raw.githubusercontent.com/Decentralized-Pictures/T4L3NT/6d4d3edb2d73575384282ad5a633518cba3d29e3/src/proto_alpha/lib_protocol/test/test_endorsement.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
* inject an endorsement and return the block with the endorsement and its
parent.
**************************************************************
Tests
**************************************************************
* Apply a single endorsement from the slot 0 endorser.
**************************************************************
The following test scenarios are supposed to raise errors.
**************************************************************
* Apply an endorsement with a negative slot.
* Apply an endorsement with a non-normalized slot (that is, not the smallest
possible).
no duplicated slots
* Wrong endorsement predecessor : apply an endorsement with an
incorrect block predecessor.
* Invalid_endorsement_level: apply an endorsement with an incorrect
level (i.e. the predecessor level).
* Duplicate endorsement : apply an endorsement that has already been applied.
* Consensus operation for future level : apply an endorsement with a level in the future
* Consensus operation for future round : apply an endorsement with a round in the future
* Consensus operation for old round : apply an endorsement with a round in the past
* Consensus operation on competing proposal : apply an endorsement on a competing proposal
* Wrong round : apply an endorsement with an incorrect round
* Wrong level : apply an endorsement with an incorrect level
* Wrong payload hash : apply an endorsement with an incorrect payload hash
* Check that:
- a block with not enough endorsement cannot be baked;
- a block with enough endorsement is baked.
* Test for endorsement injection with wrong slot in mempool mode. This
test is expected to fail
we didn't use min slot for the injection. It's bad !
* Endorsement for next level
* Endorsement for next round
* Endorsement of grandparent
Endorsement on grandparent
Endorsement on parent
* Double inclusion of grandparent endorsement
Endorsement on grandparent
Endorsement on parent
* Endorsement of grandparent on same slot as parent
Endorsement on parent
Endorsement on grandparent
* Endorsement of grandparent in application mode should be rejected
* Endorsement of grandparent in full construction mode should be rejected
Endorsement on grandparent
Fail scenarios | Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
* Testing
-------
Component : Protocol ( endorsement )
Invocation : dune exec src / proto_alpha / lib_protocol / test / main.exe -- test " ^endorsement$ "
Subject : Endorsing a block adds an extra layer of confidence
to the Tezos ' PoS algorithm . The block endorsing
operation must be included in the following block .
-------
Component: Protocol (endorsement)
Invocation: dune exec src/proto_alpha/lib_protocol/test/main.exe -- test "^endorsement$"
Subject: Endorsing a block adds an extra layer of confidence
to the Tezos' PoS algorithm. The block endorsing
operation must be included in the following block.
*)
open Protocol
open Alpha_context
let init_genesis ?policy () =
Context.init ~consensus_threshold:0 5 >>=? fun (genesis, _) ->
Block.bake ?policy genesis >>=? fun b -> return (genesis, b)
let inject_the_first_endorsement () =
init_genesis () >>=? fun (genesis, b) ->
Op.endorsement ~endorsed_block:b (B genesis) () >>=? fun op ->
Block.bake ~operations:[Operation.pack op] b >>=? fun b' -> return (b', b)
let test_simple_endorsement () =
inject_the_first_endorsement () >>=? fun _ -> return_unit
let test_negative_slot () =
Context.init 5 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b ->
Context.get_endorser (B b) >>=? fun (delegate, _slots) ->
Lwt.catch
(fun () ->
Op.endorsement
~delegate:(delegate, [Slot.of_int_do_not_use_except_for_parameters (-1)])
~endorsed_block:b
(B genesis)
()
>>=? fun _ ->
failwith "negative slot should not be accepted by the binary format")
(function
| Data_encoding.Binary.Write_error _ -> return_unit | e -> Lwt.fail e)
let test_non_normalized_slot () =
Context.init 5 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b ->
Context.get_endorsers (B b) >>=? fun endorsers_list ->
find an endorsers with more than 1 slot
List.find_map
(function
| {Plugin.RPC.Validators.delegate; slots; _} ->
if Compare.List_length_with.(slots > 1) then Some (delegate, slots)
else None)
endorsers_list
|> function
| None -> assert false
| Some (delegate, slots) ->
let set_slots = Slot.Set.of_list slots in
Assert.equal_int
~loc:__LOC__
(Slot.Set.cardinal set_slots)
(List.length slots)
>>=? fun () ->
the first slot should be the smallest slot
Assert.equal
~loc:__LOC__
(fun x y -> Slot.compare x y = 0)
"the first slot is not the smallest"
Slot.pp
(WithExceptions.Option.get ~loc:__LOC__ @@ List.hd slots)
(WithExceptions.Option.get ~loc:__LOC__ @@ Slot.Set.min_elt set_slots)
>>=? fun () ->
Op.endorsement
~delegate:(delegate, List.rev slots)
~endorsed_block:b
(B genesis)
()
>>=? fun op ->
let policy = Block.Excluding [delegate] in
Block.bake ~policy ~operations:[Operation.pack op] b >>= fun res ->
Assert.proto_error ~loc:__LOC__ res (function err ->
let error_info =
Error_monad.find_info_of_error (Environment.wrap_tzerror err)
in
error_info.title = "wrong slot")
let test_wrong_endorsement_predecessor () =
init_genesis () >>=? fun (genesis, b) ->
Op.endorsement ~endorsed_block:b (B genesis) ~signing_context:(B b) ()
>>=? fun operation ->
let operation = Operation.pack operation in
Block.bake ~operation b >>= fun res ->
Assert.proto_error ~loc:__LOC__ res (function
| Apply.Wrong_consensus_operation_branch _ -> true
| _ -> false)
let test_invalid_endorsement_level () =
init_genesis () >>=? fun (genesis, b) ->
Context.get_level (B genesis) >>?= fun genesis_level ->
Op.endorsement ~level:genesis_level ~endorsed_block:b (B genesis) ()
>>=? fun op ->
Block.bake ~operations:[Operation.pack op] b >>= fun res ->
Assert.proto_error ~loc:__LOC__ res (function
| Apply.Wrong_level_for_consensus_operation _ -> true
| _ -> false)
let test_duplicate_endorsement () =
init_genesis () >>=? fun (genesis, b) ->
Incremental.begin_construction b >>=? fun inc ->
Op.endorsement ~endorsed_block:b (B genesis) () >>=? fun operation ->
let operation = Operation.pack operation in
Incremental.add_operation inc operation >>=? fun inc ->
Op.endorsement ~endorsed_block:b (B genesis) () >>=? fun operation ->
let operation = Operation.pack operation in
Incremental.add_operation inc operation >>= fun res ->
Assert.proto_error_with_info
~loc:__LOC__
res
"double inclusion of consensus operation"
let test_consensus_operation_endorsement_for_future_level () =
init_genesis () >>=? fun (genesis, pred) ->
let raw_level = Raw_level.of_int32 (Int32.of_int 10) in
let level = match raw_level with Ok l -> l | Error _ -> assert false in
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:pred
~level
~error_title:"Consensus operation for future level"
~context:(Context.B genesis)
~construction_mode:(pred, None)
()
* Consensus operation for old level : apply an endorsement one level in the past
let test_consensus_operation_endorsement_for_predecessor_level () =
init_genesis () >>=? fun (genesis, pred) ->
let raw_level = Raw_level.of_int32 (Int32.of_int 0) in
let level = match raw_level with Ok l -> l | Error _ -> assert false in
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:pred
~level
~error_title:"Endorsement for previous level"
~context:(Context.B genesis)
~construction_mode:(pred, None)
()
* Consensus operation for old level : apply an endorsement with more than one level in the past
let test_consensus_operation_endorsement_for_old_level () =
init_genesis () >>=? fun (genesis, pred) ->
Block.bake genesis >>=? fun next_block ->
let raw_level = Raw_level.of_int32 (Int32.of_int 0) in
let level = match raw_level with Ok l -> l | Error _ -> assert false in
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:pred
~level
~error_title:"Consensus operation for old level"
~context:(Context.B next_block)
~construction_mode:(pred, None)
()
let test_consensus_operation_endorsement_for_future_round () =
init_genesis () >>=? fun (genesis, pred) ->
Environment.wrap_tzresult (Round.of_int 21) >>?= fun round ->
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:pred
~round
~error_title:"Consensus operation for future round"
~context:(Context.B genesis)
~construction_mode:(pred, None)
()
let test_consensus_operation_endorsement_for_old_round () =
init_genesis ~policy:(By_round 10) () >>=? fun (genesis, pred) ->
Environment.wrap_tzresult (Round.of_int 0) >>?= fun round ->
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:pred
~round
~error_title:"Consensus operation for old round"
~context:(Context.B genesis)
~construction_mode:(pred, None)
()
let test_consensus_operation_endorsement_on_competing_proposal () =
init_genesis () >>=? fun (genesis, pred) ->
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:pred
~block_payload_hash:Block_payload_hash.zero
~error_title:"Consensus operation on competing proposal"
~context:(Context.B genesis)
~construction_mode:(pred, None)
()
let test_wrong_round () =
init_genesis () >>=? fun (genesis, b) ->
Environment.wrap_tzresult (Round.of_int 2) >>?= fun round ->
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:b
~round
~error_title:"wrong round for consensus operation"
~context:(Context.B genesis)
()
let test_wrong_level () =
init_genesis () >>=? fun (genesis, b) ->
let context = Context.B genesis in
let raw_level = Raw_level.of_int32 (Int32.of_int 0) in
let level = match raw_level with Ok l -> l | Error _ -> assert false in
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:b
~level
~error_title:"wrong level for consensus operation"
~context
()
let test_wrong_payload_hash () =
init_genesis () >>=? fun (genesis, b) ->
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:b
~block_payload_hash:Block_payload_hash.zero
~error_title:"wrong payload hash for consensus operation"
~context:(Context.B genesis)
()
let test_wrong_slot_used () =
init_genesis () >>=? fun (genesis, b) ->
Context.get_endorser (B b) >>=? fun (_, slots) ->
(match slots with
| _x :: y :: _ -> return y
| _ -> failwith "Slots size should be at least of 2 ")
>>=? fun slot ->
Consensus_helpers.test_consensus_operation
~loc:__LOC__
~is_preendorsement:false
~endorsed_block:b
~slot
~error_title:"wrong slot"
~context:(Context.B genesis)
()
let test_endorsement_threshold ~sufficient_threshold () =
We choose a relative large number of accounts so that the probability that
any delegate has [ consensus_threshold ] slots is low and most delegates have
about 1 slot so we can get closer to the limit of [ consensus_threshold ] : we
check that a block with endorsing power [ consensus_threshold - 1 ] wo n't be
baked .
any delegate has [consensus_threshold] slots is low and most delegates have
about 1 slot so we can get closer to the limit of [consensus_threshold]: we
check that a block with endorsing power [consensus_threshold - 1] won't be
baked. *)
Context.init 10 >>=? fun (genesis, _contracts) ->
Block.bake genesis >>=? fun b ->
Context.get_constants (B b)
>>=? fun {parametric = {consensus_threshold; _}; _} ->
Context.get_endorsers (B b) >>=? fun endorsers_list ->
Block.get_round b >>?= fun round ->
List.fold_left_es
(fun (counter, endos) {Plugin.RPC.Validators.delegate; slots; _} ->
let new_counter = counter + List.length slots in
if
(sufficient_threshold && counter < consensus_threshold)
|| ((not sufficient_threshold) && new_counter < consensus_threshold)
then
Op.endorsement
~round
~delegate:(delegate, slots)
~endorsed_block:b
(B genesis)
()
>>=? fun endo -> return (new_counter, Operation.pack endo :: endos)
else return (counter, endos))
(0, [])
endorsers_list
>>=? fun (_, endos) ->
Block.bake ~operations:endos b >>= fun b ->
if sufficient_threshold then return_unit
else
Assert.proto_error ~loc:__LOC__ b (function err ->
let error_info =
Error_monad.find_info_of_error (Environment.wrap_tzerror err)
in
error_info.title = "Not enough endorsements")
* Fitness gap : this is a straightforward update from to , that
is , check that the level is incremented in a child block .
is, check that the level is incremented in a child block. *)
let test_fitness_gap () =
inject_the_first_endorsement () >>=? fun (b, pred_b) ->
let fitness =
match Fitness.from_raw b.header.shell.fitness with
| Ok fitness -> fitness
| _ -> assert false
in
let pred_fitness =
match Fitness.from_raw pred_b.header.shell.fitness with
| Ok fitness -> fitness
| _ -> assert false
in
let level = Fitness.level fitness in
let pred_level = Fitness.level pred_fitness in
let level_diff =
Int32.sub (Raw_level.to_int32 level) (Raw_level.to_int32 pred_level)
in
Assert.equal_int32 ~loc:__LOC__ level_diff 1l
let test_preendorsement_endorsement_same_level () =
Context.init ~consensus_threshold:0 1 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b1 ->
Incremental.begin_construction ~mempool_mode:true ~policy:(By_round 2) b1
>>=? fun i ->
Op.endorsement ~endorsed_block:b1 (B genesis) () >>=? fun op_endo ->
let op_endo = Alpha_context.Operation.pack op_endo in
Incremental.add_operation i op_endo >>=? fun _i ->
Op.preendorsement ~endorsed_block:b1 (B genesis) () >>=? fun op_preendo ->
let op_preendo = Alpha_context.Operation.pack op_preendo in
Incremental.add_operation i op_preendo >>=? fun _i -> return ()
let test_wrong_endorsement_slot_in_mempool_mode () =
Context.init ~consensus_threshold:1 5 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b1 ->
let module V = Plugin.RPC.Validators in
(Context.get_endorsers (B b1) >>=? function
| {V.slots = _ :: non_canonical_slot :: _; _} :: _ ->
return (Some non_canonical_slot)
| _ -> assert false)
>>=? fun slot ->
Op.endorsement ~endorsed_block:b1 (B genesis) ?slot () >>=? fun endo ->
let endo = Operation.pack endo in
Incremental.begin_construction ~mempool_mode:true b1 >>=? fun i ->
Incremental.add_operation i endo >>= fun res ->
Assert.proto_error_with_info ~loc:__LOC__ res "wrong slot"
let test_endorsement_for_next_level () =
init_genesis () >>=? fun (genesis, _) ->
Consensus_helpers.test_consensus_op_for_next
~genesis
~kind:`Endorsement
~next:`Level
let test_endorsement_for_next_round () =
init_genesis () >>=? fun (genesis, _) ->
Consensus_helpers.test_consensus_op_for_next
~genesis
~kind:`Endorsement
~next:`Round
let test_endorsement_grandparent () =
Context.init ~consensus_threshold:0 1 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b_gp ->
Block.bake b_gp >>=? fun b ->
Incremental.begin_construction ~mempool_mode:true b >>=? fun i ->
Op.endorsement ~endorsed_block:b_gp (B genesis) () >>=? fun op1 ->
Op.endorsement ~endorsed_block:b (B b_gp) () >>=? fun op2 ->
let op1 = Alpha_context.Operation.pack op1 in
let op2 = Alpha_context.Operation.pack op2 in
Both should be accepted by the
Incremental.add_operation i op1 >>=? fun i ->
Incremental.add_operation i op2 >>=? fun _i -> return ()
let test_double_endorsement_grandparent () =
Context.init ~consensus_threshold:0 1 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b_gp ->
Block.bake b_gp >>=? fun b ->
Incremental.begin_construction ~mempool_mode:true b >>=? fun i ->
Op.endorsement ~endorsed_block:b_gp (B genesis) () >>=? fun op1 ->
Op.endorsement ~endorsed_block:b (B b_gp) () >>=? fun op2 ->
let op1 = Alpha_context.Operation.pack op1 in
let op2 = Alpha_context.Operation.pack op2 in
The first grand parent endorsement should be accepted by the
mempool but the second rejected .
mempool but the second rejected. *)
Incremental.add_operation i op1 >>=? fun i ->
Incremental.add_operation i op1 >>= fun res ->
Assert.proto_error_with_info
~loc:__LOC__
res
"double inclusion of consensus operation"
>>=? fun () ->
Incremental.add_operation i op2 >>=? fun _i -> return ()
let test_endorsement_grandparent_same_slot () =
Context.init ~consensus_threshold:0 1 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b_gp ->
Block.bake b_gp >>=? fun b ->
Incremental.begin_construction ~mempool_mode:true b >>=? fun i ->
Consensus_helpers.delegate_of_first_slot (B b) >>=? fun (delegate, slot) ->
Op.endorsement ~endorsed_block:b ~delegate (B b_gp) () >>=? fun op2 ->
Consensus_helpers.delegate_of_slot slot (B b_gp) >>=? fun delegate ->
Op.endorsement ~endorsed_block:b_gp ~delegate (B genesis) () >>=? fun op1 ->
let op1 = Alpha_context.Operation.pack op1 in
let op2 = Alpha_context.Operation.pack op2 in
Both should be accepted by the
Incremental.add_operation i op1 >>=? fun i ->
Incremental.add_operation i op2 >>=? fun _i -> return ()
let test_endorsement_grandparent_application () =
Context.init ~consensus_threshold:0 1 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b_gp ->
Block.bake b_gp >>=? fun b ->
Op.endorsement ~endorsed_block:b_gp (B genesis) () >>=? fun op ->
Block.bake ~operations:[Operation.pack op] b >>= fun res ->
Assert.proto_error ~loc:__LOC__ res (function
| Apply.Wrong_level_for_consensus_operation _ -> true
| _ -> false)
let test_endorsement_grandparent_full_construction () =
Context.init ~consensus_threshold:0 1 >>=? fun (genesis, _) ->
Block.bake genesis >>=? fun b_gp ->
Block.bake b_gp >>=? fun b ->
Incremental.begin_construction b >>=? fun i ->
Op.endorsement ~endorsed_block:b_gp (B genesis) () >>=? fun op1 ->
let op1 = Alpha_context.Operation.pack op1 in
Incremental.add_operation i op1 >>= fun res ->
Assert.proto_error ~loc:__LOC__ res (function
| Apply.Wrong_level_for_consensus_operation _ -> true
| _ -> false)
let tests =
[
Tztest.tztest "Simple endorsement" `Quick test_simple_endorsement;
Tztest.tztest "Endorsement with slot -1" `Quick test_negative_slot;
Tztest.tztest
"Endorsement wrapped with non-normalized slot"
`Quick
test_non_normalized_slot;
Tztest.tztest "Fitness gap" `Quick test_fitness_gap;
Tztest.tztest
"Wrong endorsement predecessor"
`Quick
test_wrong_endorsement_predecessor;
Tztest.tztest
"Invalid endorsement level"
`Quick
test_invalid_endorsement_level;
Tztest.tztest "Duplicate endorsement" `Quick test_duplicate_endorsement;
Tztest.tztest
"Endorsement for future level"
`Quick
test_consensus_operation_endorsement_for_future_level;
Tztest.tztest
"Endorsement for predecessor level"
`Quick
test_consensus_operation_endorsement_for_old_level;
Tztest.tztest
"Endorsement for old level"
`Quick
test_consensus_operation_endorsement_for_old_level;
Tztest.tztest
"Endorsement for future round"
`Quick
test_consensus_operation_endorsement_for_future_round;
Tztest.tztest
"Endorsement for old round"
`Quick
test_consensus_operation_endorsement_for_old_round;
Tztest.tztest
"Endorsement on competing proposal"
`Quick
test_consensus_operation_endorsement_on_competing_proposal;
Tztest.tztest "Wrong level for consensus operation" `Quick test_wrong_level;
Tztest.tztest "Wrong round for consensus operation" `Quick test_wrong_round;
Tztest.tztest
"Wrong payload hash for consensus operation"
`Quick
test_wrong_payload_hash;
Tztest.tztest
"Wrong slot used for consensus operation"
`Quick
test_wrong_slot_used;
Tztest.tztest
"sufficient endorsement threshold"
`Quick
(test_endorsement_threshold ~sufficient_threshold:true);
Tztest.tztest
"insufficient endorsement threshold"
`Quick
(test_endorsement_threshold ~sufficient_threshold:false);
Tztest.tztest
"Endorsement/Preendorsement at same level"
`Quick
test_preendorsement_endorsement_same_level;
Tztest.tztest
"Wrong endorsement slot in mempool mode"
`Quick
test_wrong_endorsement_slot_in_mempool_mode;
Tztest.tztest
"Endorsement for next level"
`Quick
test_endorsement_for_next_level;
Tztest.tztest
"Endorsement for next round"
`Quick
test_endorsement_for_next_round;
Tztest.tztest
"Endorsement for grandparent"
`Quick
test_endorsement_grandparent;
Tztest.tztest
"Double endorsement of grandparent"
`Quick
test_double_endorsement_grandparent;
Tztest.tztest
"Endorsement for grandparent on same slot as parent"
`Quick
test_endorsement_grandparent_same_slot;
Tztest.tztest
"Endorsement for grandparent in application mode"
`Quick
test_endorsement_grandparent_application;
Tztest.tztest
"Endorsement for grandparent in full construction mode"
`Quick
test_endorsement_grandparent_full_construction;
]
|
4a26aa5b3fed51770d1eb7fafb047331eef62c22f3450aaff3b5c017b07f79e0 | ANSSI-FR/xsvgen | meta_val_re.ml | (***********************************************************************)
(* *)
(* XML Schema Validator Generator *)
(* *)
( SafeRiver )
(* *)
Copyright 2012 , ANSSI and SafeRiver .
(* *)
(***********************************************************************)
$ I d : meta_val_re.ml 1365 2012 - 04 - 03 15:23:30Z maarek $
let (_ : unit) =
Meta_lib.print_header "val_re"
let (_ : unit) =
Meta_lib.print_test_charset
"st_string"
(Meta_lib.xsd_re_of_range Xsd_datatypes.range_st_string)
let (_ : unit) =
Meta_lib.print_test_charset
"st_boolean"
Xsd_datatypes.st_boolean
let (_ : unit) =
Meta_lib.print_test_charset
"st_decimal"
Xsd_datatypes.st_decimal
let (_ : unit) =
Meta_lib.print_test_charset
"st_float"
Xsd_datatypes.st_float
let (_ : unit) =
Meta_lib.print_test_charset
"st_double"
Xsd_datatypes.st_double
let (_ : unit) =
Meta_lib.print_test_charset
"st_duration"
Xsd_datatypes.st_duration
let (_ : unit) =
Meta_lib.print_test_charset
"st_dateTime"
Xsd_datatypes.st_dateTime
let (_ : unit) =
Meta_lib.print_test_charset
"st_time"
Xsd_datatypes.st_time
let (_ : unit) =
Meta_lib.print_test_charset
"st_date"
Xsd_datatypes.st_date
let (_ : unit) =
Meta_lib.print_test_charset
"st_gYearMonth"
Xsd_datatypes.st_gYearMonth
let (_ : unit) =
Meta_lib.print_test_charset
"st_gYear"
Xsd_datatypes.st_gYear
let (_ : unit) =
Meta_lib.print_test_charset
"st_gMonthDay"
Xsd_datatypes.st_gMonthDay
let (_ : unit) =
Meta_lib.print_test_charset
"st_gDay"
Xsd_datatypes.st_gDay
let (_ : unit) =
Meta_lib.print_test_charset
"st_gMonth"
Xsd_datatypes.st_gMonth
let (_ : unit) =
Meta_lib.print_test_charset
"st_hexBinary"
Xsd_datatypes.st_hexBinary
let (_ : unit) =
Meta_lib.print_test_charset
"st_base64Binary"
Xsd_datatypes.st_base64Binary
let (_ : unit) =
Meta_lib.print_test_charset
"st_anyURI"
(Meta_lib.xsd_re_of_range Xsd_datatypes.st_anyURI)
let (_ : unit) =
Meta_lib.print_test_charset
"st_NMTOKEN"
Xsd_datatypes.st_NMTOKEN
let (_ : unit) =
Meta_lib.print_test_charset
"st_Name"
Xsd_datatypes.st_Name
let (_ : unit) =
Meta_lib.print_test_charset
"st_NCName"
Xsd_datatypes.st_NCName
let (_ : unit) =
Meta_lib.print_test_charset
"st_integer"
Xsd_datatypes.st_integer
let (_ : unit) =
Meta_lib.print_test_charset
"st_yearMonthDuration"
Xsd_datatypes.st_yearMonthDuration
let (_ : unit) =
Meta_lib.print_test_charset
"st_dayTimeDuration"
Xsd_datatypes.st_dayTimeDuration
let (_ : unit) =
Meta_lib.print_test_charset
"st_explicit_timestamp"
Xsd_datatypes.st_explicit_timestamp
| null | https://raw.githubusercontent.com/ANSSI-FR/xsvgen/3c2b5e43e7adcb856a3a2aa01bfc039bf3c6459b/meta/meta_val_re.ml | ocaml | *********************************************************************
XML Schema Validator Generator
********************************************************************* | ( SafeRiver )
Copyright 2012 , ANSSI and SafeRiver .
$ I d : meta_val_re.ml 1365 2012 - 04 - 03 15:23:30Z maarek $
let (_ : unit) =
Meta_lib.print_header "val_re"
let (_ : unit) =
Meta_lib.print_test_charset
"st_string"
(Meta_lib.xsd_re_of_range Xsd_datatypes.range_st_string)
let (_ : unit) =
Meta_lib.print_test_charset
"st_boolean"
Xsd_datatypes.st_boolean
let (_ : unit) =
Meta_lib.print_test_charset
"st_decimal"
Xsd_datatypes.st_decimal
let (_ : unit) =
Meta_lib.print_test_charset
"st_float"
Xsd_datatypes.st_float
let (_ : unit) =
Meta_lib.print_test_charset
"st_double"
Xsd_datatypes.st_double
let (_ : unit) =
Meta_lib.print_test_charset
"st_duration"
Xsd_datatypes.st_duration
let (_ : unit) =
Meta_lib.print_test_charset
"st_dateTime"
Xsd_datatypes.st_dateTime
let (_ : unit) =
Meta_lib.print_test_charset
"st_time"
Xsd_datatypes.st_time
let (_ : unit) =
Meta_lib.print_test_charset
"st_date"
Xsd_datatypes.st_date
let (_ : unit) =
Meta_lib.print_test_charset
"st_gYearMonth"
Xsd_datatypes.st_gYearMonth
let (_ : unit) =
Meta_lib.print_test_charset
"st_gYear"
Xsd_datatypes.st_gYear
let (_ : unit) =
Meta_lib.print_test_charset
"st_gMonthDay"
Xsd_datatypes.st_gMonthDay
let (_ : unit) =
Meta_lib.print_test_charset
"st_gDay"
Xsd_datatypes.st_gDay
let (_ : unit) =
Meta_lib.print_test_charset
"st_gMonth"
Xsd_datatypes.st_gMonth
let (_ : unit) =
Meta_lib.print_test_charset
"st_hexBinary"
Xsd_datatypes.st_hexBinary
let (_ : unit) =
Meta_lib.print_test_charset
"st_base64Binary"
Xsd_datatypes.st_base64Binary
let (_ : unit) =
Meta_lib.print_test_charset
"st_anyURI"
(Meta_lib.xsd_re_of_range Xsd_datatypes.st_anyURI)
let (_ : unit) =
Meta_lib.print_test_charset
"st_NMTOKEN"
Xsd_datatypes.st_NMTOKEN
let (_ : unit) =
Meta_lib.print_test_charset
"st_Name"
Xsd_datatypes.st_Name
let (_ : unit) =
Meta_lib.print_test_charset
"st_NCName"
Xsd_datatypes.st_NCName
let (_ : unit) =
Meta_lib.print_test_charset
"st_integer"
Xsd_datatypes.st_integer
let (_ : unit) =
Meta_lib.print_test_charset
"st_yearMonthDuration"
Xsd_datatypes.st_yearMonthDuration
let (_ : unit) =
Meta_lib.print_test_charset
"st_dayTimeDuration"
Xsd_datatypes.st_dayTimeDuration
let (_ : unit) =
Meta_lib.print_test_charset
"st_explicit_timestamp"
Xsd_datatypes.st_explicit_timestamp
|
59483cd67daa5e560b144780ae360069f9221ce9a8b5b5aa577a6c389be1cdf6 | vouch-opensource/vouch-load-tests | reporter.clj | (ns com.example.io.reporter
(:require
[clojure.core.async :refer [<! chan go]]
[clojure.data.csv :as csv]
[clojure.java.io :as io]
[clojure.tools.logging :as log]))
(defn create-csv-reporter
([log-file-name] (create-csv-reporter log-file-name nil false))
([log-file-name buf-or-n errors-to-console?]
(let [reporting (chan buf-or-n)]
(go
(with-open [writer (io/writer log-file-name)]
(csv/write-csv writer
[["method" "duration" "executor" "status"]])
(loop []
(when-let [{:keys [duration executor error]
{:keys [task]} :task} (<! reporting)]
(csv/write-csv writer
[[task duration executor (ex-message error)]])
(when (and errors-to-console? error)
(log/error (:id executor) "Failed to handle task" task error))
(recur)))))
reporting)))
(defn create-log-reporter
([] (create-log-reporter nil))
([buffer]
(let [reporting (chan buffer)]
(go
(loop []
(when-let [{:keys [duration executor error]
{:keys [task]} :task} (<! reporting)]
(if error
(log/error executor "Failed to handle task" task error)
(log/info (format "%s took %dms %s" task duration executor)))
(recur))))
reporting)))
| null | https://raw.githubusercontent.com/vouch-opensource/vouch-load-tests/502a6f8eb880cb76a4fbaea279e826a5362d6351/dev/com/example/io/reporter.clj | clojure | (ns com.example.io.reporter
(:require
[clojure.core.async :refer [<! chan go]]
[clojure.data.csv :as csv]
[clojure.java.io :as io]
[clojure.tools.logging :as log]))
(defn create-csv-reporter
([log-file-name] (create-csv-reporter log-file-name nil false))
([log-file-name buf-or-n errors-to-console?]
(let [reporting (chan buf-or-n)]
(go
(with-open [writer (io/writer log-file-name)]
(csv/write-csv writer
[["method" "duration" "executor" "status"]])
(loop []
(when-let [{:keys [duration executor error]
{:keys [task]} :task} (<! reporting)]
(csv/write-csv writer
[[task duration executor (ex-message error)]])
(when (and errors-to-console? error)
(log/error (:id executor) "Failed to handle task" task error))
(recur)))))
reporting)))
(defn create-log-reporter
([] (create-log-reporter nil))
([buffer]
(let [reporting (chan buffer)]
(go
(loop []
(when-let [{:keys [duration executor error]
{:keys [task]} :task} (<! reporting)]
(if error
(log/error executor "Failed to handle task" task error)
(log/info (format "%s took %dms %s" task duration executor)))
(recur))))
reporting)))
| |
fdb1d4c0bc12f1f1f046d236bb14d7f3e19202fd264bd809bd2a4e210c18a9c1 | deadpendency/deadpendency | Dependency.hs | # OPTIONS_GHC -fno - warn - missing - export - lists #
module CommonTest.Gen.Model.Dependency where
import Common.Model.Dependency.Basic.BasicDependency
import Common.Model.Dependency.Basic.BasicRepoDependencies
import Common.Model.Dependency.DependencyIdentifier
import Common.Model.Dependency.DependencyName
import Common.Model.Dependency.DependencyType
import Common.Model.Dependency.Enriched.EnrichedDependency
import Common.Model.Dependency.Enriched.EnrichedRepoDependencies
import Common.Model.Dependency.Errored.ErroredDependency
import Common.Model.Dependency.Errored.ErroredReason
import Common.Model.Dependency.Errored.ErroredRepoDependencies
import Common.Model.Dependency.File.DependenciesFileLoad
import Common.Model.Dependency.File.DependenciesFileLoadDetails
import Common.Model.Dependency.File.DependenciesFileType
import Common.Model.Dependency.Ignored.IgnoredDependency
import Common.Model.Dependency.Ignored.IgnoredRepoDependencies
import Common.Model.Dependency.Registry.DependencyRegistryInfo
import Common.Model.Dependency.Registry.RegistryAlivenessStatus
import Common.Model.Dependency.Repo.DependencyRepoCommit
import Common.Model.Dependency.Repo.DependencyRepoStats
import CommonTest.Gen.General
import CommonTest.Gen.Model.Ecosystem
import CommonTest.Gen.Model.Git
import Hedgehog
import Hedgehog.Gen qualified as Gen
import Hedgehog.Range qualified as Range
genBasicRepoDependencies :: Gen BasicRepoDependencies
genBasicRepoDependencies = BasicRepoDependencies <$> genNonEmptyVector (Range.constant 1 10) genBasicDependency
genBasicDependency :: Gen BasicDependency
genBasicDependency = do
programmingLanguage <- genProgrammingLanguage
depIdentifier <- genDependencyIdentifier
depType <- Gen.maybe genDependencyType
pure $
BasicDependency programmingLanguage depIdentifier depType
genDependencyIdentifier :: Gen DependencyIdentifier
genDependencyIdentifier =
Gen.choice
[ genDependencyIdentifierNamed,
genDependencyIdentifierRepo
]
genDependencyIdentifierNamed :: Gen DependencyIdentifier
genDependencyIdentifierNamed = DependencyIdentifierNamed <$> genDependencyName
genDependencyIdentifierRepo :: Gen DependencyIdentifier
genDependencyIdentifierRepo = DependencyIdentifierRepo <$> genQualifiedRepo <*> Gen.maybe genDependencyName
genIgnoredRepoDependencies :: Gen IgnoredRepoDependencies
genIgnoredRepoDependencies = IgnoredRepoDependencies <$> genVector (Range.constant 1 10) genIgnoredDependency
-- genDependencyIdentifierOnlyNamed :: Gen DependencyIdentifier
-- genDependencyIdentifierOnlyNamed =
-- Gen.choice
-- [ genDependencyIdentifierNamed,
-- DependencyIdentifierRepo <$> genQualifiedRepo <*> (Just <$> genDependencyName)
-- ]
genIgnoredDependency :: Gen IgnoredDependency
genIgnoredDependency = do
programmingLanguage <- genProgrammingLanguage
dependencyIdentifier <- genDependencyIdentifier
depType <- Gen.maybe genDependencyType
pure
IgnoredDependency
{ _programmingLanguage = programmingLanguage,
_dependencyIdentifier = dependencyIdentifier,
_dependencyType = depType
}
genErroredRepoDependencies :: Gen ErroredRepoDependencies
genErroredRepoDependencies = ErroredRepoDependencies <$> genVector (Range.constant 1 10) genErroredDependency
genErroredDependency :: Gen ErroredDependency
genErroredDependency = do
dependencyIdentifier <- genDependencyIdentifier
depType <- Gen.maybe genDependencyType
programmingLanguage <- genProgrammingLanguage
registryInfo <- Gen.maybe genDependencyRegistryInfo
failureReason <- genErroredReason
pure
ErroredDependency
{ _dependencyIdentifier = dependencyIdentifier,
_dependencyType = depType,
_programmingLanguage = programmingLanguage,
_registryInfo = registryInfo,
_erroredReason = failureReason
}
genErroredReason :: Gen ErroredReason
genErroredReason =
Gen.choice
[ UnexpectedFailureToParseRegistryEntry <$> genAlphaText,
UnexpectedFailureRegistryDataInconsistent <$> genAlphaText,
Gen.constant NoRegistryOrRepoData
]
genDependencyType :: Gen DependencyType
genDependencyType = Gen.element [CoreDependency, DevDependency]
genDependencyName :: Gen DependencyName
genDependencyName =
DependencyName
<$> Gen.text
(Range.constant 1 10)
( Gen.choice
[ Gen.alphaNum,
Gen.constant '-',
Gen.constant '_',
Gen.constant '@',
Gen.constant '.',
Gen.constant '/'
]
)
genEnrichedRepoDependencies :: Gen EnrichedRepoDependencies
genEnrichedRepoDependencies = EnrichedRepoDependencies <$> genNonEmptyVector (Range.constant 1 10) genEnrichedDependency
genEnrichedDependency :: Gen EnrichedDependency
genEnrichedDependency = do
programmingLanguage <- genProgrammingLanguage
dependencyIdentifier <- genDependencyIdentifier
dependencyType <- Gen.maybe genDependencyType
theseData <- genThese genDependencyRegistryInfo genDependencyRepoStats
pure
EnrichedDependency
{ _programmingLanguage = programmingLanguage,
_dependencyIdentifier = dependencyIdentifier,
_dependencyType = dependencyType,
_data = theseData
}
genDependencyRegistryInfo :: Gen DependencyRegistryInfo
genDependencyRegistryInfo =
DependencyRegistryInfo
<$> genRegistry
<*> Gen.maybe genRepo
<*> genRegistryAlivenessStatus
<*> Gen.maybe genUTCTime
genRegistryAlivenessStatus :: Gen RegistryAlivenessStatus
genRegistryAlivenessStatus =
Gen.choice
[ Gen.constant RASAlive,
RASDeprecated <$> genRegistryAlivenessStatusType <*> Gen.maybe genAlphaText <*> genVector (Range.constant 0 10) genDependencyName
]
genRegistryAlivenessStatusType :: Gen RegistryAlivenessStatusType
genRegistryAlivenessStatusType = Gen.enumBounded
genDependencyRepoStats :: Gen DependencyRepoStats
genDependencyRepoStats = do
commits <- genVector (Range.constant 0 10) genDependencyRepoCommit
isArchived <- Gen.bool
isFork <- Gen.bool
pure
DependencyRepoStats
{ _twoYearlyCommitHistory = commits,
_isArchived = isArchived,
_isFork = isFork
}
genDependencyRepoCommit :: Gen DependencyRepoCommit
genDependencyRepoCommit =
DependencyRepoCommit
<$> genUTCTime
<*> Gen.maybe genAlphaText
genDependenciesFileType :: Gen DependenciesFileType
genDependenciesFileType = Gen.enumBounded
genDependenciesFile :: Gen DependenciesFileLoad
genDependenciesFile =
DependenciesFileLoad
<$> genDependenciesFileType
<*> genDependenciesFileLoadDetails
genDependenciesFileLoadDetails :: Gen DependenciesFileLoadDetails
genDependenciesFileLoadDetails =
Gen.choice
[ DFLDSpecific <$> genAlphaText,
DFLDSearch <$> genGitFileMatch,
DFLDDirectorySearch <$> genGitPath <*> genGitFileMatch
]
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/common-test/src/CommonTest/Gen/Model/Dependency.hs | haskell | genDependencyIdentifierOnlyNamed :: Gen DependencyIdentifier
genDependencyIdentifierOnlyNamed =
Gen.choice
[ genDependencyIdentifierNamed,
DependencyIdentifierRepo <$> genQualifiedRepo <*> (Just <$> genDependencyName)
] | # OPTIONS_GHC -fno - warn - missing - export - lists #
module CommonTest.Gen.Model.Dependency where
import Common.Model.Dependency.Basic.BasicDependency
import Common.Model.Dependency.Basic.BasicRepoDependencies
import Common.Model.Dependency.DependencyIdentifier
import Common.Model.Dependency.DependencyName
import Common.Model.Dependency.DependencyType
import Common.Model.Dependency.Enriched.EnrichedDependency
import Common.Model.Dependency.Enriched.EnrichedRepoDependencies
import Common.Model.Dependency.Errored.ErroredDependency
import Common.Model.Dependency.Errored.ErroredReason
import Common.Model.Dependency.Errored.ErroredRepoDependencies
import Common.Model.Dependency.File.DependenciesFileLoad
import Common.Model.Dependency.File.DependenciesFileLoadDetails
import Common.Model.Dependency.File.DependenciesFileType
import Common.Model.Dependency.Ignored.IgnoredDependency
import Common.Model.Dependency.Ignored.IgnoredRepoDependencies
import Common.Model.Dependency.Registry.DependencyRegistryInfo
import Common.Model.Dependency.Registry.RegistryAlivenessStatus
import Common.Model.Dependency.Repo.DependencyRepoCommit
import Common.Model.Dependency.Repo.DependencyRepoStats
import CommonTest.Gen.General
import CommonTest.Gen.Model.Ecosystem
import CommonTest.Gen.Model.Git
import Hedgehog
import Hedgehog.Gen qualified as Gen
import Hedgehog.Range qualified as Range
genBasicRepoDependencies :: Gen BasicRepoDependencies
genBasicRepoDependencies = BasicRepoDependencies <$> genNonEmptyVector (Range.constant 1 10) genBasicDependency
genBasicDependency :: Gen BasicDependency
genBasicDependency = do
programmingLanguage <- genProgrammingLanguage
depIdentifier <- genDependencyIdentifier
depType <- Gen.maybe genDependencyType
pure $
BasicDependency programmingLanguage depIdentifier depType
genDependencyIdentifier :: Gen DependencyIdentifier
genDependencyIdentifier =
Gen.choice
[ genDependencyIdentifierNamed,
genDependencyIdentifierRepo
]
genDependencyIdentifierNamed :: Gen DependencyIdentifier
genDependencyIdentifierNamed = DependencyIdentifierNamed <$> genDependencyName
genDependencyIdentifierRepo :: Gen DependencyIdentifier
genDependencyIdentifierRepo = DependencyIdentifierRepo <$> genQualifiedRepo <*> Gen.maybe genDependencyName
genIgnoredRepoDependencies :: Gen IgnoredRepoDependencies
genIgnoredRepoDependencies = IgnoredRepoDependencies <$> genVector (Range.constant 1 10) genIgnoredDependency
genIgnoredDependency :: Gen IgnoredDependency
genIgnoredDependency = do
programmingLanguage <- genProgrammingLanguage
dependencyIdentifier <- genDependencyIdentifier
depType <- Gen.maybe genDependencyType
pure
IgnoredDependency
{ _programmingLanguage = programmingLanguage,
_dependencyIdentifier = dependencyIdentifier,
_dependencyType = depType
}
genErroredRepoDependencies :: Gen ErroredRepoDependencies
genErroredRepoDependencies = ErroredRepoDependencies <$> genVector (Range.constant 1 10) genErroredDependency
genErroredDependency :: Gen ErroredDependency
genErroredDependency = do
dependencyIdentifier <- genDependencyIdentifier
depType <- Gen.maybe genDependencyType
programmingLanguage <- genProgrammingLanguage
registryInfo <- Gen.maybe genDependencyRegistryInfo
failureReason <- genErroredReason
pure
ErroredDependency
{ _dependencyIdentifier = dependencyIdentifier,
_dependencyType = depType,
_programmingLanguage = programmingLanguage,
_registryInfo = registryInfo,
_erroredReason = failureReason
}
genErroredReason :: Gen ErroredReason
genErroredReason =
Gen.choice
[ UnexpectedFailureToParseRegistryEntry <$> genAlphaText,
UnexpectedFailureRegistryDataInconsistent <$> genAlphaText,
Gen.constant NoRegistryOrRepoData
]
genDependencyType :: Gen DependencyType
genDependencyType = Gen.element [CoreDependency, DevDependency]
genDependencyName :: Gen DependencyName
genDependencyName =
DependencyName
<$> Gen.text
(Range.constant 1 10)
( Gen.choice
[ Gen.alphaNum,
Gen.constant '-',
Gen.constant '_',
Gen.constant '@',
Gen.constant '.',
Gen.constant '/'
]
)
genEnrichedRepoDependencies :: Gen EnrichedRepoDependencies
genEnrichedRepoDependencies = EnrichedRepoDependencies <$> genNonEmptyVector (Range.constant 1 10) genEnrichedDependency
genEnrichedDependency :: Gen EnrichedDependency
genEnrichedDependency = do
programmingLanguage <- genProgrammingLanguage
dependencyIdentifier <- genDependencyIdentifier
dependencyType <- Gen.maybe genDependencyType
theseData <- genThese genDependencyRegistryInfo genDependencyRepoStats
pure
EnrichedDependency
{ _programmingLanguage = programmingLanguage,
_dependencyIdentifier = dependencyIdentifier,
_dependencyType = dependencyType,
_data = theseData
}
genDependencyRegistryInfo :: Gen DependencyRegistryInfo
genDependencyRegistryInfo =
DependencyRegistryInfo
<$> genRegistry
<*> Gen.maybe genRepo
<*> genRegistryAlivenessStatus
<*> Gen.maybe genUTCTime
genRegistryAlivenessStatus :: Gen RegistryAlivenessStatus
genRegistryAlivenessStatus =
Gen.choice
[ Gen.constant RASAlive,
RASDeprecated <$> genRegistryAlivenessStatusType <*> Gen.maybe genAlphaText <*> genVector (Range.constant 0 10) genDependencyName
]
genRegistryAlivenessStatusType :: Gen RegistryAlivenessStatusType
genRegistryAlivenessStatusType = Gen.enumBounded
genDependencyRepoStats :: Gen DependencyRepoStats
genDependencyRepoStats = do
commits <- genVector (Range.constant 0 10) genDependencyRepoCommit
isArchived <- Gen.bool
isFork <- Gen.bool
pure
DependencyRepoStats
{ _twoYearlyCommitHistory = commits,
_isArchived = isArchived,
_isFork = isFork
}
genDependencyRepoCommit :: Gen DependencyRepoCommit
genDependencyRepoCommit =
DependencyRepoCommit
<$> genUTCTime
<*> Gen.maybe genAlphaText
genDependenciesFileType :: Gen DependenciesFileType
genDependenciesFileType = Gen.enumBounded
genDependenciesFile :: Gen DependenciesFileLoad
genDependenciesFile =
DependenciesFileLoad
<$> genDependenciesFileType
<*> genDependenciesFileLoadDetails
genDependenciesFileLoadDetails :: Gen DependenciesFileLoadDetails
genDependenciesFileLoadDetails =
Gen.choice
[ DFLDSpecific <$> genAlphaText,
DFLDSearch <$> genGitFileMatch,
DFLDDirectorySearch <$> genGitPath <*> genGitFileMatch
]
|
1c95a3fb91ed4a7822bbcc575ac4390c1fd0e74f2c25e7c96f3aa783487df573 | codereport/SICP-2020 | conor_hoekstra_solutions_i.rkt | Exercise 2.17
;; Solution 1
(require threading)
(define (last-pair lst)
(~> lst
reverse
car))
;; Solution 2
(define (last-pair lst)
(list-ref lst (- (length lst) 1)))
;; Solution 3
(define (last-pair lst)
(if (null? (cdr lst))
(car lst)
(last-pair (cdr lst))))
Exercise 2.18
(define (reverse lst)
(define (iter lst acc)
(if (null? lst)
acc
(iter (cdr lst) (cons (car lst) acc))))
(iter lst '()))
Exercise 2.20
(define (same-parity x . xs)
(filter (λ (n) (= (remainder x 2)
(remainder n 2)))
xs))
> ( same - parity 1 1 2 3 4 5 )
;; '(1 3 5)
> ( same - parity 2 1 2 3 4 5 )
' ( 2 4 )
Exercise 2.21
(define (sq x) (* x x))
(define (square-list items)
(if (null? items)
'()
(cons (sq (car items))
(square-list (cdr items)))))
(define (square-list2 items)
(map sq items))
Exercise 2.23
;; this fails :(
(define (for-each proc lst)
(if (null? lst)
(λ (x) (x))
((proc (car lst))
(for-each proc (cdr lst)))))
;; this prints a #t at the end :(
(define (for-each proc lst)
(cond ((null? lst) #t)
(else (proc (car lst))
(for-each proc (cdr lst)))))
;; this works
(define (for-each proc lst)
(cond ((null? (cdr lst)) (proc (car lst)))
(else (proc (car lst))
(for-each proc (cdr lst)))))
Exercise 2.25
> (define x '(1 3 (5 7) 9))
> (car (cdaddr x))
7
> (define x '((7)))
> (caar x)
7
(require threading)
> (define x '(1 (2 (3 (4 (5 (6 7)))))))
> (~> x
cadadr
cadadr
cadadr)
7
Exercise 2.27
(define (deep-reverse lst)
(define (iter lst acc)
(if (null? lst)
acc
(let ((fst (car lst)))
(iter (cdr lst)
(cons (if (list? fst)
(reverse fst)
fst)
acc)))))
(iter lst '()))
Exercise 2.28
;; Solution 1
(define fringe flatten) ; :p
;; Solution 2
(define (fringe tree)
(if (null? tree)
'()
(let ((x (car tree)))
(append (if (list? x)
(fringe x)
(list x))
(fringe (cdr tree))))))
;; Example from book
(define (scale-tree tree factor)
(cond ((null? tree) '())
((not (pair? tree)) (* tree factor))
(else (cons (scale-tree (car tree) factor)
(scale-tree (cdr tree) factor)))))
;; Exercise 2.30 (direct)
(define (square-tree tree)
(cond ((null? tree) '())
((not (pair? tree)) (sq tree))
(else (cons (square-tree (car tree))
(square-tree (cdr tree))))))
;; Example from book
(define (scale-tree tree factor)
(map (λ (sub-tree)
(if (pair? sub-tree)
(scale-tree sub-tree factor)
(* sub-tree factor)))
tree))
;; Exercise 2.30 (map)
(define (square-tree tree)
(map (λ (sub-tree)
(if (pair? sub-tree)
(square-tree sub-tree)
(sq sub-tree)))
tree))
Exercise 2.31
(define (tree-map tree proc)
(map (λ (sub-tree)
(if (pair? sub-tree)
(tree-map sub-tree proc)
(proc sub-tree)))
tree))
(define (square-tree tree) (tree-map tree sq))
(define (scale-tree tree factor) (tree-map tree (λ (x) (* x factor))))
Exercise 2.32
(define (subsets s)
(if (null? s)
(list '())
(let ((rest (subsets (cdr s))))
(append rest (map (λ (x) (cons (car s) x)) rest)))))
> ( subsets ( range 4 ) )
;; '(()
( 3 )
( 2 )
( 2 3 )
( 1 )
( 1 3 )
( 1 2 )
;; (1 2 3)
;; (0)
( 0 3 )
( 0 2 )
;; (0 2 3)
;; (0 1)
;; (0 1 3)
;; (0 1 2)
;; (0 1 2 3))
| null | https://raw.githubusercontent.com/codereport/SICP-2020/2d1e60048db89678830d93fcc558a846b7f57b76/Chapter%202.2%20Solutions/conor_hoekstra_solutions_i.rkt | racket | Solution 1
Solution 2
Solution 3
'(1 3 5)
this fails :(
this prints a #t at the end :(
this works
Solution 1
:p
Solution 2
Example from book
Exercise 2.30 (direct)
Example from book
Exercise 2.30 (map)
'(()
(1 2 3)
(0)
(0 2 3)
(0 1)
(0 1 3)
(0 1 2)
(0 1 2 3)) | Exercise 2.17
(require threading)
(define (last-pair lst)
(~> lst
reverse
car))
(define (last-pair lst)
(list-ref lst (- (length lst) 1)))
(define (last-pair lst)
(if (null? (cdr lst))
(car lst)
(last-pair (cdr lst))))
Exercise 2.18
(define (reverse lst)
(define (iter lst acc)
(if (null? lst)
acc
(iter (cdr lst) (cons (car lst) acc))))
(iter lst '()))
Exercise 2.20
(define (same-parity x . xs)
(filter (λ (n) (= (remainder x 2)
(remainder n 2)))
xs))
> ( same - parity 1 1 2 3 4 5 )
> ( same - parity 2 1 2 3 4 5 )
' ( 2 4 )
Exercise 2.21
(define (sq x) (* x x))
(define (square-list items)
(if (null? items)
'()
(cons (sq (car items))
(square-list (cdr items)))))
(define (square-list2 items)
(map sq items))
Exercise 2.23
(define (for-each proc lst)
(if (null? lst)
(λ (x) (x))
((proc (car lst))
(for-each proc (cdr lst)))))
(define (for-each proc lst)
(cond ((null? lst) #t)
(else (proc (car lst))
(for-each proc (cdr lst)))))
(define (for-each proc lst)
(cond ((null? (cdr lst)) (proc (car lst)))
(else (proc (car lst))
(for-each proc (cdr lst)))))
Exercise 2.25
> (define x '(1 3 (5 7) 9))
> (car (cdaddr x))
7
> (define x '((7)))
> (caar x)
7
(require threading)
> (define x '(1 (2 (3 (4 (5 (6 7)))))))
> (~> x
cadadr
cadadr
cadadr)
7
Exercise 2.27
(define (deep-reverse lst)
(define (iter lst acc)
(if (null? lst)
acc
(let ((fst (car lst)))
(iter (cdr lst)
(cons (if (list? fst)
(reverse fst)
fst)
acc)))))
(iter lst '()))
Exercise 2.28
(define (fringe tree)
(if (null? tree)
'()
(let ((x (car tree)))
(append (if (list? x)
(fringe x)
(list x))
(fringe (cdr tree))))))
(define (scale-tree tree factor)
(cond ((null? tree) '())
((not (pair? tree)) (* tree factor))
(else (cons (scale-tree (car tree) factor)
(scale-tree (cdr tree) factor)))))
(define (square-tree tree)
(cond ((null? tree) '())
((not (pair? tree)) (sq tree))
(else (cons (square-tree (car tree))
(square-tree (cdr tree))))))
(define (scale-tree tree factor)
(map (λ (sub-tree)
(if (pair? sub-tree)
(scale-tree sub-tree factor)
(* sub-tree factor)))
tree))
(define (square-tree tree)
(map (λ (sub-tree)
(if (pair? sub-tree)
(square-tree sub-tree)
(sq sub-tree)))
tree))
Exercise 2.31
(define (tree-map tree proc)
(map (λ (sub-tree)
(if (pair? sub-tree)
(tree-map sub-tree proc)
(proc sub-tree)))
tree))
(define (square-tree tree) (tree-map tree sq))
(define (scale-tree tree factor) (tree-map tree (λ (x) (* x factor))))
Exercise 2.32
(define (subsets s)
(if (null? s)
(list '())
(let ((rest (subsets (cdr s))))
(append rest (map (λ (x) (cons (car s) x)) rest)))))
> ( subsets ( range 4 ) )
( 3 )
( 2 )
( 2 3 )
( 1 )
( 1 3 )
( 1 2 )
( 0 3 )
( 0 2 )
|
2856d7151dc8f50be426d6f5a8475f70982d53844e857f69813b399144d16d26 | 2600hz-archive/whistle | crossbar_util.erl | %%%-------------------------------------------------------------------
@author < >
( C ) 2010 - 2011 VoIP INC
%%% @doc
%%%
%%% @end
Created : 14 Dec 2010 by < >
%%%-------------------------------------------------------------------
-module(crossbar_util).
-export([response/2, response/3, response/4, response/5]).
-export([response_deprecated/1, response_deprecated_redirect/2, response_deprecated_redirect/3]).
-export([response_faulty_request/1]).
-export([response_bad_identifier/2]).
-export([response_conflicting_docs/1]).
-export([response_datastore_timeout/1]).
-export([response_datastore_conn_refused/1]).
-export([response_invalid_data/2]).
-export([response_missing_view/1]).
-export([response_db_missing/1]).
-export([response_db_fatal/1]).
-export([binding_heartbeat/1, binding_heartbeat/2]).
-export([put_reqid/1]).
-export([store/3, fetch/2, get_abs_url/2]).
-include("../include/crossbar.hrl").
-include_lib("webmachine/include/webmachine.hrl").
%%--------------------------------------------------------------------
@public
%% @doc
%% This function set the response status to success, and load the provided
%% data.
%% @end
%%--------------------------------------------------------------------
-spec(response/2 :: (JTerm :: json_term(), Context :: #cb_context{}) -> #cb_context{}).
response(JTerm, Context) ->
create_response(success, undefined, undefined, JTerm, Context).
%%--------------------------------------------------------------------
@public
%% @doc
This function load the error message into a 500 response , of type
%% fatal or error.
%% @end
%%--------------------------------------------------------------------
-spec(response/3 :: (Status :: error|fatal, Msg :: json_string(), Context :: #cb_context{}) -> #cb_context{}).
response(error, Msg, Context) ->
create_response(error, Msg, 500, [], Context);
response(fatal, Msg, Context) ->
create_response(fatal, Msg, 500, [], Context).
%%--------------------------------------------------------------------
@public
%% @doc
%% This function load the error message into a specifc code response,
%% of type fatal or error.
%% @end
%%--------------------------------------------------------------------
-spec(response/4 :: (Status :: error|fatal, Msg :: json_string(), Code :: integer()|undefined, Context :: #cb_context{}) -> #cb_context{}).
response(error, Msg, Code, Context) ->
create_response(error, Msg, Code, [], Context);
response(fatal, Msg, Code, Context) ->
create_response(fatal, Msg, Code, [], Context).
%%--------------------------------------------------------------------
@public
%% @doc
%% This function load the error message into a specifc code response,
%% of type fatal or error with additional data
%% @end
%%--------------------------------------------------------------------
-spec(response/5 :: (Status :: error|fatal, Msg :: json_string(), Code :: integer()|undefined, JTerm :: json_term(), Context :: #cb_context{}) -> #cb_context{}).
response(error, Msg, Code, JTerm, Context) ->
create_response(error, Msg, Code, JTerm, Context);
response(fatal, Msg, Code, JTerm, Context) ->
create_response(fatal, Msg, Code, JTerm, Context).
%%--------------------------------------------------------------------
@public
%% @doc
This function loads the response vars in Context , soon it will
%% make smarter chooices about formating resp_data and filtering
%% other parameters.
%% @end
%%--------------------------------------------------------------------
-spec create_response/5 :: (error|fatal|success, json_string(), integer()|undefined, json_term(), #cb_context{}) -> #cb_context{}.
create_response(Status, Msg, Code, JTerm, Context) ->
Context#cb_context {
resp_status = Status
,resp_error_msg = Msg
,resp_error_code = Code
,resp_data = JTerm
}.
%%--------------------------------------------------------------------
@public
%% @doc
%% Create a standard response if the request is faulty (doesnt have a
%% match in validation, or someother issue with it keeps it from being
%% processed, like nonsensical chains)
%% @end
%%--------------------------------------------------------------------
-spec response_faulty_request/1 :: (#cb_context{}) -> #cb_context{}.
response_faulty_request(Context) ->
response(error, <<"faulty request">>, 400, Context).
%%--------------------------------------------------------------------
@public
%% @doc
%% When a module is no longer valid, alert the client of the deprecated status
by either sending a 410 Gone or a 301 Redirct ( when using the arity
3 version .
%%
The RedirectUrl should be relative to the accessed URL . So , if the
%% URL accessed that is deprecated is:
%% /v1/account/{AID}/module/{MID}
and that MID moved to , the RedirectURL should be :
%% ../../module2/{MID}
%%
If redirecting from module1 to module2 , RedirectURL should be :
%% ../module2
%% @end
%%--------------------------------------------------------------------
-spec response_deprecated(#cb_context{}) -> #cb_context{}.
response_deprecated(Context) ->
create_response(error, <<"deprecated">>, 410, wh_json:new(), Context).
-spec response_deprecated_redirect(#cb_context{}, json_string()) -> #cb_context{}.
-spec response_deprecated_redirect(#cb_context{}, json_string(), json_object()) -> #cb_context{}.
response_deprecated_redirect(Context, RedirectUrl) ->
response_deprecated_redirect(Context, RedirectUrl, wh_json:new()).
response_deprecated_redirect(#cb_context{resp_headers=RespHeaders}=Context, RedirectUrl, JObj) ->
create_response(error, <<"deprecated">>, 301, JObj
,Context#cb_context{resp_headers=[{"Location", RedirectUrl} | RespHeaders]}).
%%--------------------------------------------------------------------
@public
%% @doc
%% Create a standard response if the requested ID did not match a
data record . Using 404 as 410 is a permanent Gone , while 404 is
%% a softer not found now.
%% @end
%%--------------------------------------------------------------------
-spec(response_bad_identifier/2 :: (Id :: binary(), Context :: #cb_context{}) -> #cb_context{}).
response_bad_identifier(Id, Context) ->
response(error, <<"bad identifier">>, 404, [Id], Context).
%%--------------------------------------------------------------------
@public
%% @doc
%% Create a standard response if the requested resource update fails
%% because of a conflict in the DB
%% @end
%%--------------------------------------------------------------------
-spec(response_conflicting_docs/1 :: (Context :: #cb_context{}) -> #cb_context{}).
response_conflicting_docs(Context) ->
response(error, <<"conflicting documents">>, 409, Context).
%%--------------------------------------------------------------------
@public
%% @doc
%% Create a standard response if the requested data query was missing
%% @end
%%--------------------------------------------------------------------
-spec(response_missing_view/1 :: (Context :: #cb_context{}) -> #cb_context{}).
response_missing_view(Context) ->
response(fatal, <<"datastore missing view">>, 500, Context).
%%--------------------------------------------------------------------
@public
%% @doc
%% Create a standard response if the datastore time'd out
%% @end
%%--------------------------------------------------------------------
-spec(response_datastore_timeout/1 :: (Context :: #cb_context{}) -> #cb_context{}).
response_datastore_timeout(Context) ->
response(error, <<"datastore timeout">>, 503, Context).
%%--------------------------------------------------------------------
@public
%% @doc
%% Create a standard response if the datastore time'd out
%% @end
%%--------------------------------------------------------------------
-spec(response_datastore_conn_refused/1 :: (Context :: #cb_context{}) -> #cb_context{}).
response_datastore_conn_refused(Context) ->
response(error, <<"datastore connection refused">>, 503, Context).
%%--------------------------------------------------------------------
@public
%% @doc
%% Create a standard response if the provided data did not validate
%% @end
%%--------------------------------------------------------------------
-spec(response_invalid_data/2 :: (JTerm :: json_term(), Context :: #cb_context{}) -> #cb_context{}).
response_invalid_data(JTerm, Context) ->
response(error, <<"invalid data">>, 400, JTerm, Context).
%%--------------------------------------------------------------------
@public
%% @doc
%% Create a standard response if the datastore does not have the requested
%% record collection
%% @end
%%--------------------------------------------------------------------
-spec(response_db_missing/1 :: (Context :: #cb_context{}) -> #cb_context{}).
response_db_missing(Context) ->
response(fatal, <<"data collection missing: database not found">>, 503, Context).
%%--------------------------------------------------------------------
@public
%% @doc
%% Create a standard response if the datastore does not have the requested
%% record collection
%% @end
%%--------------------------------------------------------------------
-spec(response_db_fatal/1 :: (Context :: #cb_context{}) -> #cb_context{}).
response_db_fatal(Context) ->
response(fatal, <<"datastore fatal error">>, 500, Context).
%%--------------------------------------------------------------------
@public
%% @doc
%% This spawns a function that will monitor the parent and hearbeat
%% the crossbar_binding PID provided as long as the parent lives
%% @end
%%--------------------------------------------------------------------
-spec binding_heartbeat/1 :: (BPid) -> pid() when
BPid :: pid().
binding_heartbeat(BPid) ->
binding_heartbeat(BPid, 10000).
-spec binding_heartbeat/2 :: (BPid, Timeout) -> pid() when
BPid :: pid(),
Timeout :: non_neg_integer() | 'infinity'.
binding_heartbeat(BPid, Timeout) ->
PPid = self(),
?LOG("Starting binding heartbeat for ~p", [PPid]),
spawn(fun() ->
Ref = erlang:monitor(process, PPid),
{ok, Tref} = timer:send_interval(250, BPid, heartbeat),
ok = receive
{'DOWN', Ref, process, _, normal} ->
ok;
{'DOWN', Ref, process, _, Reason} ->
?LOG("Bound client (~p) down for non-normal reason: ~p", [PPid, Reason]),
BPid ! {binding_error, Reason};
_ -> ok
after Timeout ->
?LOG("Bound client (~p) too slow, timed out after ~p", [PPid, Timeout]),
ok
end,
timer:cancel(Tref)
end).
%%--------------------------------------------------------------------
@public
%% @doc
%% This function extracts the request ID and sets it as 'callid' in
%% the process dictionary, where the logger expects it.
%% @end
%%--------------------------------------------------------------------
-spec put_reqid/1 :: (Context) -> no_return() when
Context :: #cb_context{}.
put_reqid(#cb_context{req_id=ReqId}) ->
put(callid, ReqId).
%%--------------------------------------------------------------------
@public
%% @doc
%% Sets a value in the crossbar context for later retrieval during
%% this request.
%% @end
%%--------------------------------------------------------------------
-spec store/3 :: (Key, Data, Context) -> #cb_context{} when
Key :: term(),
Data :: term(),
Context :: #cb_context{}.
store(Key, Data, #cb_context{storage=Storage}=Context) ->
Context#cb_context{storage=[{Key, Data}|proplists:delete(Key, Storage)]}.
%%--------------------------------------------------------------------
@public
%% @doc
a previously stored value from the current request .
%% @end
%%--------------------------------------------------------------------
-spec fetch/2 :: (Key, Context) -> term() when
Key :: term(),
Context :: #cb_context{}.
fetch(Key, #cb_context{storage=Storage}) ->
proplists:get_value(Key, Storage).
-spec get_abs_url/2 :: (#wm_reqdata{}, ne_binary()) -> string().
get_abs_url(RD, Url) ->
%% :port/"
Port = case wrq:port(RD) of
80 -> "";
P -> [":", wh_util:to_list(P)]
end,
Host = ["http://", string:join(lists:reverse(wrq:host_tokens(RD)), "."), Port, "/"],
?LOG("host: ~s", [Host]),
PathTokensRev = lists:reverse(string:tokens(wrq:path(RD), "/")),
get_abs_url(Host, PathTokensRev, Url).
%% Request: http[s]:port/v1/accounts/acct_id/module
%%
%% Host : http[s]:port/
%% PathTokensRev: /v1/accounts/acct_id/module => [module, acct_id, accounts, v1]
%% Url: ../other_mod
%%
Result : http[s] : port / v1 / accounts / acct_id / other_mod
-spec get_abs_url/3 :: (iolist(), [ne_binary(),...], ne_binary() | string()) -> string().
get_abs_url(Host, PathTokensRev, Url) ->
UrlTokens = string:tokens(wh_util:to_list(Url), "/"),
?LOG("path: ~p", [PathTokensRev]),
?LOG("rel: ~p", [UrlTokens]),
Url1 = string:join(
lists:reverse(
lists:foldl(fun("..", []) -> [];
("..", [_ | PathTokens]) -> PathTokens;
(".", PathTokens) -> PathTokens;
(Segment, PathTokens) -> [Segment | PathTokens]
end, PathTokensRev, UrlTokens)
), "/"),
?LOG("final url: ~p", [Url1]),
binary_to_list(list_to_binary([Host, Url1])).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
get_abs_url_test() ->
Host = ["http://", "some.host.com", ":8000", "/"],
PTs = ["module", "acct_id", "accounts", "v1"],
Url = "../other_mod",
?assertEqual(get_abs_url(Host, PTs, Url), ":8000/v1/accounts/acct_id/other_mod"),
?assertEqual(get_abs_url(Host, ["mod_id" | PTs], "../../other_mod"++"/mod_id"), ":8000/v1/accounts/acct_id/other_mod/mod_id").
-endif.
| null | https://raw.githubusercontent.com/2600hz-archive/whistle/1a256604f0d037fac409ad5a55b6b17e545dcbf9/whistle_apps/apps/crossbar/src/crossbar_util.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function set the response status to success, and load the provided
data.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
fatal or error.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function load the error message into a specifc code response,
of type fatal or error.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function load the error message into a specifc code response,
of type fatal or error with additional data
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
make smarter chooices about formating resp_data and filtering
other parameters.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Create a standard response if the request is faulty (doesnt have a
match in validation, or someother issue with it keeps it from being
processed, like nonsensical chains)
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
When a module is no longer valid, alert the client of the deprecated status
URL accessed that is deprecated is:
/v1/account/{AID}/module/{MID}
../../module2/{MID}
../module2
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Create a standard response if the requested ID did not match a
a softer not found now.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Create a standard response if the requested resource update fails
because of a conflict in the DB
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Create a standard response if the requested data query was missing
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Create a standard response if the datastore time'd out
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Create a standard response if the datastore time'd out
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Create a standard response if the provided data did not validate
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Create a standard response if the datastore does not have the requested
record collection
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Create a standard response if the datastore does not have the requested
record collection
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This spawns a function that will monitor the parent and hearbeat
the crossbar_binding PID provided as long as the parent lives
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function extracts the request ID and sets it as 'callid' in
the process dictionary, where the logger expects it.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Sets a value in the crossbar context for later retrieval during
this request.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
:port/"
Request: http[s]:port/v1/accounts/acct_id/module
Host : http[s]:port/
PathTokensRev: /v1/accounts/acct_id/module => [module, acct_id, accounts, v1]
Url: ../other_mod
| @author < >
( C ) 2010 - 2011 VoIP INC
Created : 14 Dec 2010 by < >
-module(crossbar_util).
-export([response/2, response/3, response/4, response/5]).
-export([response_deprecated/1, response_deprecated_redirect/2, response_deprecated_redirect/3]).
-export([response_faulty_request/1]).
-export([response_bad_identifier/2]).
-export([response_conflicting_docs/1]).
-export([response_datastore_timeout/1]).
-export([response_datastore_conn_refused/1]).
-export([response_invalid_data/2]).
-export([response_missing_view/1]).
-export([response_db_missing/1]).
-export([response_db_fatal/1]).
-export([binding_heartbeat/1, binding_heartbeat/2]).
-export([put_reqid/1]).
-export([store/3, fetch/2, get_abs_url/2]).
-include("../include/crossbar.hrl").
-include_lib("webmachine/include/webmachine.hrl").
@public
-spec(response/2 :: (JTerm :: json_term(), Context :: #cb_context{}) -> #cb_context{}).
response(JTerm, Context) ->
create_response(success, undefined, undefined, JTerm, Context).
@public
This function load the error message into a 500 response , of type
-spec(response/3 :: (Status :: error|fatal, Msg :: json_string(), Context :: #cb_context{}) -> #cb_context{}).
response(error, Msg, Context) ->
create_response(error, Msg, 500, [], Context);
response(fatal, Msg, Context) ->
create_response(fatal, Msg, 500, [], Context).
@public
-spec(response/4 :: (Status :: error|fatal, Msg :: json_string(), Code :: integer()|undefined, Context :: #cb_context{}) -> #cb_context{}).
response(error, Msg, Code, Context) ->
create_response(error, Msg, Code, [], Context);
response(fatal, Msg, Code, Context) ->
create_response(fatal, Msg, Code, [], Context).
@public
-spec(response/5 :: (Status :: error|fatal, Msg :: json_string(), Code :: integer()|undefined, JTerm :: json_term(), Context :: #cb_context{}) -> #cb_context{}).
response(error, Msg, Code, JTerm, Context) ->
create_response(error, Msg, Code, JTerm, Context);
response(fatal, Msg, Code, JTerm, Context) ->
create_response(fatal, Msg, Code, JTerm, Context).
@public
This function loads the response vars in Context , soon it will
-spec create_response/5 :: (error|fatal|success, json_string(), integer()|undefined, json_term(), #cb_context{}) -> #cb_context{}.
create_response(Status, Msg, Code, JTerm, Context) ->
Context#cb_context {
resp_status = Status
,resp_error_msg = Msg
,resp_error_code = Code
,resp_data = JTerm
}.
@public
-spec response_faulty_request/1 :: (#cb_context{}) -> #cb_context{}.
response_faulty_request(Context) ->
response(error, <<"faulty request">>, 400, Context).
@public
by either sending a 410 Gone or a 301 Redirct ( when using the arity
3 version .
The RedirectUrl should be relative to the accessed URL . So , if the
and that MID moved to , the RedirectURL should be :
If redirecting from module1 to module2 , RedirectURL should be :
-spec response_deprecated(#cb_context{}) -> #cb_context{}.
response_deprecated(Context) ->
create_response(error, <<"deprecated">>, 410, wh_json:new(), Context).
-spec response_deprecated_redirect(#cb_context{}, json_string()) -> #cb_context{}.
-spec response_deprecated_redirect(#cb_context{}, json_string(), json_object()) -> #cb_context{}.
response_deprecated_redirect(Context, RedirectUrl) ->
response_deprecated_redirect(Context, RedirectUrl, wh_json:new()).
response_deprecated_redirect(#cb_context{resp_headers=RespHeaders}=Context, RedirectUrl, JObj) ->
create_response(error, <<"deprecated">>, 301, JObj
,Context#cb_context{resp_headers=[{"Location", RedirectUrl} | RespHeaders]}).
@public
data record . Using 404 as 410 is a permanent Gone , while 404 is
-spec(response_bad_identifier/2 :: (Id :: binary(), Context :: #cb_context{}) -> #cb_context{}).
response_bad_identifier(Id, Context) ->
response(error, <<"bad identifier">>, 404, [Id], Context).
@public
-spec(response_conflicting_docs/1 :: (Context :: #cb_context{}) -> #cb_context{}).
response_conflicting_docs(Context) ->
response(error, <<"conflicting documents">>, 409, Context).
@public
-spec(response_missing_view/1 :: (Context :: #cb_context{}) -> #cb_context{}).
response_missing_view(Context) ->
response(fatal, <<"datastore missing view">>, 500, Context).
@public
-spec(response_datastore_timeout/1 :: (Context :: #cb_context{}) -> #cb_context{}).
response_datastore_timeout(Context) ->
response(error, <<"datastore timeout">>, 503, Context).
@public
-spec(response_datastore_conn_refused/1 :: (Context :: #cb_context{}) -> #cb_context{}).
response_datastore_conn_refused(Context) ->
response(error, <<"datastore connection refused">>, 503, Context).
@public
-spec(response_invalid_data/2 :: (JTerm :: json_term(), Context :: #cb_context{}) -> #cb_context{}).
response_invalid_data(JTerm, Context) ->
response(error, <<"invalid data">>, 400, JTerm, Context).
@public
-spec(response_db_missing/1 :: (Context :: #cb_context{}) -> #cb_context{}).
response_db_missing(Context) ->
response(fatal, <<"data collection missing: database not found">>, 503, Context).
@public
-spec(response_db_fatal/1 :: (Context :: #cb_context{}) -> #cb_context{}).
response_db_fatal(Context) ->
response(fatal, <<"datastore fatal error">>, 500, Context).
@public
-spec binding_heartbeat/1 :: (BPid) -> pid() when
BPid :: pid().
binding_heartbeat(BPid) ->
binding_heartbeat(BPid, 10000).
-spec binding_heartbeat/2 :: (BPid, Timeout) -> pid() when
BPid :: pid(),
Timeout :: non_neg_integer() | 'infinity'.
binding_heartbeat(BPid, Timeout) ->
PPid = self(),
?LOG("Starting binding heartbeat for ~p", [PPid]),
spawn(fun() ->
Ref = erlang:monitor(process, PPid),
{ok, Tref} = timer:send_interval(250, BPid, heartbeat),
ok = receive
{'DOWN', Ref, process, _, normal} ->
ok;
{'DOWN', Ref, process, _, Reason} ->
?LOG("Bound client (~p) down for non-normal reason: ~p", [PPid, Reason]),
BPid ! {binding_error, Reason};
_ -> ok
after Timeout ->
?LOG("Bound client (~p) too slow, timed out after ~p", [PPid, Timeout]),
ok
end,
timer:cancel(Tref)
end).
@public
-spec put_reqid/1 :: (Context) -> no_return() when
Context :: #cb_context{}.
put_reqid(#cb_context{req_id=ReqId}) ->
put(callid, ReqId).
@public
-spec store/3 :: (Key, Data, Context) -> #cb_context{} when
Key :: term(),
Data :: term(),
Context :: #cb_context{}.
store(Key, Data, #cb_context{storage=Storage}=Context) ->
Context#cb_context{storage=[{Key, Data}|proplists:delete(Key, Storage)]}.
@public
a previously stored value from the current request .
-spec fetch/2 :: (Key, Context) -> term() when
Key :: term(),
Context :: #cb_context{}.
fetch(Key, #cb_context{storage=Storage}) ->
proplists:get_value(Key, Storage).
-spec get_abs_url/2 :: (#wm_reqdata{}, ne_binary()) -> string().
get_abs_url(RD, Url) ->
Port = case wrq:port(RD) of
80 -> "";
P -> [":", wh_util:to_list(P)]
end,
Host = ["http://", string:join(lists:reverse(wrq:host_tokens(RD)), "."), Port, "/"],
?LOG("host: ~s", [Host]),
PathTokensRev = lists:reverse(string:tokens(wrq:path(RD), "/")),
get_abs_url(Host, PathTokensRev, Url).
Result : http[s] : port / v1 / accounts / acct_id / other_mod
-spec get_abs_url/3 :: (iolist(), [ne_binary(),...], ne_binary() | string()) -> string().
get_abs_url(Host, PathTokensRev, Url) ->
UrlTokens = string:tokens(wh_util:to_list(Url), "/"),
?LOG("path: ~p", [PathTokensRev]),
?LOG("rel: ~p", [UrlTokens]),
Url1 = string:join(
lists:reverse(
lists:foldl(fun("..", []) -> [];
("..", [_ | PathTokens]) -> PathTokens;
(".", PathTokens) -> PathTokens;
(Segment, PathTokens) -> [Segment | PathTokens]
end, PathTokensRev, UrlTokens)
), "/"),
?LOG("final url: ~p", [Url1]),
binary_to_list(list_to_binary([Host, Url1])).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
get_abs_url_test() ->
Host = ["http://", "some.host.com", ":8000", "/"],
PTs = ["module", "acct_id", "accounts", "v1"],
Url = "../other_mod",
?assertEqual(get_abs_url(Host, PTs, Url), ":8000/v1/accounts/acct_id/other_mod"),
?assertEqual(get_abs_url(Host, ["mod_id" | PTs], "../../other_mod"++"/mod_id"), ":8000/v1/accounts/acct_id/other_mod/mod_id").
-endif.
|
665ff42c34b29432c08f8437854a880ca1d05202e1f70e055b734d92b370c1ee | jaspervdj/hakyll | Tests.hs | --------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Hakyll.Core.Provider.Tests
( tests
) where
--------------------------------------------------------------------------------
import Hakyll.Core.Metadata
import Hakyll.Core.Provider
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit (Assertion, testCase, (@=?))
import TestSuite.Util
--------------------------------------------------------------------------------
tests :: TestTree
tests = testGroup "Hakyll.Core.Provider.Tests"
[ testCase "case01" case01
]
--------------------------------------------------------------------------------
case01 :: Assertion
case01 = do
store <- newTestStore
provider <- newTestProvider store
True @=? resourceExists provider "example.md"
metadata <- resourceMetadata provider "example.md"
Just "An example" @=? lookupString "title" metadata
Just "External data" @=? lookupString "external" metadata
doesntExist <- resourceMetadata provider "doesntexist.md"
mempty @=? doesntExist
cleanTestEnv
| null | https://raw.githubusercontent.com/jaspervdj/hakyll/af9e29b5456c105dc948bc46c93e989a650b5ed1/tests/Hakyll/Core/Provider/Tests.hs | haskell | ------------------------------------------------------------------------------
# LANGUAGE OverloadedStrings #
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | module Hakyll.Core.Provider.Tests
( tests
) where
import Hakyll.Core.Metadata
import Hakyll.Core.Provider
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit (Assertion, testCase, (@=?))
import TestSuite.Util
tests :: TestTree
tests = testGroup "Hakyll.Core.Provider.Tests"
[ testCase "case01" case01
]
case01 :: Assertion
case01 = do
store <- newTestStore
provider <- newTestProvider store
True @=? resourceExists provider "example.md"
metadata <- resourceMetadata provider "example.md"
Just "An example" @=? lookupString "title" metadata
Just "External data" @=? lookupString "external" metadata
doesntExist <- resourceMetadata provider "doesntexist.md"
mempty @=? doesntExist
cleanTestEnv
|
d230ca9672f3efdc558871280c579f1f2e0bd1d7ebd29fa26273cf845ff0b5b0 | John-Nagle/nqthm | monotonicity-macros.lisp | Copyright ( C ) 1990 - 1994 Computational Logic , Inc. All Rights
;;; Reserved. See the file LICENSE in this directory for the
;;; complete license agreement.
;;;~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
;;;
;;; MONOTONICITY-MACROS.LISP
;;;
;;;~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(in-package "USER")
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;; MONOTONICITY LEMMAS FOR BOOLEAN FUNCTIONS ;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; e.g.:
;;; >(macroexpand-1 '(monotonicity-lemma f-and3))
( PROVE - LEMMA F - AND3 - MONOTONE ( REWRITE )
;;; (IMPLIES (AND (B-APPROX A1 A2) (B-APPROX B1 B2) (B-APPROX C1 C2))
( B - APPROX ( F - AND3 A1 B1 C1 ) ( F - AND3 A2 B2 C2 ) ) ) )
;;; T
;;;
;;; >
(defun monotonicity-lemma-fn (name &optional hints)
(let* ((ev (get name 'event))
(args (caddr ev))
(args1
(iterate for arg in args
collect (pack (list arg 1))))
(args2
(iterate for arg in args
collect (pack (list arg 2))))
(conjuncts
(iterate for arg1 in args1
as arg2 in args2
collect (list 'b-approx arg1 arg2))))
(if args
`(prove-lemma ,(pack (list name '-monotone)) (rewrite)
(implies
,(if (consp (cdr args))
(cons 'and conjuncts)
(car conjuncts))
(b-approx (,name ,@args1) (,name ,@args2)))
,@(and hints (list hints)))
t)))
(defmacro monotonicity-lemma (name &optional hints)
(monotonicity-lemma-fn name hints))
(defmacro monotonicity-lemmas (names &optional hints)
(list 'do-events-recursive
(list 'quote
(iterate for name in names
collect
(monotonicity-lemma-fn name hints)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;; DISABLE-ALL ;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmacro disable-all (&rest names)
(list 'do-events-recursive
(list 'quote
(iterate for name in names
collect
(list 'disable name)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;; PROVE-PRIMITIVE-MONOTONICITY ;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; >(macroexpand-1 '(prove-primitive-monotonicity (AO2 AO4)))
;;; (DO-EVENTS-RECURSIVE
;;; '((PROVE-LEMMA DUAL-EVAL-AO2-VALUE (REWRITE)
;;; (EQUAL (DUAL-EVAL 0 'AO2 ARGS STATE NETLIST)
;;; (LET ((A (CAR ARGS)) (B (CAR (CDR ARGS)))
;;; (C (CAR (CDR (CDR ARGS))))
;;; (D (CAR (CDR (CDR (CDR ARGS))))))
;;; (CONS (F-NOR (F-AND A B) (F-AND C D)) 'NIL)))
;;; ((ENABLE DUAL-EVAL DUAL-APPLY-VALUE)))
;;; (PROVE-LEMMA DUAL-EVAL-AO2-STATE (REWRITE)
;;; (EQUAL (DUAL-EVAL 2 'AO2 ARGS STATE NETLIST) 0)
;;; ((ENABLE DUAL-EVAL DUAL-APPLY-STATE)))
;;; (PROVE-LEMMA AO2-MONOTONE (REWRITE)
;;; (AND (MONOTONICITY-PROPERTY 0 'AO2 NETLIST A1 A2 S1 S2)
;;; (MONOTONICITY-PROPERTY 2 'AO2 NETLIST A1 A2 S1 S2))
;;; ((DISABLE-THEORY T)
( ENABLE - THEORY GROUND - ZERO MONOTONICITY - LEMMAS )
;;; (ENABLE *1*B-APPROX *1*V-APPROX *1*S-APPROX V-APPROX
;;; MONOTONICITY-PROPERTY-OPENER-0
;;; MONOTONICITY-PROPERTY-OPENER-2 DUAL-EVAL-AO2-VALUE
;;; DUAL-EVAL-AO2-STATE S-APPROX-IMPLIES-B-APPROX
;;; FOURP-IMPLIES-S-APPROX-IS-B-APPROX FOURP-F-BUF
;;; FOURP-F-IF)
;;; (EXPAND (V-APPROX A1 A2) (V-APPROX (CDR A1) (CDR A2))
;;; (V-APPROX (CDR (CDR A1)) (CDR (CDR A2)))
;;; (V-APPROX (CDR (CDR (CDR A1))) (CDR (CDR (CDR A2)))))))
;;; (PROVE-LEMMA DUAL-EVAL-AO4-VALUE (REWRITE)
;;; (EQUAL (DUAL-EVAL 0 'AO4 ARGS STATE NETLIST)
;;; (LET ((A (CAR ARGS)) (B (CAR (CDR ARGS)))
;;; (C (CAR (CDR (CDR ARGS))))
;;; (D (CAR (CDR (CDR (CDR ARGS))))))
;;; (CONS (F-NAND (F-OR A B) (F-OR C D)) 'NIL)))
;;; ((ENABLE DUAL-EVAL DUAL-APPLY-VALUE)))
;;; (PROVE-LEMMA DUAL-EVAL-AO4-STATE (REWRITE)
;;; (EQUAL (DUAL-EVAL 2 'AO4 ARGS STATE NETLIST) 0)
;;; ((ENABLE DUAL-EVAL DUAL-APPLY-STATE)))
;;; (PROVE-LEMMA AO4-MONOTONE (REWRITE)
;;; (AND (MONOTONICITY-PROPERTY 0 'AO4 NETLIST A1 A2 S1 S2)
;;; (MONOTONICITY-PROPERTY 2 'AO4 NETLIST A1 A2 S1 S2))
;;; ((DISABLE-THEORY T)
( ENABLE - THEORY GROUND - ZERO MONOTONICITY - LEMMAS )
;;; (ENABLE *1*B-APPROX *1*V-APPROX *1*S-APPROX V-APPROX
;;; MONOTONICITY-PROPERTY-OPENER-0
;;; MONOTONICITY-PROPERTY-OPENER-2 DUAL-EVAL-AO4-VALUE
;;; DUAL-EVAL-AO4-STATE S-APPROX-IMPLIES-B-APPROX
;;; FOURP-IMPLIES-S-APPROX-IS-B-APPROX FOURP-F-BUF
;;; FOURP-F-IF)
;;; (EXPAND (V-APPROX A1 A2) (V-APPROX (CDR A1) (CDR A2))
;;; (V-APPROX (CDR (CDR A1)) (CDR (CDR A2)))
;;; (V-APPROX (CDR (CDR (CDR A1)))
;;; (CDR (CDR (CDR A2)))))))))
;;; T
;;;
;;; >
(defun dual-eval-name-state* (name)
(pack (list 'dual-eval- name '-state)))
(defun dual-eval-name-value* (name)
(pack (list 'dual-eval- name '-value)))
(defun name-value-let-bindings (inputs)
(iterate for i in inputs
with x = 'args
collect
(prog1 (list i (list 'car x))
(setq x (list 'cdr x)))))
(defun dual-eval-state-lemma (name inputs states new-states)
`(prove-lemma ,(dual-eval-name-state* name) (rewrite)
(equal (dual-eval 2 ',name args state netlist)
,(if states
`(let ,(name-value-let-bindings inputs)
,new-states)
0))
((enable dual-eval dual-apply-state))))
(defun dual-eval-value-lemma (name inputs states results)
(declare (ignore states))
`(prove-lemma ,(dual-eval-name-value* name) (rewrite)
(equal (dual-eval 0 ',name args state netlist)
(let ,(name-value-let-bindings inputs)
,results))
((enable dual-eval dual-apply-value))))
(defun device-monotonicity-lemma-expand-hint (name inputs states)
(declare (ignore states))
(cons 'expand
(iterate for x in inputs
with a1 = 'a1
and a2 = 'a2
collect
(prog1
(list 'v-approx a1 a2)
(ignore-variable x)
(setq a1 (list 'cdr a1))
(setq a2 (list 'cdr a2))))))
(defun device-monotonicity-lemma (name inputs states)
`(prove-lemma
,(pack (list name '-monotone))
(rewrite)
(and (monotonicity-property 0 ',name netlist a1 a2 s1 s2)
(monotonicity-property 2 ',name netlist a1 a2 s1 s2))
((disable-theory t)
(enable-theory ground-zero monotonicity-lemmas)
(enable *1*b-approx *1*v-approx *1*s-approx v-approx
monotonicity-property-opener-0 monotonicity-property-opener-2
,(dual-eval-name-value* name) ,(dual-eval-name-state* name)
s-approx-implies-b-approx
fourp-implies-s-approx-is-b-approx fourp-f-buf fourp-f-if)
,(device-monotonicity-lemma-expand-hint name inputs states))))
(defun prove-primitive-monotonicity-events (name)
(let ((entry (cdr (assoc name common-lisp-primp-database))))
(let ((inputs (cdr (assoc 'inputs entry)))
;; (outputs (cdr (assoc 'outputs entry)))
(results (cdr (assoc 'results entry)))
;; the following appears to always be 'state or nil
(states (cdr (assoc 'states entry)))
(new-states (cdr (assoc 'new-states entry))))
(list (dual-eval-value-lemma name inputs states results)
(dual-eval-state-lemma name inputs states new-states)
(device-monotonicity-lemma name inputs states)))))
(defmacro prove-primitive-monotonicity (x)
(let ((lst (if (consp x) x (list x))))
(list 'do-events-recursive
(list 'quote
(iterate for name in lst
nconc (prove-primitive-monotonicity-events name))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;; REVERTING THE DISABLED/ENABLED STATE ;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defun old-state-events (ev-name)
(let (on-off)
(iterate for x in chronology
until (eq x ev-name)
with ans and z and ev
do
(setq ev (get x 'event))
(when (and (eq (car ev) 'prove-lemma)
(member-eq 'rewrite (caddr ev)))
(setq ans (cons `(disable ,x) ans)))
(when (match ev (toggle & z on-off))
(if on-off
(setq ans (cons `(enable ,z) ans))
(setq ans (cons `(disable ,z) ans))))
finally (return ans))))
(defmacro revert-state (ev-name)
`(do-events-recursive ',(old-state-events ev-name)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;; PROVE-DUAL-APPLY-VALUE-DP-RAM-16X32-LEMMA-2 ;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; This little hack is simply to save room in the files.
;;; (PROVE-LEMMA DUAL-APPLY-VALUE-DP-RAM-16X32-LEMMA-2 (REWRITE)
;;; (EQUAL (DUAL-APPLY-VALUE 'DP-RAM-16X32 A S)
( DUAL - PORT - RAM - VALUE 32 4
;;; (LIST (EVAL$ T 'READ-A0
( ( PAIRLIST
;;; '(READ-A0 READ-A1 READ-A2 READ-A3
;;; WRITE-B0 WRITE-B1 WRITE-B2
;;; WRITE-B3 WEN D0 D1 D2 D3 D4 D5
D6 D7 D8 D9 D10 D11 D12 D13 D14
D15 D16 D17
;;; D23 D24 D25 D26 D27 D28 D29 D30
;;; D31)
;;; A)
;;; (PAIRSTATES 'STATE S)))
;;; (EVAL$ T 'READ-A1
( ( PAIRLIST
;;; '(READ-A0 READ-A1 READ-A2 READ-A3
;;; WRITE-B0 WRITE-B1 WRITE-B2
;;; WRITE-B3 WEN D0 D1 D2 D3 D4 D5
D6 D7 D8 D9 D10 D11 D12 D13 D14
D15 D16 D17
;;; D23 D24 D25 D26 D27 D28 D29 D30
;;; D31)
;;; A)
;;; (PAIRSTATES 'STATE S)))
;;; ....
;;; (EVAL$ T 'D31
( ( PAIRLIST
;;; '(READ-A0 READ-A1 READ-A2 READ-A3
;;; WRITE-B0 WRITE-B1 WRITE-B2
;;; WRITE-B3 WEN D0 D1 D2 D3 D4 D5
D6 D7 D8 D9 D10 D11 D12 D13 D14
D15 D16 D17
;;; D23 D24 D25 D26 D27 D28 D29 D30
;;; D31)
;;; A)
;;; (PAIRSTATES 'STATE S))))
;;; (EVAL$ T 'STATE
( ( PAIRLIST
;;; '(READ-A0 READ-A1 READ-A2 READ-A3
;;; WRITE-B0 WRITE-B1 WRITE-B2 WRITE-B3
;;; WEN D0 D1 D2 D3 D4 D5 D6 D7 D8 D9
;;; D10 D11 D12 D13 D14 D15 D16 D17 D18
;;; D19 D20 D21 D22 D23 D24 D25 D26 D27
;;; D28 D29 D30 D31)
;;; A)
( ' STATE S ) ) ) ) )
;;; ((DISABLE-THEORY T)
( ENABLE * 1*PRIMP * 1*LOOKUP - MODULE * 1*CDR * 1*CAR * 1*EVAL$
;;; DUAL-APPLY-VALUE-DP-RAM-16X32-LEMMA-1 EVAL$-APPEND
;;; EVAL$-APPEND-2 EVAL$-QUOTE)))
(defmacro prove-dual-apply-value-or-state-dp-ram-16x32-lemma-2 (value-or-state)
(let ((name1 (if (eq value-or-state 'value)
'dual-apply-value-dp-ram-16x32-lemma-2
'dual-apply-state-dp-ram-16x32-lemma-2))
(name2 (if (eq value-or-state 'value)
'dual-apply-value
'dual-apply-state))
(name3 (if (eq value-or-state 'value)
'dual-port-ram-value
'dual-port-ram-state))
(name4 (if (eq value-or-state 'value)
'dual-apply-value-dp-ram-16x32-lemma-1
'dual-apply-state-dp-ram-16x32-lemma-1)))
`(prove-lemma ,name1 (rewrite)
(equal (,name2 'dp-ram-16x32 a s)
(,name3
32 4
,(cons 'list
(iterate for input in dp-ram-16x32-inputs
collect (list 'eval$ 't (list 'quote input)
(list 'append
(list 'pairlist
(list 'quote
dp-ram-16x32-inputs)
'a)
'(pairstates 'state s)))))
(eval$ t 'state
,(list 'append
(list 'pairlist
(list 'quote dp-ram-16x32-inputs)
'a)
'(pairstates 'state s)))))
((disable-theory t)
(enable *1*primp *1*lookup-module *1*cdr *1*car *1*eval$
,name4 eval$-append rewrite-eval$
eval$-append-2 eval$-quote)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
GOOD - STATE LEMMAS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defun device-good-s-lemma (name inputs states)
(declare (ignore inputs states))
`(prove-lemma
,(pack (list name '-preserves-good-s))
(rewrite)
(implies (good-s s)
(good-s (dual-apply-state ',name args s)))
((disable-theory t)
(enable-theory ground-zero)
(enable *1*b-approx *1*v-approx *1*s-approx v-approx dual-apply-state
*1*primp2 f-buf-preserves-good-s f-if-preserves-good-s good-s-0
,(dual-eval-name-value* name) ,(dual-eval-name-state* name))
;,(device-monotonicity-lemma-expand-hint name inputs states)
)))
(defun prove-primitive-preserves-good-s-events (name)
(let ((entry (cdr (assoc name common-lisp-primp-database))))
(let ((inputs (cdr (assoc 'inputs entry)))
;; (outputs (cdr (assoc 'outputs entry)))
;; (results (cdr (assoc 'results entry)))
;; the following appears to always be 'state or nil
;; (new-states (cdr (assoc 'new-states entry)))
(states (cdr (assoc 'states entry))))
(list ;(dual-eval-value-lemma name inputs states results)
;(dual-eval-state-lemma name inputs states new-states)
(device-good-s-lemma name inputs states)))))
(defmacro prove-primitive-preserves-good-s (x)
(let ((lst (if (consp x) x (list x))))
(list 'do-events-recursive
(list 'quote
(iterate for name in lst
nconc
(prove-primitive-preserves-good-s-events name))))))
| null | https://raw.githubusercontent.com/John-Nagle/nqthm/aeafa016e424e9fba968a48bf7dab45ab96a3020/nqthm-1992/examples/fm9001-piton/fm9001/monotonicity-macros.lisp | lisp | Reserved. See the file LICENSE in this directory for the
complete license agreement.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
MONOTONICITY-MACROS.LISP
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
MONOTONICITY LEMMAS FOR BOOLEAN FUNCTIONS ;;;;;
e.g.:
>(macroexpand-1 '(monotonicity-lemma f-and3))
(IMPLIES (AND (B-APPROX A1 A2) (B-APPROX B1 B2) (B-APPROX C1 C2))
T
>
DISABLE-ALL ;;;;;
PROVE-PRIMITIVE-MONOTONICITY ;;;;;
>(macroexpand-1 '(prove-primitive-monotonicity (AO2 AO4)))
(DO-EVENTS-RECURSIVE
'((PROVE-LEMMA DUAL-EVAL-AO2-VALUE (REWRITE)
(EQUAL (DUAL-EVAL 0 'AO2 ARGS STATE NETLIST)
(LET ((A (CAR ARGS)) (B (CAR (CDR ARGS)))
(C (CAR (CDR (CDR ARGS))))
(D (CAR (CDR (CDR (CDR ARGS))))))
(CONS (F-NOR (F-AND A B) (F-AND C D)) 'NIL)))
((ENABLE DUAL-EVAL DUAL-APPLY-VALUE)))
(PROVE-LEMMA DUAL-EVAL-AO2-STATE (REWRITE)
(EQUAL (DUAL-EVAL 2 'AO2 ARGS STATE NETLIST) 0)
((ENABLE DUAL-EVAL DUAL-APPLY-STATE)))
(PROVE-LEMMA AO2-MONOTONE (REWRITE)
(AND (MONOTONICITY-PROPERTY 0 'AO2 NETLIST A1 A2 S1 S2)
(MONOTONICITY-PROPERTY 2 'AO2 NETLIST A1 A2 S1 S2))
((DISABLE-THEORY T)
(ENABLE *1*B-APPROX *1*V-APPROX *1*S-APPROX V-APPROX
MONOTONICITY-PROPERTY-OPENER-0
MONOTONICITY-PROPERTY-OPENER-2 DUAL-EVAL-AO2-VALUE
DUAL-EVAL-AO2-STATE S-APPROX-IMPLIES-B-APPROX
FOURP-IMPLIES-S-APPROX-IS-B-APPROX FOURP-F-BUF
FOURP-F-IF)
(EXPAND (V-APPROX A1 A2) (V-APPROX (CDR A1) (CDR A2))
(V-APPROX (CDR (CDR A1)) (CDR (CDR A2)))
(V-APPROX (CDR (CDR (CDR A1))) (CDR (CDR (CDR A2)))))))
(PROVE-LEMMA DUAL-EVAL-AO4-VALUE (REWRITE)
(EQUAL (DUAL-EVAL 0 'AO4 ARGS STATE NETLIST)
(LET ((A (CAR ARGS)) (B (CAR (CDR ARGS)))
(C (CAR (CDR (CDR ARGS))))
(D (CAR (CDR (CDR (CDR ARGS))))))
(CONS (F-NAND (F-OR A B) (F-OR C D)) 'NIL)))
((ENABLE DUAL-EVAL DUAL-APPLY-VALUE)))
(PROVE-LEMMA DUAL-EVAL-AO4-STATE (REWRITE)
(EQUAL (DUAL-EVAL 2 'AO4 ARGS STATE NETLIST) 0)
((ENABLE DUAL-EVAL DUAL-APPLY-STATE)))
(PROVE-LEMMA AO4-MONOTONE (REWRITE)
(AND (MONOTONICITY-PROPERTY 0 'AO4 NETLIST A1 A2 S1 S2)
(MONOTONICITY-PROPERTY 2 'AO4 NETLIST A1 A2 S1 S2))
((DISABLE-THEORY T)
(ENABLE *1*B-APPROX *1*V-APPROX *1*S-APPROX V-APPROX
MONOTONICITY-PROPERTY-OPENER-0
MONOTONICITY-PROPERTY-OPENER-2 DUAL-EVAL-AO4-VALUE
DUAL-EVAL-AO4-STATE S-APPROX-IMPLIES-B-APPROX
FOURP-IMPLIES-S-APPROX-IS-B-APPROX FOURP-F-BUF
FOURP-F-IF)
(EXPAND (V-APPROX A1 A2) (V-APPROX (CDR A1) (CDR A2))
(V-APPROX (CDR (CDR A1)) (CDR (CDR A2)))
(V-APPROX (CDR (CDR (CDR A1)))
(CDR (CDR (CDR A2)))))))))
T
>
(outputs (cdr (assoc 'outputs entry)))
the following appears to always be 'state or nil
REVERTING THE DISABLED/ENABLED STATE ;;;;;
PROVE-DUAL-APPLY-VALUE-DP-RAM-16X32-LEMMA-2 ;;;;;
This little hack is simply to save room in the files.
(PROVE-LEMMA DUAL-APPLY-VALUE-DP-RAM-16X32-LEMMA-2 (REWRITE)
(EQUAL (DUAL-APPLY-VALUE 'DP-RAM-16X32 A S)
(LIST (EVAL$ T 'READ-A0
'(READ-A0 READ-A1 READ-A2 READ-A3
WRITE-B0 WRITE-B1 WRITE-B2
WRITE-B3 WEN D0 D1 D2 D3 D4 D5
D23 D24 D25 D26 D27 D28 D29 D30
D31)
A)
(PAIRSTATES 'STATE S)))
(EVAL$ T 'READ-A1
'(READ-A0 READ-A1 READ-A2 READ-A3
WRITE-B0 WRITE-B1 WRITE-B2
WRITE-B3 WEN D0 D1 D2 D3 D4 D5
D23 D24 D25 D26 D27 D28 D29 D30
D31)
A)
(PAIRSTATES 'STATE S)))
....
(EVAL$ T 'D31
'(READ-A0 READ-A1 READ-A2 READ-A3
WRITE-B0 WRITE-B1 WRITE-B2
WRITE-B3 WEN D0 D1 D2 D3 D4 D5
D23 D24 D25 D26 D27 D28 D29 D30
D31)
A)
(PAIRSTATES 'STATE S))))
(EVAL$ T 'STATE
'(READ-A0 READ-A1 READ-A2 READ-A3
WRITE-B0 WRITE-B1 WRITE-B2 WRITE-B3
WEN D0 D1 D2 D3 D4 D5 D6 D7 D8 D9
D10 D11 D12 D13 D14 D15 D16 D17 D18
D19 D20 D21 D22 D23 D24 D25 D26 D27
D28 D29 D30 D31)
A)
((DISABLE-THEORY T)
DUAL-APPLY-VALUE-DP-RAM-16X32-LEMMA-1 EVAL$-APPEND
EVAL$-APPEND-2 EVAL$-QUOTE)))
,(device-monotonicity-lemma-expand-hint name inputs states)
(outputs (cdr (assoc 'outputs entry)))
(results (cdr (assoc 'results entry)))
the following appears to always be 'state or nil
(new-states (cdr (assoc 'new-states entry)))
(dual-eval-value-lemma name inputs states results)
(dual-eval-state-lemma name inputs states new-states) | Copyright ( C ) 1990 - 1994 Computational Logic , Inc. All Rights
(in-package "USER")
( PROVE - LEMMA F - AND3 - MONOTONE ( REWRITE )
( B - APPROX ( F - AND3 A1 B1 C1 ) ( F - AND3 A2 B2 C2 ) ) ) )
(defun monotonicity-lemma-fn (name &optional hints)
(let* ((ev (get name 'event))
(args (caddr ev))
(args1
(iterate for arg in args
collect (pack (list arg 1))))
(args2
(iterate for arg in args
collect (pack (list arg 2))))
(conjuncts
(iterate for arg1 in args1
as arg2 in args2
collect (list 'b-approx arg1 arg2))))
(if args
`(prove-lemma ,(pack (list name '-monotone)) (rewrite)
(implies
,(if (consp (cdr args))
(cons 'and conjuncts)
(car conjuncts))
(b-approx (,name ,@args1) (,name ,@args2)))
,@(and hints (list hints)))
t)))
(defmacro monotonicity-lemma (name &optional hints)
(monotonicity-lemma-fn name hints))
(defmacro monotonicity-lemmas (names &optional hints)
(list 'do-events-recursive
(list 'quote
(iterate for name in names
collect
(monotonicity-lemma-fn name hints)))))
(defmacro disable-all (&rest names)
(list 'do-events-recursive
(list 'quote
(iterate for name in names
collect
(list 'disable name)))))
( ENABLE - THEORY GROUND - ZERO MONOTONICITY - LEMMAS )
( ENABLE - THEORY GROUND - ZERO MONOTONICITY - LEMMAS )
(defun dual-eval-name-state* (name)
(pack (list 'dual-eval- name '-state)))
(defun dual-eval-name-value* (name)
(pack (list 'dual-eval- name '-value)))
(defun name-value-let-bindings (inputs)
(iterate for i in inputs
with x = 'args
collect
(prog1 (list i (list 'car x))
(setq x (list 'cdr x)))))
(defun dual-eval-state-lemma (name inputs states new-states)
`(prove-lemma ,(dual-eval-name-state* name) (rewrite)
(equal (dual-eval 2 ',name args state netlist)
,(if states
`(let ,(name-value-let-bindings inputs)
,new-states)
0))
((enable dual-eval dual-apply-state))))
(defun dual-eval-value-lemma (name inputs states results)
(declare (ignore states))
`(prove-lemma ,(dual-eval-name-value* name) (rewrite)
(equal (dual-eval 0 ',name args state netlist)
(let ,(name-value-let-bindings inputs)
,results))
((enable dual-eval dual-apply-value))))
(defun device-monotonicity-lemma-expand-hint (name inputs states)
(declare (ignore states))
(cons 'expand
(iterate for x in inputs
with a1 = 'a1
and a2 = 'a2
collect
(prog1
(list 'v-approx a1 a2)
(ignore-variable x)
(setq a1 (list 'cdr a1))
(setq a2 (list 'cdr a2))))))
(defun device-monotonicity-lemma (name inputs states)
`(prove-lemma
,(pack (list name '-monotone))
(rewrite)
(and (monotonicity-property 0 ',name netlist a1 a2 s1 s2)
(monotonicity-property 2 ',name netlist a1 a2 s1 s2))
((disable-theory t)
(enable-theory ground-zero monotonicity-lemmas)
(enable *1*b-approx *1*v-approx *1*s-approx v-approx
monotonicity-property-opener-0 monotonicity-property-opener-2
,(dual-eval-name-value* name) ,(dual-eval-name-state* name)
s-approx-implies-b-approx
fourp-implies-s-approx-is-b-approx fourp-f-buf fourp-f-if)
,(device-monotonicity-lemma-expand-hint name inputs states))))
(defun prove-primitive-monotonicity-events (name)
(let ((entry (cdr (assoc name common-lisp-primp-database))))
(let ((inputs (cdr (assoc 'inputs entry)))
(results (cdr (assoc 'results entry)))
(states (cdr (assoc 'states entry)))
(new-states (cdr (assoc 'new-states entry))))
(list (dual-eval-value-lemma name inputs states results)
(dual-eval-state-lemma name inputs states new-states)
(device-monotonicity-lemma name inputs states)))))
(defmacro prove-primitive-monotonicity (x)
(let ((lst (if (consp x) x (list x))))
(list 'do-events-recursive
(list 'quote
(iterate for name in lst
nconc (prove-primitive-monotonicity-events name))))))
(defun old-state-events (ev-name)
(let (on-off)
(iterate for x in chronology
until (eq x ev-name)
with ans and z and ev
do
(setq ev (get x 'event))
(when (and (eq (car ev) 'prove-lemma)
(member-eq 'rewrite (caddr ev)))
(setq ans (cons `(disable ,x) ans)))
(when (match ev (toggle & z on-off))
(if on-off
(setq ans (cons `(enable ,z) ans))
(setq ans (cons `(disable ,z) ans))))
finally (return ans))))
(defmacro revert-state (ev-name)
`(do-events-recursive ',(old-state-events ev-name)))
( DUAL - PORT - RAM - VALUE 32 4
( ( PAIRLIST
D6 D7 D8 D9 D10 D11 D12 D13 D14
D15 D16 D17
( ( PAIRLIST
D6 D7 D8 D9 D10 D11 D12 D13 D14
D15 D16 D17
( ( PAIRLIST
D6 D7 D8 D9 D10 D11 D12 D13 D14
D15 D16 D17
( ( PAIRLIST
( ' STATE S ) ) ) ) )
( ENABLE * 1*PRIMP * 1*LOOKUP - MODULE * 1*CDR * 1*CAR * 1*EVAL$
(defmacro prove-dual-apply-value-or-state-dp-ram-16x32-lemma-2 (value-or-state)
(let ((name1 (if (eq value-or-state 'value)
'dual-apply-value-dp-ram-16x32-lemma-2
'dual-apply-state-dp-ram-16x32-lemma-2))
(name2 (if (eq value-or-state 'value)
'dual-apply-value
'dual-apply-state))
(name3 (if (eq value-or-state 'value)
'dual-port-ram-value
'dual-port-ram-state))
(name4 (if (eq value-or-state 'value)
'dual-apply-value-dp-ram-16x32-lemma-1
'dual-apply-state-dp-ram-16x32-lemma-1)))
`(prove-lemma ,name1 (rewrite)
(equal (,name2 'dp-ram-16x32 a s)
(,name3
32 4
,(cons 'list
(iterate for input in dp-ram-16x32-inputs
collect (list 'eval$ 't (list 'quote input)
(list 'append
(list 'pairlist
(list 'quote
dp-ram-16x32-inputs)
'a)
'(pairstates 'state s)))))
(eval$ t 'state
,(list 'append
(list 'pairlist
(list 'quote dp-ram-16x32-inputs)
'a)
'(pairstates 'state s)))))
((disable-theory t)
(enable *1*primp *1*lookup-module *1*cdr *1*car *1*eval$
,name4 eval$-append rewrite-eval$
eval$-append-2 eval$-quote)))))
GOOD - STATE LEMMAS
(defun device-good-s-lemma (name inputs states)
(declare (ignore inputs states))
`(prove-lemma
,(pack (list name '-preserves-good-s))
(rewrite)
(implies (good-s s)
(good-s (dual-apply-state ',name args s)))
((disable-theory t)
(enable-theory ground-zero)
(enable *1*b-approx *1*v-approx *1*s-approx v-approx dual-apply-state
*1*primp2 f-buf-preserves-good-s f-if-preserves-good-s good-s-0
,(dual-eval-name-value* name) ,(dual-eval-name-state* name))
)))
(defun prove-primitive-preserves-good-s-events (name)
(let ((entry (cdr (assoc name common-lisp-primp-database))))
(let ((inputs (cdr (assoc 'inputs entry)))
(states (cdr (assoc 'states entry))))
(device-good-s-lemma name inputs states)))))
(defmacro prove-primitive-preserves-good-s (x)
(let ((lst (if (consp x) x (list x))))
(list 'do-events-recursive
(list 'quote
(iterate for name in lst
nconc
(prove-primitive-preserves-good-s-events name))))))
|
f54448f9572b1a112c96f1a42b7e225678bf4b36e9fad6e944f396b256767214 | Frama-C/Frama-C-snapshot | hcexprs.ml | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
open Cil_types
module Exp = Cil_datatype.ExpStructEq
module Lval = Cil_datatype.LvalStructEq
(* lvalues are never stored under a constructor [E]. *)
type unhashconsed_exprs = E of Exp.t | LV of Lval.t
(* The replacement of an lvalue by another term in an expression fails
(raises an exception) if the height of the resulting expression exceeds
this limit. *)
let height_limit = 8
exception NonExchangeable
type kill_type = Modified | Deleted
module E = struct
include Datatype.Make (struct
include Datatype.Serializable_undefined
type t = unhashconsed_exprs
let name = "Value.Symbolic_exprs.key"
let reprs = [ E Cil_datatype.Exp.dummy ]
let structural_descr =
Structural_descr.t_sum
[| [| Exp.packed_descr |] ; [| Lval.packed_descr |] ; |]
let equal a b = match a, b with
| E e1, E e2 -> Exp.equal e1 e2
| LV lv1, LV lv2 -> Lval.equal lv1 lv2
| (E _ | LV _), _ -> false
let compare a b = match a, b with
| E e1, E e2 -> Exp.compare e1 e2
| LV lv1, LV lv2 -> Lval.compare lv1 lv2
| LV _, E _ -> -1
| E _, LV _ -> 1
let pretty fmt = function
| E e -> Format.fprintf fmt "%a" Exp.pretty e
| LV lv -> Format.fprintf fmt "%a" Lval.pretty lv
let hash = function
| E e -> Exp.hash e
| LV lv -> Lval.hash lv
let copy c = c
end)
let replace_visitor kind ~late ~heir = object
inherit Visitor.frama_c_copy (Project.current ())
method! vexpr expr =
match expr.enode with
| Lval lval ->
if Lval.equal lval late then Cil.ChangeTo heir else Cil.JustCopy
| StartOf lval | AddrOf lval ->
if kind = Modified
then Cil.JustCopy
else if Lval.equal lval late then raise NonExchangeable else Cil.JustCopy
| AlignOfE _ -> raise NonExchangeable
| _ -> Cil.DoChildren
end
let replace kind ~late ~heir expr =
let visitor = replace_visitor kind ~late ~heir in
Visitor.visitFramacExpr visitor expr
end
module HCE = struct
module S =
State_builder.Hashcons(E)
(struct
let dependencies = [Ast.self]
let name = ""
let initial_values = []
end)
include S
let pretty_debug = pretty
let of_lval lv = hashcons (LV lv)
let of_exp exp =
match exp.enode with
| Lval lv -> of_lval lv
| _ -> hashcons (E exp)
let to_exp h = match get h with
| E e -> e
| LV lv -> Value_util.lval_to_exp lv
let to_lval h = match get h with
| E _ -> None
| LV lv -> Some lv
let is_lval h = match get h with
| E _ -> false
| LV _ -> true
let replace kind ~late ~heir h = match get h with
| E e ->
let e = E.replace kind ~late ~heir e in
if Value_util.height_expr e > height_limit
then raise NonExchangeable
else of_exp e
| LV lval -> if Lval.equal lval late then of_exp heir else h
end
module HCESet =
Hptset.Make (HCE) (struct let v = [] end) (struct let l = [Ast.self] end)
type lvalues = {
read : HCESet.t;
addr : HCESet.t;
}
let empty_lvalues = { read = HCESet.empty; addr = HCESet.empty; }
let syntactic_lvalues expr =
let rec gather expr lvalues =
match expr.enode with
| Lval lv ->
{ lvalues with read = HCESet.add (HCE.of_lval lv) lvalues.read }
| AddrOf lv | StartOf lv ->
{ lvalues with addr = HCESet.add (HCE.of_lval lv) lvalues.addr }
| AlignOfE e | SizeOfE e ->
The appearing in [ e ] are not read , and must all be in addr .
let new_lvalues = gather e empty_lvalues in
let new_addr = HCESet.union new_lvalues.read new_lvalues.addr in
{ lvalues with addr = HCESet.union new_addr lvalues.addr }
| UnOp (_, e, _) | CastE (_, e) | Info (e, _) -> gather e lvalues
| BinOp (_, e1, e2, _) -> gather e1 (gather e2 lvalues)
| _ -> lvalues
in
gather expr empty_lvalues
module HCEToZone = struct
let cache_prefix = "Value.Symbolic_exprs.K2Z"
include Hptmap.Make(HCE)(Locations.Zone)(Hptmap.Comp_unused)
(struct let v = [] end)(struct let l = [Ast.self] end)
let is_included =
let cache_name = cache_prefix ^ ".is_included" in
let decide_fst _b _v1 = true in
let decide_snd _b _v2 = false in
let decide_both _ v1 v2 = Locations.Zone.is_included v1 v2 in
let decide_fast s t = if s == t then PTrue else PUnknown in
binary_predicate
(Hptmap_sig.PersistentCache cache_name) UniversalPredicate
~decide_fast ~decide_fst ~decide_snd ~decide_both
let inter =
let cache_name = cache_prefix ^ ".inter" in
let cache = Hptmap_sig.PersistentCache cache_name in
let symmetric = true in
let idempotent = true in
let decide _ v1 v2 = Some (Locations.Zone.join v1 v2) in
inter ~cache ~symmetric ~idempotent ~decide
let union =
let cache_name = cache_prefix ^ ".union" in
let cache = Hptmap_sig.PersistentCache cache_name in
let symmetric = true in
let idempotent = true in
let decide _ v1 v2 = Locations.Zone.join v1 v2 in
join ~cache ~symmetric ~idempotent ~decide
let merge =
let cache_name = cache_prefix ^ ".merge" in
let cache = Hptmap_sig.PersistentCache cache_name in
let decide _ _ v2 = v2 in
join ~cache ~symmetric:false ~idempotent:true ~decide
let merge ~into v = merge into v
end
module BaseToHCESet = struct
include Hptmap.Make (Base.Base) (HCESet) (Hptmap.Comp_unused)
(struct let v = [] end)(struct let l = [Ast.self] end)
let cache_prefix = "Value.Symbolic_exprs.B2K"
let inter =
let cache_name = cache_prefix ^ ".inter" in
let cache = Hptmap_sig.PersistentCache cache_name in
let symmetric = true in
let idempotent = true in
let decide _ v1 v2 =
let s = HCESet.inter v1 v2 in
if HCESet.is_empty s then None else Some s
in
inter ~cache ~symmetric ~idempotent ~decide
let union =
let cache_name = cache_prefix ^ ".union" in
let cache = Hptmap_sig.PersistentCache cache_name in
let symmetric = true in
let idempotent = true in
let decide _ v1 v2 = HCESet.union v1 v2 in
join ~cache ~symmetric ~idempotent ~decide
let find_default b m =
try find b m
with Not_found -> HCESet.empty
end
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/value/domains/hcexprs.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
lvalues are never stored under a constructor [E].
The replacement of an lvalue by another term in an expression fails
(raises an exception) if the height of the resulting expression exceeds
this limit. | This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
open Cil_types
module Exp = Cil_datatype.ExpStructEq
module Lval = Cil_datatype.LvalStructEq
type unhashconsed_exprs = E of Exp.t | LV of Lval.t
let height_limit = 8
exception NonExchangeable
type kill_type = Modified | Deleted
module E = struct
include Datatype.Make (struct
include Datatype.Serializable_undefined
type t = unhashconsed_exprs
let name = "Value.Symbolic_exprs.key"
let reprs = [ E Cil_datatype.Exp.dummy ]
let structural_descr =
Structural_descr.t_sum
[| [| Exp.packed_descr |] ; [| Lval.packed_descr |] ; |]
let equal a b = match a, b with
| E e1, E e2 -> Exp.equal e1 e2
| LV lv1, LV lv2 -> Lval.equal lv1 lv2
| (E _ | LV _), _ -> false
let compare a b = match a, b with
| E e1, E e2 -> Exp.compare e1 e2
| LV lv1, LV lv2 -> Lval.compare lv1 lv2
| LV _, E _ -> -1
| E _, LV _ -> 1
let pretty fmt = function
| E e -> Format.fprintf fmt "%a" Exp.pretty e
| LV lv -> Format.fprintf fmt "%a" Lval.pretty lv
let hash = function
| E e -> Exp.hash e
| LV lv -> Lval.hash lv
let copy c = c
end)
let replace_visitor kind ~late ~heir = object
inherit Visitor.frama_c_copy (Project.current ())
method! vexpr expr =
match expr.enode with
| Lval lval ->
if Lval.equal lval late then Cil.ChangeTo heir else Cil.JustCopy
| StartOf lval | AddrOf lval ->
if kind = Modified
then Cil.JustCopy
else if Lval.equal lval late then raise NonExchangeable else Cil.JustCopy
| AlignOfE _ -> raise NonExchangeable
| _ -> Cil.DoChildren
end
let replace kind ~late ~heir expr =
let visitor = replace_visitor kind ~late ~heir in
Visitor.visitFramacExpr visitor expr
end
module HCE = struct
module S =
State_builder.Hashcons(E)
(struct
let dependencies = [Ast.self]
let name = ""
let initial_values = []
end)
include S
let pretty_debug = pretty
let of_lval lv = hashcons (LV lv)
let of_exp exp =
match exp.enode with
| Lval lv -> of_lval lv
| _ -> hashcons (E exp)
let to_exp h = match get h with
| E e -> e
| LV lv -> Value_util.lval_to_exp lv
let to_lval h = match get h with
| E _ -> None
| LV lv -> Some lv
let is_lval h = match get h with
| E _ -> false
| LV _ -> true
let replace kind ~late ~heir h = match get h with
| E e ->
let e = E.replace kind ~late ~heir e in
if Value_util.height_expr e > height_limit
then raise NonExchangeable
else of_exp e
| LV lval -> if Lval.equal lval late then of_exp heir else h
end
module HCESet =
Hptset.Make (HCE) (struct let v = [] end) (struct let l = [Ast.self] end)
type lvalues = {
read : HCESet.t;
addr : HCESet.t;
}
let empty_lvalues = { read = HCESet.empty; addr = HCESet.empty; }
let syntactic_lvalues expr =
let rec gather expr lvalues =
match expr.enode with
| Lval lv ->
{ lvalues with read = HCESet.add (HCE.of_lval lv) lvalues.read }
| AddrOf lv | StartOf lv ->
{ lvalues with addr = HCESet.add (HCE.of_lval lv) lvalues.addr }
| AlignOfE e | SizeOfE e ->
The appearing in [ e ] are not read , and must all be in addr .
let new_lvalues = gather e empty_lvalues in
let new_addr = HCESet.union new_lvalues.read new_lvalues.addr in
{ lvalues with addr = HCESet.union new_addr lvalues.addr }
| UnOp (_, e, _) | CastE (_, e) | Info (e, _) -> gather e lvalues
| BinOp (_, e1, e2, _) -> gather e1 (gather e2 lvalues)
| _ -> lvalues
in
gather expr empty_lvalues
module HCEToZone = struct
let cache_prefix = "Value.Symbolic_exprs.K2Z"
include Hptmap.Make(HCE)(Locations.Zone)(Hptmap.Comp_unused)
(struct let v = [] end)(struct let l = [Ast.self] end)
let is_included =
let cache_name = cache_prefix ^ ".is_included" in
let decide_fst _b _v1 = true in
let decide_snd _b _v2 = false in
let decide_both _ v1 v2 = Locations.Zone.is_included v1 v2 in
let decide_fast s t = if s == t then PTrue else PUnknown in
binary_predicate
(Hptmap_sig.PersistentCache cache_name) UniversalPredicate
~decide_fast ~decide_fst ~decide_snd ~decide_both
let inter =
let cache_name = cache_prefix ^ ".inter" in
let cache = Hptmap_sig.PersistentCache cache_name in
let symmetric = true in
let idempotent = true in
let decide _ v1 v2 = Some (Locations.Zone.join v1 v2) in
inter ~cache ~symmetric ~idempotent ~decide
let union =
let cache_name = cache_prefix ^ ".union" in
let cache = Hptmap_sig.PersistentCache cache_name in
let symmetric = true in
let idempotent = true in
let decide _ v1 v2 = Locations.Zone.join v1 v2 in
join ~cache ~symmetric ~idempotent ~decide
let merge =
let cache_name = cache_prefix ^ ".merge" in
let cache = Hptmap_sig.PersistentCache cache_name in
let decide _ _ v2 = v2 in
join ~cache ~symmetric:false ~idempotent:true ~decide
let merge ~into v = merge into v
end
module BaseToHCESet = struct
include Hptmap.Make (Base.Base) (HCESet) (Hptmap.Comp_unused)
(struct let v = [] end)(struct let l = [Ast.self] end)
let cache_prefix = "Value.Symbolic_exprs.B2K"
let inter =
let cache_name = cache_prefix ^ ".inter" in
let cache = Hptmap_sig.PersistentCache cache_name in
let symmetric = true in
let idempotent = true in
let decide _ v1 v2 =
let s = HCESet.inter v1 v2 in
if HCESet.is_empty s then None else Some s
in
inter ~cache ~symmetric ~idempotent ~decide
let union =
let cache_name = cache_prefix ^ ".union" in
let cache = Hptmap_sig.PersistentCache cache_name in
let symmetric = true in
let idempotent = true in
let decide _ v1 v2 = HCESet.union v1 v2 in
join ~cache ~symmetric ~idempotent ~decide
let find_default b m =
try find b m
with Not_found -> HCESet.empty
end
|
21a4f8131d7d397e395e3422a214c2ca557e83b366deb09d8ed3ac8140357757 | ygmpkk/house | MarshalArray.hs | module MarshalArray (module Foreign.Marshal.Array) where
import Foreign.Marshal.Array
| null | https://raw.githubusercontent.com/ygmpkk/house/1ed0eed82139869e85e3c5532f2b579cf2566fa2/ghc-6.2/libraries/haskell98/MarshalArray.hs | haskell | module MarshalArray (module Foreign.Marshal.Array) where
import Foreign.Marshal.Array
| |
5dd7b2121a83d9cdc72ab7bbfeb65909cb362176637fdba959d6e2d93bb28a67 | exercism/babashka | example.clj | (ns triangle)
(defn is-valid? [s1 s2 s3]
(and
(> s1 0) (> s2 0) (> s3 0)
(>= (+ s1 s2) s3)
(>= (+ s1 s3) s2)
(>= (+ s2 s3) s1)))
(defn equilateral? [s1 s2 s3]
(and (is-valid? s1 s2 s3) (= s1 s2 s3)))
(defn isosceles? [s1 s2 s3]
(and (is-valid? s1 s2 s3) (or (= s1 s2) (= s1 s3) (= s2 s3))))
(defn scalene? [s1 s2 s3]
(and (not (isosceles? s1 s2 s3)) (is-valid? s1 s2 s3))) | null | https://raw.githubusercontent.com/exercism/babashka/707356c52e08490e66cb1b2e63e4f4439d91cf08/exercises/practice/triangle/.meta/src/example.clj | clojure | (ns triangle)
(defn is-valid? [s1 s2 s3]
(and
(> s1 0) (> s2 0) (> s3 0)
(>= (+ s1 s2) s3)
(>= (+ s1 s3) s2)
(>= (+ s2 s3) s1)))
(defn equilateral? [s1 s2 s3]
(and (is-valid? s1 s2 s3) (= s1 s2 s3)))
(defn isosceles? [s1 s2 s3]
(and (is-valid? s1 s2 s3) (or (= s1 s2) (= s1 s3) (= s2 s3))))
(defn scalene? [s1 s2 s3]
(and (not (isosceles? s1 s2 s3)) (is-valid? s1 s2 s3))) | |
e506352c21634d63af1daa2e125f53826da92cbcc6a13af28f024f4f145e0c85 | chrovis/cljam | reader.clj | (ns cljam.io.sam.reader
(:require [clojure.java.io :as cio]
[clojure.tools.logging :as logging]
[cljam.io.sam.util :as sam-util]
[cljam.io.sam.util.refs :as refs]
[cljam.io.sam.util.header :as header]
[cljam.io.protocols :as protocols]
[cljam.util :as util])
(:import [java.io BufferedReader Closeable]
[cljam.io.protocols SAMCoordinateBlock SAMQuerynameBlock]))
(declare read-alignments* read-blocks* read-alignments-in-region*)
;;; reader
(deftype SAMReader [url header reader]
Closeable
(close [this]
(.close ^Closeable (.reader this)))
protocols/IReader
(reader-url [this]
(.url this))
(read [this]
(protocols/read this {}))
(read [this region]
(protocols/read-alignments this region))
(indexed? [_] false)
protocols/IRegionReader
(read-in-region [this region]
(protocols/read-in-region this region {}))
(read-in-region [this region option]
(read-alignments-in-region* this region option))
protocols/IAlignmentReader
(read-header [this]
(.header this))
(read-refs [this]
(vec (refs/make-refs (.header this))))
(read-alignments [this]
(protocols/read-alignments this {}))
(read-alignments [this {:keys [chr start end] :as region}]
(if (or chr start end)
(read-alignments-in-region* this region)
(read-alignments* this)))
(read-blocks [this]
(protocols/read-blocks this {}))
(read-blocks [this region]
(protocols/read-blocks this region {}))
(read-blocks [this region option]
(read-blocks* this option)))
(defn- read-alignments*
[^SAMReader sam-reader]
(eduction
(comp
(drop-while (fn [[f]] (= f \@)))
(map sam-util/parse-alignment))
(line-seq (.reader sam-reader))))
(defn- read-alignments-in-region*
[^SAMReader sam-reader {:keys [chr start end]}]
(logging/warn "May cause degradation of performance.")
(eduction
(filter
(fn [a] (and (if chr (= (:rname a) chr) true)
(if start (<= start (sam-util/get-end a)) true)
(if end (<= (:pos a) end) true))))
(read-alignments* sam-reader)))
(defn- parse-coordinate
[rname->ref-id ^String line]
(let [t0 (.indexOf line (int \tab) 0)
t1 (.indexOf line (int \tab) (unchecked-inc t0))
t2 (.indexOf line (int \tab) (unchecked-inc t1))
t3 (.indexOf line (int \tab) (unchecked-inc t2))
flag (Integer/parseInt (.substring line (unchecked-inc t0) t1))
rname (.substring line (unchecked-inc t1) t2)
pos (Integer/parseInt (.substring line (unchecked-inc t2) t3))]
(SAMCoordinateBlock. line (rname->ref-id rname 0) pos flag)))
(defn- parse-qname
[^String line]
(let [t0 (.indexOf line (int \tab) 0)
t1 (.indexOf line (int \tab) (unchecked-inc t0))
qname (.substring line 0 t0)
flag (Integer/parseInt (.substring line (unchecked-inc t0) t1))]
(SAMQuerynameBlock. line qname flag)))
(defn- read-blocks*
[^SAMReader sam-reader {:keys [mode] :or {mode :normal}}]
(let [parse-fn (if (fn? mode)
mode
(case mode
:normal (fn [line] {:data line})
:coordinate (->> (.header sam-reader)
:SQ
(into {"*" -1} (map-indexed (fn [i {:keys [SN]}] [SN i])))
(partial parse-coordinate))
:queryname parse-qname))]
(eduction
(comp
(drop-while (fn [[f]] (= f \@)))
(map parse-fn))
(line-seq (.reader sam-reader)))))
(defn- read-header* [^BufferedReader rdr]
(->> (line-seq rdr)
(transduce
(comp
(take-while (fn [line] (= (first line) \@)))
(map header/parse-header-line))
header/into-header)))
(defn reader [f]
(let [header (with-open [r (cio/reader f)]
(read-header* r))]
(->SAMReader (util/as-url f)
header (cio/reader f))))
| null | https://raw.githubusercontent.com/chrovis/cljam/2b8e7386765be8efdbbbb4f18dbc52447f4a08af/src/cljam/io/sam/reader.clj | clojure | reader | (ns cljam.io.sam.reader
(:require [clojure.java.io :as cio]
[clojure.tools.logging :as logging]
[cljam.io.sam.util :as sam-util]
[cljam.io.sam.util.refs :as refs]
[cljam.io.sam.util.header :as header]
[cljam.io.protocols :as protocols]
[cljam.util :as util])
(:import [java.io BufferedReader Closeable]
[cljam.io.protocols SAMCoordinateBlock SAMQuerynameBlock]))
(declare read-alignments* read-blocks* read-alignments-in-region*)
(deftype SAMReader [url header reader]
Closeable
(close [this]
(.close ^Closeable (.reader this)))
protocols/IReader
(reader-url [this]
(.url this))
(read [this]
(protocols/read this {}))
(read [this region]
(protocols/read-alignments this region))
(indexed? [_] false)
protocols/IRegionReader
(read-in-region [this region]
(protocols/read-in-region this region {}))
(read-in-region [this region option]
(read-alignments-in-region* this region option))
protocols/IAlignmentReader
(read-header [this]
(.header this))
(read-refs [this]
(vec (refs/make-refs (.header this))))
(read-alignments [this]
(protocols/read-alignments this {}))
(read-alignments [this {:keys [chr start end] :as region}]
(if (or chr start end)
(read-alignments-in-region* this region)
(read-alignments* this)))
(read-blocks [this]
(protocols/read-blocks this {}))
(read-blocks [this region]
(protocols/read-blocks this region {}))
(read-blocks [this region option]
(read-blocks* this option)))
(defn- read-alignments*
[^SAMReader sam-reader]
(eduction
(comp
(drop-while (fn [[f]] (= f \@)))
(map sam-util/parse-alignment))
(line-seq (.reader sam-reader))))
(defn- read-alignments-in-region*
[^SAMReader sam-reader {:keys [chr start end]}]
(logging/warn "May cause degradation of performance.")
(eduction
(filter
(fn [a] (and (if chr (= (:rname a) chr) true)
(if start (<= start (sam-util/get-end a)) true)
(if end (<= (:pos a) end) true))))
(read-alignments* sam-reader)))
(defn- parse-coordinate
[rname->ref-id ^String line]
(let [t0 (.indexOf line (int \tab) 0)
t1 (.indexOf line (int \tab) (unchecked-inc t0))
t2 (.indexOf line (int \tab) (unchecked-inc t1))
t3 (.indexOf line (int \tab) (unchecked-inc t2))
flag (Integer/parseInt (.substring line (unchecked-inc t0) t1))
rname (.substring line (unchecked-inc t1) t2)
pos (Integer/parseInt (.substring line (unchecked-inc t2) t3))]
(SAMCoordinateBlock. line (rname->ref-id rname 0) pos flag)))
(defn- parse-qname
[^String line]
(let [t0 (.indexOf line (int \tab) 0)
t1 (.indexOf line (int \tab) (unchecked-inc t0))
qname (.substring line 0 t0)
flag (Integer/parseInt (.substring line (unchecked-inc t0) t1))]
(SAMQuerynameBlock. line qname flag)))
(defn- read-blocks*
[^SAMReader sam-reader {:keys [mode] :or {mode :normal}}]
(let [parse-fn (if (fn? mode)
mode
(case mode
:normal (fn [line] {:data line})
:coordinate (->> (.header sam-reader)
:SQ
(into {"*" -1} (map-indexed (fn [i {:keys [SN]}] [SN i])))
(partial parse-coordinate))
:queryname parse-qname))]
(eduction
(comp
(drop-while (fn [[f]] (= f \@)))
(map parse-fn))
(line-seq (.reader sam-reader)))))
(defn- read-header* [^BufferedReader rdr]
(->> (line-seq rdr)
(transduce
(comp
(take-while (fn [line] (= (first line) \@)))
(map header/parse-header-line))
header/into-header)))
(defn reader [f]
(let [header (with-open [r (cio/reader f)]
(read-header* r))]
(->SAMReader (util/as-url f)
header (cio/reader f))))
|
d4f26c205dc6c733ce06da59ccb9463a562374e7e2779e8bc74e239f0ad17cba | BinaryAnalysisPlatform/FrontC | cprint.ml | (* cprint -- pretty printer of C program from abstract syntax *)
open Cabs
let version = "Cprint 4.0 Hugues Cassé et al."
(*
** FrontC Pretty printer
*)
let out = ref stdout
let width = ref 80
let tab = ref 8
let max_indent = ref 60
let line = ref ""
let line_len = ref 0
let current = ref ""
let current_len = ref 0
let spaces = ref 0
let follow = ref 0
let roll = ref 0
let print_tab size =
output_string !out (String.make (size / 8) '\t');
output_string !out (String.make (size mod 8) ' ')
let flush _ =
if !line <> "" then begin
print_tab (!spaces + !follow);
output_string !out !line;
line := "";
line_len := 0
end
let commit _ =
if !current <> "" then begin
if !line = "" then begin
line := !current;
line_len := !current_len
end else begin
line := (!line ^ " " ^ !current);
line_len := !line_len + 1 + !current_len
end;
current := "";
current_len := 0
end
let new_line _ =
commit ();
if !line <> "" then begin
flush ();
output_char !out '\n'
end;
follow := 0
let force_new_line _ =
commit ();
flush ();
output_char !out '\n';
follow := 0
let indent _ =
new_line ();
spaces := !spaces + !tab;
if !spaces >= !max_indent then begin
spaces := !tab;
roll := !roll + 1
end
let unindent _ =
new_line ();
spaces := !spaces - !tab;
if (!spaces <= 0) && (!roll > 0) then begin
spaces := ((!max_indent - 1) / !tab) * !tab;
roll := !roll - 1
end
let space _ = commit ()
let print str =
current := !current ^ str;
current_len := !current_len + (String.length str);
if (!spaces + !follow + !line_len + 1 + !current_len) > !width
then begin
if !line_len = 0 then commit ();
flush ();
output_char !out '\n';
if !follow = 0 then follow := !tab
end
(*
** Useful primitives
*)
let print_commas nl fct lst =
let _ = List.fold_left
(fun com elt ->
if com then begin
print ",";
if nl then new_line () else space ()
end else ();
fct elt;
true)
false
lst in
()
let escape_string str =
let lng = String.length str in
let conv value = String.make 1 (Char.chr (value +
(if value < 10 then (Char.code '0') else (Char.code 'a' - 10)))) in
let rec build idx =
if idx >= lng then ""
else
let sub = String.sub str idx 1 in
let res = match sub with
"\n" -> "\\n"
| "\"" -> "\\\""
| "'" -> "\\'"
| "\r" -> "\\r"
| "\t" -> "\\t"
| "\b" -> "\\b"
| "\000" -> "\\0"
| _ -> if sub = (Char.escaped (String.get sub 0))
then sub
else let code = Char.code (String.get sub 0) in
"\\"
^ (conv (code / 64))
^ (conv ((code mod 64) / 8))
^ (conv (code mod 8)) in
res ^ (build (idx + 1)) in
build 0
let rec has_extension attrs =
match attrs with
[] -> false
| GNU_EXTENSION::_ -> true
| _::attrs -> has_extension attrs
(*
** Base Type Printing
*)
let get_sign si =
match si with
NO_SIGN -> ""
| SIGNED -> "signed "
| UNSIGNED -> "unsigned "
let get_size siz =
match siz with
NO_SIZE -> ""
| SHORT -> "short "
| LONG -> "long "
| LONG_LONG -> "long long "
let rec print_base_type typ =
match typ with
NO_TYPE -> ()
| VOID -> print "void"
| BOOL -> print "_Bool"
| CHAR sign -> print ((get_sign sign) ^ "char")
| INT (size, sign) -> print ((get_sign sign) ^ (get_size size) ^ "int")
| BITFIELD (sign, _) -> print ((get_sign sign) ^ "int")
| FLOAT size -> print ((if size then "long " else "") ^ "float")
| DOUBLE size -> print ((if size then "long " else "") ^ "double")
| COMPLEX_FLOAT -> print "float _Complex"
| COMPLEX_DOUBLE -> print "double _Complex"
| COMPLEX_LONG_DOUBLE -> print "long double _Complex"
| NAMED_TYPE id -> print id
| ENUM (id, items) -> print_enum id items
| STRUCT (id, flds) -> print_fields ("struct " ^ id) flds
| UNION (id, flds) -> print_fields ("union " ^ id) flds
| PROTO (typ, _, _) -> print_base_type typ
| OLD_PROTO (typ, _, _) -> print_base_type typ
| PTR typ -> print_base_type typ
| RESTRICT_PTR typ -> print_base_type typ
| ARRAY (typ, _) -> print_base_type typ
| CONST typ -> print_base_type typ
| VOLATILE typ -> print_base_type typ
| GNU_TYPE (attrs, typ) -> print_attributes attrs; print_base_type typ
| BUILTIN_TYPE t -> print t
| TYPE_LINE (_, _, _type) -> print_base_type _type
and print_fields id (flds : name_group list) =
print id;
if flds = []
then ()
else begin
print " {";
indent ();
List.iter
(fun fld -> print_name_group fld; print ";"; new_line ())
flds;
unindent ();
print "}"
end
and print_enum id items =
print ("enum " ^ id);
if items = []
then ()
else begin
print " {";
indent ();
print_commas
true
(fun (id, exp) -> print id;
if exp = NOTHING then ()
else begin
space ();
print "= ";
print_expression exp 1
end)
items;
unindent ();
print "}";
end
(*
** Declaration Printing
*)
and get_base_type typ =
match typ with
PTR typ -> get_base_type typ
| RESTRICT_PTR typ -> get_base_type typ
| CONST typ -> get_base_type typ
| VOLATILE typ -> get_base_type typ
| ARRAY (typ, _) -> get_base_type typ
| _ -> typ
and print_pointer typ =
match typ with
PTR typ -> print_pointer typ; print "*"
| RESTRICT_PTR typ ->
print_pointer typ; print "* __restrict";
space ()
| CONST typ -> print_pointer typ; print " const "
| VOLATILE typ -> print_pointer typ; print " volatile "
| ARRAY (typ, _) -> print_pointer typ
| _ -> (*print_base_type typ*) ()
and print_array typ =
match typ with
ARRAY (typ, dim) ->
print_array typ;
print "[";
print_expression dim 0;
print "]"
| _ -> ()
(** Print a type.
@param fct Function called to display the name of the.
@param typ Type to display.
*)
and print_type (fct : unit -> unit) (typ : base_type ) =
let base = get_base_type typ in
match base with
BITFIELD (_, exp) -> fct (); print " : "; print_expression exp 1
| PROTO (typ', pars, ell) ->
print_type
(fun _ ->
if base <> typ then print "(";
print_pointer typ;
fct ();
print_array typ;
if base <> typ then print ")";
print "(";
print_params pars ell;
print ")")
typ'
| OLD_PROTO (typ', pars, ell) ->
print_type
(fun _ ->
if base <> typ then print "(";
print_pointer typ;
fct ();
print_array typ;
if base <> typ then print ")";
print "(";
print_old_params pars ell;
print ")")
typ'
| _ -> print_pointer typ; fct (); print_array typ
and print_onlytype typ =
print_base_type typ;
print_type (fun _ -> ()) typ
and print_name ((id, typ, attr, exp) : name) =
print_type (fun _ -> print id) typ;
print_attributes attr;
if exp <> NOTHING then begin
space ();
print "= ";
print_expression exp 1
end else ()
and get_storage sto =
match sto with
NO_STORAGE -> ""
| AUTO -> "auto"
| STATIC -> "static"
| EXTERN -> "extern"
| REGISTER -> "register"
and print_name_group (typ, sto, names) =
let extension = List.exists
(fun (_, _, attrs, _) -> has_extension attrs)
names in
if extension then begin
print "__extension__";
space ()
end;
if sto <> NO_STORAGE then begin
print (get_storage sto);
space ()
end;
print_base_type typ;
space ();
print_commas false print_name names
and print_single_name (typ, sto, name) =
if sto <> NO_STORAGE then begin
print (get_storage sto);
space ()
end;
print_base_type typ;
space ();
print_name name
and print_params (pars : single_name list) (ell : bool) =
print_commas false print_single_name pars;
if ell then print (if pars = [] then "..." else ", ...") else ()
and print_old_params pars ell =
print_commas false (fun id -> print id) pars;
if ell then print (if pars = [] then "..." else ", ...") else ()
* * Expression printing
* * Priorities
* * 16 variables
* * 15 . - > [ ] call ( )
* * 14 + + , -- ( post )
* * 13 + + -- ( pre ) ~ ! - + & * ( cast )
* * 12 * / %
* * 11 + -
* * 10 < < > >
* * 9 < < = > > =
* * 8 = = ! =
* * 7 &
* * 6 ^
* * 5 |
* * 4 & &
* * 3 ||
* * 2 ? :
* * 1 = ? =
* * 0 ,
** Expression printing
** Priorities
** 16 variables
** 15 . -> [] call()
** 14 ++, -- (post)
** 13 ++ -- (pre) ~ ! - + & *(cast)
** 12 * / %
** 11 + -
** 10 << >>
** 9 < <= > >=
** 8 == !=
** 7 &
** 6 ^
** 5 |
** 4 &&
** 3 ||
** 2 ? :
** 1 = ?=
** 0 ,
*)
and get_operator exp =
match exp with
NOTHING -> ("", 16)
| UNARY (op, _) ->
(match op with
MINUS -> ("-", 13)
| PLUS -> ("+", 13)
| NOT -> ("!", 13)
| BNOT -> ("~", 13)
| MEMOF -> ("*", 13)
| ADDROF -> ("&", 13)
| PREINCR -> ("++", 13)
| PREDECR -> ("--", 13)
| POSINCR -> ("++", 14)
| POSDECR -> ("--", 14))
| BINARY (op, _, _) ->
(match op with
MUL -> ("*", 12)
| DIV -> ("/", 12)
| MOD -> ("%", 12)
| ADD -> ("+", 11)
| SUB -> ("-", 11)
| SHL -> ("<<", 10)
| SHR -> (">>", 10)
| LT -> ("<", 9)
| LE -> ("<=", 9)
| GT -> (">", 9)
| GE -> (">=", 9)
| EQ -> ("==", 8)
| NE -> ("!=", 8)
| BAND -> ("&", 7)
| XOR -> ("^", 6)
| BOR -> ("|", 5)
| AND -> ("&&", 4)
| OR -> ("||", 3)
| ASSIGN -> ("=", 1)
| ADD_ASSIGN -> ("+=", 1)
| SUB_ASSIGN -> ("-=", 1)
| MUL_ASSIGN -> ("*=", 1)
| DIV_ASSIGN -> ("/=", 1)
| MOD_ASSIGN -> ("%=", 1)
| BAND_ASSIGN -> ("&=", 1)
| BOR_ASSIGN -> ("|=", 1)
| XOR_ASSIGN -> ("^=", 1)
| SHL_ASSIGN -> ("<<=", 1)
| SHR_ASSIGN -> (">>=", 1))
| QUESTION _ -> ("", 2)
| CAST _ -> ("", 13)
| CALL _ -> ("", 15)
| COMMA _ -> ("", 0)
| CONSTANT _ -> ("", 16)
| VARIABLE _ -> ("", 16)
| EXPR_SIZEOF _ -> ("", 16)
| TYPE_SIZEOF _ -> ("", 16)
| INDEX _ -> ("", 15)
| MEMBEROF _ -> ("", 15)
| MEMBEROFPTR _ -> ("", 15)
| GNU_BODY _ -> ("", 17)
| DESIGNATED _ -> ("", 15)
| EXPR_LINE (expr, _, _) -> get_operator expr
and print_comma_exps exps =
print_commas false (fun exp -> print_expression exp 1) exps
and print_expression (exp : expression) (lvl : int) =
let (txt, lvl') = get_operator exp in
let _ = if lvl > lvl' then print "(" else () in
let _ = match exp with
NOTHING -> ()
| UNARY (op, exp') ->
(match op with
POSINCR | POSDECR ->
print_expression exp' lvl';
print txt
| _ ->
print txt;
print_expression exp' lvl')
| BINARY (_, exp1, exp2) ->
if ( op = SUB ) & & ( lvl < = lvl ' ) then print " ( " ;
print_expression exp1 lvl';
space ();
print txt;
space ();
(*print_expression exp2 (if op = SUB then (lvl' + 1) else lvl');*)
print_expression exp2 (lvl' + 1)
if ( op = SUB ) & & ( lvl < = lvl ' ) then print " ) "
| QUESTION (exp1, exp2, exp3) ->
print_expression exp1 2;
space ();
print "? ";
print_expression exp2 2;
space ();
print ": ";
print_expression exp3 2;
| CAST (typ, exp) ->
print "(";
print_onlytype typ;
print ")";
print_expression exp 15
| CALL (exp, args) ->
print_expression exp 16;
print "(";
print_comma_exps args;
print ")"
| COMMA exps ->
print_comma_exps exps
| CONSTANT cst ->
print_constant cst
| VARIABLE name ->
print name
| EXPR_SIZEOF exp ->
print "sizeof(";
print_expression exp 0;
print ")"
| TYPE_SIZEOF typ ->
print "sizeof(";
print_onlytype typ;
print ")"
| INDEX (exp, idx) ->
print_expression exp 16;
print "[";
print_expression idx 0;
print "]"
| MEMBEROF (exp, fld) ->
print_expression exp 16;
print ("." ^ fld)
| MEMBEROFPTR (exp, fld) ->
print_expression exp 16;
print ("->" ^ fld)
| GNU_BODY (decs, stat) ->
print "(";
print_statement (BLOCK (decs, stat));
print ")"
| DESIGNATED (member, exp) ->
print ".";
print member;
print "=";
print_expression exp 16;
| EXPR_LINE (expr, _, _) ->
print_expression expr lvl in
if lvl > lvl' then print ")" else ()
and print_constant cst =
match cst with
CONST_INT i ->
print i
| CONST_FLOAT r ->
print r
| CONST_CHAR c ->
print ("'" ^ (escape_string c) ^ "'")
| CONST_STRING s ->
print ("\"" ^ (escape_string s) ^ "\"")
| CONST_COMPOUND exps ->
begin
print "{";
print_comma_exps exps;
print "}"
end
(*
** Statement printing
*)
and print_statement stat =
match stat with
NOP ->
print ";";
new_line ()
| COMPUTATION exp ->
print_expression exp 0;
print ";";
new_line ()
| BLOCK (defs, stat) ->
new_line ();
print "{";
indent ();
print_defs defs;
if stat <> NOP then print_statement stat else ();
unindent ();
print "}";
new_line ();
| SEQUENCE (s1, s2) ->
print_statement s1;
print_statement s2;
| IF (exp, s1, s2) ->
print "if(";
print_expression exp 0;
print ")";
print_substatement s1;
if s2 = NOP
then ()
else begin
print "else";
print_substatement s2;
end
| WHILE (exp, stat) ->
print "while(";
print_expression exp 0;
print ")";
print_substatement stat
| DOWHILE (exp, stat) ->
print "do";
print_substatement stat;
print "while(";
print_expression exp 0;
print ");";
new_line ();
| FOR (exp1, exp2, exp3, stat) ->
print "for(";
print_expression exp1 0;
print ";";
space ();
print_expression exp2 0;
print ";";
space ();
print_expression exp3 0;
print ")";
print_substatement stat
| BREAK ->
print "break;"; new_line ()
| CONTINUE ->
print "continue;"; new_line ()
| RETURN exp ->
print "return";
if exp = NOTHING
then ()
else begin
print " ";
print_expression exp 1
end;
print ";";
new_line ()
| SWITCH (exp, stat) ->
print "switch(";
print_expression exp 0;
print ")";
print_substatement stat
| CASE (exp, stat) ->
unindent ();
print "case ";
print_expression exp 1;
print ":";
indent ();
print_substatement stat
| DEFAULT stat ->
unindent ();
print "default :";
indent ();
print_substatement stat
| LABEL (name, stat) ->
print (name ^ ":");
space ();
print_substatement stat
| GOTO name ->
print ("goto " ^ name ^ ";");
new_line ()
| ASM desc ->
print ("asm(\"" ^ (escape_string desc) ^ "\");")
| GNU_ASM (desc, output, input, mods) ->
print ("asm(" ^ (escape_string desc) ^ "\"");
print " : ";
print_commas false print_gnu_asm_arg output;
print " : ";
print_commas false print_gnu_asm_arg input;
if mods <> [] then begin
print " : ";
print_commas false print mods
end;
print ");"
| STAT_LINE (stat, _, _) ->
print_statement stat
and print_gnu_asm_arg (id, desc, exp) =
if id <> "" then print ("[" ^ id ^ "]");
print ("\"" ^ (escape_string desc) ^ "\"(");
print_expression exp 0;
print ("\"")
and print_substatement stat =
match stat with
IF _
| SEQUENCE _
| DOWHILE _ ->
new_line ();
print "{";
indent ();
print_statement stat;
unindent ();
print "}";
new_line ();
| BLOCK _ ->
print_statement stat
| _ ->
indent ();
print_statement stat;
unindent ()
(*
** GCC Attributes
*)
and print_attributes attrs =
match attrs with
[] ->
()
| [GNU_EXTENSION] ->
()
| _ ->
if attrs <> [] then
begin
print " __attribute__ ((";
print_commas false print_attribute attrs;
print ")) "
end
and print_attribute attr =
match attr with
GNU_NONE ->
()
| GNU_ID id ->
print id
| GNU_CALL (id, args) ->
print id;
print "(";
print_commas false print_attribute args;
print ")"
| GNU_CST cst ->
print_constant cst
| GNU_EXTENSION ->
print "__extension__"
| GNU_INLINE ->
print "__inline__"
| GNU_TYPE_ARG (typ,sto) ->
if sto <> NO_STORAGE then begin
print (get_storage sto);
space ()
end;
print_base_type typ
(*
** Declaration printing
*)
and print_defs defs =
let prev = ref false in
List.iter
(fun def ->
(match def with
DECDEF _ -> prev := false
| _ ->
if not !prev then force_new_line ();
prev := true);
print_def def)
defs
and print_def def =
match def with
FUNDEF (proto, body) ->
print_single_name proto;
let (decs, stat) = body in print_statement (BLOCK (decs, stat));
force_new_line ();
| OLDFUNDEF (proto, decs, body) ->
print_single_name proto;
force_new_line ();
List.iter
(fun dec -> print_name_group dec; print ";"; new_line ())
decs;
let (decs, stat) = body in print_statement (BLOCK (decs, stat));
force_new_line ();
| DECDEF names ->
print_name_group names;
print ";";
new_line ()
| TYPEDEF (names, attrs) ->
if has_extension attrs then begin
print "__extension__";
space ();
end;
print "typedef ";
print_name_group names;
print ";";
new_line ();
force_new_line ()
| ONLYTYPEDEF names ->
print_name_group names;
print ";";
new_line ();
force_new_line ()
(* print abstrac_syntax -> ()
** Pretty printing the given abstract syntax program.
*)
let print (result : out_channel) (defs : file) =
out := result;
print_defs defs
let set_tab t = tab := t
let set_width w = width := w
| null | https://raw.githubusercontent.com/BinaryAnalysisPlatform/FrontC/c167bb8bb82c7f90afa069bd0c49afe57374d5df/frontc/cprint.ml | ocaml | cprint -- pretty printer of C program from abstract syntax
** FrontC Pretty printer
** Useful primitives
** Base Type Printing
** Declaration Printing
print_base_type typ
* Print a type.
@param fct Function called to display the name of the.
@param typ Type to display.
print_expression exp2 (if op = SUB then (lvl' + 1) else lvl');
** Statement printing
** GCC Attributes
** Declaration printing
print abstrac_syntax -> ()
** Pretty printing the given abstract syntax program.
|
open Cabs
let version = "Cprint 4.0 Hugues Cassé et al."
let out = ref stdout
let width = ref 80
let tab = ref 8
let max_indent = ref 60
let line = ref ""
let line_len = ref 0
let current = ref ""
let current_len = ref 0
let spaces = ref 0
let follow = ref 0
let roll = ref 0
let print_tab size =
output_string !out (String.make (size / 8) '\t');
output_string !out (String.make (size mod 8) ' ')
let flush _ =
if !line <> "" then begin
print_tab (!spaces + !follow);
output_string !out !line;
line := "";
line_len := 0
end
let commit _ =
if !current <> "" then begin
if !line = "" then begin
line := !current;
line_len := !current_len
end else begin
line := (!line ^ " " ^ !current);
line_len := !line_len + 1 + !current_len
end;
current := "";
current_len := 0
end
let new_line _ =
commit ();
if !line <> "" then begin
flush ();
output_char !out '\n'
end;
follow := 0
let force_new_line _ =
commit ();
flush ();
output_char !out '\n';
follow := 0
let indent _ =
new_line ();
spaces := !spaces + !tab;
if !spaces >= !max_indent then begin
spaces := !tab;
roll := !roll + 1
end
let unindent _ =
new_line ();
spaces := !spaces - !tab;
if (!spaces <= 0) && (!roll > 0) then begin
spaces := ((!max_indent - 1) / !tab) * !tab;
roll := !roll - 1
end
let space _ = commit ()
let print str =
current := !current ^ str;
current_len := !current_len + (String.length str);
if (!spaces + !follow + !line_len + 1 + !current_len) > !width
then begin
if !line_len = 0 then commit ();
flush ();
output_char !out '\n';
if !follow = 0 then follow := !tab
end
let print_commas nl fct lst =
let _ = List.fold_left
(fun com elt ->
if com then begin
print ",";
if nl then new_line () else space ()
end else ();
fct elt;
true)
false
lst in
()
let escape_string str =
let lng = String.length str in
let conv value = String.make 1 (Char.chr (value +
(if value < 10 then (Char.code '0') else (Char.code 'a' - 10)))) in
let rec build idx =
if idx >= lng then ""
else
let sub = String.sub str idx 1 in
let res = match sub with
"\n" -> "\\n"
| "\"" -> "\\\""
| "'" -> "\\'"
| "\r" -> "\\r"
| "\t" -> "\\t"
| "\b" -> "\\b"
| "\000" -> "\\0"
| _ -> if sub = (Char.escaped (String.get sub 0))
then sub
else let code = Char.code (String.get sub 0) in
"\\"
^ (conv (code / 64))
^ (conv ((code mod 64) / 8))
^ (conv (code mod 8)) in
res ^ (build (idx + 1)) in
build 0
let rec has_extension attrs =
match attrs with
[] -> false
| GNU_EXTENSION::_ -> true
| _::attrs -> has_extension attrs
let get_sign si =
match si with
NO_SIGN -> ""
| SIGNED -> "signed "
| UNSIGNED -> "unsigned "
let get_size siz =
match siz with
NO_SIZE -> ""
| SHORT -> "short "
| LONG -> "long "
| LONG_LONG -> "long long "
let rec print_base_type typ =
match typ with
NO_TYPE -> ()
| VOID -> print "void"
| BOOL -> print "_Bool"
| CHAR sign -> print ((get_sign sign) ^ "char")
| INT (size, sign) -> print ((get_sign sign) ^ (get_size size) ^ "int")
| BITFIELD (sign, _) -> print ((get_sign sign) ^ "int")
| FLOAT size -> print ((if size then "long " else "") ^ "float")
| DOUBLE size -> print ((if size then "long " else "") ^ "double")
| COMPLEX_FLOAT -> print "float _Complex"
| COMPLEX_DOUBLE -> print "double _Complex"
| COMPLEX_LONG_DOUBLE -> print "long double _Complex"
| NAMED_TYPE id -> print id
| ENUM (id, items) -> print_enum id items
| STRUCT (id, flds) -> print_fields ("struct " ^ id) flds
| UNION (id, flds) -> print_fields ("union " ^ id) flds
| PROTO (typ, _, _) -> print_base_type typ
| OLD_PROTO (typ, _, _) -> print_base_type typ
| PTR typ -> print_base_type typ
| RESTRICT_PTR typ -> print_base_type typ
| ARRAY (typ, _) -> print_base_type typ
| CONST typ -> print_base_type typ
| VOLATILE typ -> print_base_type typ
| GNU_TYPE (attrs, typ) -> print_attributes attrs; print_base_type typ
| BUILTIN_TYPE t -> print t
| TYPE_LINE (_, _, _type) -> print_base_type _type
and print_fields id (flds : name_group list) =
print id;
if flds = []
then ()
else begin
print " {";
indent ();
List.iter
(fun fld -> print_name_group fld; print ";"; new_line ())
flds;
unindent ();
print "}"
end
and print_enum id items =
print ("enum " ^ id);
if items = []
then ()
else begin
print " {";
indent ();
print_commas
true
(fun (id, exp) -> print id;
if exp = NOTHING then ()
else begin
space ();
print "= ";
print_expression exp 1
end)
items;
unindent ();
print "}";
end
and get_base_type typ =
match typ with
PTR typ -> get_base_type typ
| RESTRICT_PTR typ -> get_base_type typ
| CONST typ -> get_base_type typ
| VOLATILE typ -> get_base_type typ
| ARRAY (typ, _) -> get_base_type typ
| _ -> typ
and print_pointer typ =
match typ with
PTR typ -> print_pointer typ; print "*"
| RESTRICT_PTR typ ->
print_pointer typ; print "* __restrict";
space ()
| CONST typ -> print_pointer typ; print " const "
| VOLATILE typ -> print_pointer typ; print " volatile "
| ARRAY (typ, _) -> print_pointer typ
and print_array typ =
match typ with
ARRAY (typ, dim) ->
print_array typ;
print "[";
print_expression dim 0;
print "]"
| _ -> ()
and print_type (fct : unit -> unit) (typ : base_type ) =
let base = get_base_type typ in
match base with
BITFIELD (_, exp) -> fct (); print " : "; print_expression exp 1
| PROTO (typ', pars, ell) ->
print_type
(fun _ ->
if base <> typ then print "(";
print_pointer typ;
fct ();
print_array typ;
if base <> typ then print ")";
print "(";
print_params pars ell;
print ")")
typ'
| OLD_PROTO (typ', pars, ell) ->
print_type
(fun _ ->
if base <> typ then print "(";
print_pointer typ;
fct ();
print_array typ;
if base <> typ then print ")";
print "(";
print_old_params pars ell;
print ")")
typ'
| _ -> print_pointer typ; fct (); print_array typ
and print_onlytype typ =
print_base_type typ;
print_type (fun _ -> ()) typ
and print_name ((id, typ, attr, exp) : name) =
print_type (fun _ -> print id) typ;
print_attributes attr;
if exp <> NOTHING then begin
space ();
print "= ";
print_expression exp 1
end else ()
and get_storage sto =
match sto with
NO_STORAGE -> ""
| AUTO -> "auto"
| STATIC -> "static"
| EXTERN -> "extern"
| REGISTER -> "register"
and print_name_group (typ, sto, names) =
let extension = List.exists
(fun (_, _, attrs, _) -> has_extension attrs)
names in
if extension then begin
print "__extension__";
space ()
end;
if sto <> NO_STORAGE then begin
print (get_storage sto);
space ()
end;
print_base_type typ;
space ();
print_commas false print_name names
and print_single_name (typ, sto, name) =
if sto <> NO_STORAGE then begin
print (get_storage sto);
space ()
end;
print_base_type typ;
space ();
print_name name
and print_params (pars : single_name list) (ell : bool) =
print_commas false print_single_name pars;
if ell then print (if pars = [] then "..." else ", ...") else ()
and print_old_params pars ell =
print_commas false (fun id -> print id) pars;
if ell then print (if pars = [] then "..." else ", ...") else ()
* * Expression printing
* * Priorities
* * 16 variables
* * 15 . - > [ ] call ( )
* * 14 + + , -- ( post )
* * 13 + + -- ( pre ) ~ ! - + & * ( cast )
* * 12 * / %
* * 11 + -
* * 10 < < > >
* * 9 < < = > > =
* * 8 = = ! =
* * 7 &
* * 6 ^
* * 5 |
* * 4 & &
* * 3 ||
* * 2 ? :
* * 1 = ? =
* * 0 ,
** Expression printing
** Priorities
** 16 variables
** 15 . -> [] call()
** 14 ++, -- (post)
** 13 ++ -- (pre) ~ ! - + & *(cast)
** 12 * / %
** 11 + -
** 10 << >>
** 9 < <= > >=
** 8 == !=
** 7 &
** 6 ^
** 5 |
** 4 &&
** 3 ||
** 2 ? :
** 1 = ?=
** 0 ,
*)
and get_operator exp =
match exp with
NOTHING -> ("", 16)
| UNARY (op, _) ->
(match op with
MINUS -> ("-", 13)
| PLUS -> ("+", 13)
| NOT -> ("!", 13)
| BNOT -> ("~", 13)
| MEMOF -> ("*", 13)
| ADDROF -> ("&", 13)
| PREINCR -> ("++", 13)
| PREDECR -> ("--", 13)
| POSINCR -> ("++", 14)
| POSDECR -> ("--", 14))
| BINARY (op, _, _) ->
(match op with
MUL -> ("*", 12)
| DIV -> ("/", 12)
| MOD -> ("%", 12)
| ADD -> ("+", 11)
| SUB -> ("-", 11)
| SHL -> ("<<", 10)
| SHR -> (">>", 10)
| LT -> ("<", 9)
| LE -> ("<=", 9)
| GT -> (">", 9)
| GE -> (">=", 9)
| EQ -> ("==", 8)
| NE -> ("!=", 8)
| BAND -> ("&", 7)
| XOR -> ("^", 6)
| BOR -> ("|", 5)
| AND -> ("&&", 4)
| OR -> ("||", 3)
| ASSIGN -> ("=", 1)
| ADD_ASSIGN -> ("+=", 1)
| SUB_ASSIGN -> ("-=", 1)
| MUL_ASSIGN -> ("*=", 1)
| DIV_ASSIGN -> ("/=", 1)
| MOD_ASSIGN -> ("%=", 1)
| BAND_ASSIGN -> ("&=", 1)
| BOR_ASSIGN -> ("|=", 1)
| XOR_ASSIGN -> ("^=", 1)
| SHL_ASSIGN -> ("<<=", 1)
| SHR_ASSIGN -> (">>=", 1))
| QUESTION _ -> ("", 2)
| CAST _ -> ("", 13)
| CALL _ -> ("", 15)
| COMMA _ -> ("", 0)
| CONSTANT _ -> ("", 16)
| VARIABLE _ -> ("", 16)
| EXPR_SIZEOF _ -> ("", 16)
| TYPE_SIZEOF _ -> ("", 16)
| INDEX _ -> ("", 15)
| MEMBEROF _ -> ("", 15)
| MEMBEROFPTR _ -> ("", 15)
| GNU_BODY _ -> ("", 17)
| DESIGNATED _ -> ("", 15)
| EXPR_LINE (expr, _, _) -> get_operator expr
and print_comma_exps exps =
print_commas false (fun exp -> print_expression exp 1) exps
and print_expression (exp : expression) (lvl : int) =
let (txt, lvl') = get_operator exp in
let _ = if lvl > lvl' then print "(" else () in
let _ = match exp with
NOTHING -> ()
| UNARY (op, exp') ->
(match op with
POSINCR | POSDECR ->
print_expression exp' lvl';
print txt
| _ ->
print txt;
print_expression exp' lvl')
| BINARY (_, exp1, exp2) ->
if ( op = SUB ) & & ( lvl < = lvl ' ) then print " ( " ;
print_expression exp1 lvl';
space ();
print txt;
space ();
print_expression exp2 (lvl' + 1)
if ( op = SUB ) & & ( lvl < = lvl ' ) then print " ) "
| QUESTION (exp1, exp2, exp3) ->
print_expression exp1 2;
space ();
print "? ";
print_expression exp2 2;
space ();
print ": ";
print_expression exp3 2;
| CAST (typ, exp) ->
print "(";
print_onlytype typ;
print ")";
print_expression exp 15
| CALL (exp, args) ->
print_expression exp 16;
print "(";
print_comma_exps args;
print ")"
| COMMA exps ->
print_comma_exps exps
| CONSTANT cst ->
print_constant cst
| VARIABLE name ->
print name
| EXPR_SIZEOF exp ->
print "sizeof(";
print_expression exp 0;
print ")"
| TYPE_SIZEOF typ ->
print "sizeof(";
print_onlytype typ;
print ")"
| INDEX (exp, idx) ->
print_expression exp 16;
print "[";
print_expression idx 0;
print "]"
| MEMBEROF (exp, fld) ->
print_expression exp 16;
print ("." ^ fld)
| MEMBEROFPTR (exp, fld) ->
print_expression exp 16;
print ("->" ^ fld)
| GNU_BODY (decs, stat) ->
print "(";
print_statement (BLOCK (decs, stat));
print ")"
| DESIGNATED (member, exp) ->
print ".";
print member;
print "=";
print_expression exp 16;
| EXPR_LINE (expr, _, _) ->
print_expression expr lvl in
if lvl > lvl' then print ")" else ()
and print_constant cst =
match cst with
CONST_INT i ->
print i
| CONST_FLOAT r ->
print r
| CONST_CHAR c ->
print ("'" ^ (escape_string c) ^ "'")
| CONST_STRING s ->
print ("\"" ^ (escape_string s) ^ "\"")
| CONST_COMPOUND exps ->
begin
print "{";
print_comma_exps exps;
print "}"
end
and print_statement stat =
match stat with
NOP ->
print ";";
new_line ()
| COMPUTATION exp ->
print_expression exp 0;
print ";";
new_line ()
| BLOCK (defs, stat) ->
new_line ();
print "{";
indent ();
print_defs defs;
if stat <> NOP then print_statement stat else ();
unindent ();
print "}";
new_line ();
| SEQUENCE (s1, s2) ->
print_statement s1;
print_statement s2;
| IF (exp, s1, s2) ->
print "if(";
print_expression exp 0;
print ")";
print_substatement s1;
if s2 = NOP
then ()
else begin
print "else";
print_substatement s2;
end
| WHILE (exp, stat) ->
print "while(";
print_expression exp 0;
print ")";
print_substatement stat
| DOWHILE (exp, stat) ->
print "do";
print_substatement stat;
print "while(";
print_expression exp 0;
print ");";
new_line ();
| FOR (exp1, exp2, exp3, stat) ->
print "for(";
print_expression exp1 0;
print ";";
space ();
print_expression exp2 0;
print ";";
space ();
print_expression exp3 0;
print ")";
print_substatement stat
| BREAK ->
print "break;"; new_line ()
| CONTINUE ->
print "continue;"; new_line ()
| RETURN exp ->
print "return";
if exp = NOTHING
then ()
else begin
print " ";
print_expression exp 1
end;
print ";";
new_line ()
| SWITCH (exp, stat) ->
print "switch(";
print_expression exp 0;
print ")";
print_substatement stat
| CASE (exp, stat) ->
unindent ();
print "case ";
print_expression exp 1;
print ":";
indent ();
print_substatement stat
| DEFAULT stat ->
unindent ();
print "default :";
indent ();
print_substatement stat
| LABEL (name, stat) ->
print (name ^ ":");
space ();
print_substatement stat
| GOTO name ->
print ("goto " ^ name ^ ";");
new_line ()
| ASM desc ->
print ("asm(\"" ^ (escape_string desc) ^ "\");")
| GNU_ASM (desc, output, input, mods) ->
print ("asm(" ^ (escape_string desc) ^ "\"");
print " : ";
print_commas false print_gnu_asm_arg output;
print " : ";
print_commas false print_gnu_asm_arg input;
if mods <> [] then begin
print " : ";
print_commas false print mods
end;
print ");"
| STAT_LINE (stat, _, _) ->
print_statement stat
and print_gnu_asm_arg (id, desc, exp) =
if id <> "" then print ("[" ^ id ^ "]");
print ("\"" ^ (escape_string desc) ^ "\"(");
print_expression exp 0;
print ("\"")
and print_substatement stat =
match stat with
IF _
| SEQUENCE _
| DOWHILE _ ->
new_line ();
print "{";
indent ();
print_statement stat;
unindent ();
print "}";
new_line ();
| BLOCK _ ->
print_statement stat
| _ ->
indent ();
print_statement stat;
unindent ()
and print_attributes attrs =
match attrs with
[] ->
()
| [GNU_EXTENSION] ->
()
| _ ->
if attrs <> [] then
begin
print " __attribute__ ((";
print_commas false print_attribute attrs;
print ")) "
end
and print_attribute attr =
match attr with
GNU_NONE ->
()
| GNU_ID id ->
print id
| GNU_CALL (id, args) ->
print id;
print "(";
print_commas false print_attribute args;
print ")"
| GNU_CST cst ->
print_constant cst
| GNU_EXTENSION ->
print "__extension__"
| GNU_INLINE ->
print "__inline__"
| GNU_TYPE_ARG (typ,sto) ->
if sto <> NO_STORAGE then begin
print (get_storage sto);
space ()
end;
print_base_type typ
and print_defs defs =
let prev = ref false in
List.iter
(fun def ->
(match def with
DECDEF _ -> prev := false
| _ ->
if not !prev then force_new_line ();
prev := true);
print_def def)
defs
and print_def def =
match def with
FUNDEF (proto, body) ->
print_single_name proto;
let (decs, stat) = body in print_statement (BLOCK (decs, stat));
force_new_line ();
| OLDFUNDEF (proto, decs, body) ->
print_single_name proto;
force_new_line ();
List.iter
(fun dec -> print_name_group dec; print ";"; new_line ())
decs;
let (decs, stat) = body in print_statement (BLOCK (decs, stat));
force_new_line ();
| DECDEF names ->
print_name_group names;
print ";";
new_line ()
| TYPEDEF (names, attrs) ->
if has_extension attrs then begin
print "__extension__";
space ();
end;
print "typedef ";
print_name_group names;
print ";";
new_line ();
force_new_line ()
| ONLYTYPEDEF names ->
print_name_group names;
print ";";
new_line ();
force_new_line ()
let print (result : out_channel) (defs : file) =
out := result;
print_defs defs
let set_tab t = tab := t
let set_width w = width := w
|
6e585e551d73306809c9eff3424995fd091fb16f234978bb17702600465d26c4 | emqx/ekka | ekka_locker.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2019 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(ekka_locker).
-include_lib("stdlib/include/ms_transform.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-behaviour(gen_server).
-export([ start_link/0
, start_link/1
, start_link/2
]).
%% For test cases
-export([stop/0, stop/1]).
%% Lock APIs
-export([ acquire/1
, acquire/2
, acquire/3
, acquire/4
]).
-export([ release/1
, release/2
, release/3
]).
For RPC call
-export([ acquire_lock/2
, acquire_lock/3
, release_lock/2
]).
%% gen_server Callbacks
-export([ init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
-type(resource() :: term()).
-type(lock_type() :: local | leader | quorum | all).
-type(lock_result() :: {boolean, [node() | {node(), any()}]}).
-type(piggyback() :: mfa() | undefined).
-export_type([ resource/0
, lock_type/0
, lock_result/0
, piggyback/0
]).
-record(lock, {
resource :: resource(),
owner :: pid(),
counter :: integer(),
created :: integer()
}).
-record(lease, {expiry, timer}).
-record(state, {locks, lease, monitors}).
-define(SERVER, ?MODULE).
-define(LOG(Level, Format, Args),
logger:Level("Ekka(Locker): " ++ Format, Args)).
15 seconds by default
-define(LEASE_TIME, 15000).
-define(MC_TIMEOUT, 30000).
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
-spec(start_link() -> {ok, pid()} | {error, term()}).
start_link() ->
start_link(?SERVER).
-spec(start_link(atom()) -> {ok, pid()} | ignore | {error, any()}).
start_link(Name) ->
start_link(Name, ?LEASE_TIME).
-spec(start_link(atom(), pos_integer()) -> {ok, pid()} | ignore | {error, any()}).
start_link(Name, LeaseTime) ->
gen_server:start_link({local, Name}, ?MODULE, [Name, LeaseTime], []).
-spec(stop() -> ok).
stop() ->
stop(?SERVER).
-spec(stop(atom()) -> ok).
stop(Name) ->
gen_server:call(Name, stop).
-spec(acquire(resource()) -> lock_result()).
acquire(Resource) ->
acquire(?SERVER, Resource).
-spec(acquire(atom(), resource()) -> lock_result()).
acquire(Name, Resource) when is_atom(Name) ->
acquire(Name, Resource, local).
-spec(acquire(atom(), resource(), lock_type()) -> lock_result()).
acquire(Name, Resource, Type) ->
acquire(Name, Resource, Type, undefined).
-spec(acquire(atom(), resource(), lock_type(), piggyback()) -> lock_result()).
acquire(Name, Resource, local, Piggyback) when is_atom(Name) ->
acquire_lock(Name, lock_obj(Resource), Piggyback);
acquire(Name, Resource, leader, Piggyback) when is_atom(Name)->
Leader = mria_membership:leader(),
case rpc:call(Leader, ?MODULE, acquire_lock,
[Name, lock_obj(Resource), Piggyback]) of
Err = {badrpc, _Reason} ->
{false, [{Leader, Err}]};
Res -> Res
end;
acquire(Name, Resource, quorum, Piggyback) when is_atom(Name) ->
Ring = mria_membership:ring(up),
Nodes = ekka_ring:find_nodes(Resource, Ring),
acquire_locks(Nodes, Name, lock_obj(Resource), Piggyback);
acquire(Name, Resource, all, Piggyback) when is_atom(Name) ->
acquire_locks(mria_membership:nodelist(up),
Name, lock_obj(Resource), Piggyback).
acquire_locks(Nodes, Name, LockObj, Piggyback) ->
{ResL, _BadNodes}
= rpc:multicall(Nodes, ?MODULE, acquire_lock, [Name, LockObj, Piggyback], ?MC_TIMEOUT),
case merge_results(ResL) of
Res = {true, _} -> Res;
Res = {false, _} ->
rpc:multicall(Nodes, ?MODULE, release_lock, [Name, LockObj], ?MC_TIMEOUT),
Res
end.
acquire_lock(Name, LockObj, Piggyback) ->
{acquire_lock(Name, LockObj), [with_piggyback(node(), Piggyback)]}.
acquire_lock(Name, LockObj = #lock{resource = Resource, owner = Owner}) ->
Pos = #lock.counter,
%% check lock status and set the lock atomically
try ets:update_counter(Name, Resource, [{Pos, 0}, {Pos, 1, 1, 1}], LockObj) of
[0, 1] -> %% no lock before, lock it
true;
[1, 1] -> %% has already been locked, either by self or by others
case ets:lookup(Name, Resource) of
[#lock{owner = Owner}] -> true;
_Other -> false
end
catch
error:badarg ->
%% While remote node is booting, this might fail because
%% the ETS table has not been created at that moment
true
end.
with_piggyback(Node, undefined) ->
Node;
with_piggyback(Node, {M, F, Args}) ->
{Node, erlang:apply(M, F, Args)}.
lock_obj(Resource) ->
#lock{resource = Resource,
owner = self(),
counter = 0,
created = erlang:system_time(millisecond)
}.
-spec(release(resource()) -> lock_result()).
release(Resource) ->
release(?SERVER, Resource).
-spec(release(atom(), resource()) -> lock_result()).
release(Name, Resource) ->
release(Name, Resource, local).
-spec(release(atom(), resource(), lock_type()) -> lock_result()).
release(Name, Resource, local) ->
release_lock(Name, lock_obj(Resource));
release(Name, Resource, leader) ->
Leader = mria_membership:leader(),
case rpc:call(Leader, ?MODULE, release_lock, [Name, lock_obj(Resource)]) of
Err = {badrpc, _Reason} ->
{false, [{Leader, Err}]};
Res -> Res
end;
release(Name, Resource, quorum) ->
Ring = mria_membership:ring(up),
Nodes = ekka_ring:find_nodes(Resource, Ring),
release_locks(Nodes, Name, lock_obj(Resource));
release(Name, Resource, all) ->
release_locks(mria_membership:nodelist(up), Name, lock_obj(Resource)).
release_locks(Nodes, Name, LockObj) ->
{ResL, _BadNodes} = rpc:multicall(Nodes, ?MODULE, release_lock, [Name, LockObj], ?MC_TIMEOUT),
merge_results(ResL).
release_lock(Name, #lock{resource = Resource, owner = Owner}) ->
Res = try ets:lookup(Name, Resource) of
[Lock = #lock{owner = Owner}] ->
ets:delete_object(Name, Lock);
[_Lock] -> false;
[] -> true
catch
error:badarg -> true
end,
{Res, [node()]}.
merge_results(ResL) ->
merge_results(ResL, [], []).
merge_results([], Succ, []) ->
{true, lists:flatten(Succ)};
merge_results([], _, Failed) ->
{false, lists:flatten(Failed)};
merge_results([{true, Res}|ResL], Succ, Failed) ->
merge_results(ResL, [Res|Succ], Failed);
merge_results([{false, Res}|ResL], Succ, Failed) ->
merge_results(ResL, Succ, [Res|Failed]).
%%--------------------------------------------------------------------
%% gen_server callbacks
%%--------------------------------------------------------------------
init([Name, LeaseTime]) ->
Tab = ets:new(Name, [public, set, named_table, {keypos, 2},
{read_concurrency, true}, {write_concurrency, true}]),
TRef = timer:send_interval(LeaseTime * 2, check_lease),
Lease = #lease{expiry = LeaseTime, timer = TRef},
{ok, #state{locks = Tab, lease = Lease, monitors = #{}}}.
handle_call(stop, _From, State) ->
{stop, normal, ok, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ignore, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}.
handle_info(check_lease, State = #state{locks = Tab, lease = Lease, monitors = Monitors}) ->
Monitors1 = lists:foldl(
fun(#lock{resource = Resource, owner = Owner}, MonAcc) ->
case maps:find(Owner, MonAcc) of
{ok, ResourceSet} ->
case is_set_elem(Resource, ResourceSet) of
true ->
%% force kill it as it might have hung
_ = spawn(fun() -> force_kill_lock_owner(Owner, Resource) end),
MonAcc;
false ->
maps:put(Owner, set_put(Resource, ResourceSet), MonAcc)
end;
error ->
_MRef = erlang:monitor(process, Owner),
maps:put(Owner, set_put(Resource, #{}), MonAcc)
end
end, Monitors, check_lease(Tab, Lease, erlang:system_time(millisecond))),
{noreply, State#state{monitors = Monitors1}, hibernate};
handle_info({'DOWN', _MRef, process, DownPid, _Reason},
State = #state{locks = Tab, monitors = Monitors}) ->
case maps:find(DownPid, Monitors) of
{ok, ResourceSet} ->
lists:foreach(
fun(Resource) ->
case ets:lookup(Tab, Resource) of
[Lock = #lock{owner = OwnerPid}] when OwnerPid =:= DownPid ->
ets:delete_object(Tab, Lock);
_ -> ok
end
end, set_to_list(ResourceSet)),
{noreply, State#state{monitors = maps:remove(DownPid, Monitors)}};
error ->
{noreply, State}
end;
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, _State = #state{lease = Lease}) ->
cancel_lease(Lease).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
check_lease(Tab, #lease{expiry = Expiry}, Now) ->
Spec = ets:fun2ms(fun({_, _, _, _, T} = Resource) when (Now - T) > Expiry -> Resource end),
ets:select(Tab, Spec).
cancel_lease(#lease{timer = TRef}) -> timer:cancel(TRef).
set_put(Resource, ResourceSet) when is_map(ResourceSet) ->
ResourceSet#{Resource => nil}.
set_to_list(ResourceSet) when is_map(ResourceSet) ->
maps:keys(ResourceSet).
is_set_elem(Resource, ResourceSet) when is_map(ResourceSet) ->
maps:is_key(Resource, ResourceSet).
force_kill_lock_owner(Pid, Resource) ->
logger:error("kill ~p as it has held the lock for too long, resource: ~p", [Pid, Resource]),
Fields = [status, message_queue_len, current_stacktrace],
Status = rpc:call(node(Pid), erlang, process_info, [Pid, Fields], 5000),
logger:error("lock_owner_status:~n~p", [Status]),
_ = exit(Pid, kill),
ok.
-ifdef(TEST).
force_kill_test() ->
Pid = spawn(fun() ->
receive
foo ->
ok
end
end),
?assert(is_process_alive(Pid)),
ok = force_kill_lock_owner(Pid, resource),
?assertNot(is_process_alive(Pid)).
-endif.
| null | https://raw.githubusercontent.com/emqx/ekka/5ecfa2d66ca978cc0d4a4632223c60befc20dbe7/src/ekka_locker.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
For test cases
Lock APIs
gen_server Callbacks
--------------------------------------------------------------------
API
--------------------------------------------------------------------
check lock status and set the lock atomically
no lock before, lock it
has already been locked, either by self or by others
While remote node is booting, this might fail because
the ETS table has not been created at that moment
--------------------------------------------------------------------
gen_server callbacks
--------------------------------------------------------------------
force kill it as it might have hung
--------------------------------------------------------------------
-------------------------------------------------------------------- | Copyright ( c ) 2019 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(ekka_locker).
-include_lib("stdlib/include/ms_transform.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-behaviour(gen_server).
-export([ start_link/0
, start_link/1
, start_link/2
]).
-export([stop/0, stop/1]).
-export([ acquire/1
, acquire/2
, acquire/3
, acquire/4
]).
-export([ release/1
, release/2
, release/3
]).
For RPC call
-export([ acquire_lock/2
, acquire_lock/3
, release_lock/2
]).
-export([ init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
-type(resource() :: term()).
-type(lock_type() :: local | leader | quorum | all).
-type(lock_result() :: {boolean, [node() | {node(), any()}]}).
-type(piggyback() :: mfa() | undefined).
-export_type([ resource/0
, lock_type/0
, lock_result/0
, piggyback/0
]).
-record(lock, {
resource :: resource(),
owner :: pid(),
counter :: integer(),
created :: integer()
}).
-record(lease, {expiry, timer}).
-record(state, {locks, lease, monitors}).
-define(SERVER, ?MODULE).
-define(LOG(Level, Format, Args),
logger:Level("Ekka(Locker): " ++ Format, Args)).
15 seconds by default
-define(LEASE_TIME, 15000).
-define(MC_TIMEOUT, 30000).
-spec(start_link() -> {ok, pid()} | {error, term()}).
start_link() ->
start_link(?SERVER).
-spec(start_link(atom()) -> {ok, pid()} | ignore | {error, any()}).
start_link(Name) ->
start_link(Name, ?LEASE_TIME).
-spec(start_link(atom(), pos_integer()) -> {ok, pid()} | ignore | {error, any()}).
start_link(Name, LeaseTime) ->
gen_server:start_link({local, Name}, ?MODULE, [Name, LeaseTime], []).
-spec(stop() -> ok).
stop() ->
stop(?SERVER).
-spec(stop(atom()) -> ok).
stop(Name) ->
gen_server:call(Name, stop).
-spec(acquire(resource()) -> lock_result()).
acquire(Resource) ->
acquire(?SERVER, Resource).
-spec(acquire(atom(), resource()) -> lock_result()).
acquire(Name, Resource) when is_atom(Name) ->
acquire(Name, Resource, local).
-spec(acquire(atom(), resource(), lock_type()) -> lock_result()).
acquire(Name, Resource, Type) ->
acquire(Name, Resource, Type, undefined).
-spec(acquire(atom(), resource(), lock_type(), piggyback()) -> lock_result()).
acquire(Name, Resource, local, Piggyback) when is_atom(Name) ->
acquire_lock(Name, lock_obj(Resource), Piggyback);
acquire(Name, Resource, leader, Piggyback) when is_atom(Name)->
Leader = mria_membership:leader(),
case rpc:call(Leader, ?MODULE, acquire_lock,
[Name, lock_obj(Resource), Piggyback]) of
Err = {badrpc, _Reason} ->
{false, [{Leader, Err}]};
Res -> Res
end;
acquire(Name, Resource, quorum, Piggyback) when is_atom(Name) ->
Ring = mria_membership:ring(up),
Nodes = ekka_ring:find_nodes(Resource, Ring),
acquire_locks(Nodes, Name, lock_obj(Resource), Piggyback);
acquire(Name, Resource, all, Piggyback) when is_atom(Name) ->
acquire_locks(mria_membership:nodelist(up),
Name, lock_obj(Resource), Piggyback).
acquire_locks(Nodes, Name, LockObj, Piggyback) ->
{ResL, _BadNodes}
= rpc:multicall(Nodes, ?MODULE, acquire_lock, [Name, LockObj, Piggyback], ?MC_TIMEOUT),
case merge_results(ResL) of
Res = {true, _} -> Res;
Res = {false, _} ->
rpc:multicall(Nodes, ?MODULE, release_lock, [Name, LockObj], ?MC_TIMEOUT),
Res
end.
acquire_lock(Name, LockObj, Piggyback) ->
{acquire_lock(Name, LockObj), [with_piggyback(node(), Piggyback)]}.
acquire_lock(Name, LockObj = #lock{resource = Resource, owner = Owner}) ->
Pos = #lock.counter,
try ets:update_counter(Name, Resource, [{Pos, 0}, {Pos, 1, 1, 1}], LockObj) of
true;
case ets:lookup(Name, Resource) of
[#lock{owner = Owner}] -> true;
_Other -> false
end
catch
error:badarg ->
true
end.
with_piggyback(Node, undefined) ->
Node;
with_piggyback(Node, {M, F, Args}) ->
{Node, erlang:apply(M, F, Args)}.
lock_obj(Resource) ->
#lock{resource = Resource,
owner = self(),
counter = 0,
created = erlang:system_time(millisecond)
}.
-spec(release(resource()) -> lock_result()).
release(Resource) ->
release(?SERVER, Resource).
-spec(release(atom(), resource()) -> lock_result()).
release(Name, Resource) ->
release(Name, Resource, local).
-spec(release(atom(), resource(), lock_type()) -> lock_result()).
release(Name, Resource, local) ->
release_lock(Name, lock_obj(Resource));
release(Name, Resource, leader) ->
Leader = mria_membership:leader(),
case rpc:call(Leader, ?MODULE, release_lock, [Name, lock_obj(Resource)]) of
Err = {badrpc, _Reason} ->
{false, [{Leader, Err}]};
Res -> Res
end;
release(Name, Resource, quorum) ->
Ring = mria_membership:ring(up),
Nodes = ekka_ring:find_nodes(Resource, Ring),
release_locks(Nodes, Name, lock_obj(Resource));
release(Name, Resource, all) ->
release_locks(mria_membership:nodelist(up), Name, lock_obj(Resource)).
release_locks(Nodes, Name, LockObj) ->
{ResL, _BadNodes} = rpc:multicall(Nodes, ?MODULE, release_lock, [Name, LockObj], ?MC_TIMEOUT),
merge_results(ResL).
release_lock(Name, #lock{resource = Resource, owner = Owner}) ->
Res = try ets:lookup(Name, Resource) of
[Lock = #lock{owner = Owner}] ->
ets:delete_object(Name, Lock);
[_Lock] -> false;
[] -> true
catch
error:badarg -> true
end,
{Res, [node()]}.
merge_results(ResL) ->
merge_results(ResL, [], []).
merge_results([], Succ, []) ->
{true, lists:flatten(Succ)};
merge_results([], _, Failed) ->
{false, lists:flatten(Failed)};
merge_results([{true, Res}|ResL], Succ, Failed) ->
merge_results(ResL, [Res|Succ], Failed);
merge_results([{false, Res}|ResL], Succ, Failed) ->
merge_results(ResL, Succ, [Res|Failed]).
init([Name, LeaseTime]) ->
Tab = ets:new(Name, [public, set, named_table, {keypos, 2},
{read_concurrency, true}, {write_concurrency, true}]),
TRef = timer:send_interval(LeaseTime * 2, check_lease),
Lease = #lease{expiry = LeaseTime, timer = TRef},
{ok, #state{locks = Tab, lease = Lease, monitors = #{}}}.
handle_call(stop, _From, State) ->
{stop, normal, ok, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ignore, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}.
handle_info(check_lease, State = #state{locks = Tab, lease = Lease, monitors = Monitors}) ->
Monitors1 = lists:foldl(
fun(#lock{resource = Resource, owner = Owner}, MonAcc) ->
case maps:find(Owner, MonAcc) of
{ok, ResourceSet} ->
case is_set_elem(Resource, ResourceSet) of
true ->
_ = spawn(fun() -> force_kill_lock_owner(Owner, Resource) end),
MonAcc;
false ->
maps:put(Owner, set_put(Resource, ResourceSet), MonAcc)
end;
error ->
_MRef = erlang:monitor(process, Owner),
maps:put(Owner, set_put(Resource, #{}), MonAcc)
end
end, Monitors, check_lease(Tab, Lease, erlang:system_time(millisecond))),
{noreply, State#state{monitors = Monitors1}, hibernate};
handle_info({'DOWN', _MRef, process, DownPid, _Reason},
State = #state{locks = Tab, monitors = Monitors}) ->
case maps:find(DownPid, Monitors) of
{ok, ResourceSet} ->
lists:foreach(
fun(Resource) ->
case ets:lookup(Tab, Resource) of
[Lock = #lock{owner = OwnerPid}] when OwnerPid =:= DownPid ->
ets:delete_object(Tab, Lock);
_ -> ok
end
end, set_to_list(ResourceSet)),
{noreply, State#state{monitors = maps:remove(DownPid, Monitors)}};
error ->
{noreply, State}
end;
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, _State = #state{lease = Lease}) ->
cancel_lease(Lease).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
check_lease(Tab, #lease{expiry = Expiry}, Now) ->
Spec = ets:fun2ms(fun({_, _, _, _, T} = Resource) when (Now - T) > Expiry -> Resource end),
ets:select(Tab, Spec).
cancel_lease(#lease{timer = TRef}) -> timer:cancel(TRef).
set_put(Resource, ResourceSet) when is_map(ResourceSet) ->
ResourceSet#{Resource => nil}.
set_to_list(ResourceSet) when is_map(ResourceSet) ->
maps:keys(ResourceSet).
is_set_elem(Resource, ResourceSet) when is_map(ResourceSet) ->
maps:is_key(Resource, ResourceSet).
force_kill_lock_owner(Pid, Resource) ->
logger:error("kill ~p as it has held the lock for too long, resource: ~p", [Pid, Resource]),
Fields = [status, message_queue_len, current_stacktrace],
Status = rpc:call(node(Pid), erlang, process_info, [Pid, Fields], 5000),
logger:error("lock_owner_status:~n~p", [Status]),
_ = exit(Pid, kill),
ok.
-ifdef(TEST).
force_kill_test() ->
Pid = spawn(fun() ->
receive
foo ->
ok
end
end),
?assert(is_process_alive(Pid)),
ok = force_kill_lock_owner(Pid, resource),
?assertNot(is_process_alive(Pid)).
-endif.
|
1d072415392d9e071e2cc1a9fe1cae5e422fdd6a2dc7b601ebeca17963dfa851 | coccinelle/herodotos | avglifespan.ml | open Helper
let avg vlist bugs grinfo vminopt =
let (_,_, _, _, _, _, factor) = grinfo in
let (_, avgday, _) = sl_ratio_day vlist bugs in
wrap_single_some (Array.make 1 ((float_of_int avgday) /. factor))
let xmax _ cumuls = float_of_int (List.length cumuls)
let ymax cumuls = (0.0, ceil (get_ymax1 cumuls))
let dfts = ("AVG lifespan", "Project", "Lifespan", 365.25, xmax, ymax, false, avg)
| null | https://raw.githubusercontent.com/coccinelle/herodotos/5da230a18962ca445ed2368bc21abe0a8402e00f/herodotos/graph/avglifespan.ml | ocaml | open Helper
let avg vlist bugs grinfo vminopt =
let (_,_, _, _, _, _, factor) = grinfo in
let (_, avgday, _) = sl_ratio_day vlist bugs in
wrap_single_some (Array.make 1 ((float_of_int avgday) /. factor))
let xmax _ cumuls = float_of_int (List.length cumuls)
let ymax cumuls = (0.0, ceil (get_ymax1 cumuls))
let dfts = ("AVG lifespan", "Project", "Lifespan", 365.25, xmax, ymax, false, avg)
| |
c2ced48f4955059d5a73d0596878d27fb5b7aeddd2b1d25c4595c6db32bf17cb | well-typed-lightbulbs/ocaml-esp32 | typecore.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Type inference for the core language *)
open Asttypes
open Types
(* This variant is used to print improved error messages, and does not affect
the behavior of the typechecker itself.
It describes possible explanation for types enforced by a keyword of the
language; e.g. "if" requires the condition to be of type bool, and the
then-branch to be of type unit if there is no else branch; "for" requires
indices to be of type int, and the body to be of type unit.
*)
type type_forcing_context =
| If_conditional
| If_no_else_branch
| While_loop_conditional
| While_loop_body
| For_loop_start_index
| For_loop_stop_index
| For_loop_body
| Assert_condition
| Sequence_left_hand_side
| When_guard
(* The combination of a type and a "type forcing context". The intent is that it
describes a type that is "expected" (required) by the context. If unifying
with such a type fails, then the "explanation" field explains why it was
required, in order to display a more enlightening error message.
*)
type type_expected = private {
ty: type_expr;
explanation: type_forcing_context option;
}
val mk_expected:
?explanation:type_forcing_context ->
type_expr ->
type_expected
val is_nonexpansive: Typedtree.expression -> bool
type existential_restriction =
| At_toplevel (** no existential types at the toplevel *)
| In_group (** nor with [let ... and ...] *)
| In_rec (** or recursive definition *)
| With_attributes (** or [let[@any_attribute] = ...] *)
| In_class_args (** or in class arguments [class c (...) = ...] *)
| In_class_def (** or in [class c = let ... in ...] *)
| In_self_pattern (** or in self pattern *)
val type_binding:
Env.t -> rec_flag ->
Parsetree.value_binding list ->
Annot.ident option ->
Typedtree.value_binding list * Env.t
val type_let:
existential_restriction -> Env.t -> rec_flag ->
Parsetree.value_binding list ->
Annot.ident option ->
Typedtree.value_binding list * Env.t
val type_expression:
Env.t -> Parsetree.expression -> Typedtree.expression
val type_class_arg_pattern:
string -> Env.t -> Env.t -> arg_label -> Parsetree.pattern ->
Typedtree.pattern * (Ident.t * Ident.t * type_expr) list *
Env.t * Env.t
val type_self_pattern:
string -> type_expr -> Env.t -> Env.t -> Env.t -> Parsetree.pattern ->
Typedtree.pattern *
(Ident.t * type_expr) Meths.t ref *
(Ident.t * Asttypes.mutable_flag * Asttypes.virtual_flag * type_expr)
Vars.t ref *
Env.t * Env.t * Env.t
val check_partial:
?lev:int -> Env.t -> type_expr ->
Location.t -> Typedtree.case list -> Typedtree.partial
val type_expect:
?in_function:(Location.t * type_expr) ->
Env.t -> Parsetree.expression -> type_expected -> Typedtree.expression
val type_exp:
Env.t -> Parsetree.expression -> Typedtree.expression
val type_approx:
Env.t -> Parsetree.expression -> type_expr
val type_argument:
Env.t -> Parsetree.expression ->
type_expr -> type_expr -> Typedtree.expression
val option_some: Env.t -> Typedtree.expression -> Typedtree.expression
val option_none: Env.t -> type_expr -> Location.t -> Typedtree.expression
val extract_option_type: Env.t -> type_expr -> type_expr
val generalizable: int -> type_expr -> bool
val reset_delayed_checks: unit -> unit
val force_delayed_checks: unit -> unit
val name_pattern : string -> Typedtree.pattern list -> Ident.t
val name_cases : string -> Typedtree.case list -> Ident.t
val self_coercion : (Path.t * Location.t list ref) list ref
type error =
| Constructor_arity_mismatch of Longident.t * int * int
| Label_mismatch of Longident.t * Ctype.Unification_trace.t
| Pattern_type_clash of
Ctype.Unification_trace.t * Typedtree.pattern_desc option
| Or_pattern_type_clash of Ident.t * Ctype.Unification_trace.t
| Multiply_bound_variable of string
| Orpat_vars of Ident.t * Ident.t list
| Expr_type_clash of
Ctype.Unification_trace.t * type_forcing_context option
* Typedtree.expression_desc option
| Apply_non_function of type_expr
| Apply_wrong_label of arg_label * type_expr
| Label_multiply_defined of string
| Label_missing of Ident.t list
| Label_not_mutable of Longident.t
| Wrong_name of
string * type_expected * string * Path.t * string * string list
| Name_type_mismatch of
string * Longident.t * (Path.t * Path.t) * (Path.t * Path.t) list
| Invalid_format of string
| Undefined_method of type_expr * string * string list option
| Undefined_inherited_method of string * string list
| Virtual_class of Longident.t
| Private_type of type_expr
| Private_label of Longident.t * type_expr
| Private_constructor of constructor_description * type_expr
| Unbound_instance_variable of string * string list
| Instance_variable_not_mutable of string
| Not_subtype of Ctype.Unification_trace.t * Ctype.Unification_trace.t
| Outside_class
| Value_multiply_overridden of string
| Coercion_failure of
type_expr * type_expr * Ctype.Unification_trace.t * bool
| Too_many_arguments of bool * type_expr * type_forcing_context option
| Abstract_wrong_label of arg_label * type_expr * type_forcing_context option
| Scoping_let_module of string * type_expr
| Not_a_variant_type of Longident.t
| Incoherent_label_order
| Less_general of string * Ctype.Unification_trace.t
| Modules_not_allowed
| Cannot_infer_signature
| Not_a_packed_module of type_expr
| Unexpected_existential of existential_restriction * string * string list
| Invalid_interval
| Invalid_for_loop_index
| No_value_clauses
| Exception_pattern_disallowed
| Mixed_value_and_exception_patterns_under_guard
| Inlined_record_escape
| Inlined_record_expected
| Unrefuted_pattern of Typedtree.pattern
| Invalid_extension_constructor_payload
| Not_an_extension_constructor
| Literal_overflow of string
| Unknown_literal of string * char
| Illegal_letrec_pat
| Illegal_letrec_expr
| Illegal_class_expr
| Empty_pattern
| Letop_type_clash of string * Ctype.Unification_trace.t
| Andop_type_clash of string * Ctype.Unification_trace.t
| Bindings_type_clash of Ctype.Unification_trace.t
exception Error of Location.t * Env.t * error
exception Error_forward of Location.error
val report_error: loc:Location.t -> Env.t -> error -> Location.error
* @deprecated . Use { ! } , { ! } .
Forward declaration , to be filled in by
val type_module: (Env.t -> Parsetree.module_expr -> Typedtree.module_expr) ref
(* Forward declaration, to be filled in by Typemod.type_open *)
val type_open:
(?used_slot:bool ref -> override_flag -> Env.t -> Location.t ->
Longident.t loc -> Path.t * Env.t)
ref
(* Forward declaration, to be filled in by Typemod.type_open_decl *)
val type_open_decl:
(?used_slot:bool ref -> Env.t -> Parsetree.open_declaration ->
Typedtree.open_declaration * Types.signature * Env.t)
ref
Forward declaration , to be filled in by Typeclass.class_structure
val type_object:
(Env.t -> Location.t -> Parsetree.class_structure ->
Typedtree.class_structure * Types.class_signature * string list) ref
val type_package:
(Env.t -> Parsetree.module_expr -> Path.t -> Longident.t list ->
Typedtree.module_expr * type_expr list) ref
val create_package_type : Location.t -> Env.t ->
Longident.t * (Longident.t * Parsetree.core_type) list ->
Path.t * (Longident.t * Typedtree.core_type) list * Types.type_expr
val constant: Parsetree.constant -> (Asttypes.constant, error) result
val check_recursive_bindings : Env.t -> Typedtree.value_binding list -> unit
val check_recursive_class_bindings :
Env.t -> Ident.t list -> Typedtree.class_expr list -> unit
| null | https://raw.githubusercontent.com/well-typed-lightbulbs/ocaml-esp32/c24fcbfbee0e3aa6bb71c9b467c60c6bac326cc7/typing/typecore.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Type inference for the core language
This variant is used to print improved error messages, and does not affect
the behavior of the typechecker itself.
It describes possible explanation for types enforced by a keyword of the
language; e.g. "if" requires the condition to be of type bool, and the
then-branch to be of type unit if there is no else branch; "for" requires
indices to be of type int, and the body to be of type unit.
The combination of a type and a "type forcing context". The intent is that it
describes a type that is "expected" (required) by the context. If unifying
with such a type fails, then the "explanation" field explains why it was
required, in order to display a more enlightening error message.
* no existential types at the toplevel
* nor with [let ... and ...]
* or recursive definition
* or [let[@any_attribute] = ...]
* or in class arguments [class c (...) = ...]
* or in [class c = let ... in ...]
* or in self pattern
Forward declaration, to be filled in by Typemod.type_open
Forward declaration, to be filled in by Typemod.type_open_decl | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Asttypes
open Types
type type_forcing_context =
| If_conditional
| If_no_else_branch
| While_loop_conditional
| While_loop_body
| For_loop_start_index
| For_loop_stop_index
| For_loop_body
| Assert_condition
| Sequence_left_hand_side
| When_guard
type type_expected = private {
ty: type_expr;
explanation: type_forcing_context option;
}
val mk_expected:
?explanation:type_forcing_context ->
type_expr ->
type_expected
val is_nonexpansive: Typedtree.expression -> bool
type existential_restriction =
val type_binding:
Env.t -> rec_flag ->
Parsetree.value_binding list ->
Annot.ident option ->
Typedtree.value_binding list * Env.t
val type_let:
existential_restriction -> Env.t -> rec_flag ->
Parsetree.value_binding list ->
Annot.ident option ->
Typedtree.value_binding list * Env.t
val type_expression:
Env.t -> Parsetree.expression -> Typedtree.expression
val type_class_arg_pattern:
string -> Env.t -> Env.t -> arg_label -> Parsetree.pattern ->
Typedtree.pattern * (Ident.t * Ident.t * type_expr) list *
Env.t * Env.t
val type_self_pattern:
string -> type_expr -> Env.t -> Env.t -> Env.t -> Parsetree.pattern ->
Typedtree.pattern *
(Ident.t * type_expr) Meths.t ref *
(Ident.t * Asttypes.mutable_flag * Asttypes.virtual_flag * type_expr)
Vars.t ref *
Env.t * Env.t * Env.t
val check_partial:
?lev:int -> Env.t -> type_expr ->
Location.t -> Typedtree.case list -> Typedtree.partial
val type_expect:
?in_function:(Location.t * type_expr) ->
Env.t -> Parsetree.expression -> type_expected -> Typedtree.expression
val type_exp:
Env.t -> Parsetree.expression -> Typedtree.expression
val type_approx:
Env.t -> Parsetree.expression -> type_expr
val type_argument:
Env.t -> Parsetree.expression ->
type_expr -> type_expr -> Typedtree.expression
val option_some: Env.t -> Typedtree.expression -> Typedtree.expression
val option_none: Env.t -> type_expr -> Location.t -> Typedtree.expression
val extract_option_type: Env.t -> type_expr -> type_expr
val generalizable: int -> type_expr -> bool
val reset_delayed_checks: unit -> unit
val force_delayed_checks: unit -> unit
val name_pattern : string -> Typedtree.pattern list -> Ident.t
val name_cases : string -> Typedtree.case list -> Ident.t
val self_coercion : (Path.t * Location.t list ref) list ref
type error =
| Constructor_arity_mismatch of Longident.t * int * int
| Label_mismatch of Longident.t * Ctype.Unification_trace.t
| Pattern_type_clash of
Ctype.Unification_trace.t * Typedtree.pattern_desc option
| Or_pattern_type_clash of Ident.t * Ctype.Unification_trace.t
| Multiply_bound_variable of string
| Orpat_vars of Ident.t * Ident.t list
| Expr_type_clash of
Ctype.Unification_trace.t * type_forcing_context option
* Typedtree.expression_desc option
| Apply_non_function of type_expr
| Apply_wrong_label of arg_label * type_expr
| Label_multiply_defined of string
| Label_missing of Ident.t list
| Label_not_mutable of Longident.t
| Wrong_name of
string * type_expected * string * Path.t * string * string list
| Name_type_mismatch of
string * Longident.t * (Path.t * Path.t) * (Path.t * Path.t) list
| Invalid_format of string
| Undefined_method of type_expr * string * string list option
| Undefined_inherited_method of string * string list
| Virtual_class of Longident.t
| Private_type of type_expr
| Private_label of Longident.t * type_expr
| Private_constructor of constructor_description * type_expr
| Unbound_instance_variable of string * string list
| Instance_variable_not_mutable of string
| Not_subtype of Ctype.Unification_trace.t * Ctype.Unification_trace.t
| Outside_class
| Value_multiply_overridden of string
| Coercion_failure of
type_expr * type_expr * Ctype.Unification_trace.t * bool
| Too_many_arguments of bool * type_expr * type_forcing_context option
| Abstract_wrong_label of arg_label * type_expr * type_forcing_context option
| Scoping_let_module of string * type_expr
| Not_a_variant_type of Longident.t
| Incoherent_label_order
| Less_general of string * Ctype.Unification_trace.t
| Modules_not_allowed
| Cannot_infer_signature
| Not_a_packed_module of type_expr
| Unexpected_existential of existential_restriction * string * string list
| Invalid_interval
| Invalid_for_loop_index
| No_value_clauses
| Exception_pattern_disallowed
| Mixed_value_and_exception_patterns_under_guard
| Inlined_record_escape
| Inlined_record_expected
| Unrefuted_pattern of Typedtree.pattern
| Invalid_extension_constructor_payload
| Not_an_extension_constructor
| Literal_overflow of string
| Unknown_literal of string * char
| Illegal_letrec_pat
| Illegal_letrec_expr
| Illegal_class_expr
| Empty_pattern
| Letop_type_clash of string * Ctype.Unification_trace.t
| Andop_type_clash of string * Ctype.Unification_trace.t
| Bindings_type_clash of Ctype.Unification_trace.t
exception Error of Location.t * Env.t * error
exception Error_forward of Location.error
val report_error: loc:Location.t -> Env.t -> error -> Location.error
* @deprecated . Use { ! } , { ! } .
Forward declaration , to be filled in by
val type_module: (Env.t -> Parsetree.module_expr -> Typedtree.module_expr) ref
val type_open:
(?used_slot:bool ref -> override_flag -> Env.t -> Location.t ->
Longident.t loc -> Path.t * Env.t)
ref
val type_open_decl:
(?used_slot:bool ref -> Env.t -> Parsetree.open_declaration ->
Typedtree.open_declaration * Types.signature * Env.t)
ref
Forward declaration , to be filled in by Typeclass.class_structure
val type_object:
(Env.t -> Location.t -> Parsetree.class_structure ->
Typedtree.class_structure * Types.class_signature * string list) ref
val type_package:
(Env.t -> Parsetree.module_expr -> Path.t -> Longident.t list ->
Typedtree.module_expr * type_expr list) ref
val create_package_type : Location.t -> Env.t ->
Longident.t * (Longident.t * Parsetree.core_type) list ->
Path.t * (Longident.t * Typedtree.core_type) list * Types.type_expr
val constant: Parsetree.constant -> (Asttypes.constant, error) result
val check_recursive_bindings : Env.t -> Typedtree.value_binding list -> unit
val check_recursive_class_bindings :
Env.t -> Ident.t list -> Typedtree.class_expr list -> unit
|
eb21499d2d287e244370609d8d3880a26bc68eaab99117e5e3459ff2c51c4c1e | UCSD-PL/refscript | CmdLine.hs | {-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE TupleSections #
module Language.Rsc.CmdLine (Config(..), config, withPragmas) where
import Control.Monad (foldM)
import Language.Rsc.Locations
import System.Console.CmdArgs
import System.Console.CmdArgs.Explicit (modeValue)
import System.Environment (withArgs)
---------------------------------------------------------------------
-- | Command Line Configuration Options
---------------------------------------------------------------------
data Config
= TC { files :: [FilePath] -- ^ source files to check
, incdirs :: [FilePath] -- ^ path to directory for include specs
^ fail when casts are inserted
}
| Liquid { files :: [FilePath] -- ^ source files to check
, incdirs :: [FilePath] -- ^ path to directory for include specs
, extraInvs :: Bool -- ^ add extra invariants to object types
, renderAnns :: Bool -- ^ render annotations
, prelude :: Maybe FilePath -- ^ use this prelude file
^ use real - valued SMT arithmetic
, extSolver :: Bool -- ^ use external (Ocaml) fixpoint solver (deprecated)
, dumpDebug :: Bool -- ^ emit .fq, .fqout, .out files
, dumpJson :: Bool -- ^ dump result in JSON form in error stream
}
deriving (Data, Typeable, Show, Eq)
instance Default Config where
def = Liquid [] [] False False Nothing False False False False
---------------------------------------------------------------------------------
-- | Parsing Command Line -------------------------------------------------------
---------------------------------------------------------------------------------
tc :: Config
tc = TC {
files = def &= typ "TARGET"
&= args
&= typFile
, incdirs = def &= typDir
&= help "Paths to Spec Include Directory "
, noFailCasts = def &= help "Do not fail typecheck when casts are added"
} &= help "RefScript Type Checker"
liquid :: Config
liquid = Liquid {
files = def &= typ "TARGET"
&= args
&= typFile
, incdirs = def &= typDir
&= help "Paths to Spec Include Directory "
, extraInvs = def &= help "Add extra invariants (e.g. 'keyIn' for object types)"
, renderAnns = def &= help "Render annotations"
, prelude = def &= help "Use given prelude.ts file (debug)"
, real = def &= help "Use real-valued SMT logic (slow!)"
, extSolver = def &= help "Use external (Ocaml) fixpoint solver (deprecated)"
, dumpDebug = def &= help "Dump debug files (e.g. .fq, .fqout, .out)"
, dumpJson = def &= help "Dump result in JSON format in error stream"
} &= help "RefScript Refinement Type Checker"
config :: Config
config = modes [ liquid &= auto
, tc
]
&= help "rsc is an optional refinement type checker for TypeScript"
&= program "rsc"
&= summary "rsc © Copyright 2013-15 Regents of the University of California."
&= verbosity
---------------------------------------------------------------------------------------
withPragmas :: Config -> [Located String] -> IO Config
---------------------------------------------------------------------------------------
withPragmas = foldM withPragma
withPragma :: Config -> Located String -> IO Config
withPragma c s = withArgs [val s] $ cmdArgsRun
cfg0 { modeValue = (modeValue cfg0) { cmdArgsValue = c } }
where
cfg0 = cmdArgsMode liquid
-- getOpts :: IO Config
getOpts = do config
whenLoud $ putStrLn $ banner md
return $ md
banner args = " rsc © Copyright 2013 - 14 Regents of the University of California.\n "
+ + " All Rights Reserved.\n "
-- ++ "rsc" ++ show args ++ "\n"
| null | https://raw.githubusercontent.com/UCSD-PL/refscript/884306fef72248ac41ecdbb928bbd7b06ca71bd4/src/Language/Rsc/CmdLine.hs | haskell | # LANGUAGE DeriveDataTypeable #
-------------------------------------------------------------------
| Command Line Configuration Options
-------------------------------------------------------------------
^ source files to check
^ path to directory for include specs
^ source files to check
^ path to directory for include specs
^ add extra invariants to object types
^ render annotations
^ use this prelude file
^ use external (Ocaml) fixpoint solver (deprecated)
^ emit .fq, .fqout, .out files
^ dump result in JSON form in error stream
-------------------------------------------------------------------------------
| Parsing Command Line -------------------------------------------------------
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
getOpts :: IO Config
++ "rsc" ++ show args ++ "\n" | # LANGUAGE TupleSections #
module Language.Rsc.CmdLine (Config(..), config, withPragmas) where
import Control.Monad (foldM)
import Language.Rsc.Locations
import System.Console.CmdArgs
import System.Console.CmdArgs.Explicit (modeValue)
import System.Environment (withArgs)
data Config
^ fail when casts are inserted
}
^ use real - valued SMT arithmetic
}
deriving (Data, Typeable, Show, Eq)
instance Default Config where
def = Liquid [] [] False False Nothing False False False False
tc :: Config
tc = TC {
files = def &= typ "TARGET"
&= args
&= typFile
, incdirs = def &= typDir
&= help "Paths to Spec Include Directory "
, noFailCasts = def &= help "Do not fail typecheck when casts are added"
} &= help "RefScript Type Checker"
liquid :: Config
liquid = Liquid {
files = def &= typ "TARGET"
&= args
&= typFile
, incdirs = def &= typDir
&= help "Paths to Spec Include Directory "
, extraInvs = def &= help "Add extra invariants (e.g. 'keyIn' for object types)"
, renderAnns = def &= help "Render annotations"
, prelude = def &= help "Use given prelude.ts file (debug)"
, real = def &= help "Use real-valued SMT logic (slow!)"
, extSolver = def &= help "Use external (Ocaml) fixpoint solver (deprecated)"
, dumpDebug = def &= help "Dump debug files (e.g. .fq, .fqout, .out)"
, dumpJson = def &= help "Dump result in JSON format in error stream"
} &= help "RefScript Refinement Type Checker"
config :: Config
config = modes [ liquid &= auto
, tc
]
&= help "rsc is an optional refinement type checker for TypeScript"
&= program "rsc"
&= summary "rsc © Copyright 2013-15 Regents of the University of California."
&= verbosity
withPragmas :: Config -> [Located String] -> IO Config
withPragmas = foldM withPragma
withPragma :: Config -> Located String -> IO Config
withPragma c s = withArgs [val s] $ cmdArgsRun
cfg0 { modeValue = (modeValue cfg0) { cmdArgsValue = c } }
where
cfg0 = cmdArgsMode liquid
getOpts = do config
whenLoud $ putStrLn $ banner md
return $ md
banner args = " rsc © Copyright 2013 - 14 Regents of the University of California.\n "
+ + " All Rights Reserved.\n "
|
414450f4f602a6e61b26dc600b57f5005f15ca13c59d6645d7817ec5c639417e | mejgun/haskell-tdlib | ReportChatPhoto.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
module TD.Query.ReportChatPhoto where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified TD.Data.ChatReportReason as ChatReportReason
import qualified Utils as U
-- |
Reports a chat photo to the Telegram moderators . A chat photo can be reported only if chat.can_be_reported
data ReportChatPhoto = ReportChatPhoto
| Additional report details ; 0 - 1024 characters
text :: Maybe String,
-- | The reason for reporting the chat photo
reason :: Maybe ChatReportReason.ChatReportReason,
-- | Identifier of the photo to report. Only full photos from chatPhoto can be reported
file_id :: Maybe Int,
-- | Chat identifier
chat_id :: Maybe Int
}
deriving (Eq)
instance Show ReportChatPhoto where
show
ReportChatPhoto
{ text = text_,
reason = reason_,
file_id = file_id_,
chat_id = chat_id_
} =
"ReportChatPhoto"
++ U.cc
[ U.p "text" text_,
U.p "reason" reason_,
U.p "file_id" file_id_,
U.p "chat_id" chat_id_
]
instance T.ToJSON ReportChatPhoto where
toJSON
ReportChatPhoto
{ text = text_,
reason = reason_,
file_id = file_id_,
chat_id = chat_id_
} =
A.object
[ "@type" A..= T.String "reportChatPhoto",
"text" A..= text_,
"reason" A..= reason_,
"file_id" A..= file_id_,
"chat_id" A..= chat_id_
]
| null | https://raw.githubusercontent.com/mejgun/haskell-tdlib/dc380d18d49eaadc386a81dc98af2ce00f8797c2/src/TD/Query/ReportChatPhoto.hs | haskell | # LANGUAGE OverloadedStrings #
|
|
| The reason for reporting the chat photo
| Identifier of the photo to report. Only full photos from chatPhoto can be reported
| Chat identifier |
module TD.Query.ReportChatPhoto where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified TD.Data.ChatReportReason as ChatReportReason
import qualified Utils as U
Reports a chat photo to the Telegram moderators . A chat photo can be reported only if chat.can_be_reported
data ReportChatPhoto = ReportChatPhoto
| Additional report details ; 0 - 1024 characters
text :: Maybe String,
reason :: Maybe ChatReportReason.ChatReportReason,
file_id :: Maybe Int,
chat_id :: Maybe Int
}
deriving (Eq)
instance Show ReportChatPhoto where
show
ReportChatPhoto
{ text = text_,
reason = reason_,
file_id = file_id_,
chat_id = chat_id_
} =
"ReportChatPhoto"
++ U.cc
[ U.p "text" text_,
U.p "reason" reason_,
U.p "file_id" file_id_,
U.p "chat_id" chat_id_
]
instance T.ToJSON ReportChatPhoto where
toJSON
ReportChatPhoto
{ text = text_,
reason = reason_,
file_id = file_id_,
chat_id = chat_id_
} =
A.object
[ "@type" A..= T.String "reportChatPhoto",
"text" A..= text_,
"reason" A..= reason_,
"file_id" A..= file_id_,
"chat_id" A..= chat_id_
]
|
d512c30475dd563f554c0c214b31a5a2f8e98c31d8b1a3812cea0f5ccbef5797 | kardan/taxa | test_fns.cljc | (ns com.kardans.taxa.flow.test-fns
(:require [com.kardans.taxa :as taxa]))
(defn f
([t k v]
(f t k v (taxa/tag t)))
([t k v tag]
(-> t
taxa/thing
(taxa/taxon tag)
(assoc-in [::taxa/thing k] v))))
(defn f1
([t]
(f1 t "f1"))
([t arg]
(f t :f1 arg)))
(defn f2
([t]
(f2 t "f2"))
([t arg]
(f t :f2 arg)))
(defn f3
[t]
(f t :f3 "f3" ::taxa/err))
(defn f4
[t]
(assoc-in t [::taxa/thing :f4] "f4"))
| null | https://raw.githubusercontent.com/kardan/taxa/e76d1ded4ad2fe140284a0be7261580d65293578/src/test/com/kardans/taxa/flow/test_fns.cljc | clojure | (ns com.kardans.taxa.flow.test-fns
(:require [com.kardans.taxa :as taxa]))
(defn f
([t k v]
(f t k v (taxa/tag t)))
([t k v tag]
(-> t
taxa/thing
(taxa/taxon tag)
(assoc-in [::taxa/thing k] v))))
(defn f1
([t]
(f1 t "f1"))
([t arg]
(f t :f1 arg)))
(defn f2
([t]
(f2 t "f2"))
([t arg]
(f t :f2 arg)))
(defn f3
[t]
(f t :f3 "f3" ::taxa/err))
(defn f4
[t]
(assoc-in t [::taxa/thing :f4] "f4"))
| |
1190c6a5b9d5a8a69342886fe682a8c7e573bf4ce4402e3914d5b273144df5dd | frex-project/haskell-frex | LinAlg.hs | # LANGUAGE FlexibleContexts #
module LinAlg where
import Data.List (transpose)
import Prelude hiding ((<*>))
import Data.Ring
import Data.Coproduct
import Data.PartiallyStatic
import qualified Data.Map as Map
import qualified Data.MultiSet as MultiSet
import Language.Haskell.TH.Syntax (Lift)
type PsIntRing = FreeExt Ring (Code Int) Int
dot :: Ring r ⇒ [r] → [r] → r
dot xs ys = sumr (zipWith (⊗) xs ys)
sumr :: Ring r ⇒ [r] → r
sumr = foldr (⊕) r₀
cdNumRing :: (Num n, Eq n, Ring (Code n), Lift n) ⇒ FreeExt Ring (Code n) n → Code n
cdNumRing (CR (MN m)) = cd $ sumBits $ Map.assocs m
where
sumBits = foldr (\(xs, c) r → (prodVars xs `prod2` sta c) `sum2` r) (sta 0)
prodVars = prodN . map dyn . MultiSet.elems
-- TODO: we could simplify further here, reducing the number of multiplications as for 'power'
prodN :: (Num n, Eq n, Lift n) ⇒ [FreeExt Set (Code n) n] → FreeExt Set (Code n) n
prodN = foldr prod2 (sta 1)
sum2 :: (Num n, Eq n, Lift n) ⇒ FreeExt Set (Code n) n → FreeExt Set (Code n) n → FreeExt Set (Code n) n
sum2 (Inl 0) r = r
sum2 l (Inl 0) = l
sum2 (Inl l) (Inl r) = sta (l + r)
sum2 l r = dyn [|| $$(cd l) + $$(cd r) ||]
prod2 :: (Num n, Eq n, Lift n) ⇒ FreeExt Set (Code n) n → FreeExt Set (Code n) n → FreeExt Set (Code n) n
prod2 _ (Inl 0) = sta 0
prod2 (Inl 0) _ = sta 0
prod2 x (Inl 1) = x
prod2 (Inl 1) y = y
prod2 l r = dyn [|| $$(cd l) * $$(cd r) ||]
-- matrix-matrix multiplication
mmmul :: Ring r ⇒ [[r]] → [[r]] → [[r]]
mmmul m n = [[dot a b | b <- transpose n] | a <- m]
| null | https://raw.githubusercontent.com/frex-project/haskell-frex/23d66eb2f8b1eb21021100fbf3576e6bac2dd075/test/LinAlg.hs | haskell | TODO: we could simplify further here, reducing the number of multiplications as for 'power'
matrix-matrix multiplication | # LANGUAGE FlexibleContexts #
module LinAlg where
import Data.List (transpose)
import Prelude hiding ((<*>))
import Data.Ring
import Data.Coproduct
import Data.PartiallyStatic
import qualified Data.Map as Map
import qualified Data.MultiSet as MultiSet
import Language.Haskell.TH.Syntax (Lift)
type PsIntRing = FreeExt Ring (Code Int) Int
dot :: Ring r ⇒ [r] → [r] → r
dot xs ys = sumr (zipWith (⊗) xs ys)
sumr :: Ring r ⇒ [r] → r
sumr = foldr (⊕) r₀
cdNumRing :: (Num n, Eq n, Ring (Code n), Lift n) ⇒ FreeExt Ring (Code n) n → Code n
cdNumRing (CR (MN m)) = cd $ sumBits $ Map.assocs m
where
sumBits = foldr (\(xs, c) r → (prodVars xs `prod2` sta c) `sum2` r) (sta 0)
prodVars = prodN . map dyn . MultiSet.elems
prodN :: (Num n, Eq n, Lift n) ⇒ [FreeExt Set (Code n) n] → FreeExt Set (Code n) n
prodN = foldr prod2 (sta 1)
sum2 :: (Num n, Eq n, Lift n) ⇒ FreeExt Set (Code n) n → FreeExt Set (Code n) n → FreeExt Set (Code n) n
sum2 (Inl 0) r = r
sum2 l (Inl 0) = l
sum2 (Inl l) (Inl r) = sta (l + r)
sum2 l r = dyn [|| $$(cd l) + $$(cd r) ||]
prod2 :: (Num n, Eq n, Lift n) ⇒ FreeExt Set (Code n) n → FreeExt Set (Code n) n → FreeExt Set (Code n) n
prod2 _ (Inl 0) = sta 0
prod2 (Inl 0) _ = sta 0
prod2 x (Inl 1) = x
prod2 (Inl 1) y = y
prod2 l r = dyn [|| $$(cd l) * $$(cd r) ||]
mmmul :: Ring r ⇒ [[r]] → [[r]] → [[r]]
mmmul m n = [[dot a b | b <- transpose n] | a <- m]
|
43ef18de5acd31be39ade90028ef875ad3971ba847611501eafd4ca7eee70eb5 | threatgrid/ctia | migrations.clj | (ns ctia.task.migration.migrations
(:require [clj-momo.lib.clj-time
[coerce :as time-coerce]
[core :as time-core]]
[clojure.set :as set]
[ctia.task.migration.migrations.describe :refer [migrate-describe]]
[ctia.task.migration.migrations.investigation-actions :refer [migrate-action-data]]))
(def add-groups
"set a document group to [\"tenzin\"] if unset"
(map (fn [{:keys [groups]
:as doc}]
(if-not (seq groups)
(assoc doc :groups ["tenzin"])
doc))))
(def fix-end-time
"fix end_time to 2535"
(map
(fn [{:keys [valid_time]
:as doc}]
(if (:end_time valid_time)
(update-in doc
[:valid_time
:end_time]
#(let [max-end-time (time-core/internal-date 2525 01 01)
end-time (time-coerce/to-internal-date %)]
(if (time-core/after? end-time max-end-time)
max-end-time
end-time)))
doc))))
(defn append-version
"append the version field only
if the document is not a user or an event"
[version]
(map #(if-not (or (= (:type %) "event")
(seq (:capabilities %)))
(assoc % :schema_version version)
%)))
(def target-observed_time
"append observed_time to sighting/target
inheriting the sighting"
(map (fn [{:keys [target observed_time] :as doc}]
(if (and target
(not (:observed_time target)))
(update doc :target assoc :observed_time observed_time)
doc))))
(def pluralize-target
"a sighting can have multiple targets"
(map (fn [{:keys [type target] :as doc}]
(if (and (= "sighting" type)
(not (nil? target)))
(-> doc
(assoc :targets (if (vector? target)
target [target]))
(dissoc :target))
doc))))
;;-- Rename observable type
(defn with-renamed-observable-type
[observable old new]
(update observable
:type
(fn [obs-type]
(if (= obs-type old)
new
obs-type))))
(defn with-renamed-observable-types
[c old new]
(mapv #(with-renamed-observable-type % old new)
c))
(defn rename-sighting-relations-observable-types
[relation old new]
(-> relation
(update :source with-renamed-observable-type old new)
(update :related with-renamed-observable-type old new)))
(defn rename-sighting-observable-types
[sighting old new]
(-> sighting
(update :observables with-renamed-observable-types old new)
(update :relations
(fn [relations]
(mapv #(rename-sighting-relations-observable-types % old new)
relations)))))
(defn rename-judgement-observable-type
[judgement old new]
(update judgement
:observable
with-renamed-observable-type
old
new))
(defn rename-bundle-observable-types
[bundle old new]
(-> bundle
(update :sightings (fn [sightings]
(mapv #(rename-sighting-observable-types % old new)
sightings)))
(update :judgements (fn [judgements]
(mapv #(rename-judgement-observable-type % old new)
judgements)))
(update :verdicts (fn [verdicts]
(mapv #(rename-judgement-observable-type % old new)
verdicts)))))
(defn rename-casebook-observable-types
[{:keys [observables bundle] :as casebook} old new]
(cond-> casebook
(seq observables) (update :observables with-renamed-observable-types old new)
bundle (update :bundle rename-bundle-observable-types old new)))
(defn rename-observable-type
[old new]
(map (fn [{observable-type :type
:as doc}]
(case observable-type
"sighting" (rename-sighting-observable-types doc old new)
("judgement"
"verdict") (rename-judgement-observable-type doc old new)
"casebook" (rename-casebook-observable-types doc old new)
doc))))
;;--- Simplify incident model
(defn simplify-incident-time
[incident-time]
(-> incident-time
(dissoc :first_malicious_action :initial_compromise :first_data_exfiltration
:containment_achieved :restoration_achieved)
(set/rename-keys {:incident_discovery :discovered
:incident_opened :opened
:incident_reported :reported
:incident_closed :closed})))
(def simplify-incident
(map (fn [{entity-type :type :as doc}]
(if (= entity-type "incident")
(-> doc
(dissoc :valid_time :reporter :responder :coordinator :victim
:affected_assets :impact_assessment :security_compromise
:COA_requested :COA_taken :contact :history :related_indicators
:related_observables :attributed_actors :related_incidents)
(update :incident_time simplify-incident-time))
doc))))
(def actor-type-array
(map (fn [{type :type
actor-type :actor_type
:as doc}]
(if (and (= type "actor") actor-type)
(-> doc
(assoc :actor_types [actor-type])
(dissoc :actor_type))
doc))))
(def available-migrations
{:identity identity
:__test (map #(assoc % :groups ["migration-test"]))
:0.4.16 (comp (append-version "0.4.16")
add-groups
fix-end-time)
:0.4.28 (comp (append-version "0.4.28")
fix-end-time
add-groups
target-observed_time
pluralize-target)
:1.0.0 (comp (append-version "1.0.0")
(rename-observable-type "pki-serial" "pki_serial")
simplify-incident)
:1.2.0 (comp (append-version "1.2.0")
actor-type-array)
:investigation-actions (comp (append-version "1.1.0")
migrate-action-data)
:describe (comp (append-version "1.1.0")
migrate-describe)})
| null | https://raw.githubusercontent.com/threatgrid/ctia/3d41c6012e701017a63535444a410f59da65c53c/src/ctia/task/migration/migrations.clj | clojure | -- Rename observable type
--- Simplify incident model | (ns ctia.task.migration.migrations
(:require [clj-momo.lib.clj-time
[coerce :as time-coerce]
[core :as time-core]]
[clojure.set :as set]
[ctia.task.migration.migrations.describe :refer [migrate-describe]]
[ctia.task.migration.migrations.investigation-actions :refer [migrate-action-data]]))
(def add-groups
"set a document group to [\"tenzin\"] if unset"
(map (fn [{:keys [groups]
:as doc}]
(if-not (seq groups)
(assoc doc :groups ["tenzin"])
doc))))
(def fix-end-time
"fix end_time to 2535"
(map
(fn [{:keys [valid_time]
:as doc}]
(if (:end_time valid_time)
(update-in doc
[:valid_time
:end_time]
#(let [max-end-time (time-core/internal-date 2525 01 01)
end-time (time-coerce/to-internal-date %)]
(if (time-core/after? end-time max-end-time)
max-end-time
end-time)))
doc))))
(defn append-version
"append the version field only
if the document is not a user or an event"
[version]
(map #(if-not (or (= (:type %) "event")
(seq (:capabilities %)))
(assoc % :schema_version version)
%)))
(def target-observed_time
"append observed_time to sighting/target
inheriting the sighting"
(map (fn [{:keys [target observed_time] :as doc}]
(if (and target
(not (:observed_time target)))
(update doc :target assoc :observed_time observed_time)
doc))))
(def pluralize-target
"a sighting can have multiple targets"
(map (fn [{:keys [type target] :as doc}]
(if (and (= "sighting" type)
(not (nil? target)))
(-> doc
(assoc :targets (if (vector? target)
target [target]))
(dissoc :target))
doc))))
(defn with-renamed-observable-type
[observable old new]
(update observable
:type
(fn [obs-type]
(if (= obs-type old)
new
obs-type))))
(defn with-renamed-observable-types
[c old new]
(mapv #(with-renamed-observable-type % old new)
c))
(defn rename-sighting-relations-observable-types
[relation old new]
(-> relation
(update :source with-renamed-observable-type old new)
(update :related with-renamed-observable-type old new)))
(defn rename-sighting-observable-types
[sighting old new]
(-> sighting
(update :observables with-renamed-observable-types old new)
(update :relations
(fn [relations]
(mapv #(rename-sighting-relations-observable-types % old new)
relations)))))
(defn rename-judgement-observable-type
[judgement old new]
(update judgement
:observable
with-renamed-observable-type
old
new))
(defn rename-bundle-observable-types
[bundle old new]
(-> bundle
(update :sightings (fn [sightings]
(mapv #(rename-sighting-observable-types % old new)
sightings)))
(update :judgements (fn [judgements]
(mapv #(rename-judgement-observable-type % old new)
judgements)))
(update :verdicts (fn [verdicts]
(mapv #(rename-judgement-observable-type % old new)
verdicts)))))
(defn rename-casebook-observable-types
[{:keys [observables bundle] :as casebook} old new]
(cond-> casebook
(seq observables) (update :observables with-renamed-observable-types old new)
bundle (update :bundle rename-bundle-observable-types old new)))
(defn rename-observable-type
[old new]
(map (fn [{observable-type :type
:as doc}]
(case observable-type
"sighting" (rename-sighting-observable-types doc old new)
("judgement"
"verdict") (rename-judgement-observable-type doc old new)
"casebook" (rename-casebook-observable-types doc old new)
doc))))
(defn simplify-incident-time
[incident-time]
(-> incident-time
(dissoc :first_malicious_action :initial_compromise :first_data_exfiltration
:containment_achieved :restoration_achieved)
(set/rename-keys {:incident_discovery :discovered
:incident_opened :opened
:incident_reported :reported
:incident_closed :closed})))
(def simplify-incident
(map (fn [{entity-type :type :as doc}]
(if (= entity-type "incident")
(-> doc
(dissoc :valid_time :reporter :responder :coordinator :victim
:affected_assets :impact_assessment :security_compromise
:COA_requested :COA_taken :contact :history :related_indicators
:related_observables :attributed_actors :related_incidents)
(update :incident_time simplify-incident-time))
doc))))
(def actor-type-array
(map (fn [{type :type
actor-type :actor_type
:as doc}]
(if (and (= type "actor") actor-type)
(-> doc
(assoc :actor_types [actor-type])
(dissoc :actor_type))
doc))))
(def available-migrations
{:identity identity
:__test (map #(assoc % :groups ["migration-test"]))
:0.4.16 (comp (append-version "0.4.16")
add-groups
fix-end-time)
:0.4.28 (comp (append-version "0.4.28")
fix-end-time
add-groups
target-observed_time
pluralize-target)
:1.0.0 (comp (append-version "1.0.0")
(rename-observable-type "pki-serial" "pki_serial")
simplify-incident)
:1.2.0 (comp (append-version "1.2.0")
actor-type-array)
:investigation-actions (comp (append-version "1.1.0")
migrate-action-data)
:describe (comp (append-version "1.1.0")
migrate-describe)})
|
39894e09fd610b09d490a634590ad4ee38a16d6564596afad046aee4cac2410b | rkaippully/webgear | Main.hs | # LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Monad (replicateM_, when)
import Criterion.Main (bench, defaultMain, nfIO)
import Data.ByteString (ByteString)
import Data.IORef (newIORef, readIORef, writeIORef)
import Network.HTTP.Types (methodDelete, methodGet, methodPut, statusCode)
import Network.Wai (Application, defaultRequest)
import Network.Wai.Internal (Request (..), Response (..), ResponseReceived (..))
import System.Environment (getArgs)
import qualified Network.Wai.Handler.Warp as Warp
import qualified Scotty
import qualified Servant
import qualified WebGear
import Model (newStore)
main :: IO ()
main = do
store <- newStore
getArgs >>= \case
["webgear"] -> Warp.run 3000 (WebGear.application store)
["servant"] -> Warp.run 3000 (Servant.application store)
["scotty"] -> Scotty.application store >>= Warp.run 3000
_ -> runCriterion
runCriterion :: IO ()
runCriterion = do
store <- newStore
defaultMain [ bench "webgear" $ nfIO (runTest $ WebGear.application store)
, bench "servant" $ nfIO (runTest $ Servant.application store)
, bench "scotty" $ nfIO (Scotty.application store >>= runTest)
]
runTest :: Application -> IO ()
runTest app = replicateM_ 500 $ do
_ <- putRequest >>= flip app (respond 200)
_ <- app getRequest (respond 200)
_ <- app deleteRequest (respond 204)
return ()
putRequest :: IO Request
putRequest = do
f <- bodyGetter "{\"userId\": 1, \"userName\": \"John Doe\", \"dateOfBirth\": \"2000-03-01\", \"gender\": \"Male\", \"emailAddress\": \"\"}"
return defaultRequest
{ requestMethod = methodPut
, requestHeaders = [("Content-type", "application/json")]
, pathInfo = ["v1", "users", "1"]
, requestBody = f
}
bodyGetter :: ByteString -> IO (IO ByteString)
bodyGetter s = do
ref <- newIORef (Just s)
pure $ readIORef ref >>= \case
Nothing -> pure ""
Just x -> writeIORef ref Nothing >> return x
getRequest :: Request
getRequest = defaultRequest
{ requestMethod = methodGet
, pathInfo = ["v1", "users", "1"]
}
deleteRequest :: Request
deleteRequest = defaultRequest
{ requestMethod = methodDelete
, pathInfo = ["v1", "users", "1"]
}
respond :: Int -> Response -> IO ResponseReceived
respond expectedStatus res = do
let actualStatus = statusOf res
when (expectedStatus /= actualStatus) $
putStrLn "Unexpected response status"
return ResponseReceived
statusOf :: Response -> Int
statusOf (ResponseFile status _ _ _) = statusCode status
statusOf (ResponseBuilder status _ _) = statusCode status
statusOf (ResponseStream status _ _) = statusCode status
statusOf (ResponseRaw _ res) = statusOf res
| null | https://raw.githubusercontent.com/rkaippully/webgear/60359389271292b1c81ae4c9292d2a69a52d5a38/webgear-benchmarks/src/users/Main.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE LambdaCase #
module Main where
import Control.Monad (replicateM_, when)
import Criterion.Main (bench, defaultMain, nfIO)
import Data.ByteString (ByteString)
import Data.IORef (newIORef, readIORef, writeIORef)
import Network.HTTP.Types (methodDelete, methodGet, methodPut, statusCode)
import Network.Wai (Application, defaultRequest)
import Network.Wai.Internal (Request (..), Response (..), ResponseReceived (..))
import System.Environment (getArgs)
import qualified Network.Wai.Handler.Warp as Warp
import qualified Scotty
import qualified Servant
import qualified WebGear
import Model (newStore)
main :: IO ()
main = do
store <- newStore
getArgs >>= \case
["webgear"] -> Warp.run 3000 (WebGear.application store)
["servant"] -> Warp.run 3000 (Servant.application store)
["scotty"] -> Scotty.application store >>= Warp.run 3000
_ -> runCriterion
runCriterion :: IO ()
runCriterion = do
store <- newStore
defaultMain [ bench "webgear" $ nfIO (runTest $ WebGear.application store)
, bench "servant" $ nfIO (runTest $ Servant.application store)
, bench "scotty" $ nfIO (Scotty.application store >>= runTest)
]
runTest :: Application -> IO ()
runTest app = replicateM_ 500 $ do
_ <- putRequest >>= flip app (respond 200)
_ <- app getRequest (respond 200)
_ <- app deleteRequest (respond 204)
return ()
putRequest :: IO Request
putRequest = do
f <- bodyGetter "{\"userId\": 1, \"userName\": \"John Doe\", \"dateOfBirth\": \"2000-03-01\", \"gender\": \"Male\", \"emailAddress\": \"\"}"
return defaultRequest
{ requestMethod = methodPut
, requestHeaders = [("Content-type", "application/json")]
, pathInfo = ["v1", "users", "1"]
, requestBody = f
}
bodyGetter :: ByteString -> IO (IO ByteString)
bodyGetter s = do
ref <- newIORef (Just s)
pure $ readIORef ref >>= \case
Nothing -> pure ""
Just x -> writeIORef ref Nothing >> return x
getRequest :: Request
getRequest = defaultRequest
{ requestMethod = methodGet
, pathInfo = ["v1", "users", "1"]
}
deleteRequest :: Request
deleteRequest = defaultRequest
{ requestMethod = methodDelete
, pathInfo = ["v1", "users", "1"]
}
respond :: Int -> Response -> IO ResponseReceived
respond expectedStatus res = do
let actualStatus = statusOf res
when (expectedStatus /= actualStatus) $
putStrLn "Unexpected response status"
return ResponseReceived
statusOf :: Response -> Int
statusOf (ResponseFile status _ _ _) = statusCode status
statusOf (ResponseBuilder status _ _) = statusCode status
statusOf (ResponseStream status _ _) = statusCode status
statusOf (ResponseRaw _ res) = statusOf res
|
753e709ff4a6c35f4f1dc3520b836b251169e6511513b4200ed17f49a0d1803d | atlas-engineer/nyxt | package.lisp | SPDX - FileCopyrightText : Atlas Engineer LLC
SPDX - License - Identifier : BSD-3 - Clause
(uiop:define-package :text-buffer
(:use :cl)
(:export #:text-buffer #:cursor))
| null | https://raw.githubusercontent.com/atlas-engineer/nyxt/aa27fc47314046062d6f4e7ef5d8c95a62c2858f/libraries/text-buffer/package.lisp | lisp | SPDX - FileCopyrightText : Atlas Engineer LLC
SPDX - License - Identifier : BSD-3 - Clause
(uiop:define-package :text-buffer
(:use :cl)
(:export #:text-buffer #:cursor))
| |
5ff1aaa456ea9d1c3a49ae6711d0c99b2d06f2bb41c57b047ad809e7872d8747 | GaloisInc/saw-script | ClearState.hs | # LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
module SAWServer.ClearState
( clearState
, clearStateDescr
, clearAllStates
, clearAllStatesDescr
) where
import qualified Argo
import qualified Argo.Doc as Doc
import qualified Data.Aeson as JSON
import Data.Aeson ((.:))
newtype ClearStateParams = ClearStateParams Argo.StateID
instance JSON.FromJSON ClearStateParams where
parseJSON =
JSON.withObject "params for \"clear state\"" $
\o -> ClearStateParams <$> o .: "state to clear"
instance Doc.DescribedMethod ClearStateParams () where
parameterFieldDescription =
[("state to clear",
Doc.Paragraph [Doc.Text "The state to clear from the server to make room for other unrelated states."])
]
clearStateDescr :: Doc.Block
clearStateDescr =
Doc.Paragraph [Doc.Text "Clear a particular state from the SAW server (making room for subsequent/unrelated states)."]
clearState :: ClearStateParams -> Argo.Notification ()
clearState (ClearStateParams stateID) = Argo.destroyState stateID
data ClearAllStatesParams = ClearAllStatesParams
instance JSON.FromJSON ClearAllStatesParams where
parseJSON =
JSON.withObject "params for \"clear all states\"" $
\_ -> pure ClearAllStatesParams
instance Doc.DescribedMethod ClearAllStatesParams () where
parameterFieldDescription = []
clearAllStatesDescr :: Doc.Block
clearAllStatesDescr =
Doc.Paragraph [Doc.Text "Clear all states from the SAW server (making room for subsequent/unrelated states)."]
clearAllStates :: ClearAllStatesParams -> Argo.Notification ()
clearAllStates ClearAllStatesParams = Argo.destroyAllStates
| null | https://raw.githubusercontent.com/GaloisInc/saw-script/490f05a8e2f8f0bfcc15ddce71a8e3668108efe3/saw-remote-api/src/SAWServer/ClearState.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE MultiParamTypeClasses #
module SAWServer.ClearState
( clearState
, clearStateDescr
, clearAllStates
, clearAllStatesDescr
) where
import qualified Argo
import qualified Argo.Doc as Doc
import qualified Data.Aeson as JSON
import Data.Aeson ((.:))
newtype ClearStateParams = ClearStateParams Argo.StateID
instance JSON.FromJSON ClearStateParams where
parseJSON =
JSON.withObject "params for \"clear state\"" $
\o -> ClearStateParams <$> o .: "state to clear"
instance Doc.DescribedMethod ClearStateParams () where
parameterFieldDescription =
[("state to clear",
Doc.Paragraph [Doc.Text "The state to clear from the server to make room for other unrelated states."])
]
clearStateDescr :: Doc.Block
clearStateDescr =
Doc.Paragraph [Doc.Text "Clear a particular state from the SAW server (making room for subsequent/unrelated states)."]
clearState :: ClearStateParams -> Argo.Notification ()
clearState (ClearStateParams stateID) = Argo.destroyState stateID
data ClearAllStatesParams = ClearAllStatesParams
instance JSON.FromJSON ClearAllStatesParams where
parseJSON =
JSON.withObject "params for \"clear all states\"" $
\_ -> pure ClearAllStatesParams
instance Doc.DescribedMethod ClearAllStatesParams () where
parameterFieldDescription = []
clearAllStatesDescr :: Doc.Block
clearAllStatesDescr =
Doc.Paragraph [Doc.Text "Clear all states from the SAW server (making room for subsequent/unrelated states)."]
clearAllStates :: ClearAllStatesParams -> Argo.Notification ()
clearAllStates ClearAllStatesParams = Argo.destroyAllStates
|
17f7a015df4f3b5a0bed492398cf52fc851bf4cb96c24c5473a5ed10802e374e | takikawa/racket-ppa | info.rkt | (module info setup/infotab (#%module-begin (define collection (quote multi)) (define deps (quote ("base" "compiler-lib" "dynext-lib" "scheme-lib"))) (define implies (quote ("dynext-lib"))) (define pkg-desc "Tools for managing C extensions, such as `raco ctool`") (define pkg-authors (quote (mflatt))) (define license (quote (Apache-2.0 OR MIT)))))
| null | https://raw.githubusercontent.com/takikawa/racket-ppa/26d6ae74a1b19258c9789b7c14c074d867a4b56b/share/pkgs/cext-lib/info.rkt | racket | (module info setup/infotab (#%module-begin (define collection (quote multi)) (define deps (quote ("base" "compiler-lib" "dynext-lib" "scheme-lib"))) (define implies (quote ("dynext-lib"))) (define pkg-desc "Tools for managing C extensions, such as `raco ctool`") (define pkg-authors (quote (mflatt))) (define license (quote (Apache-2.0 OR MIT)))))
| |
49bd5af276d6e2a204f8d471f6776b0a2946c44c3068adbb3e36365a7552d7ba | dm3/clojure.java-time | units.clj | (ns java-time.units
(:import (java.time.temporal IsoFields ChronoUnit)))
(defonce iso
{:week-based-years IsoFields/WEEK_BASED_YEARS
:quarter-years IsoFields/QUARTER_YEARS})
(defonce chrono
{:millis ChronoUnit/MILLIS
:weeks ChronoUnit/WEEKS
:centuries ChronoUnit/CENTURIES
:minutes ChronoUnit/MINUTES
:days ChronoUnit/DAYS
:years ChronoUnit/YEARS
:seconds ChronoUnit/SECONDS
:nanos ChronoUnit/NANOS
:decades ChronoUnit/DECADES
:forever ChronoUnit/FOREVER
:hours ChronoUnit/HOURS
:micros ChronoUnit/MICROS
:millenia ChronoUnit/MILLENNIA
:months ChronoUnit/MONTHS
:half-days ChronoUnit/HALF_DAYS
:eras ChronoUnit/ERAS})
| null | https://raw.githubusercontent.com/dm3/clojure.java-time/3a0fe6f350ce1b032af773cd759d9966bedf149a/src/java_time/units.clj | clojure | (ns java-time.units
(:import (java.time.temporal IsoFields ChronoUnit)))
(defonce iso
{:week-based-years IsoFields/WEEK_BASED_YEARS
:quarter-years IsoFields/QUARTER_YEARS})
(defonce chrono
{:millis ChronoUnit/MILLIS
:weeks ChronoUnit/WEEKS
:centuries ChronoUnit/CENTURIES
:minutes ChronoUnit/MINUTES
:days ChronoUnit/DAYS
:years ChronoUnit/YEARS
:seconds ChronoUnit/SECONDS
:nanos ChronoUnit/NANOS
:decades ChronoUnit/DECADES
:forever ChronoUnit/FOREVER
:hours ChronoUnit/HOURS
:micros ChronoUnit/MICROS
:millenia ChronoUnit/MILLENNIA
:months ChronoUnit/MONTHS
:half-days ChronoUnit/HALF_DAYS
:eras ChronoUnit/ERAS})
| |
55bad1d81c2bcd0755d026d2954c8a62c03356e0c7280ad6a14dfa6770744dcc | Et7f3/ocalc | grandEntier_on.mli | type grandentier = bool * int list
val zero : grandentier
val unit : grandentier
val est_negatif : grandentier -> bool
val neg : grandentier -> grandentier
val comparer : grandentier -> grandentier -> int
val additioner : grandentier -> grandentier -> grandentier
val soustraire : grandentier -> grandentier -> grandentier
val multiplier : grandentier -> grandentier -> grandentier
val pgcd : grandentier -> grandentier -> grandentier
val diviser_multiple : grandentier -> grandentier -> grandentier
val diviser : grandentier -> grandentier -> grandentier * grandentier
val grandentier_depuis_texte : string -> grandentier
val texte_depuis_grandentier : grandentier -> string
| null | https://raw.githubusercontent.com/Et7f3/ocalc/667da625676829307fc59542906ed2075fe818bd/src/modules/grandEntier_on.mli | ocaml | type grandentier = bool * int list
val zero : grandentier
val unit : grandentier
val est_negatif : grandentier -> bool
val neg : grandentier -> grandentier
val comparer : grandentier -> grandentier -> int
val additioner : grandentier -> grandentier -> grandentier
val soustraire : grandentier -> grandentier -> grandentier
val multiplier : grandentier -> grandentier -> grandentier
val pgcd : grandentier -> grandentier -> grandentier
val diviser_multiple : grandentier -> grandentier -> grandentier
val diviser : grandentier -> grandentier -> grandentier * grandentier
val grandentier_depuis_texte : string -> grandentier
val texte_depuis_grandentier : grandentier -> string
| |
b1b5445c0b07362dba14e6ecbbefa51170941aabf2e64dccfb6bc2233e76d03c | cartazio/tlaps | isabelle_keywords.mli |
* Copyright ( C ) 2012 INRIA and Microsoft Corporation
* Copyright (C) 2012 INRIA and Microsoft Corporation
*)
val v : string list;;
| null | https://raw.githubusercontent.com/cartazio/tlaps/562a34c066b636da7b921ae30fc5eacf83608280/src/isabelle_keywords.mli | ocaml |
* Copyright ( C ) 2012 INRIA and Microsoft Corporation
* Copyright (C) 2012 INRIA and Microsoft Corporation
*)
val v : string list;;
| |
1c6c34047fe003c85d06250626c6bc1eedbade65a152a80f56f591ee925287c8 | flavioc/cl-hurd | flags.lisp |
(in-package :hurd)
;;
;; Exec flags for file-exec callback.
;;
(defbitfield exec-flags
(:newtask #x00000001)
(:secure #x00000002)
(:defaults #x00000004)
(:sigtrap #x00000008)
(:stack-args #x00000010))
| null | https://raw.githubusercontent.com/flavioc/cl-hurd/982232f47d1a0ff4df5fde2edad03b9df871470a/hurd/exec/flags.lisp | lisp |
Exec flags for file-exec callback.
|
(in-package :hurd)
(defbitfield exec-flags
(:newtask #x00000001)
(:secure #x00000002)
(:defaults #x00000004)
(:sigtrap #x00000008)
(:stack-args #x00000010))
|
386c33096ba44299d8a72d58d3d96c263d8c849b03b7c05719b101f206caa409 | ont-app/igraph-jena | build.clj | (ns build
"Adpated from -new/blob/develop/resources/org/corfield/new/lib/root/build.clj"
(:refer-clojure :exclude [test])
(:require [clojure.tools.build.api :as b] ; for b/git-count-revs
[org.corfield.build :as bb]
[clojure.spec.alpha :as spec]
[clojure.tools.deps.specs :as deps-specs]
))
(def lib 'ont-app/igraph-jena)
(def version "0.2.2")
(defn validate-deps
"Throws an `ex-info` of type `::invalid-deps`, or returns `opts` unchanged"
[opts]
(println "Validating deps.edn...")
(spec/check-asserts true)
(spec/assert ::deps-specs/deps-map
(-> "deps.edn" (slurp) (clojure.edn/read-string)))
(println "deps.edn conforms to clojure.tools.deps.specs")
opts)
(defn test "Run the tests."
[opts]
(bb/run-tests opts))
(defn ci "Run the CI pipeline of tests (and build the JAR)."
[opts]
(-> opts
(assoc :lib lib :version version)
(validate-deps)
(bb/run-tests)
(bb/clean)
(bb/jar)))
(defn clean "Cleans any clj/s compilation output.
Where:
`opts` := `m` s.t. (keys m) may match #{:include-caches?, ...}
`include-caches?` when truthy indicates to clear .cpcache and .shadow-cljs directories.
"
[opts]
(println (str "Cleaning with opts:" opts "."))
;; TODO: check opts
(bb/clean opts)
(b/delete {:path "./out"})
(b/delete {:path "./cljs-test-runner-out"})
(when (:include-caches? opts)
(println (str "Clearing caches"))
(b/delete {:path "./.cpcache"})
(b/delete {:path "./.shadow-cljs"}))
opts)
(defn install "Install the JAR locally."
[opts]
(-> opts
(assoc :lib lib :version version)
(bb/install)))
(defn deploy
"Deploy the JAR to Clojars. Using $CLOJARS_USERNAME and $CLOJARS_PASSWORD"
[opts]
(-> opts
(assoc :lib lib :version version)
(bb/deploy)))
| null | https://raw.githubusercontent.com/ont-app/igraph-jena/b6ce294bd4b9c9d2db96df227b54b9e961cf43df/build.clj | clojure | for b/git-count-revs
TODO: check opts | (ns build
"Adpated from -new/blob/develop/resources/org/corfield/new/lib/root/build.clj"
(:refer-clojure :exclude [test])
[org.corfield.build :as bb]
[clojure.spec.alpha :as spec]
[clojure.tools.deps.specs :as deps-specs]
))
(def lib 'ont-app/igraph-jena)
(def version "0.2.2")
(defn validate-deps
"Throws an `ex-info` of type `::invalid-deps`, or returns `opts` unchanged"
[opts]
(println "Validating deps.edn...")
(spec/check-asserts true)
(spec/assert ::deps-specs/deps-map
(-> "deps.edn" (slurp) (clojure.edn/read-string)))
(println "deps.edn conforms to clojure.tools.deps.specs")
opts)
(defn test "Run the tests."
[opts]
(bb/run-tests opts))
(defn ci "Run the CI pipeline of tests (and build the JAR)."
[opts]
(-> opts
(assoc :lib lib :version version)
(validate-deps)
(bb/run-tests)
(bb/clean)
(bb/jar)))
(defn clean "Cleans any clj/s compilation output.
Where:
`opts` := `m` s.t. (keys m) may match #{:include-caches?, ...}
`include-caches?` when truthy indicates to clear .cpcache and .shadow-cljs directories.
"
[opts]
(println (str "Cleaning with opts:" opts "."))
(bb/clean opts)
(b/delete {:path "./out"})
(b/delete {:path "./cljs-test-runner-out"})
(when (:include-caches? opts)
(println (str "Clearing caches"))
(b/delete {:path "./.cpcache"})
(b/delete {:path "./.shadow-cljs"}))
opts)
(defn install "Install the JAR locally."
[opts]
(-> opts
(assoc :lib lib :version version)
(bb/install)))
(defn deploy
"Deploy the JAR to Clojars. Using $CLOJARS_USERNAME and $CLOJARS_PASSWORD"
[opts]
(-> opts
(assoc :lib lib :version version)
(bb/deploy)))
|
76c356c4fea066a9c25aff84a98d7c80dcec67071db8a116da16c603be1d4f82 | kowainik/tomland | Array.hs | module Test.Toml.Parser.Array
( arraySpecs
) where
import Data.Time (TimeOfDay (..))
import Test.Hspec (Spec, describe, it)
import Test.Toml.Parser.Common (arrayFailOn, day1, day2, int1, int2, int3, int4, parseArray)
import Toml.Type (UValue (..))
arraySpecs :: Spec
arraySpecs = describe "arrayP" $ do
it "can parse arrays" $ do
parseArray
"[]"
[]
parseArray
"[1]"
[int1]
parseArray
"[1, 2, 3]"
[int1, int2, int3]
parseArray
"[1.2, 2.3, 3.4]"
[UDouble 1.2, UDouble 2.3, UDouble 3.4]
parseArray
"['x', 'y']"
[UText "x", UText "y"]
parseArray
"[[1], [2]]"
[UArray [UInteger 1], UArray [UInteger 2]]
parseArray
"[1920-12-10, 1979-05-27]"
[UDay day2, UDay day1]
parseArray
"[16:33:05, 10:15:30]"
[UHours (TimeOfDay 16 33 5), UHours (TimeOfDay 10 15 30)]
it "can parse multiline arrays" $
parseArray
"[\n1,\n2\n]"
[int1, int2]
it "can parse an array of arrays" $
parseArray
"[[1], [2.3, 5.1]]"
[UArray [int1], UArray [UDouble 2.3, UDouble 5.1]]
it "can parse an array with terminating commas (trailing commas)" $ do
parseArray
"[1, 2,]"
[int1, int2]
parseArray
"[1, 2, 3, , ,]"
[int1, int2, int3]
it "allows an arbitrary number of comments and newlines before or after a value" $
parseArray
"[\n\n#c\n1, #c 2 \n 2, \n\n\n 3, #c \n #c \n 4]"
[int1, int2, int3, int4]
it "ignores white spaces" $
parseArray
"[ 1 , 2,3, 4 ]"
[int1, int2, int3, int4]
it "fails if the elements are not surrounded by square brackets" $ do
arrayFailOn "1, 2, 3"
arrayFailOn "[1, 2, 3"
arrayFailOn "1, 2, 3]"
arrayFailOn "{'x', 'y', 'z'}"
arrayFailOn "(\"ab\", \"cd\")"
arrayFailOn "<true, false>"
it "fails if the elements are not separated by commas" $ do
arrayFailOn "[1 2 3]"
arrayFailOn "[1 . 2 . 3]"
arrayFailOn "['x' - 'y' - 'z']"
arrayFailOn "[1920-12-10, 10:15:30]"
| null | https://raw.githubusercontent.com/kowainik/tomland/2b4bcc465b79873a61bccfc7131d423a9a0aec1d/test/Test/Toml/Parser/Array.hs | haskell | module Test.Toml.Parser.Array
( arraySpecs
) where
import Data.Time (TimeOfDay (..))
import Test.Hspec (Spec, describe, it)
import Test.Toml.Parser.Common (arrayFailOn, day1, day2, int1, int2, int3, int4, parseArray)
import Toml.Type (UValue (..))
arraySpecs :: Spec
arraySpecs = describe "arrayP" $ do
it "can parse arrays" $ do
parseArray
"[]"
[]
parseArray
"[1]"
[int1]
parseArray
"[1, 2, 3]"
[int1, int2, int3]
parseArray
"[1.2, 2.3, 3.4]"
[UDouble 1.2, UDouble 2.3, UDouble 3.4]
parseArray
"['x', 'y']"
[UText "x", UText "y"]
parseArray
"[[1], [2]]"
[UArray [UInteger 1], UArray [UInteger 2]]
parseArray
"[1920-12-10, 1979-05-27]"
[UDay day2, UDay day1]
parseArray
"[16:33:05, 10:15:30]"
[UHours (TimeOfDay 16 33 5), UHours (TimeOfDay 10 15 30)]
it "can parse multiline arrays" $
parseArray
"[\n1,\n2\n]"
[int1, int2]
it "can parse an array of arrays" $
parseArray
"[[1], [2.3, 5.1]]"
[UArray [int1], UArray [UDouble 2.3, UDouble 5.1]]
it "can parse an array with terminating commas (trailing commas)" $ do
parseArray
"[1, 2,]"
[int1, int2]
parseArray
"[1, 2, 3, , ,]"
[int1, int2, int3]
it "allows an arbitrary number of comments and newlines before or after a value" $
parseArray
"[\n\n#c\n1, #c 2 \n 2, \n\n\n 3, #c \n #c \n 4]"
[int1, int2, int3, int4]
it "ignores white spaces" $
parseArray
"[ 1 , 2,3, 4 ]"
[int1, int2, int3, int4]
it "fails if the elements are not surrounded by square brackets" $ do
arrayFailOn "1, 2, 3"
arrayFailOn "[1, 2, 3"
arrayFailOn "1, 2, 3]"
arrayFailOn "{'x', 'y', 'z'}"
arrayFailOn "(\"ab\", \"cd\")"
arrayFailOn "<true, false>"
it "fails if the elements are not separated by commas" $ do
arrayFailOn "[1 2 3]"
arrayFailOn "[1 . 2 . 3]"
arrayFailOn "['x' - 'y' - 'z']"
arrayFailOn "[1920-12-10, 10:15:30]"
| |
d0422d3f25e18894756c27f02138c9246434ed46adabf08bd1a939e5aaaa0f63 | babashka/babashka | impl2.clj | (ns my.impl2
(:require [my.impl :as impl]))
(def impl-fn impl/impl-fn)
| null | https://raw.githubusercontent.com/babashka/babashka/3dfc15f5a40efaec07cba991892c1207a352fab4/test-resources/babashka/uberjar/src/my/impl2.clj | clojure | (ns my.impl2
(:require [my.impl :as impl]))
(def impl-fn impl/impl-fn)
| |
d9902415ed4eba5c46fe5396649fe0694e58f1978f6c8b50fc816235056fae8b | lispbuilder/lispbuilder | stdinc.lisp |
(in-package #:lispbuilder-sdl-cffi)
;;; Probably do not need this.
(defcenum SDL-bool
(:SDL-FALSE 0)
(:SDL-TRUE 1))
;;; Probably do not need this.
(defcstruct Uint64
(hi :unsigned-int)
(lo :unsigned-int))
;;; Probably do not need this.
(defcenum SDL-DUMMY-ENUM
:DUMMY-ENUM-VALUE)
extern DECLSPEC char * SDLCALL SDL_getenv(const char * name ) ;
(defun sdl-get-env (string)
(cond
((cffi:foreign-symbol-pointer "SDL_getenv" :library 'sdl)
(cffi:foreign-funcall-pointer (cffi:foreign-symbol-pointer "SDL_getenv") () :string string :pointer))
((cffi:foreign-symbol-pointer "getenv")
(cffi:foreign-funcall-pointer (cffi:foreign-symbol-pointer "getenv") () :string string :pointer))))
extern DECLSPEC int SDLCALL SDL_putenv(const char * variable ) ;
(defun sdl-put-env (string)
(cond
((cffi:foreign-symbol-pointer "SDL_putenv" :library 'sdl)
(cffi:foreign-funcall-pointer (cffi:foreign-symbol-pointer "SDL_putenv") () :string string :int))
((cffi:foreign-symbol-pointer "putenv")
(cffi:foreign-funcall-pointer (cffi:foreign-symbol-pointer "putenv") () :string string :int))))
| null | https://raw.githubusercontent.com/lispbuilder/lispbuilder/589b3c6d552bbec4b520f61388117d6c7b3de5ab/lispbuilder-sdl/cffi/stdinc.lisp | lisp | Probably do not need this.
Probably do not need this.
Probably do not need this.
|
(in-package #:lispbuilder-sdl-cffi)
(defcenum SDL-bool
(:SDL-FALSE 0)
(:SDL-TRUE 1))
(defcstruct Uint64
(hi :unsigned-int)
(lo :unsigned-int))
(defcenum SDL-DUMMY-ENUM
:DUMMY-ENUM-VALUE)
(defun sdl-get-env (string)
(cond
((cffi:foreign-symbol-pointer "SDL_getenv" :library 'sdl)
(cffi:foreign-funcall-pointer (cffi:foreign-symbol-pointer "SDL_getenv") () :string string :pointer))
((cffi:foreign-symbol-pointer "getenv")
(cffi:foreign-funcall-pointer (cffi:foreign-symbol-pointer "getenv") () :string string :pointer))))
(defun sdl-put-env (string)
(cond
((cffi:foreign-symbol-pointer "SDL_putenv" :library 'sdl)
(cffi:foreign-funcall-pointer (cffi:foreign-symbol-pointer "SDL_putenv") () :string string :int))
((cffi:foreign-symbol-pointer "putenv")
(cffi:foreign-funcall-pointer (cffi:foreign-symbol-pointer "putenv") () :string string :int))))
|
1d3d12149f9389267d324fd4e16a9da86adf04e668db8d08d6b8a4f368097889 | qnikst/irc-simple | client.hs | {-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Client
import Data.ByteString.Char8 (ByteString)
import Data.Conduit.Network
import Data.Monoid ((<>))
import Options.Applicative
-- | Server configuration
data Config = Config
{ cfgPort :: !Int
, cfgHost :: !ByteString
}
-- | Command line parser
irc :: Parser Config
irc = Config <$> option auto
( long "port"
<> metavar "PORT"
<> short 'p'
<> value 9999
<> help "Application port")
<*> option auto
( long "host"
<> short 'h'
<> metavar "HOST"
<> value "localhost"
<> help "Application host")
main :: IO ()
main = execParser opts >>= run where
run :: Config -> IO ()
run (Config port host) = client $ clientSettings port host
opts = info (irc <**> helper)
(fullDesc
<> progDesc "simple irc server"
<> header "irc - is a nice thing")
| null | https://raw.githubusercontent.com/qnikst/irc-simple/6deaec9c240b0e9023257b96e0328e883e0760a8/app/client.hs | haskell | # LANGUAGE ViewPatterns #
# LANGUAGE RankNTypes #
# LANGUAGE OverloadedStrings #
| Server configuration
| Command line parser | module Main where
import Client
import Data.ByteString.Char8 (ByteString)
import Data.Conduit.Network
import Data.Monoid ((<>))
import Options.Applicative
data Config = Config
{ cfgPort :: !Int
, cfgHost :: !ByteString
}
irc :: Parser Config
irc = Config <$> option auto
( long "port"
<> metavar "PORT"
<> short 'p'
<> value 9999
<> help "Application port")
<*> option auto
( long "host"
<> short 'h'
<> metavar "HOST"
<> value "localhost"
<> help "Application host")
main :: IO ()
main = execParser opts >>= run where
run :: Config -> IO ()
run (Config port host) = client $ clientSettings port host
opts = info (irc <**> helper)
(fullDesc
<> progDesc "simple irc server"
<> header "irc - is a nice thing")
|
30e09ed88e6f88567e82e7c5d18beb3adf8309fdfb1d6c51503d9dfdbb1ef54b | MyDataFlow/ttalk-server | ejabberd_auth_internal.erl | %%%----------------------------------------------------------------------
%%% File : ejabberd_auth_internal.erl
Author : < >
%%% Purpose : Authentification via mnesia
Created : 12 Dec 2004 by < >
%%%
%%%
ejabberd , Copyright ( C ) 2002 - 2011 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License
%%% along with this program; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA
02111 - 1307 USA
%%%
%%%----------------------------------------------------------------------
-module(ejabberd_auth_internal).
-author('').
%% External exports
-behaviour(ejabberd_gen_auth).
-export([start/1,
stop/1,
set_password/3,
check_password/3,
check_password/5,
try_register/3,
dirty_get_registered_users/0,
get_vh_registered_users/1,
get_vh_registered_users/2,
get_vh_registered_users_number/1,
get_vh_registered_users_number/2,
get_password/2,
get_password_s/2,
does_user_exist/2,
remove_user/2,
remove_user/3,
store_type/1,
plain_password_required/0
]).
%% Exported for behaviour but not implemented
-export([login/2, get_password/3]).
-export([scram_passwords/0]).
-include("ejabberd.hrl").
-record(passwd, {us, password}).
-type passwd() :: #passwd{
us :: ejabberd:simple_bare_jid(),
password :: binary() | #scram{}
}.
-record(reg_users_counter, {vhost, count}).
-type users_counter() :: #reg_users_counter {
vhost :: binary(),
count :: integer()
}.
%%%----------------------------------------------------------------------
%%% API
%%%----------------------------------------------------------------------
-spec start(Host :: ejabberd:server()) -> ok.
start(Host) ->
mnesia:create_table(passwd, [{disc_copies, [node()]},
{attributes, record_info(fields, passwd)},
{storage_properties,
[{ets, [{read_concurrency,true}]}]}
]),
mnesia:create_table(reg_users_counter,
[{ram_copies, [node()]},
{attributes, record_info(fields, reg_users_counter)}]),
mnesia:add_table_copy(passwd, node(), disc_copies),
mnesia:add_table_copy(reg_users_counter, node(), ram_copies),
update_reg_users_counter_table(Host),
ok.
-spec stop(Host :: ejabberd:server()) -> ok.
stop(_Host) ->
ok.
-spec update_reg_users_counter_table(Server :: ejabberd:server()) -> any().
update_reg_users_counter_table(Server) ->
Set = get_vh_registered_users(Server),
Size = length(Set),
LServer = jid:nameprep(Server),
F = fun() ->
write_counter(#reg_users_counter{vhost = LServer, count = Size})
end,
mnesia:sync_dirty(F).
plain_password_required() ->
false.
store_type(Server) ->
case scram:enabled(Server) of
false -> plain;
true -> scram
end.
-spec check_password(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver(),
Password :: binary()) -> boolean().
check_password(LUser, LServer, Password) ->
US = {LUser, LServer},
case catch dirty_read_passwd(US) of
[#passwd{password = #scram{} = Scram}] ->
scram:check_password(Password, Scram);
[#passwd{password = Password}] ->
Password /= <<>>;
_ ->
false
end.
-spec check_password(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver(),
Password :: binary(),
Digest :: binary(),
DigestGen :: fun()) -> boolean().
check_password(LUser, LServer, Password, Digest, DigestGen) ->
US = {LUser, LServer},
case catch dirty_read_passwd(US) of
[#passwd{password = Scram}] when is_record(Scram, scram) ->
Passwd = base64:decode(Scram#scram.storedkey),
ejabberd_auth:check_digest(Digest, DigestGen, Password, Passwd);
[#passwd{password = Passwd}] ->
ejabberd_auth:check_digest(Digest, DigestGen, Password, Passwd);
_ ->
false
end.
-spec set_password(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver(),
Password :: binary()) -> ok | {error, not_allowed | invalid_jid}.
set_password(LUser, LServer, Password) ->
US = {LUser, LServer},
F = fun() ->
Password2 = case scram:enabled(LServer) of
true ->
scram:password_to_scram(Password, scram:iterations(LServer));
false -> Password
end,
write_passwd(#passwd{us = US, password = Password2})
end,
{atomic, ok} = mnesia:transaction(F),
ok.
-spec try_register(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver(),
Password :: binary()
) -> ok | {error, exists | not_allowed}.
try_register(LUser, LServer, Password) ->
US = {LUser, LServer},
F = fun() ->
case read_passwd(US) of
[] ->
Password2 = case scram:enabled(LServer) and is_binary(Password) of
true ->
scram:password_to_scram(Password, scram:iterations(LServer));
false -> Password
end,
write_passwd(#passwd{us = US, password = Password2}),
mnesia:dirty_update_counter(reg_users_counter, LServer, 1),
ok;
[_E] ->
exists
end
end,
case mnesia:transaction(F) of
{atomic, ok} ->
ok;
{atomic, exists} ->
{error, exists};
Result ->
?ERROR_MSG("transaction_result=~p", [Result]),
{error, not_allowed}
end.
@doc Get all registered users in
-spec dirty_get_registered_users() -> [ejabberd:simple_bare_jid()].
dirty_get_registered_users() ->
mnesia:dirty_all_keys(passwd).
-spec get_vh_registered_users(LServer :: ejabberd:lserver()
) -> [ejabberd:simple_bare_jid()].
get_vh_registered_users(LServer) ->
mnesia:dirty_select(
passwd,
[{#passwd{us = '$1', _ = '_'},
[{'==', {element, 2, '$1'}, LServer}],
['$1']}]).
-type query_keyword() :: from | to | limit | offset | prefix.
-type query_value() :: integer() | binary().
-spec get_vh_registered_users(LServer :: ejabberd:lserver(),
Query :: [{query_keyword(), query_value()}]
) -> [ejabberd:simple_bare_jid()].
get_vh_registered_users(LServer, [{from, Start}, {to, End}])
when is_integer(Start) and is_integer(End) ->
get_vh_registered_users(LServer, [{limit, End-Start+1}, {offset, Start}]);
get_vh_registered_users(LServer, [{limit, Limit}, {offset, Offset}])
when is_integer(Limit) and is_integer(Offset) ->
case get_vh_registered_users(LServer) of
[] ->
[];
Users ->
Set = lists:keysort(1, Users),
L = length(Set),
Start = if Offset < 1 -> 1;
Offset > L -> L;
true -> Offset
end,
lists:sublist(Set, Start, Limit)
end;
get_vh_registered_users(LServer, [{prefix, Prefix}])
when is_binary(Prefix) ->
Set = [{U,S} || {U, S} <- get_vh_registered_users(LServer),
binary:part(U, 0, bit_size(Prefix)) =:= Prefix],
lists:keysort(1, Set);
get_vh_registered_users(LServer, [{prefix, Prefix}, {from, Start}, {to, End}])
when is_binary(Prefix) and is_integer(Start) and is_integer(End) ->
get_vh_registered_users(LServer, [{prefix, Prefix}, {limit, End-Start+1}, {offset, Start}]);
get_vh_registered_users(LServer, [{prefix, Prefix}, {limit, Limit}, {offset, Offset}])
when is_binary(Prefix) and is_integer(Limit) and is_integer(Offset) ->
case [{U,S} || {U, S} <- get_vh_registered_users(LServer),
binary:part(U, 0, bit_size(Prefix)) =:= Prefix] of
[] ->
[];
Users ->
Set = lists:keysort(1, Users),
L = length(Set),
Start = if Offset < 1 -> 1;
Offset > L -> L;
true -> Offset
end,
lists:sublist(Set, Start, Limit)
end;
get_vh_registered_users(LServer, _) ->
get_vh_registered_users(LServer).
-spec get_vh_registered_users_number(LServer :: ejabberd:server()
) -> non_neg_integer().
get_vh_registered_users_number(LServer) ->
Query = mnesia:dirty_select(
reg_users_counter,
[{#reg_users_counter{vhost = LServer, count = '$1'},
[],
['$1']}]),
case Query of
[Count] ->
Count;
_ -> 0
end.
-spec get_vh_registered_users_number(LServer :: ejabberd:lserver(),
Query :: [{prefix, binary()}]
) -> integer().
get_vh_registered_users_number(LServer, [{prefix, Prefix}]) when is_binary(Prefix) ->
Set = [{U, S} || {U, S} <- get_vh_registered_users(LServer),
binary:part(U, 0, bit_size(Prefix)) =:= Prefix],
length(Set);
get_vh_registered_users_number(LServer, _) ->
get_vh_registered_users_number(LServer).
-spec get_password(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver()) -> binary() | false.
get_password(LUser, LServer) ->
US = {LUser, LServer},
case catch dirty_read_passwd(US) of
[#passwd{password = Scram}] when is_record(Scram, scram) ->
{base64:decode(Scram#scram.storedkey),
base64:decode(Scram#scram.serverkey),
base64:decode(Scram#scram.salt),
Scram#scram.iterationcount};
[#passwd{password = Password}] ->
Password;
_ ->
false
end.
-spec get_password_s(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver()) -> binary().
get_password_s(LUser, LServer) ->
US = {LUser, LServer},
case catch dirty_read_passwd(US) of
[#passwd{password = Scram}] when is_record(Scram, scram) ->
<<"">>;
[#passwd{password = Password}] ->
Password;
_ ->
<<"">>
end.
-spec does_user_exist(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver()
) -> boolean() | {error, atom()}.
does_user_exist(LUser, LServer) ->
US = {LUser, LServer},
case catch dirty_read_passwd(US) of
[] ->
false;
[_] ->
true;
Other ->
{error, Other}
end.
%% @doc Remove user.
%% Note: it returns ok even if there was some problem removing the user.
-spec remove_user(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver()
) -> ok | {error, not_allowed}.
remove_user(LUser, LServer) ->
US = {LUser, LServer},
F = fun() ->
mnesia:delete({passwd, US}),
mnesia:dirty_update_counter(reg_users_counter,
LServer, -1)
end,
mnesia:transaction(F),
ok.
%% @doc Remove user if the provided password is correct.
-spec remove_user(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver(),
Password :: binary()
) -> ok | {error, not_exists | not_allowed | bad_request}.
remove_user(LUser, LServer, Password) ->
US = {LUser, LServer},
F = fun() ->
case read_passwd(US) of
[#passwd{password = Scram}] when is_record(Scram, scram) ->
case scram:check_password(Password, Scram) of
true ->
mnesia:delete({passwd, US}),
mnesia:dirty_update_counter(reg_users_counter,
LServer, -1),
ok;
false ->
not_allowed
end;
[#passwd{password = Password}] ->
mnesia:delete({passwd, US}),
mnesia:dirty_update_counter(reg_users_counter,
LServer, -1),
ok;
_ ->
not_exists
end
end,
case mnesia:transaction(F) of
{atomic, ok} ->
ok;
{atomic, not_exists} ->
{error, not_exists};
{atomic, not_allowed} ->
{error, not_allowed};
Error ->
?ERROR_MSG("Mnesia transaction fail: ~p", [Error]),
{error, bad_request}
end.
-spec scram_passwords() -> {atomic, ok}.
scram_passwords() ->
?INFO_MSG("Converting the stored passwords into SCRAM bits", []),
Fields = record_info(fields, passwd),
{atomic, ok} = mnesia:transform_table(passwd, fun scramming_function/1, Fields).
-spec scramming_function(passwd()) -> passwd().
scramming_function(#passwd{us = {_, Server}, password = Password} = P) ->
Scram = scram:password_to_scram(Password, scram:iterations(Server)),
P#passwd{password = Scram}.
-spec dirty_read_passwd(US :: ejabberd:simple_bare_jid()) -> [passwd()].
dirty_read_passwd(US) ->
mnesia:dirty_read(passwd, US).
-spec read_passwd(US :: ejabberd:simple_bare_jid()) -> [passwd()].
read_passwd(US) ->
mnesia:read({passwd, US}).
-spec write_passwd(passwd()) -> ok.
write_passwd(#passwd{} = Passwd) ->
mnesia:write(Passwd).
-spec write_counter(users_counter()) -> ok.
write_counter(#reg_users_counter{} = Counter) ->
mnesia:write(Counter).
%% @doc gen_auth unimplemented callbacks
login(_User, _Server) -> erlang:error(not_implemented).
get_password(_User, _Server, _DefaultValue) -> erlang:error(not_implemented).
| null | https://raw.githubusercontent.com/MyDataFlow/ttalk-server/07a60d5d74cd86aedd1f19c922d9d3abf2ebf28d/apps/ejabberd/src/ejabberd_auth_internal.erl | erlang | ----------------------------------------------------------------------
File : ejabberd_auth_internal.erl
Purpose : Authentification via mnesia
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
along with this program; if not, write to the Free Software
----------------------------------------------------------------------
External exports
Exported for behaviour but not implemented
----------------------------------------------------------------------
API
----------------------------------------------------------------------
@doc Remove user.
Note: it returns ok even if there was some problem removing the user.
@doc Remove user if the provided password is correct.
@doc gen_auth unimplemented callbacks | Author : < >
Created : 12 Dec 2004 by < >
ejabberd , Copyright ( C ) 2002 - 2011 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA
02111 - 1307 USA
-module(ejabberd_auth_internal).
-author('').
-behaviour(ejabberd_gen_auth).
-export([start/1,
stop/1,
set_password/3,
check_password/3,
check_password/5,
try_register/3,
dirty_get_registered_users/0,
get_vh_registered_users/1,
get_vh_registered_users/2,
get_vh_registered_users_number/1,
get_vh_registered_users_number/2,
get_password/2,
get_password_s/2,
does_user_exist/2,
remove_user/2,
remove_user/3,
store_type/1,
plain_password_required/0
]).
-export([login/2, get_password/3]).
-export([scram_passwords/0]).
-include("ejabberd.hrl").
-record(passwd, {us, password}).
-type passwd() :: #passwd{
us :: ejabberd:simple_bare_jid(),
password :: binary() | #scram{}
}.
-record(reg_users_counter, {vhost, count}).
-type users_counter() :: #reg_users_counter {
vhost :: binary(),
count :: integer()
}.
-spec start(Host :: ejabberd:server()) -> ok.
start(Host) ->
mnesia:create_table(passwd, [{disc_copies, [node()]},
{attributes, record_info(fields, passwd)},
{storage_properties,
[{ets, [{read_concurrency,true}]}]}
]),
mnesia:create_table(reg_users_counter,
[{ram_copies, [node()]},
{attributes, record_info(fields, reg_users_counter)}]),
mnesia:add_table_copy(passwd, node(), disc_copies),
mnesia:add_table_copy(reg_users_counter, node(), ram_copies),
update_reg_users_counter_table(Host),
ok.
-spec stop(Host :: ejabberd:server()) -> ok.
stop(_Host) ->
ok.
-spec update_reg_users_counter_table(Server :: ejabberd:server()) -> any().
update_reg_users_counter_table(Server) ->
Set = get_vh_registered_users(Server),
Size = length(Set),
LServer = jid:nameprep(Server),
F = fun() ->
write_counter(#reg_users_counter{vhost = LServer, count = Size})
end,
mnesia:sync_dirty(F).
plain_password_required() ->
false.
store_type(Server) ->
case scram:enabled(Server) of
false -> plain;
true -> scram
end.
-spec check_password(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver(),
Password :: binary()) -> boolean().
check_password(LUser, LServer, Password) ->
US = {LUser, LServer},
case catch dirty_read_passwd(US) of
[#passwd{password = #scram{} = Scram}] ->
scram:check_password(Password, Scram);
[#passwd{password = Password}] ->
Password /= <<>>;
_ ->
false
end.
-spec check_password(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver(),
Password :: binary(),
Digest :: binary(),
DigestGen :: fun()) -> boolean().
check_password(LUser, LServer, Password, Digest, DigestGen) ->
US = {LUser, LServer},
case catch dirty_read_passwd(US) of
[#passwd{password = Scram}] when is_record(Scram, scram) ->
Passwd = base64:decode(Scram#scram.storedkey),
ejabberd_auth:check_digest(Digest, DigestGen, Password, Passwd);
[#passwd{password = Passwd}] ->
ejabberd_auth:check_digest(Digest, DigestGen, Password, Passwd);
_ ->
false
end.
-spec set_password(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver(),
Password :: binary()) -> ok | {error, not_allowed | invalid_jid}.
set_password(LUser, LServer, Password) ->
US = {LUser, LServer},
F = fun() ->
Password2 = case scram:enabled(LServer) of
true ->
scram:password_to_scram(Password, scram:iterations(LServer));
false -> Password
end,
write_passwd(#passwd{us = US, password = Password2})
end,
{atomic, ok} = mnesia:transaction(F),
ok.
-spec try_register(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver(),
Password :: binary()
) -> ok | {error, exists | not_allowed}.
try_register(LUser, LServer, Password) ->
US = {LUser, LServer},
F = fun() ->
case read_passwd(US) of
[] ->
Password2 = case scram:enabled(LServer) and is_binary(Password) of
true ->
scram:password_to_scram(Password, scram:iterations(LServer));
false -> Password
end,
write_passwd(#passwd{us = US, password = Password2}),
mnesia:dirty_update_counter(reg_users_counter, LServer, 1),
ok;
[_E] ->
exists
end
end,
case mnesia:transaction(F) of
{atomic, ok} ->
ok;
{atomic, exists} ->
{error, exists};
Result ->
?ERROR_MSG("transaction_result=~p", [Result]),
{error, not_allowed}
end.
@doc Get all registered users in
-spec dirty_get_registered_users() -> [ejabberd:simple_bare_jid()].
dirty_get_registered_users() ->
mnesia:dirty_all_keys(passwd).
-spec get_vh_registered_users(LServer :: ejabberd:lserver()
) -> [ejabberd:simple_bare_jid()].
get_vh_registered_users(LServer) ->
mnesia:dirty_select(
passwd,
[{#passwd{us = '$1', _ = '_'},
[{'==', {element, 2, '$1'}, LServer}],
['$1']}]).
-type query_keyword() :: from | to | limit | offset | prefix.
-type query_value() :: integer() | binary().
-spec get_vh_registered_users(LServer :: ejabberd:lserver(),
Query :: [{query_keyword(), query_value()}]
) -> [ejabberd:simple_bare_jid()].
get_vh_registered_users(LServer, [{from, Start}, {to, End}])
when is_integer(Start) and is_integer(End) ->
get_vh_registered_users(LServer, [{limit, End-Start+1}, {offset, Start}]);
get_vh_registered_users(LServer, [{limit, Limit}, {offset, Offset}])
when is_integer(Limit) and is_integer(Offset) ->
case get_vh_registered_users(LServer) of
[] ->
[];
Users ->
Set = lists:keysort(1, Users),
L = length(Set),
Start = if Offset < 1 -> 1;
Offset > L -> L;
true -> Offset
end,
lists:sublist(Set, Start, Limit)
end;
get_vh_registered_users(LServer, [{prefix, Prefix}])
when is_binary(Prefix) ->
Set = [{U,S} || {U, S} <- get_vh_registered_users(LServer),
binary:part(U, 0, bit_size(Prefix)) =:= Prefix],
lists:keysort(1, Set);
get_vh_registered_users(LServer, [{prefix, Prefix}, {from, Start}, {to, End}])
when is_binary(Prefix) and is_integer(Start) and is_integer(End) ->
get_vh_registered_users(LServer, [{prefix, Prefix}, {limit, End-Start+1}, {offset, Start}]);
get_vh_registered_users(LServer, [{prefix, Prefix}, {limit, Limit}, {offset, Offset}])
when is_binary(Prefix) and is_integer(Limit) and is_integer(Offset) ->
case [{U,S} || {U, S} <- get_vh_registered_users(LServer),
binary:part(U, 0, bit_size(Prefix)) =:= Prefix] of
[] ->
[];
Users ->
Set = lists:keysort(1, Users),
L = length(Set),
Start = if Offset < 1 -> 1;
Offset > L -> L;
true -> Offset
end,
lists:sublist(Set, Start, Limit)
end;
get_vh_registered_users(LServer, _) ->
get_vh_registered_users(LServer).
-spec get_vh_registered_users_number(LServer :: ejabberd:server()
) -> non_neg_integer().
get_vh_registered_users_number(LServer) ->
Query = mnesia:dirty_select(
reg_users_counter,
[{#reg_users_counter{vhost = LServer, count = '$1'},
[],
['$1']}]),
case Query of
[Count] ->
Count;
_ -> 0
end.
-spec get_vh_registered_users_number(LServer :: ejabberd:lserver(),
Query :: [{prefix, binary()}]
) -> integer().
get_vh_registered_users_number(LServer, [{prefix, Prefix}]) when is_binary(Prefix) ->
Set = [{U, S} || {U, S} <- get_vh_registered_users(LServer),
binary:part(U, 0, bit_size(Prefix)) =:= Prefix],
length(Set);
get_vh_registered_users_number(LServer, _) ->
get_vh_registered_users_number(LServer).
-spec get_password(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver()) -> binary() | false.
get_password(LUser, LServer) ->
US = {LUser, LServer},
case catch dirty_read_passwd(US) of
[#passwd{password = Scram}] when is_record(Scram, scram) ->
{base64:decode(Scram#scram.storedkey),
base64:decode(Scram#scram.serverkey),
base64:decode(Scram#scram.salt),
Scram#scram.iterationcount};
[#passwd{password = Password}] ->
Password;
_ ->
false
end.
-spec get_password_s(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver()) -> binary().
get_password_s(LUser, LServer) ->
US = {LUser, LServer},
case catch dirty_read_passwd(US) of
[#passwd{password = Scram}] when is_record(Scram, scram) ->
<<"">>;
[#passwd{password = Password}] ->
Password;
_ ->
<<"">>
end.
-spec does_user_exist(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver()
) -> boolean() | {error, atom()}.
does_user_exist(LUser, LServer) ->
US = {LUser, LServer},
case catch dirty_read_passwd(US) of
[] ->
false;
[_] ->
true;
Other ->
{error, Other}
end.
-spec remove_user(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver()
) -> ok | {error, not_allowed}.
remove_user(LUser, LServer) ->
US = {LUser, LServer},
F = fun() ->
mnesia:delete({passwd, US}),
mnesia:dirty_update_counter(reg_users_counter,
LServer, -1)
end,
mnesia:transaction(F),
ok.
-spec remove_user(LUser :: ejabberd:luser(),
LServer :: ejabberd:lserver(),
Password :: binary()
) -> ok | {error, not_exists | not_allowed | bad_request}.
remove_user(LUser, LServer, Password) ->
US = {LUser, LServer},
F = fun() ->
case read_passwd(US) of
[#passwd{password = Scram}] when is_record(Scram, scram) ->
case scram:check_password(Password, Scram) of
true ->
mnesia:delete({passwd, US}),
mnesia:dirty_update_counter(reg_users_counter,
LServer, -1),
ok;
false ->
not_allowed
end;
[#passwd{password = Password}] ->
mnesia:delete({passwd, US}),
mnesia:dirty_update_counter(reg_users_counter,
LServer, -1),
ok;
_ ->
not_exists
end
end,
case mnesia:transaction(F) of
{atomic, ok} ->
ok;
{atomic, not_exists} ->
{error, not_exists};
{atomic, not_allowed} ->
{error, not_allowed};
Error ->
?ERROR_MSG("Mnesia transaction fail: ~p", [Error]),
{error, bad_request}
end.
-spec scram_passwords() -> {atomic, ok}.
scram_passwords() ->
?INFO_MSG("Converting the stored passwords into SCRAM bits", []),
Fields = record_info(fields, passwd),
{atomic, ok} = mnesia:transform_table(passwd, fun scramming_function/1, Fields).
-spec scramming_function(passwd()) -> passwd().
scramming_function(#passwd{us = {_, Server}, password = Password} = P) ->
Scram = scram:password_to_scram(Password, scram:iterations(Server)),
P#passwd{password = Scram}.
-spec dirty_read_passwd(US :: ejabberd:simple_bare_jid()) -> [passwd()].
dirty_read_passwd(US) ->
mnesia:dirty_read(passwd, US).
-spec read_passwd(US :: ejabberd:simple_bare_jid()) -> [passwd()].
read_passwd(US) ->
mnesia:read({passwd, US}).
-spec write_passwd(passwd()) -> ok.
write_passwd(#passwd{} = Passwd) ->
mnesia:write(Passwd).
-spec write_counter(users_counter()) -> ok.
write_counter(#reg_users_counter{} = Counter) ->
mnesia:write(Counter).
login(_User, _Server) -> erlang:error(not_implemented).
get_password(_User, _Server, _DefaultValue) -> erlang:error(not_implemented).
|
9beff701ee2a282d34455a042753390a603c2cf948d3b82cd915656e1e5f3bae | wz1000/hie-lsp | RequesterT.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE FlexibleContexts #
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecursiveDo #
# LANGUAGE ScopedTypeVariables #
module Main where
import Control.Lens
import Control.Monad
import Control.Monad.Fix
import qualified Data.Dependent.Map as DMap
import Data.Dependent.Sum
import Data.Functor.Misc
import qualified Data.Map as M
import Data.These
import Reflex
import Reflex.Requester.Base
import Reflex.Requester.Class
import Test.Run
data RequestInt a where
RequestInt :: Int -> RequestInt Int
main :: IO ()
main = do
os1 <- runApp' (unwrapApp testOrdering) $
[ Just ()
]
print os1
os2 <- runApp' (unwrapApp testSimultaneous) $ map Just $
[ This ()
, That ()
, This ()
, These () ()
]
print os2
os3 <- runApp' (unwrapApp testMoribundRequest) [Just ()]
print os3
os4 <- runApp' (unwrapApp testMoribundRequestDMap) [Just ()]
print os4
os5 <- runApp' (unwrapApp testLiveRequestDMap) [Just ()]
print os5
os6 <- runApp' (unwrapApp delayedPulse) [Just ()]
print os6
let ![[Just [1,2,3,4,5,6,7,8,9,10]]] = os1 -- The order is reversed here: see the documentation for 'runRequesterT'
let ![[Just [9,7,5,3,1]],[Nothing,Nothing],[Just [10,8,6,4,2]],[Just [10,8,6,4,2],Nothing]] = os2
let ![[Nothing, Just [2]]] = os3
let ![[Nothing, Just [2]]] = os4
let ![[Nothing, Just [1, 2]]] = os5
let ! [ [ Nothing , Nothing ] ] = os6 -- TODO re - enable this test after issue # 233 has been resolved
return ()
unwrapRequest :: DSum tag RequestInt -> Int
unwrapRequest (_ :=> RequestInt i) = i
unwrapApp :: ( Reflex t, Monad m )
=> (a -> RequesterT t RequestInt Identity m ())
-> a
-> m (Event t [Int])
unwrapApp x appIn = do
((), e) <- runRequesterT (x appIn) never
return $ fmap (map unwrapRequest . requesterDataToList) e
testOrdering :: ( Response m ~ Identity
, Request m ~ RequestInt
, Requester t m
, Adjustable t m)
=> Event t ()
-> m ()
testOrdering pulse = forM_ [10,9..1] $ \i ->
requestingIdentity (RequestInt i <$ pulse)
testSimultaneous :: ( Response m ~ Identity
, Request m ~ RequestInt
, Requester t m
, Adjustable t m)
=> Event t (These () ())
-> m ()
testSimultaneous pulse = do
let tellE = fmapMaybe (^? here) pulse
switchE = fmapMaybe (^? there) pulse
forM_ [1,3..9] $ \i -> runWithReplace (requestingIdentity (RequestInt i <$ tellE)) $ ffor switchE $ \_ ->
requestingIdentity (RequestInt (i+1) <$ tellE)
-- | Test that a widget requesting and event which fires at the same time it has been replaced
-- doesn't count along with the new widget.
testMoribundRequest
:: forall t m
. ( Reflex t
, Adjustable t m
, MonadHold t m
, MonadFix m
, Response m ~ Identity
, Request m ~ RequestInt
, Requester t m
)
=> Event t ()
-> m ()
testMoribundRequest pulse = do
rec let requestIntOnReplace x = requestingIdentity $ RequestInt x <$ rwrFinished
(_, rwrFinished) <- runWithReplace (requestIntOnReplace 1) $ requestIntOnReplace 2 <$ pulse
return ()
-- | The equivalent of 'testMoribundRequest' for 'traverseDMapWithKeyWithAdjust'.
testMoribundRequestDMap
:: forall t m
. ( Reflex t
, Adjustable t m
, MonadHold t m
, MonadFix m
, Response m ~ Identity
, Request m ~ RequestInt
, Requester t m
)
=> Event t ()
-> m ()
testMoribundRequestDMap pulse = do
rec let requestIntOnReplace :: Int -> m ()
requestIntOnReplace x = void $ requestingIdentity $ RequestInt x <$ rwrFinished
(_, rwrFinished :: Event t (PatchDMap (Const2 () Int) Identity)) <-
traverseDMapWithKeyWithAdjust
(\(Const2 ()) (Identity v) -> Identity . const v <$> requestIntOnReplace v)
(mapToDMap $ M.singleton () 1)
((PatchDMap $ DMap.map (ComposeMaybe . Just) $ mapToDMap $ M.singleton () 2) <$ pulse)
return ()
-- | Ensures that elements which are _not_ removed can still fire requests
-- during the same frame as other elements are updated.
testLiveRequestDMap
:: forall t m
. ( Reflex t
, Adjustable t m
, MonadHold t m
, MonadFix m
, Response m ~ Identity
, Request m ~ RequestInt
, Requester t m
)
=> Event t ()
-> m ()
testLiveRequestDMap pulse = do
rec let requestIntOnReplace :: Int -> m ()
requestIntOnReplace x = void $ requestingIdentity $ RequestInt x <$ rwrFinished
(_, rwrFinished :: Event t (PatchDMap (Const2 Int ()) Identity)) <-
traverseDMapWithKeyWithAdjust
(\(Const2 k) (Identity ()) -> Identity <$> requestIntOnReplace k)
(mapToDMap $ M.singleton 1 ())
((PatchDMap $ DMap.map (ComposeMaybe . Just) $ mapToDMap $ M.singleton 2 ()) <$ pulse)
return ()
delayedPulse
:: forall t m
. ( Reflex t
, Adjustable t m
, MonadHold t m
, MonadFix m
, Response m ~ Identity
, Request m ~ RequestInt
, PerformEvent t m
, Requester t m
)
=> Event t ()
-> m ()
delayedPulse pulse = void $ flip runWithReplace (pure () <$ pulse) $ do
-- This has the effect of delaying pulse' from pulse
(_, pulse') <- runWithReplace (pure ()) $ pure (RequestInt 1) <$ pulse
requestingIdentity pulse'
| null | https://raw.githubusercontent.com/wz1000/hie-lsp/dbb3caa97c0acbff0e4fd86fc46eeea748f65e89/reflex-0.6.1/test/RequesterT.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE GADTs #
# LANGUAGE RankNTypes #
The order is reversed here: see the documentation for 'runRequesterT'
TODO re - enable this test after issue # 233 has been resolved
| Test that a widget requesting and event which fires at the same time it has been replaced
doesn't count along with the new widget.
| The equivalent of 'testMoribundRequest' for 'traverseDMapWithKeyWithAdjust'.
| Ensures that elements which are _not_ removed can still fire requests
during the same frame as other elements are updated.
This has the effect of delaying pulse' from pulse | # LANGUAGE FlexibleContexts #
# LANGUAGE RecursiveDo #
# LANGUAGE ScopedTypeVariables #
module Main where
import Control.Lens
import Control.Monad
import Control.Monad.Fix
import qualified Data.Dependent.Map as DMap
import Data.Dependent.Sum
import Data.Functor.Misc
import qualified Data.Map as M
import Data.These
import Reflex
import Reflex.Requester.Base
import Reflex.Requester.Class
import Test.Run
data RequestInt a where
RequestInt :: Int -> RequestInt Int
main :: IO ()
main = do
os1 <- runApp' (unwrapApp testOrdering) $
[ Just ()
]
print os1
os2 <- runApp' (unwrapApp testSimultaneous) $ map Just $
[ This ()
, That ()
, This ()
, These () ()
]
print os2
os3 <- runApp' (unwrapApp testMoribundRequest) [Just ()]
print os3
os4 <- runApp' (unwrapApp testMoribundRequestDMap) [Just ()]
print os4
os5 <- runApp' (unwrapApp testLiveRequestDMap) [Just ()]
print os5
os6 <- runApp' (unwrapApp delayedPulse) [Just ()]
print os6
let ![[Just [9,7,5,3,1]],[Nothing,Nothing],[Just [10,8,6,4,2]],[Just [10,8,6,4,2],Nothing]] = os2
let ![[Nothing, Just [2]]] = os3
let ![[Nothing, Just [2]]] = os4
let ![[Nothing, Just [1, 2]]] = os5
return ()
unwrapRequest :: DSum tag RequestInt -> Int
unwrapRequest (_ :=> RequestInt i) = i
unwrapApp :: ( Reflex t, Monad m )
=> (a -> RequesterT t RequestInt Identity m ())
-> a
-> m (Event t [Int])
unwrapApp x appIn = do
((), e) <- runRequesterT (x appIn) never
return $ fmap (map unwrapRequest . requesterDataToList) e
testOrdering :: ( Response m ~ Identity
, Request m ~ RequestInt
, Requester t m
, Adjustable t m)
=> Event t ()
-> m ()
testOrdering pulse = forM_ [10,9..1] $ \i ->
requestingIdentity (RequestInt i <$ pulse)
testSimultaneous :: ( Response m ~ Identity
, Request m ~ RequestInt
, Requester t m
, Adjustable t m)
=> Event t (These () ())
-> m ()
testSimultaneous pulse = do
let tellE = fmapMaybe (^? here) pulse
switchE = fmapMaybe (^? there) pulse
forM_ [1,3..9] $ \i -> runWithReplace (requestingIdentity (RequestInt i <$ tellE)) $ ffor switchE $ \_ ->
requestingIdentity (RequestInt (i+1) <$ tellE)
testMoribundRequest
:: forall t m
. ( Reflex t
, Adjustable t m
, MonadHold t m
, MonadFix m
, Response m ~ Identity
, Request m ~ RequestInt
, Requester t m
)
=> Event t ()
-> m ()
testMoribundRequest pulse = do
rec let requestIntOnReplace x = requestingIdentity $ RequestInt x <$ rwrFinished
(_, rwrFinished) <- runWithReplace (requestIntOnReplace 1) $ requestIntOnReplace 2 <$ pulse
return ()
testMoribundRequestDMap
:: forall t m
. ( Reflex t
, Adjustable t m
, MonadHold t m
, MonadFix m
, Response m ~ Identity
, Request m ~ RequestInt
, Requester t m
)
=> Event t ()
-> m ()
testMoribundRequestDMap pulse = do
rec let requestIntOnReplace :: Int -> m ()
requestIntOnReplace x = void $ requestingIdentity $ RequestInt x <$ rwrFinished
(_, rwrFinished :: Event t (PatchDMap (Const2 () Int) Identity)) <-
traverseDMapWithKeyWithAdjust
(\(Const2 ()) (Identity v) -> Identity . const v <$> requestIntOnReplace v)
(mapToDMap $ M.singleton () 1)
((PatchDMap $ DMap.map (ComposeMaybe . Just) $ mapToDMap $ M.singleton () 2) <$ pulse)
return ()
testLiveRequestDMap
:: forall t m
. ( Reflex t
, Adjustable t m
, MonadHold t m
, MonadFix m
, Response m ~ Identity
, Request m ~ RequestInt
, Requester t m
)
=> Event t ()
-> m ()
testLiveRequestDMap pulse = do
rec let requestIntOnReplace :: Int -> m ()
requestIntOnReplace x = void $ requestingIdentity $ RequestInt x <$ rwrFinished
(_, rwrFinished :: Event t (PatchDMap (Const2 Int ()) Identity)) <-
traverseDMapWithKeyWithAdjust
(\(Const2 k) (Identity ()) -> Identity <$> requestIntOnReplace k)
(mapToDMap $ M.singleton 1 ())
((PatchDMap $ DMap.map (ComposeMaybe . Just) $ mapToDMap $ M.singleton 2 ()) <$ pulse)
return ()
delayedPulse
:: forall t m
. ( Reflex t
, Adjustable t m
, MonadHold t m
, MonadFix m
, Response m ~ Identity
, Request m ~ RequestInt
, PerformEvent t m
, Requester t m
)
=> Event t ()
-> m ()
delayedPulse pulse = void $ flip runWithReplace (pure () <$ pulse) $ do
(_, pulse') <- runWithReplace (pure ()) $ pure (RequestInt 1) <$ pulse
requestingIdentity pulse'
|
156e74796fd59f753271d76a0aa8f2f5a9560fc769ecf35d3bf6dcea8aaecbd1 | kmicinski/program-analysis-examples | church.rkt | #lang racket
;; A church-compiler
(provide (all-defined-out))
;; for an input language:
;
e : : = ( ( [ x ( lambda ( x ... ) e ) ] ) e )
; | (let ([x e] ...) e)
; | (lambda (x ...) e)
; | (e e ...)
; | x
; | (and e e) | (or e e)
; | (if e e e)
; | (prim e) | (prim e e)
; | datum
; datum ::= nat | (quote ()) | #t | #f
nat : : = 0 | 1 | 2 | ...
; x is a symbol
; prim is a primitive operation in:
(define prims '(+ * - add1 sub1 cons car cdr null? not zero?))
;; To an output language:
;
; e ::= (lambda (x) e)
; | (e e)
; | x
;
(define id `(lambda (x) x))
(define (prim? prim)
(if (member prim prims) #t #f))
(define (churchify-prim prim)
(string->symbol (string-append "church:" (symbol->string prim))))
;; Take something in the input language and translate it to
;; a Racket lambda that represents the church-encoded version
(define (churchify e)
(match e
; Tagged expressions
[`(letrec ([,f (lambda (,args ...) ,e0)]) ,e1)
(lambda (x) x)]
[`(let ([,xs ,e0s] ...) ,e1)
(churchify `((lambda ,xs ,e1) . ,e0s))]
[`(lambda () ,e0)
(lambda (x) x)]
[`(lambda (,x) ,e0)
(lambda (,x) ,(churchify e0))]
[`(lambda (,x . ,rest) ,e0)
(lambda (x) x)]
[`(and ,e0 ,e1)
(lambda (x) x)]
[`(or ,e0 ,e1)
(lambda (x) x)]
[`(if ,e0 ,e1 ,e2)
(lambda (x) x)]
[`(,(? prim? prim) . ,args)
(lambda (x) x)]
; Variables
[(? symbol? x) x]
; Datums
[(? natural? nat)
(define (wrap nat)
(if (= 0 nat) 'x `(f ,(wrap (- nat 1)))))
(churchify `(lambda (f) (lambda (x) ,(wrap nat))))]
[''() (churchify '(lambda (when-cons when-null) (when-null)))]
[#t (churchify '(lambda (tt ft) (tt)))]
[#f (churchify '(lambda (tt ft) (ft)))]
; Untagged application
[`(,fun)
#f]
[`(,fun ,arg)
#f]
[`(,fun ,arg . ,rest)
#f]))
(define (church-encode e)
(define Y-comb `((lambda (u) (u u)) (lambda (y) (lambda (mk) (mk (lambda (x) (((y y) mk) x)))))))
(define church:null? `(lambda (p) (p (lambda (a b) #f) (lambda () #t))))
(define church:cons `(lambda (a b) (lambda (when-cons when-null) (when-cons a b))))
(define church:car `(lambda (p) (p (lambda (a b) a) (lambda () (lambda (x) x)))))
(define church:cdr `(lambda (p) (p (lambda (a b) b) (lambda () (lambda (x) x)))))
(define church:add1 `(lambda (n0) (lambda (f x) (f ((n0 f) x)))))
(define church:sub1 `(lambda (n0) (lambda (f) (lambda (y) (((n0
(lambda (g) (lambda (h) (h (g f)))))
uses n0 to produce a chain of linked closures
with linked functions g - > ( lambda ( h ) ( h ( g f ) ) )
The first g and last h are then ( lambda ( _ ) y ) and i d ,
; so in a sense it's computing |n0|+1-2
(lambda (_) y))
(lambda (x) x))))))
(define church:zero? `(lambda (n0) ((n0 (lambda (b) #f)) #t)))
(define church:+ `(lambda (n0 n1) (lambda (f x) ((n1 f) ((n0 f) x)))))
(define church:- `(lambda (n0 n1) ((n1 ,church:sub1) n0)))
(define church:* `(lambda (n0 n1) (lambda (f) (lambda (x) ((n0 (n1 f)) x)))))
(define church:= `(lambda (n0 n1) (and (,church:zero? (,church:- n0 n1)) (,church:zero? (,church:- n1 n0)))))
(define church:not `(lambda (bool) (if bool #f #t)))
(churchify
`(let ([Y-comb ,Y-comb]
[church:null? ,church:null?]
[church:cons ,church:cons]
[church:car ,church:car]
[church:cdr ,church:cdr]
[church:add1 ,church:add1]
[church:sub1 ,church:sub1]
[church:+ ,church:+]
[church:- ,church:-]
[church:* ,church:*]
[church:zero? ,church:zero?]
[church:= ,church:=]
[church:not ,church:not])
,e)))
| null | https://raw.githubusercontent.com/kmicinski/program-analysis-examples/140e5a3039ba9ddba70dc53fd3c09f3d590997a9/church.rkt | racket | A church-compiler
for an input language:
| (let ([x e] ...) e)
| (lambda (x ...) e)
| (e e ...)
| x
| (and e e) | (or e e)
| (if e e e)
| (prim e) | (prim e e)
| datum
datum ::= nat | (quote ()) | #t | #f
x is a symbol
prim is a primitive operation in:
To an output language:
e ::= (lambda (x) e)
| (e e)
| x
Take something in the input language and translate it to
a Racket lambda that represents the church-encoded version
Tagged expressions
Variables
Datums
Untagged application
so in a sense it's computing |n0|+1-2 | #lang racket
(provide (all-defined-out))
e : : = ( ( [ x ( lambda ( x ... ) e ) ] ) e )
nat : : = 0 | 1 | 2 | ...
(define prims '(+ * - add1 sub1 cons car cdr null? not zero?))
(define id `(lambda (x) x))
(define (prim? prim)
(if (member prim prims) #t #f))
(define (churchify-prim prim)
(string->symbol (string-append "church:" (symbol->string prim))))
(define (churchify e)
(match e
[`(letrec ([,f (lambda (,args ...) ,e0)]) ,e1)
(lambda (x) x)]
[`(let ([,xs ,e0s] ...) ,e1)
(churchify `((lambda ,xs ,e1) . ,e0s))]
[`(lambda () ,e0)
(lambda (x) x)]
[`(lambda (,x) ,e0)
(lambda (,x) ,(churchify e0))]
[`(lambda (,x . ,rest) ,e0)
(lambda (x) x)]
[`(and ,e0 ,e1)
(lambda (x) x)]
[`(or ,e0 ,e1)
(lambda (x) x)]
[`(if ,e0 ,e1 ,e2)
(lambda (x) x)]
[`(,(? prim? prim) . ,args)
(lambda (x) x)]
[(? symbol? x) x]
[(? natural? nat)
(define (wrap nat)
(if (= 0 nat) 'x `(f ,(wrap (- nat 1)))))
(churchify `(lambda (f) (lambda (x) ,(wrap nat))))]
[''() (churchify '(lambda (when-cons when-null) (when-null)))]
[#t (churchify '(lambda (tt ft) (tt)))]
[#f (churchify '(lambda (tt ft) (ft)))]
[`(,fun)
#f]
[`(,fun ,arg)
#f]
[`(,fun ,arg . ,rest)
#f]))
(define (church-encode e)
(define Y-comb `((lambda (u) (u u)) (lambda (y) (lambda (mk) (mk (lambda (x) (((y y) mk) x)))))))
(define church:null? `(lambda (p) (p (lambda (a b) #f) (lambda () #t))))
(define church:cons `(lambda (a b) (lambda (when-cons when-null) (when-cons a b))))
(define church:car `(lambda (p) (p (lambda (a b) a) (lambda () (lambda (x) x)))))
(define church:cdr `(lambda (p) (p (lambda (a b) b) (lambda () (lambda (x) x)))))
(define church:add1 `(lambda (n0) (lambda (f x) (f ((n0 f) x)))))
(define church:sub1 `(lambda (n0) (lambda (f) (lambda (y) (((n0
(lambda (g) (lambda (h) (h (g f)))))
uses n0 to produce a chain of linked closures
with linked functions g - > ( lambda ( h ) ( h ( g f ) ) )
The first g and last h are then ( lambda ( _ ) y ) and i d ,
(lambda (_) y))
(lambda (x) x))))))
(define church:zero? `(lambda (n0) ((n0 (lambda (b) #f)) #t)))
(define church:+ `(lambda (n0 n1) (lambda (f x) ((n1 f) ((n0 f) x)))))
(define church:- `(lambda (n0 n1) ((n1 ,church:sub1) n0)))
(define church:* `(lambda (n0 n1) (lambda (f) (lambda (x) ((n0 (n1 f)) x)))))
(define church:= `(lambda (n0 n1) (and (,church:zero? (,church:- n0 n1)) (,church:zero? (,church:- n1 n0)))))
(define church:not `(lambda (bool) (if bool #f #t)))
(churchify
`(let ([Y-comb ,Y-comb]
[church:null? ,church:null?]
[church:cons ,church:cons]
[church:car ,church:car]
[church:cdr ,church:cdr]
[church:add1 ,church:add1]
[church:sub1 ,church:sub1]
[church:+ ,church:+]
[church:- ,church:-]
[church:* ,church:*]
[church:zero? ,church:zero?]
[church:= ,church:=]
[church:not ,church:not])
,e)))
|
a3cd12f1f2d81285f932457df467aaf0e59d748a9b71ee62649dd962cf6c5c0f | marcoheisig/Typo | function-name.lisp | (in-package #:typo.fndb)
(declaim (ftype (function (function) (values (or null function-name) &optional))
function-name))
(defun function-name (function)
#-sbcl nil
#+sbcl
(let ((fun-name (sb-kernel:%fun-name function)))
(if (typep fun-name 'function-name)
fun-name
nil)))
| null | https://raw.githubusercontent.com/marcoheisig/Typo/c4451ce5e16a8c8f85128d1d59ecc2ccd311537e/code/fndb/function-name.lisp | lisp | (in-package #:typo.fndb)
(declaim (ftype (function (function) (values (or null function-name) &optional))
function-name))
(defun function-name (function)
#-sbcl nil
#+sbcl
(let ((fun-name (sb-kernel:%fun-name function)))
(if (typep fun-name 'function-name)
fun-name
nil)))
| |
31820f13ecb415a6fe207eaad45aabcf4ade03e79b8e8576c0466719a7e04cea | FranklinChen/hugs98-plus-Sep2006 | Texture3D.hs |
Texture3D.hs ( adapted from texture3d.c which is ( c ) Silicon Graphics , Inc )
Copyright ( c ) 2002 - 2005 < >
This file is part of HOpenGL and distributed under a BSD - style license
See the file libraries / GLUT / LICENSE
This program demonstrates using a three - dimensional texture . It creates
a 3D texture and then renders two rectangles with different texture
coordinates to obtain different " slices " of the 3D texture .
Texture3D.hs (adapted from texture3d.c which is (c) Silicon Graphics, Inc)
Copyright (c) Sven Panne 2002-2005 <>
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE
This program demonstrates using a three-dimensional texture. It creates
a 3D texture and then renders two rectangles with different texture
coordinates to obtain different "slices" of the 3D texture.
-}
import Control.Monad ( unless )
import Foreign ( withArray )
import System.Exit ( exitFailure, exitWith, ExitCode(ExitSuccess) )
import Graphics.UI.GLUT
-- Create checkerboard image
imageSize :: TextureSize3D
imageSize = TextureSize3D 16 16 16
withImage :: (PixelData (Color3 GLubyte) -> IO ()) -> IO ()
withImage act =
withArray [ Color3 (s * 17) (t * 17) (r * 17) |
r <- [ 0 .. fromIntegral d - 1 ],
t <- [ 0 .. fromIntegral h - 1 ],
s <- [ 0 .. fromIntegral w - 1 ] ] $
act . PixelData RGB UnsignedByte
where (TextureSize3D w h d) = imageSize
myInit :: IO ()
myInit = do
clearColor $= Color4 0 0 0 0
shadeModel $= Flat
depthFunc $= Just Less
rowAlignment Unpack $= 1
[texName] <- genObjectNames 1
textureBinding Texture3D $= Just texName
textureWrapMode Texture3D S $= (Repeated, Clamp)
textureWrapMode Texture3D T $= (Repeated, Clamp)
textureWrapMode Texture3D R $= (Repeated, Clamp)
textureFilter Texture3D $= ((Nearest, Nothing), Nearest)
withImage $ texImage3D NoProxy 0 RGB' imageSize 0
texture Texture3D $= Enabled
display :: DisplayCallback
display = do
clear [ ColorBuffer, DepthBuffer ]
-- resolve overloading, not needed in "real" programs
let texCoord3f = texCoord :: TexCoord3 GLfloat -> IO ()
vertex3f = vertex :: Vertex3 GLfloat -> IO ()
renderPrimitive Quads $ do
texCoord3f (TexCoord3 0 0 0); vertex3f (Vertex3 (-2.25) (-1) 0)
texCoord3f (TexCoord3 0 1 0); vertex3f (Vertex3 (-2.25) 1 0)
texCoord3f (TexCoord3 1 1 1); vertex3f (Vertex3 (-0.25) 1 0)
texCoord3f (TexCoord3 1 0 1); vertex3f (Vertex3 (-0.25) (-1) 0)
texCoord3f (TexCoord3 0 0 1); vertex3f (Vertex3 0.25 (-1) 0)
texCoord3f (TexCoord3 0 1 1); vertex3f (Vertex3 0.25 1 0)
texCoord3f (TexCoord3 1 1 0); vertex3f (Vertex3 2.25 1 0)
texCoord3f (TexCoord3 1 0 0); vertex3f (Vertex3 2.25 (-1) 0)
flush
reshape :: ReshapeCallback
reshape size@(Size w h) = do
viewport $= (Position 0 0, size)
matrixMode $= Projection
loadIdentity
perspective 60 (fromIntegral w / fromIntegral h) 1 30
matrixMode $= Modelview 0
loadIdentity
translate (Vector3 0 0 (-4 :: GLfloat))
keyboard :: KeyboardMouseCallback
keyboard (Char '\27') Down _ _ = exitWith ExitSuccess
keyboard _ _ _ _ = return ()
main :: IO ()
main = do
(progName, _args) <- getArgsAndInitialize
initialDisplayMode $= [ SingleBuffered, RGBMode, WithDepthBuffer ]
initialWindowSize $= Size 250 250
initialWindowPosition $= Position 100 100
createWindow progName
-- we have to do this *after* createWindow, otherwise we have no OpenGL context
exts <- get glExtensions
unless ("GL_EXT_texture3D" `elem` exts) $ do
putStrLn "Sorry, this demo requires the GL_EXT_texture3D extension."
exitFailure
myInit
reshapeCallback $= Just reshape
displayCallback $= display
keyboardMouseCallback $= Just keyboard
mainLoop
| null | https://raw.githubusercontent.com/FranklinChen/hugs98-plus-Sep2006/54ab69bd6313adbbed1d790b46aca2a0305ea67e/packages/GLUT/examples/RedBook/Texture3D.hs | haskell | Create checkerboard image
resolve overloading, not needed in "real" programs
we have to do this *after* createWindow, otherwise we have no OpenGL context |
Texture3D.hs ( adapted from texture3d.c which is ( c ) Silicon Graphics , Inc )
Copyright ( c ) 2002 - 2005 < >
This file is part of HOpenGL and distributed under a BSD - style license
See the file libraries / GLUT / LICENSE
This program demonstrates using a three - dimensional texture . It creates
a 3D texture and then renders two rectangles with different texture
coordinates to obtain different " slices " of the 3D texture .
Texture3D.hs (adapted from texture3d.c which is (c) Silicon Graphics, Inc)
Copyright (c) Sven Panne 2002-2005 <>
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE
This program demonstrates using a three-dimensional texture. It creates
a 3D texture and then renders two rectangles with different texture
coordinates to obtain different "slices" of the 3D texture.
-}
import Control.Monad ( unless )
import Foreign ( withArray )
import System.Exit ( exitFailure, exitWith, ExitCode(ExitSuccess) )
import Graphics.UI.GLUT
imageSize :: TextureSize3D
imageSize = TextureSize3D 16 16 16
withImage :: (PixelData (Color3 GLubyte) -> IO ()) -> IO ()
withImage act =
withArray [ Color3 (s * 17) (t * 17) (r * 17) |
r <- [ 0 .. fromIntegral d - 1 ],
t <- [ 0 .. fromIntegral h - 1 ],
s <- [ 0 .. fromIntegral w - 1 ] ] $
act . PixelData RGB UnsignedByte
where (TextureSize3D w h d) = imageSize
myInit :: IO ()
myInit = do
clearColor $= Color4 0 0 0 0
shadeModel $= Flat
depthFunc $= Just Less
rowAlignment Unpack $= 1
[texName] <- genObjectNames 1
textureBinding Texture3D $= Just texName
textureWrapMode Texture3D S $= (Repeated, Clamp)
textureWrapMode Texture3D T $= (Repeated, Clamp)
textureWrapMode Texture3D R $= (Repeated, Clamp)
textureFilter Texture3D $= ((Nearest, Nothing), Nearest)
withImage $ texImage3D NoProxy 0 RGB' imageSize 0
texture Texture3D $= Enabled
display :: DisplayCallback
display = do
clear [ ColorBuffer, DepthBuffer ]
let texCoord3f = texCoord :: TexCoord3 GLfloat -> IO ()
vertex3f = vertex :: Vertex3 GLfloat -> IO ()
renderPrimitive Quads $ do
texCoord3f (TexCoord3 0 0 0); vertex3f (Vertex3 (-2.25) (-1) 0)
texCoord3f (TexCoord3 0 1 0); vertex3f (Vertex3 (-2.25) 1 0)
texCoord3f (TexCoord3 1 1 1); vertex3f (Vertex3 (-0.25) 1 0)
texCoord3f (TexCoord3 1 0 1); vertex3f (Vertex3 (-0.25) (-1) 0)
texCoord3f (TexCoord3 0 0 1); vertex3f (Vertex3 0.25 (-1) 0)
texCoord3f (TexCoord3 0 1 1); vertex3f (Vertex3 0.25 1 0)
texCoord3f (TexCoord3 1 1 0); vertex3f (Vertex3 2.25 1 0)
texCoord3f (TexCoord3 1 0 0); vertex3f (Vertex3 2.25 (-1) 0)
flush
reshape :: ReshapeCallback
reshape size@(Size w h) = do
viewport $= (Position 0 0, size)
matrixMode $= Projection
loadIdentity
perspective 60 (fromIntegral w / fromIntegral h) 1 30
matrixMode $= Modelview 0
loadIdentity
translate (Vector3 0 0 (-4 :: GLfloat))
keyboard :: KeyboardMouseCallback
keyboard (Char '\27') Down _ _ = exitWith ExitSuccess
keyboard _ _ _ _ = return ()
main :: IO ()
main = do
(progName, _args) <- getArgsAndInitialize
initialDisplayMode $= [ SingleBuffered, RGBMode, WithDepthBuffer ]
initialWindowSize $= Size 250 250
initialWindowPosition $= Position 100 100
createWindow progName
exts <- get glExtensions
unless ("GL_EXT_texture3D" `elem` exts) $ do
putStrLn "Sorry, this demo requires the GL_EXT_texture3D extension."
exitFailure
myInit
reshapeCallback $= Just reshape
displayCallback $= display
keyboardMouseCallback $= Just keyboard
mainLoop
|
95a0f991098c7b24f37dc95c489a306212455664c54e4def36b032efa15a3d73 | onedata/op-worker | readdir_plus.erl | %%%--------------------------------------------------------------------
@author
( C ) 2022 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
%%% @end
%%%--------------------------------------------------------------------
%%% @doc
%%% Module providing utility function for readdir plus related file listing operations.
%%% @end
%%%--------------------------------------------------------------------
-module(readdir_plus).
-author("Michal Stanisz").
-include("global_definitions.hrl").
-include("modules/fslogic/data_access_control.hrl").
-include("modules/fslogic/fslogic_common.hrl").
-include("proto/oneclient/fuse_messages.hrl").
-export([
gather_attributes/3
]).
-type gather_attributes_fun(Entry, Attributes) ::
fun((Entry, attr_req:compute_file_attr_opts()) -> Attributes).
-export_type([gather_attributes_fun/2]).
-define(MAX_MAP_CHILDREN_PROCESSES, application:get_env(
?APP_NAME, max_read_dir_plus_procs, 20
)).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Calls GatherAttributesFun for every passed entry in parallel and
%% filters out entries for which it raised an error (potentially docs not
%% synchronized between providers or deleted files).
%% @end
%%--------------------------------------------------------------------
-spec gather_attributes(
gather_attributes_fun(Entry, Attributes),
[Entry],
attr_req:compute_file_attr_opts()
) ->
[Attributes].
gather_attributes(GatherAttributesFun, Entries, BaseOpts) ->
EntriesNum = length(Entries),
EnumeratedChildren = lists_utils:enumerate(Entries),
FilterMapFun = fun({Num, Entry}) ->
try
Result = case Num == 1 orelse Num == EntriesNum of
true ->
GatherAttributesFun(Entry, BaseOpts#{
name_conflicts_resolution_policy => resolve_name_conflicts
});
false ->
GatherAttributesFun(Entry, BaseOpts#{
name_conflicts_resolution_policy => allow_name_conflicts
})
end,
{true, Result}
catch Class:Reason ->
case datastore_runner:normalize_error(Reason) of
not_found ->
% Entry metadata can be not fully synchronized with other provider
false;
_ ->
erlang:apply(erlang, Class, [Reason])
end
end
end,
lists_utils:pfiltermap(FilterMapFun, EnumeratedChildren, ?MAX_MAP_CHILDREN_PROCESSES).
| null | https://raw.githubusercontent.com/onedata/op-worker/71d2ac527f4d20ca40b8f5ae28b8107b68ca90e9/src/modules/fslogic/operations/listing/readdir_plus.erl | erlang | --------------------------------------------------------------------
@end
--------------------------------------------------------------------
@doc
Module providing utility function for readdir plus related file listing operations.
@end
--------------------------------------------------------------------
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
Calls GatherAttributesFun for every passed entry in parallel and
filters out entries for which it raised an error (potentially docs not
synchronized between providers or deleted files).
@end
--------------------------------------------------------------------
Entry metadata can be not fully synchronized with other provider | @author
( C ) 2022 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
-module(readdir_plus).
-author("Michal Stanisz").
-include("global_definitions.hrl").
-include("modules/fslogic/data_access_control.hrl").
-include("modules/fslogic/fslogic_common.hrl").
-include("proto/oneclient/fuse_messages.hrl").
-export([
gather_attributes/3
]).
-type gather_attributes_fun(Entry, Attributes) ::
fun((Entry, attr_req:compute_file_attr_opts()) -> Attributes).
-export_type([gather_attributes_fun/2]).
-define(MAX_MAP_CHILDREN_PROCESSES, application:get_env(
?APP_NAME, max_read_dir_plus_procs, 20
)).
-spec gather_attributes(
gather_attributes_fun(Entry, Attributes),
[Entry],
attr_req:compute_file_attr_opts()
) ->
[Attributes].
gather_attributes(GatherAttributesFun, Entries, BaseOpts) ->
EntriesNum = length(Entries),
EnumeratedChildren = lists_utils:enumerate(Entries),
FilterMapFun = fun({Num, Entry}) ->
try
Result = case Num == 1 orelse Num == EntriesNum of
true ->
GatherAttributesFun(Entry, BaseOpts#{
name_conflicts_resolution_policy => resolve_name_conflicts
});
false ->
GatherAttributesFun(Entry, BaseOpts#{
name_conflicts_resolution_policy => allow_name_conflicts
})
end,
{true, Result}
catch Class:Reason ->
case datastore_runner:normalize_error(Reason) of
not_found ->
false;
_ ->
erlang:apply(erlang, Class, [Reason])
end
end
end,
lists_utils:pfiltermap(FilterMapFun, EnumeratedChildren, ?MAX_MAP_CHILDREN_PROCESSES).
|
91b9ce6552d505cf1d6ec2e220093bd76bb86169e18f8a19d94680de2393f12e | superbobry/pareto | tests.mli | (** Statistical testing. *)
type test_alternative = Less | Greater | TwoSided
type test_result = {
test_statistic : float;
test_pvalue : float
}
* Assess significance of the statistical test at a given
[ significance_level ] , which defaults to [ 0.05 ] .
[significance_level], which defaults to [0.05]. *)
val run_test
: ?significance_level:float
-> (unit -> test_result)
-> [`Significant | `NotSignificant]
module T : sig
* One sample Student 's t - test , which evaluates the null hypothesis
that a [ mean ] of a normally distributed variable is equal to the
specified value .
that a [mean] of a normally distributed variable is equal to the
specified value. *)
val one_sample
: float array
-> ?mean:float
-> ?alternative:test_alternative
-> unit
-> test_result
* Two sample t - test , which evaluates the null hypothesis that the
difference of means of two { e independent } normally distributed
populations is equal to the specified value .
difference of means of two {e independent} normally distributed
populations is equal to the specified value. *)
val two_sample_independent
: float array
-> float array
-> ?equal_variance:bool
-> ?mean:float
-> ?alternative:test_alternative
-> unit
-> test_result
* Paired two sample t - test , which evaluates the null hypothes that
the difference of means of the two { e paired } normally distributed
populations is equal to the specified value .
the difference of means of the two {e paired} normally distributed
populations is equal to the specified value. *)
val two_sample_paired
: float array
-> float array
-> ?mean:float
-> ?alternative:test_alternative
-> unit
-> test_result
end
* Pearson 's chi - squared test .
module ChiSquared : sig
val goodness_of_fit
: float array -> ?expected:float array -> ?df:int -> unit -> test_result
val independence
: float array array -> ?correction:bool -> unit -> test_result
end
module KolmogorovSmirnov : sig
* One - sample Kolmogorov - Smirnov test for goodness of fit , which
evaluates the distribution [ G(x ) ] of the observed random variable
against a given distribution [ F(x ) ] . Under the null hypothesis
the two distributions are identical , [ G(x ) = F(x ) ] .
evaluates the distribution [G(x)] of the observed random variable
against a given distribution [F(x)]. Under the null hypothesis
the two distributions are identical, [G(x) = F(x)]. *)
val goodness_of_fit
: float array
-> cumulative_probability:(float -> float)
-> ?alternative:test_alternative
-> unit
-> test_result
* Two - sample Kolmogorov - Smirnov test , which evaluates the null
hypothesis , that two { e independent } samples are drawn from the
same continious distribution .
{ b Note } : in the current implementation samples with ties will
result in an [ Invalid_argument ] exception .
hypothesis, that two {e independent} samples are drawn from the
same continious distribution.
{b Note}: in the current implementation samples with ties will
result in an [Invalid_argument] exception. *)
val two_sample
: float array
-> float array
-> ?alternative:test_alternative
-> unit
-> test_result
* { 6 References }
+ National Institute of Standards and Technology ( US ) , et al .
" Engineering statistics handbook " , Section 1.3.5.16 .
The Institute , 2001 .
+ , , and .
" Evaluating Kolmogorov 's distribution . " Journal of
Statistical Software 8 , no . 18 . 2003 .
, . " One - sided confidence contours
for probability distribution functions . " The Annals of
Mathematical Statistics , pp592 - 596 . 1951 .
+ National Institute of Standards and Technology (US), et al.
"Engineering statistics handbook", Section 1.3.5.16.
The Institute, 2001.
+ Jingbo Wang, Wai Wan Tsang, and George Marsaglia.
"Evaluating Kolmogorov's distribution." Journal of
Statistical Software 8, no. 18. 2003.
+ Z. W. Birnbaum, Fred H. Tingey. "One-sided confidence contours
for probability distribution functions." The Annals of
Mathematical Statistics, pp592-596. 1951. *)
end
module MannWhitneyU : sig
* - Whitney U test ( also known as - Whitney - Wilcoxon test and
Wilcoxon rank sum test ) is a non - paramteric test , which evaluates
the null hypothesis that two { e independent } samples have equal
medians .
Wilcoxon rank sum test) is a non-paramteric test, which evaluates
the null hypothesis that two {e independent} samples have equal
medians. *)
val two_sample_independent
: 'a array
-> 'a array
-> ?alternative:test_alternative
-> ?correction:bool
-> unit
-> test_result
* { 6 References }
, and .
" Statistics for the behavioral sciences " . Wadsworth Publishing
Company , 2006 .
+ . " Handbook of Parametric and Nonparametric
Statistical Procedures " , 3rd edition . CRC Press , 2003 .
+ Gravetter, Frederick J. and Larry B. Wallnau.
"Statistics for the behavioral sciences". Wadsworth Publishing
Company, 2006.
+ David J. Sheskin. "Handbook of Parametric and Nonparametric
Statistical Procedures", 3rd edition. CRC Press, 2003. *)
end
module WilcoxonT : sig
* signed - rank test , which evaluates the null hypothesis
that sample median is equal to the specified [ shift ] .
Test assumptions :
+ Sample under test was randomly selected from the population it
represents .
+ All [ vs - . shift ] differences are iid and come from a continious
population .
that sample median is equal to the specified [shift].
Test assumptions:
+ Sample under test was randomly selected from the population it
represents.
+ All [vs -. shift] differences are iid and come from a continious
population. *)
val one_sample
: float array
-> ?shift:float
-> ?alternative:test_alternative
-> ?correction:bool
-> unit
-> test_result
* Wilcoxon paired signed - rank test , which evaluates the null hypothesis
that two { e related } samples have equal medians .
Test assumptions :
+ Samples under test were randomly selected from the population
they represent .
+ Observation differences [ vs2 - . vs1 ] are iid and come from a
continious population .
that two {e related} samples have equal medians.
Test assumptions:
+ Samples under test were randomly selected from the population
they represent.
+ Observation differences [vs2 -. vs1] are iid and come from a
continious population. *)
val two_sample_paired
: float array
-> float array
-> ?alternative:test_alternative
-> ?correction:bool
-> unit
-> test_result
* { 6 References }
+ . " Handbook of Parametric and Nonparametric
Statistical Procedures " , 3rd edition . CRC Press , 2003 .
+
+ David J. Sheskin. "Handbook of Parametric and Nonparametric
Statistical Procedures", 3rd edition. CRC Press, 2003.
+ *)
end
module Sign : sig
(** Sign test, which evaluates the null hypothesis that sample median is
equal to the specified [shift].
Test assumptions:
+ Sample under test was randomly selected from the population it
represents. *)
val one_sample
: float array
-> ?shift:float
-> ?alternative:test_alternative
-> unit
-> test_result
* Dependent samples sign test , which evaluates the null hypothesis
that the median difference between observations from two { e related }
samples is zero .
Test assumptions :
+ Samples under test were randomly selected from the population they
represent .
that the median difference between observations from two {e related}
samples is zero.
Test assumptions:
+ Samples under test were randomly selected from the population they
represent. *)
val two_sample_paired
: float array
-> float array
-> ?alternative:test_alternative
-> unit
-> test_result
end
(** Adjustments for multiple comparisons. *)
module Multiple : sig
type adjustment_method =
| HolmBonferroni
| BenjaminiHochberg
(** Adjusts obtained P-values for multiple comparisons using a given
adjustment method. *)
val adjust : float array -> adjustment_method -> float array
* { 6 References }
+ and . " Controlling the false discovery
rate : a practical and powerful approach to multiple testing . " ,
Journal of the Royal Statistical Society , Series B ( Methodological ) ,
pp289 - 300 , 1995 .
+ Yoav Benjamini and Yosef Hochberg. "Controlling the false discovery
rate: a practical and powerful approach to multiple testing.",
Journal of the Royal Statistical Society, Series B (Methodological),
pp289-300, 1995. *)
end
| null | https://raw.githubusercontent.com/superbobry/pareto/8b3b27bce7b7df5d9713d16ed40a844861aa368e/lib/tests.mli | ocaml | * Statistical testing.
* Sign test, which evaluates the null hypothesis that sample median is
equal to the specified [shift].
Test assumptions:
+ Sample under test was randomly selected from the population it
represents.
* Adjustments for multiple comparisons.
* Adjusts obtained P-values for multiple comparisons using a given
adjustment method. |
type test_alternative = Less | Greater | TwoSided
type test_result = {
test_statistic : float;
test_pvalue : float
}
* Assess significance of the statistical test at a given
[ significance_level ] , which defaults to [ 0.05 ] .
[significance_level], which defaults to [0.05]. *)
val run_test
: ?significance_level:float
-> (unit -> test_result)
-> [`Significant | `NotSignificant]
module T : sig
* One sample Student 's t - test , which evaluates the null hypothesis
that a [ mean ] of a normally distributed variable is equal to the
specified value .
that a [mean] of a normally distributed variable is equal to the
specified value. *)
val one_sample
: float array
-> ?mean:float
-> ?alternative:test_alternative
-> unit
-> test_result
* Two sample t - test , which evaluates the null hypothesis that the
difference of means of two { e independent } normally distributed
populations is equal to the specified value .
difference of means of two {e independent} normally distributed
populations is equal to the specified value. *)
val two_sample_independent
: float array
-> float array
-> ?equal_variance:bool
-> ?mean:float
-> ?alternative:test_alternative
-> unit
-> test_result
* Paired two sample t - test , which evaluates the null hypothes that
the difference of means of the two { e paired } normally distributed
populations is equal to the specified value .
the difference of means of the two {e paired} normally distributed
populations is equal to the specified value. *)
val two_sample_paired
: float array
-> float array
-> ?mean:float
-> ?alternative:test_alternative
-> unit
-> test_result
end
* Pearson 's chi - squared test .
module ChiSquared : sig
val goodness_of_fit
: float array -> ?expected:float array -> ?df:int -> unit -> test_result
val independence
: float array array -> ?correction:bool -> unit -> test_result
end
module KolmogorovSmirnov : sig
* One - sample Kolmogorov - Smirnov test for goodness of fit , which
evaluates the distribution [ G(x ) ] of the observed random variable
against a given distribution [ F(x ) ] . Under the null hypothesis
the two distributions are identical , [ G(x ) = F(x ) ] .
evaluates the distribution [G(x)] of the observed random variable
against a given distribution [F(x)]. Under the null hypothesis
the two distributions are identical, [G(x) = F(x)]. *)
val goodness_of_fit
: float array
-> cumulative_probability:(float -> float)
-> ?alternative:test_alternative
-> unit
-> test_result
* Two - sample Kolmogorov - Smirnov test , which evaluates the null
hypothesis , that two { e independent } samples are drawn from the
same continious distribution .
{ b Note } : in the current implementation samples with ties will
result in an [ Invalid_argument ] exception .
hypothesis, that two {e independent} samples are drawn from the
same continious distribution.
{b Note}: in the current implementation samples with ties will
result in an [Invalid_argument] exception. *)
val two_sample
: float array
-> float array
-> ?alternative:test_alternative
-> unit
-> test_result
* { 6 References }
+ National Institute of Standards and Technology ( US ) , et al .
" Engineering statistics handbook " , Section 1.3.5.16 .
The Institute , 2001 .
+ , , and .
" Evaluating Kolmogorov 's distribution . " Journal of
Statistical Software 8 , no . 18 . 2003 .
, . " One - sided confidence contours
for probability distribution functions . " The Annals of
Mathematical Statistics , pp592 - 596 . 1951 .
+ National Institute of Standards and Technology (US), et al.
"Engineering statistics handbook", Section 1.3.5.16.
The Institute, 2001.
+ Jingbo Wang, Wai Wan Tsang, and George Marsaglia.
"Evaluating Kolmogorov's distribution." Journal of
Statistical Software 8, no. 18. 2003.
+ Z. W. Birnbaum, Fred H. Tingey. "One-sided confidence contours
for probability distribution functions." The Annals of
Mathematical Statistics, pp592-596. 1951. *)
end
module MannWhitneyU : sig
* - Whitney U test ( also known as - Whitney - Wilcoxon test and
Wilcoxon rank sum test ) is a non - paramteric test , which evaluates
the null hypothesis that two { e independent } samples have equal
medians .
Wilcoxon rank sum test) is a non-paramteric test, which evaluates
the null hypothesis that two {e independent} samples have equal
medians. *)
val two_sample_independent
: 'a array
-> 'a array
-> ?alternative:test_alternative
-> ?correction:bool
-> unit
-> test_result
* { 6 References }
, and .
" Statistics for the behavioral sciences " . Wadsworth Publishing
Company , 2006 .
+ . " Handbook of Parametric and Nonparametric
Statistical Procedures " , 3rd edition . CRC Press , 2003 .
+ Gravetter, Frederick J. and Larry B. Wallnau.
"Statistics for the behavioral sciences". Wadsworth Publishing
Company, 2006.
+ David J. Sheskin. "Handbook of Parametric and Nonparametric
Statistical Procedures", 3rd edition. CRC Press, 2003. *)
end
module WilcoxonT : sig
* signed - rank test , which evaluates the null hypothesis
that sample median is equal to the specified [ shift ] .
Test assumptions :
+ Sample under test was randomly selected from the population it
represents .
+ All [ vs - . shift ] differences are iid and come from a continious
population .
that sample median is equal to the specified [shift].
Test assumptions:
+ Sample under test was randomly selected from the population it
represents.
+ All [vs -. shift] differences are iid and come from a continious
population. *)
val one_sample
: float array
-> ?shift:float
-> ?alternative:test_alternative
-> ?correction:bool
-> unit
-> test_result
* Wilcoxon paired signed - rank test , which evaluates the null hypothesis
that two { e related } samples have equal medians .
Test assumptions :
+ Samples under test were randomly selected from the population
they represent .
+ Observation differences [ vs2 - . vs1 ] are iid and come from a
continious population .
that two {e related} samples have equal medians.
Test assumptions:
+ Samples under test were randomly selected from the population
they represent.
+ Observation differences [vs2 -. vs1] are iid and come from a
continious population. *)
val two_sample_paired
: float array
-> float array
-> ?alternative:test_alternative
-> ?correction:bool
-> unit
-> test_result
* { 6 References }
+ . " Handbook of Parametric and Nonparametric
Statistical Procedures " , 3rd edition . CRC Press , 2003 .
+
+ David J. Sheskin. "Handbook of Parametric and Nonparametric
Statistical Procedures", 3rd edition. CRC Press, 2003.
+ *)
end
module Sign : sig
val one_sample
: float array
-> ?shift:float
-> ?alternative:test_alternative
-> unit
-> test_result
* Dependent samples sign test , which evaluates the null hypothesis
that the median difference between observations from two { e related }
samples is zero .
Test assumptions :
+ Samples under test were randomly selected from the population they
represent .
that the median difference between observations from two {e related}
samples is zero.
Test assumptions:
+ Samples under test were randomly selected from the population they
represent. *)
val two_sample_paired
: float array
-> float array
-> ?alternative:test_alternative
-> unit
-> test_result
end
module Multiple : sig
type adjustment_method =
| HolmBonferroni
| BenjaminiHochberg
val adjust : float array -> adjustment_method -> float array
* { 6 References }
+ and . " Controlling the false discovery
rate : a practical and powerful approach to multiple testing . " ,
Journal of the Royal Statistical Society , Series B ( Methodological ) ,
pp289 - 300 , 1995 .
+ Yoav Benjamini and Yosef Hochberg. "Controlling the false discovery
rate: a practical and powerful approach to multiple testing.",
Journal of the Royal Statistical Society, Series B (Methodological),
pp289-300, 1995. *)
end
|
4bcc7c13912afc2c523e57daa8c0644c098780f324768ebb36dc88e3c9fc59bd | wireapp/wire-server | Main.hs | -- This file is part of the Wire Server implementation.
--
Copyright ( C ) 2022 Wire Swiss GmbH < >
--
-- This program is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
-- later version.
--
-- This program is distributed in the hope that it will be useful, but WITHOUT
-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
-- details.
--
You should have received a copy of the GNU Affero General Public License along
-- with this program. If not, see </>.
module Main
( main,
)
where
import Galley.Run (run)
import Imports
import OpenSSL (withOpenSSL)
import Util.Options
main :: IO ()
main = withOpenSSL $ do
let desc = "Galley - Conversation service"
defaultPath = "/etc/wire/galley/conf/galley.yaml"
options <- getOptions desc Nothing defaultPath
run options
| null | https://raw.githubusercontent.com/wireapp/wire-server/be524fac7eb46d5419c319d5f8161f0c518a7f69/services/galley/exec/Main.hs | haskell | This file is part of the Wire Server implementation.
This program is free software: you can redistribute it and/or modify it under
later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
details.
with this program. If not, see </>. | Copyright ( C ) 2022 Wire Swiss GmbH < >
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
You should have received a copy of the GNU Affero General Public License along
module Main
( main,
)
where
import Galley.Run (run)
import Imports
import OpenSSL (withOpenSSL)
import Util.Options
main :: IO ()
main = withOpenSSL $ do
let desc = "Galley - Conversation service"
defaultPath = "/etc/wire/galley/conf/galley.yaml"
options <- getOptions desc Nothing defaultPath
run options
|
76259e5569ae88ad9f0baeecf73cb83881670dcd0a3006a90066b285c1c2e755 | ntestoc3/burp-clj | context_menu.clj | (ns burp-clj.context-menu
(:require [seesaw.core :as gui]
[burp-clj.helper :as helper]
[burp-clj.utils :as utils])
(:import [burp
IContextMenuInvocation
IContextMenuFactory]
java.util.Arrays
))
(defn get-invocation-context
[invocation]
(-> (.getInvocationContext invocation)
helper/menu-invocation-context-inv))
(defn get-selected-messge
[invocation]
(.getSelectedMessages invocation))
(defn get-selected-text
"获得选中的字符"
[invocation]
(when-let [sel (.getSelectionBounds invocation)]
(when-let [msg (-> (get-selected-messge invocation)
first)]
(let [data (if (#{:message-editor-request
:message-viewer-request}
(get-invocation-context invocation))
(.getRequest msg)
(.getResponse msg))
[start end] sel]
( log ( format " sel:[%d % d ] " start end ) )
(-> (Arrays/copyOfRange data start end)
utils/->string)))))
(defn make-context-menu
[supported-context gen-menu-items-fn]
(reify IContextMenuFactory
(createMenuItems [this invocation]
(let [menu-ctx (get-invocation-context invocation)]
(if (supported-context menu-ctx)
(gen-menu-items-fn invocation)
[])))))
| null | https://raw.githubusercontent.com/ntestoc3/burp-clj/436802d34fb77e183cf513ba767e3310d96f6946/src/burp_clj/context_menu.clj | clojure | (ns burp-clj.context-menu
(:require [seesaw.core :as gui]
[burp-clj.helper :as helper]
[burp-clj.utils :as utils])
(:import [burp
IContextMenuInvocation
IContextMenuFactory]
java.util.Arrays
))
(defn get-invocation-context
[invocation]
(-> (.getInvocationContext invocation)
helper/menu-invocation-context-inv))
(defn get-selected-messge
[invocation]
(.getSelectedMessages invocation))
(defn get-selected-text
"获得选中的字符"
[invocation]
(when-let [sel (.getSelectionBounds invocation)]
(when-let [msg (-> (get-selected-messge invocation)
first)]
(let [data (if (#{:message-editor-request
:message-viewer-request}
(get-invocation-context invocation))
(.getRequest msg)
(.getResponse msg))
[start end] sel]
( log ( format " sel:[%d % d ] " start end ) )
(-> (Arrays/copyOfRange data start end)
utils/->string)))))
(defn make-context-menu
[supported-context gen-menu-items-fn]
(reify IContextMenuFactory
(createMenuItems [this invocation]
(let [menu-ctx (get-invocation-context invocation)]
(if (supported-context menu-ctx)
(gen-menu-items-fn invocation)
[])))))
| |
5ed77e2b4a1c1f787610ca5d38493e0d92b8bda14e124ff74a68188e991af547 | ates/netspire-core | gen_module.erl | -module(gen_module).
-export([start/0,
start_module/2,
stop_module/1,
restart_module/2,
loaded_modules/0,
loaded_modules_with_options/0,
get_option/2,
get_option/3,
is_loaded/1]).
-export([behaviour_info/1]).
-include("netspire.hrl").
-record(netspire_module, {id, opts}).
-define(MODULES_TABLE, netspire_modules).
behaviour_info(callbacks) ->
[{start, 1}, {stop, 0}];
behaviour_info(_) ->
undefined.
start() ->
?INFO_MSG("Starting module ~p~n", [?MODULE]),
ets:new(?MODULES_TABLE, [named_table, public, {keypos, 2}]).
start_module(Module, Options) ->
case ets:lookup(?MODULES_TABLE, Module) of
[] ->
try Module:start(Options) of
_ ->
Rec = #netspire_module{id = Module, opts = Options},
ets:insert(?MODULES_TABLE, Rec),
ok
catch
_:Reason ->
?ERROR_MSG("Error while starting module ~p: ~p~n", [Module, Reason]),
{error, Reason}
end;
_ ->
?WARNING_MSG("Dynamic module ~p already started~n", [Module]),
{error, already_started}
end.
stop_module(Module) ->
case ets:lookup(?MODULES_TABLE, Module) of
[] ->
?WARNING_MSG("Dynamic module ~p was not started~n", [Module]),
{error, not_started};
_ ->
safely_stop_module(Module)
end.
safely_stop_module(Module) ->
try Module:stop() of
{wait, ProcList} when is_list(ProcList) ->
lists:foreach(fun wait_for_process/1, ProcList),
ets:delete(?MODULES_TABLE, Module),
ok;
{wait, Process} ->
wait_for_process(Process),
ets:delete(?MODULES_TABLE, Module);
_ ->
ets:delete(?MODULES_TABLE, Module)
catch
exit:Reason ->
?ERROR_MSG("Error while stopping module due to ~p", [Reason]),
{error, Reason}
end.
restart_module(Module, NewOptions) ->
case is_loaded(Module) of
false ->
start_module(Module, NewOptions);
_ ->
stop_module(Module),
start_module(Module, NewOptions)
end.
wait_for_process(Process) ->
MonRef = erlang:monitor(process, Process),
wait_for_stop(Process, MonRef).
wait_for_stop(Process, MonRef) ->
receive
{'DOWN', MonRef, _Type, _Object, _Info} -> ok
after 5000 ->
catch exit(whereis(Process), kill),
wait_for_kill(MonRef)
end.
wait_for_kill(MonRef) ->
receive
{'DOWN', MonRef, _Type, _Object, _Info} -> ok
after 5000 -> ok
end.
get_option(Module, Name) ->
get_option(Module, Name, undefined).
get_option(Module, Name, Default) ->
case ets:lookup(?MODULES_TABLE, Module) of
[] ->
Default;
[#netspire_module{opts = Options}] ->
proplists:get_value(Name, Options, Default)
end.
is_loaded(Module) ->
ets:member(?MODULES_TABLE, Module).
loaded_modules() ->
ets:select(?MODULES_TABLE, [{{'_','$1','_'}, [], ['$1']}]).
loaded_modules_with_options() ->
ets:select(?MODULES_TABLE, [{{'_','$1','$2'}, [], [{{'$1', '$2'}}]}]).
| null | https://raw.githubusercontent.com/ates/netspire-core/746c0f254aa6f2669040d954096b4a95ae58b3ce/src/gen_module.erl | erlang | -module(gen_module).
-export([start/0,
start_module/2,
stop_module/1,
restart_module/2,
loaded_modules/0,
loaded_modules_with_options/0,
get_option/2,
get_option/3,
is_loaded/1]).
-export([behaviour_info/1]).
-include("netspire.hrl").
-record(netspire_module, {id, opts}).
-define(MODULES_TABLE, netspire_modules).
behaviour_info(callbacks) ->
[{start, 1}, {stop, 0}];
behaviour_info(_) ->
undefined.
start() ->
?INFO_MSG("Starting module ~p~n", [?MODULE]),
ets:new(?MODULES_TABLE, [named_table, public, {keypos, 2}]).
start_module(Module, Options) ->
case ets:lookup(?MODULES_TABLE, Module) of
[] ->
try Module:start(Options) of
_ ->
Rec = #netspire_module{id = Module, opts = Options},
ets:insert(?MODULES_TABLE, Rec),
ok
catch
_:Reason ->
?ERROR_MSG("Error while starting module ~p: ~p~n", [Module, Reason]),
{error, Reason}
end;
_ ->
?WARNING_MSG("Dynamic module ~p already started~n", [Module]),
{error, already_started}
end.
stop_module(Module) ->
case ets:lookup(?MODULES_TABLE, Module) of
[] ->
?WARNING_MSG("Dynamic module ~p was not started~n", [Module]),
{error, not_started};
_ ->
safely_stop_module(Module)
end.
safely_stop_module(Module) ->
try Module:stop() of
{wait, ProcList} when is_list(ProcList) ->
lists:foreach(fun wait_for_process/1, ProcList),
ets:delete(?MODULES_TABLE, Module),
ok;
{wait, Process} ->
wait_for_process(Process),
ets:delete(?MODULES_TABLE, Module);
_ ->
ets:delete(?MODULES_TABLE, Module)
catch
exit:Reason ->
?ERROR_MSG("Error while stopping module due to ~p", [Reason]),
{error, Reason}
end.
restart_module(Module, NewOptions) ->
case is_loaded(Module) of
false ->
start_module(Module, NewOptions);
_ ->
stop_module(Module),
start_module(Module, NewOptions)
end.
wait_for_process(Process) ->
MonRef = erlang:monitor(process, Process),
wait_for_stop(Process, MonRef).
wait_for_stop(Process, MonRef) ->
receive
{'DOWN', MonRef, _Type, _Object, _Info} -> ok
after 5000 ->
catch exit(whereis(Process), kill),
wait_for_kill(MonRef)
end.
wait_for_kill(MonRef) ->
receive
{'DOWN', MonRef, _Type, _Object, _Info} -> ok
after 5000 -> ok
end.
get_option(Module, Name) ->
get_option(Module, Name, undefined).
get_option(Module, Name, Default) ->
case ets:lookup(?MODULES_TABLE, Module) of
[] ->
Default;
[#netspire_module{opts = Options}] ->
proplists:get_value(Name, Options, Default)
end.
is_loaded(Module) ->
ets:member(?MODULES_TABLE, Module).
loaded_modules() ->
ets:select(?MODULES_TABLE, [{{'_','$1','_'}, [], ['$1']}]).
loaded_modules_with_options() ->
ets:select(?MODULES_TABLE, [{{'_','$1','$2'}, [], [{{'$1', '$2'}}]}]).
| |
f2259eaf94313d684f5d0553f0307a3f8fff4bafab66e44d86409ac2d6c70d29 | brendanhay/amazonka | DescribeResourcePermissions.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE TypeFamilies #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
Module : Amazonka . WorkDocs . DescribeResourcePermissions
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
--
-- Describes the permissions of a specified resource.
--
-- This operation returns paginated results.
module Amazonka.WorkDocs.DescribeResourcePermissions
( -- * Creating a Request
DescribeResourcePermissions (..),
newDescribeResourcePermissions,
-- * Request Lenses
describeResourcePermissions_authenticationToken,
describeResourcePermissions_limit,
describeResourcePermissions_marker,
describeResourcePermissions_principalId,
describeResourcePermissions_resourceId,
-- * Destructuring the Response
DescribeResourcePermissionsResponse (..),
newDescribeResourcePermissionsResponse,
-- * Response Lenses
describeResourcePermissionsResponse_marker,
describeResourcePermissionsResponse_principals,
describeResourcePermissionsResponse_httpStatus,
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import qualified Amazonka.Request as Request
import qualified Amazonka.Response as Response
import Amazonka.WorkDocs.Types
-- | /See:/ 'newDescribeResourcePermissions' smart constructor.
data DescribeResourcePermissions = DescribeResourcePermissions'
| Amazon WorkDocs authentication token . Not required when using AWS
-- administrator credentials to access the API.
authenticationToken :: Prelude.Maybe (Data.Sensitive Prelude.Text),
-- | The maximum number of items to return with this call.
limit :: Prelude.Maybe Prelude.Natural,
-- | The marker for the next set of results. (You received this marker from a
-- previous call)
marker :: Prelude.Maybe Prelude.Text,
-- | The ID of the principal to filter permissions by.
principalId :: Prelude.Maybe Prelude.Text,
-- | The ID of the resource.
resourceId :: Prelude.Text
}
deriving (Prelude.Eq, Prelude.Show, Prelude.Generic)
-- |
-- Create a value of 'DescribeResourcePermissions' with all optional fields omitted.
--
Use < -lens generic - lens > or < optics > to modify other optional fields .
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
' authenticationToken ' , ' describeResourcePermissions_authenticationToken ' - Amazon WorkDocs authentication token . Not required when using AWS
-- administrator credentials to access the API.
--
-- 'limit', 'describeResourcePermissions_limit' - The maximum number of items to return with this call.
--
-- 'marker', 'describeResourcePermissions_marker' - The marker for the next set of results. (You received this marker from a
-- previous call)
--
-- 'principalId', 'describeResourcePermissions_principalId' - The ID of the principal to filter permissions by.
--
-- 'resourceId', 'describeResourcePermissions_resourceId' - The ID of the resource.
newDescribeResourcePermissions ::
-- | 'resourceId'
Prelude.Text ->
DescribeResourcePermissions
newDescribeResourcePermissions pResourceId_ =
DescribeResourcePermissions'
{ authenticationToken =
Prelude.Nothing,
limit = Prelude.Nothing,
marker = Prelude.Nothing,
principalId = Prelude.Nothing,
resourceId = pResourceId_
}
| Amazon WorkDocs authentication token . Not required when using AWS
-- administrator credentials to access the API.
describeResourcePermissions_authenticationToken :: Lens.Lens' DescribeResourcePermissions (Prelude.Maybe Prelude.Text)
describeResourcePermissions_authenticationToken = Lens.lens (\DescribeResourcePermissions' {authenticationToken} -> authenticationToken) (\s@DescribeResourcePermissions' {} a -> s {authenticationToken = a} :: DescribeResourcePermissions) Prelude.. Lens.mapping Data._Sensitive
-- | The maximum number of items to return with this call.
describeResourcePermissions_limit :: Lens.Lens' DescribeResourcePermissions (Prelude.Maybe Prelude.Natural)
describeResourcePermissions_limit = Lens.lens (\DescribeResourcePermissions' {limit} -> limit) (\s@DescribeResourcePermissions' {} a -> s {limit = a} :: DescribeResourcePermissions)
-- | The marker for the next set of results. (You received this marker from a
-- previous call)
describeResourcePermissions_marker :: Lens.Lens' DescribeResourcePermissions (Prelude.Maybe Prelude.Text)
describeResourcePermissions_marker = Lens.lens (\DescribeResourcePermissions' {marker} -> marker) (\s@DescribeResourcePermissions' {} a -> s {marker = a} :: DescribeResourcePermissions)
-- | The ID of the principal to filter permissions by.
describeResourcePermissions_principalId :: Lens.Lens' DescribeResourcePermissions (Prelude.Maybe Prelude.Text)
describeResourcePermissions_principalId = Lens.lens (\DescribeResourcePermissions' {principalId} -> principalId) (\s@DescribeResourcePermissions' {} a -> s {principalId = a} :: DescribeResourcePermissions)
-- | The ID of the resource.
describeResourcePermissions_resourceId :: Lens.Lens' DescribeResourcePermissions Prelude.Text
describeResourcePermissions_resourceId = Lens.lens (\DescribeResourcePermissions' {resourceId} -> resourceId) (\s@DescribeResourcePermissions' {} a -> s {resourceId = a} :: DescribeResourcePermissions)
instance Core.AWSPager DescribeResourcePermissions where
page rq rs
| Core.stop
( rs
Lens.^? describeResourcePermissionsResponse_marker
Prelude.. Lens._Just
) =
Prelude.Nothing
| Core.stop
( rs
Lens.^? describeResourcePermissionsResponse_principals
Prelude.. Lens._Just
) =
Prelude.Nothing
| Prelude.otherwise =
Prelude.Just Prelude.$
rq
Prelude.& describeResourcePermissions_marker
Lens..~ rs
Lens.^? describeResourcePermissionsResponse_marker
Prelude.. Lens._Just
instance Core.AWSRequest DescribeResourcePermissions where
type
AWSResponse DescribeResourcePermissions =
DescribeResourcePermissionsResponse
request overrides =
Request.get (overrides defaultService)
response =
Response.receiveJSON
( \s h x ->
DescribeResourcePermissionsResponse'
Prelude.<$> (x Data..?> "Marker")
Prelude.<*> (x Data..?> "Principals" Core..!@ Prelude.mempty)
Prelude.<*> (Prelude.pure (Prelude.fromEnum s))
)
instance Prelude.Hashable DescribeResourcePermissions where
hashWithSalt _salt DescribeResourcePermissions' {..} =
_salt `Prelude.hashWithSalt` authenticationToken
`Prelude.hashWithSalt` limit
`Prelude.hashWithSalt` marker
`Prelude.hashWithSalt` principalId
`Prelude.hashWithSalt` resourceId
instance Prelude.NFData DescribeResourcePermissions where
rnf DescribeResourcePermissions' {..} =
Prelude.rnf authenticationToken
`Prelude.seq` Prelude.rnf limit
`Prelude.seq` Prelude.rnf marker
`Prelude.seq` Prelude.rnf principalId
`Prelude.seq` Prelude.rnf resourceId
instance Data.ToHeaders DescribeResourcePermissions where
toHeaders DescribeResourcePermissions' {..} =
Prelude.mconcat
[ "Authentication" Data.=# authenticationToken,
"Content-Type"
Data.=# ("application/x-amz-json-1.1" :: Prelude.ByteString)
]
instance Data.ToPath DescribeResourcePermissions where
toPath DescribeResourcePermissions' {..} =
Prelude.mconcat
[ "/api/v1/resources/",
Data.toBS resourceId,
"/permissions"
]
instance Data.ToQuery DescribeResourcePermissions where
toQuery DescribeResourcePermissions' {..} =
Prelude.mconcat
[ "limit" Data.=: limit,
"marker" Data.=: marker,
"principalId" Data.=: principalId
]
-- | /See:/ 'newDescribeResourcePermissionsResponse' smart constructor.
data DescribeResourcePermissionsResponse = DescribeResourcePermissionsResponse'
{ -- | The marker to use when requesting the next set of results. If there are
-- no additional results, the string is empty.
marker :: Prelude.Maybe Prelude.Text,
-- | The principals.
principals :: Prelude.Maybe [Principal],
-- | The response's http status code.
httpStatus :: Prelude.Int
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
-- |
-- Create a value of 'DescribeResourcePermissionsResponse' with all optional fields omitted.
--
Use < -lens generic - lens > or < optics > to modify other optional fields .
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
-- 'marker', 'describeResourcePermissionsResponse_marker' - The marker to use when requesting the next set of results. If there are
-- no additional results, the string is empty.
--
-- 'principals', 'describeResourcePermissionsResponse_principals' - The principals.
--
-- 'httpStatus', 'describeResourcePermissionsResponse_httpStatus' - The response's http status code.
newDescribeResourcePermissionsResponse ::
-- | 'httpStatus'
Prelude.Int ->
DescribeResourcePermissionsResponse
newDescribeResourcePermissionsResponse pHttpStatus_ =
DescribeResourcePermissionsResponse'
{ marker =
Prelude.Nothing,
principals = Prelude.Nothing,
httpStatus = pHttpStatus_
}
-- | The marker to use when requesting the next set of results. If there are
-- no additional results, the string is empty.
describeResourcePermissionsResponse_marker :: Lens.Lens' DescribeResourcePermissionsResponse (Prelude.Maybe Prelude.Text)
describeResourcePermissionsResponse_marker = Lens.lens (\DescribeResourcePermissionsResponse' {marker} -> marker) (\s@DescribeResourcePermissionsResponse' {} a -> s {marker = a} :: DescribeResourcePermissionsResponse)
-- | The principals.
describeResourcePermissionsResponse_principals :: Lens.Lens' DescribeResourcePermissionsResponse (Prelude.Maybe [Principal])
describeResourcePermissionsResponse_principals = Lens.lens (\DescribeResourcePermissionsResponse' {principals} -> principals) (\s@DescribeResourcePermissionsResponse' {} a -> s {principals = a} :: DescribeResourcePermissionsResponse) Prelude.. Lens.mapping Lens.coerced
-- | The response's http status code.
describeResourcePermissionsResponse_httpStatus :: Lens.Lens' DescribeResourcePermissionsResponse Prelude.Int
describeResourcePermissionsResponse_httpStatus = Lens.lens (\DescribeResourcePermissionsResponse' {httpStatus} -> httpStatus) (\s@DescribeResourcePermissionsResponse' {} a -> s {httpStatus = a} :: DescribeResourcePermissionsResponse)
instance
Prelude.NFData
DescribeResourcePermissionsResponse
where
rnf DescribeResourcePermissionsResponse' {..} =
Prelude.rnf marker
`Prelude.seq` Prelude.rnf principals
`Prelude.seq` Prelude.rnf httpStatus
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-workdocs/gen/Amazonka/WorkDocs/DescribeResourcePermissions.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated
Describes the permissions of a specified resource.
This operation returns paginated results.
* Creating a Request
* Request Lenses
* Destructuring the Response
* Response Lenses
| /See:/ 'newDescribeResourcePermissions' smart constructor.
administrator credentials to access the API.
| The maximum number of items to return with this call.
| The marker for the next set of results. (You received this marker from a
previous call)
| The ID of the principal to filter permissions by.
| The ID of the resource.
|
Create a value of 'DescribeResourcePermissions' with all optional fields omitted.
The following record fields are available, with the corresponding lenses provided
for backwards compatibility:
administrator credentials to access the API.
'limit', 'describeResourcePermissions_limit' - The maximum number of items to return with this call.
'marker', 'describeResourcePermissions_marker' - The marker for the next set of results. (You received this marker from a
previous call)
'principalId', 'describeResourcePermissions_principalId' - The ID of the principal to filter permissions by.
'resourceId', 'describeResourcePermissions_resourceId' - The ID of the resource.
| 'resourceId'
administrator credentials to access the API.
| The maximum number of items to return with this call.
| The marker for the next set of results. (You received this marker from a
previous call)
| The ID of the principal to filter permissions by.
| The ID of the resource.
| /See:/ 'newDescribeResourcePermissionsResponse' smart constructor.
| The marker to use when requesting the next set of results. If there are
no additional results, the string is empty.
| The principals.
| The response's http status code.
|
Create a value of 'DescribeResourcePermissionsResponse' with all optional fields omitted.
The following record fields are available, with the corresponding lenses provided
for backwards compatibility:
'marker', 'describeResourcePermissionsResponse_marker' - The marker to use when requesting the next set of results. If there are
no additional results, the string is empty.
'principals', 'describeResourcePermissionsResponse_principals' - The principals.
'httpStatus', 'describeResourcePermissionsResponse_httpStatus' - The response's http status code.
| 'httpStatus'
| The marker to use when requesting the next set of results. If there are
no additional results, the string is empty.
| The principals.
| The response's http status code. | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Module : Amazonka . WorkDocs . DescribeResourcePermissions
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Amazonka.WorkDocs.DescribeResourcePermissions
DescribeResourcePermissions (..),
newDescribeResourcePermissions,
describeResourcePermissions_authenticationToken,
describeResourcePermissions_limit,
describeResourcePermissions_marker,
describeResourcePermissions_principalId,
describeResourcePermissions_resourceId,
DescribeResourcePermissionsResponse (..),
newDescribeResourcePermissionsResponse,
describeResourcePermissionsResponse_marker,
describeResourcePermissionsResponse_principals,
describeResourcePermissionsResponse_httpStatus,
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import qualified Amazonka.Request as Request
import qualified Amazonka.Response as Response
import Amazonka.WorkDocs.Types
data DescribeResourcePermissions = DescribeResourcePermissions'
| Amazon WorkDocs authentication token . Not required when using AWS
authenticationToken :: Prelude.Maybe (Data.Sensitive Prelude.Text),
limit :: Prelude.Maybe Prelude.Natural,
marker :: Prelude.Maybe Prelude.Text,
principalId :: Prelude.Maybe Prelude.Text,
resourceId :: Prelude.Text
}
deriving (Prelude.Eq, Prelude.Show, Prelude.Generic)
Use < -lens generic - lens > or < optics > to modify other optional fields .
' authenticationToken ' , ' describeResourcePermissions_authenticationToken ' - Amazon WorkDocs authentication token . Not required when using AWS
newDescribeResourcePermissions ::
Prelude.Text ->
DescribeResourcePermissions
newDescribeResourcePermissions pResourceId_ =
DescribeResourcePermissions'
{ authenticationToken =
Prelude.Nothing,
limit = Prelude.Nothing,
marker = Prelude.Nothing,
principalId = Prelude.Nothing,
resourceId = pResourceId_
}
| Amazon WorkDocs authentication token . Not required when using AWS
describeResourcePermissions_authenticationToken :: Lens.Lens' DescribeResourcePermissions (Prelude.Maybe Prelude.Text)
describeResourcePermissions_authenticationToken = Lens.lens (\DescribeResourcePermissions' {authenticationToken} -> authenticationToken) (\s@DescribeResourcePermissions' {} a -> s {authenticationToken = a} :: DescribeResourcePermissions) Prelude.. Lens.mapping Data._Sensitive
describeResourcePermissions_limit :: Lens.Lens' DescribeResourcePermissions (Prelude.Maybe Prelude.Natural)
describeResourcePermissions_limit = Lens.lens (\DescribeResourcePermissions' {limit} -> limit) (\s@DescribeResourcePermissions' {} a -> s {limit = a} :: DescribeResourcePermissions)
describeResourcePermissions_marker :: Lens.Lens' DescribeResourcePermissions (Prelude.Maybe Prelude.Text)
describeResourcePermissions_marker = Lens.lens (\DescribeResourcePermissions' {marker} -> marker) (\s@DescribeResourcePermissions' {} a -> s {marker = a} :: DescribeResourcePermissions)
describeResourcePermissions_principalId :: Lens.Lens' DescribeResourcePermissions (Prelude.Maybe Prelude.Text)
describeResourcePermissions_principalId = Lens.lens (\DescribeResourcePermissions' {principalId} -> principalId) (\s@DescribeResourcePermissions' {} a -> s {principalId = a} :: DescribeResourcePermissions)
describeResourcePermissions_resourceId :: Lens.Lens' DescribeResourcePermissions Prelude.Text
describeResourcePermissions_resourceId = Lens.lens (\DescribeResourcePermissions' {resourceId} -> resourceId) (\s@DescribeResourcePermissions' {} a -> s {resourceId = a} :: DescribeResourcePermissions)
instance Core.AWSPager DescribeResourcePermissions where
page rq rs
| Core.stop
( rs
Lens.^? describeResourcePermissionsResponse_marker
Prelude.. Lens._Just
) =
Prelude.Nothing
| Core.stop
( rs
Lens.^? describeResourcePermissionsResponse_principals
Prelude.. Lens._Just
) =
Prelude.Nothing
| Prelude.otherwise =
Prelude.Just Prelude.$
rq
Prelude.& describeResourcePermissions_marker
Lens..~ rs
Lens.^? describeResourcePermissionsResponse_marker
Prelude.. Lens._Just
instance Core.AWSRequest DescribeResourcePermissions where
type
AWSResponse DescribeResourcePermissions =
DescribeResourcePermissionsResponse
request overrides =
Request.get (overrides defaultService)
response =
Response.receiveJSON
( \s h x ->
DescribeResourcePermissionsResponse'
Prelude.<$> (x Data..?> "Marker")
Prelude.<*> (x Data..?> "Principals" Core..!@ Prelude.mempty)
Prelude.<*> (Prelude.pure (Prelude.fromEnum s))
)
instance Prelude.Hashable DescribeResourcePermissions where
hashWithSalt _salt DescribeResourcePermissions' {..} =
_salt `Prelude.hashWithSalt` authenticationToken
`Prelude.hashWithSalt` limit
`Prelude.hashWithSalt` marker
`Prelude.hashWithSalt` principalId
`Prelude.hashWithSalt` resourceId
instance Prelude.NFData DescribeResourcePermissions where
rnf DescribeResourcePermissions' {..} =
Prelude.rnf authenticationToken
`Prelude.seq` Prelude.rnf limit
`Prelude.seq` Prelude.rnf marker
`Prelude.seq` Prelude.rnf principalId
`Prelude.seq` Prelude.rnf resourceId
instance Data.ToHeaders DescribeResourcePermissions where
toHeaders DescribeResourcePermissions' {..} =
Prelude.mconcat
[ "Authentication" Data.=# authenticationToken,
"Content-Type"
Data.=# ("application/x-amz-json-1.1" :: Prelude.ByteString)
]
instance Data.ToPath DescribeResourcePermissions where
toPath DescribeResourcePermissions' {..} =
Prelude.mconcat
[ "/api/v1/resources/",
Data.toBS resourceId,
"/permissions"
]
instance Data.ToQuery DescribeResourcePermissions where
toQuery DescribeResourcePermissions' {..} =
Prelude.mconcat
[ "limit" Data.=: limit,
"marker" Data.=: marker,
"principalId" Data.=: principalId
]
data DescribeResourcePermissionsResponse = DescribeResourcePermissionsResponse'
marker :: Prelude.Maybe Prelude.Text,
principals :: Prelude.Maybe [Principal],
httpStatus :: Prelude.Int
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
Use < -lens generic - lens > or < optics > to modify other optional fields .
newDescribeResourcePermissionsResponse ::
Prelude.Int ->
DescribeResourcePermissionsResponse
newDescribeResourcePermissionsResponse pHttpStatus_ =
DescribeResourcePermissionsResponse'
{ marker =
Prelude.Nothing,
principals = Prelude.Nothing,
httpStatus = pHttpStatus_
}
describeResourcePermissionsResponse_marker :: Lens.Lens' DescribeResourcePermissionsResponse (Prelude.Maybe Prelude.Text)
describeResourcePermissionsResponse_marker = Lens.lens (\DescribeResourcePermissionsResponse' {marker} -> marker) (\s@DescribeResourcePermissionsResponse' {} a -> s {marker = a} :: DescribeResourcePermissionsResponse)
describeResourcePermissionsResponse_principals :: Lens.Lens' DescribeResourcePermissionsResponse (Prelude.Maybe [Principal])
describeResourcePermissionsResponse_principals = Lens.lens (\DescribeResourcePermissionsResponse' {principals} -> principals) (\s@DescribeResourcePermissionsResponse' {} a -> s {principals = a} :: DescribeResourcePermissionsResponse) Prelude.. Lens.mapping Lens.coerced
describeResourcePermissionsResponse_httpStatus :: Lens.Lens' DescribeResourcePermissionsResponse Prelude.Int
describeResourcePermissionsResponse_httpStatus = Lens.lens (\DescribeResourcePermissionsResponse' {httpStatus} -> httpStatus) (\s@DescribeResourcePermissionsResponse' {} a -> s {httpStatus = a} :: DescribeResourcePermissionsResponse)
instance
Prelude.NFData
DescribeResourcePermissionsResponse
where
rnf DescribeResourcePermissionsResponse' {..} =
Prelude.rnf marker
`Prelude.seq` Prelude.rnf principals
`Prelude.seq` Prelude.rnf httpStatus
|
39653bf5e866cd99ef722a734a23f848054fc89a6b2582e347b7cd89fcc87822 | edsko/ChinesePodAPI | API.hs | {-# LANGUAGE OverloadedStrings #-}
module Servant.ChinesePod.API (
api
-- * API specification
, ChinesePod
-- ** Account
, Login
, Logout
, GetUserInfo
-- *** Request types
, ReqLogin(..)
, ReqLogout(..)
, ReqGetUserInfo(..)
, ReqSignature(..)
-- *** Response types
, RespLogin(..)
, RespGetUserInfo(..)
-- ** Lesson
, GetLesson
-- *** Request types
, ReqGetLesson(..)
-- ** Library
, GetLatestLessons
, SearchLessons
-- *** Request types
, ReqGetLatestLessons(..)
, ReqSearchLessons(..)
-- * ChinesePod specific datatypes
, AccessToken(..)
, Example
, Expansion(..)
, GrammarPoint(..)
, GrammarSentence(..)
, Lesson(..)
, LessonContent(..)
, LessonContentType(..)
, Level(..)
, Sentence(..)
, UserId(..)
, V3Id(..)
, Vocabulary(..)
, Word(..)
-- * Auxiliary
-- ** Types
, OK(..)
, Undocumented
, SearchResults(..)
, StrOrInt(..)
-- ** Parsing combinators
, tryRead
, parseFailure
) where
import Prelude hiding (Word)
import Control.Monad
import Crypto.Hash
import Data.Aeson.Types hiding ((.:?))
import Data.Bifunctor (bimap)
import Data.Binary (Binary)
import Data.Data (Data)
import Data.Either (rights)
import Data.List (sortBy)
import Data.Map (Map)
import Data.Maybe (catMaybes)
import Data.Monoid ((<>))
import Data.Ord (comparing)
import Data.Proxy
import Data.String (IsString)
import Data.Text (Text)
import Data.Typeable
import GHC.Generics
import Servant.API
import Text.Show.Pretty (PrettyVal(..))
import Web.FormUrlEncoded
import qualified Data.Aeson.Types as Aeson
import qualified Data.ByteString as BS
import qualified Data.ByteString.UTF8 as BS.UTF8
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Map as Map
import qualified Data.Text as T
import qualified Data.Vector as Vector
import Servant.ChinesePod.Util.Orphans.PrettyVal ()
api :: Proxy ChinesePod
api = Proxy
{-------------------------------------------------------------------------------
API
-------------------------------------------------------------------------------}
type ChinesePod = "api" :> "0.6" :> Services
type Services = "account" :> "login" :> Login
:<|> "account" :> "logout" :> Logout
:<|> "account" :> "get-user-info" :> GetUserInfo
:<|> "lesson" :> "get-lesson" :> GetLesson
:<|> "library" :> "get-latest-lessons" :> GetLatestLessons
:<|> "library" :> "search-lessons" :> SearchLessons
type Login = Request ReqLogin RespLogin
type Logout = Request ReqLogout OK
type GetUserInfo = Request ReqGetUserInfo RespGetUserInfo
type GetLesson = Request ReqGetLesson LessonContent
type GetLatestLessons = Request ReqGetLatestLessons (SearchResults Lesson)
type SearchLessons = Request ReqSearchLessons (SearchResults Lesson)
type Request req resp = ReqBody '[FormUrlEncoded] req :> Post '[JSON] resp
{-------------------------------------------------------------------------------
Request types
-------------------------------------------------------------------------------}
data ReqLogin = ReqLogin {
reqLoginClientId :: String
, reqLoginEmail :: String
, reqLoginSignature :: ReqSignature
}
deriving (Show, Generic, Data)
data ReqSignature = ReqSignature {
reqSignatureClientSecret :: String
, reqSignatureUserPassword :: String
}
deriving (Show, Generic, Data)
data ReqLogout = ReqLogout {
reqLogoutAccessToken :: AccessToken
, reqLogoutUserId :: UserId
}
deriving (Show, Generic, Data)
data ReqGetUserInfo = ReqGetUserInfo {
reqGetUserInfoAccessToken :: AccessToken
, reqGetUserInfoUserId :: UserId
}
deriving (Show, Generic, Data)
data ReqGetLesson = ReqGetLesson {
reqGetLessonAccessToken :: AccessToken
, reqGetLessonUserId :: UserId
, reqGetLessonV3Id :: V3Id
, reqGetLessonType :: Maybe LessonContentType
}
deriving (Show, Generic, Data)
data ReqSearchLessons = ReqSearchLessons {
reqSearchLessonsAccessToken :: AccessToken
, reqSearchLessonsUserId :: UserId
, reqSearchLessonsSearch :: String
, reqSearchLessonsSearchLevel :: Maybe Level
, reqSearchLessonsNumResults :: Maybe Int
, reqSearchLessonsPage :: Maybe Int
}
deriving (Show, Generic, Data)
data ReqGetLatestLessons = ReqGetLatestLessons {
reqGetLatestLessonsAccessToken :: AccessToken
, reqGetLatestLessonsUserId :: UserId
, reqGetLatestLessonsPage :: Maybe Int
, reqGetLatestLessonsCount :: Maybe Int
, reqGetLatestLessonsLang :: Maybe String
, reqGetLatestLessonsLevelId :: Maybe Level
}
deriving (Show, Generic, Data)
{-------------------------------------------------------------------------------
Responses
-------------------------------------------------------------------------------}
data RespLogin = RespLogin {
respLoginAccessToken :: AccessToken
, respLoginUserId :: UserId
, respLoginUsername :: String
, respLoginName :: String
, respLoginSelfStudyLessonsTotal :: Int
, respLoginAssignedLessonsTotal :: Int
, respLoginCoursesCount :: Int
, respLoginLang :: String
, respLoginBio :: String
, respLoginAvatarUrl :: String
, respLoginNewLessonNotification :: Bool
, respLoginNewShowNotification :: Bool
, respLoginNewsletterNotification :: Bool
, respLoginGeneralNotification :: Bool
, respLoginBookmarkedLessons :: Int
, respLoginSubscribedLessons :: Int
, respLoginStudiedLessons :: Int
}
deriving (Show, Generic, Data)
data RespGetUserInfo = RespGetUserInfo {
respGetUserInfoName :: String
, respGetUserInfoUsername :: String
, respGetUserInfoAvatarUrl :: String
, respGetUserInfoBio :: String
, respGetUserInfoUseTraditionalCharacters :: Bool
, respGetUserInfoUserId :: UserId
, respGetUserInfoNewLessonNotification :: Bool
, respGetUserInfoNewShowNotification :: Bool
, respGetUserInfoNewsletterNotification :: Bool
, respGetUserInfoGeneralNotification :: Bool
, respGetUserInfoLevel :: Maybe Level
, respGetUserInfoType :: Undocumented String
}
deriving (Show, Generic, Data)
{-------------------------------------------------------------------------------
ChinesePod specific datatypes
-------------------------------------------------------------------------------}
newtype UserId = UserId { userIdString :: String }
deriving (Show, Generic, Data, Eq, Ord, FromJSON, ToHttpApiData, FromHttpApiData, IsString)
newtype AccessToken = AccessToken { accessTokenString :: String }
deriving (Show, Generic, Data, Eq, Ord, FromJSON, ToHttpApiData, FromHttpApiData, IsString)
newtype V3Id = V3Id { v3IdString :: String }
deriving (Show, Generic, Data, Eq, Ord, FromJSON, ToHttpApiData, FromHttpApiData, IsString)
| Some ChinesePod requests simply return OK
data OK = OK
deriving (Show, Generic, Data)
-- | User level
data Level =
LevelNewbie
| LevelElementary
| LevelIntermediate
| LevelUpperIntermediate
| LevelAdvanced
| LevelMedia
| LevelOther String
deriving (Show, Generic, Data)
data Lesson = Lesson {
lessonV3Id :: V3Id
, lessonTitle :: String
, lessonIntroduction :: String
, lessonLevel :: Maybe Level
, lessonName :: String
, lessonSlug :: String
, lessonLessonId :: Maybe String
, lessonPublicationTimestamp :: String
, lessonImage :: String
, lessonBookMarked :: Bool
, lessonMarkAsStudied :: Bool
, lessonSource :: Maybe String
, lessonStatus :: Maybe String
, lessonRadioQualityMp3 :: Maybe String
, lessonDialogueMp3 :: Maybe String
, lessonReviewMp3 :: Maybe String
}
deriving (Show, Generic, Data)
data LessonContentType =
LessonContentAll
| LessonContentExercise
| LessonContentVocabulary
| LessonContentDialogue
| LessonContentGrammar
deriving (Show, Generic, Data)
data LessonContent = LessonContent {
lessonContentContentId :: String
, lessonContentCreatedAt :: String
, lessonContentUpdatedAt :: String
, lessonContentStatusComments :: String
, lessonContentStatusLocked :: String
, lessonContentStatusPublished :: String
, lessonContentCreatedBy :: String
, lessonContentUpdatedBy :: String
, lessonContentPopularity :: String
, lessonContentRank :: String
, lessonContentSlug :: String
, lessonContentType :: String
, lessonContentSeriesId :: String
, lessonContentChannelId :: String
, lessonContentMaturity :: String
, lessonContentTitle :: String
, lessonContentIntroduction :: String
, lessonContentTheme :: String
, lessonContentChannel :: String
, lessonContentLevel :: Maybe Level
, lessonContentHosts :: String
, lessonContentV3Id :: V3Id
, lessonContentHashCode :: String
, lessonContentPublicationTimestamp :: String
, lessonContentTimeOffset :: String
, lessonContentImage :: String
, lessonContentText :: String
, lessonContentTranscription1 :: String
, lessonContentTranscription2 :: String
, lessonContentMp3Media :: String
, lessonContentMp3Mobile :: String
, lessonContentPdf1 :: String
, lessonContentPdf2 :: String
, lessonContentPdf3 :: String
, lessonContentPdf4 :: String
, lessonContentPpt :: Maybe String
, lessonContentPptSize :: Maybe String
, lessonContentVideoFix :: String
, lessonContentLinkSource :: String
, lessonContentLinkRelated :: String
, lessonContentExercisesExercise1 :: String
, lessonContentExercisesExercise2 :: String
, lessonContentExercisesExercise3 :: String
, lessonContentExercisesExercise4 :: String
, lessonContentXmlFileName :: String
, lessonContentMp3DialogueSize :: Int
, lessonContentMp3MediaSize :: Int
, lessonContentMp3MobileSize :: Int
, lessonContentMp3PublicSize :: Int
, lessonContentMp3PrivateSize :: Int
, lessonContentMp3ThefixSize :: Int
, lessonContentMp3ThefixLength :: String
, lessonContentMp3PublicLength :: String
, lessonContentMp3PrivateLength :: String
, lessonContentMp3MobileLength :: String
, lessonContentMp3MediaLength :: String
, lessonContentMp3DialogueLength :: String
, lessonContentVideoFlv :: String
, lessonContentVideoFlvSize :: Int
, lessonContentVideoFlvLength :: String
, lessonContentVideoMp4 :: String
, lessonContentVideoMp4Size :: Int
, lessonContentVideoMp4Length :: String
, lessonContentVideoM4v :: String
, lessonContentVideoM4vSize :: Int
, lessonContentVideoM4vLength :: String
, lessonContentLastCommentId :: String
, lessonContentLastCommentTime :: String
, lessonContentIsPrivate :: Bool
, lessonContentVideo :: Maybe String
, lessonContentLessonPlan :: String
, lessonContentLessonAssignment :: String
, lessonContentName :: String
, lessonContentSeriesName :: String
, lessonContentRadioQualityMp3 :: String
, lessonContentCdQualityMp3 :: Maybe String
, lessonContentDialogueMp3 :: Maybe String
, lessonContentReviewMp3 :: Maybe String
, lessonContentCommentCount :: Int
, lessonContentVideoLesson :: Maybe Bool
, lessonContentAccessLevel :: String
, lessonContentBookMarked :: Bool
, lessonContentMarkAsStudied :: Bool
, lessonContentStudentFullname :: String
, lessonContentPostDate :: Maybe String
, lessonContentStudentComment :: Maybe String
, lessonContentFileName :: String
, lessonContentFileUrl :: Maybe String
, lessonContentTeacherName :: Maybe String
, lessonContentTeacherId :: Maybe String
, lessonContentReviewDate :: Maybe String
, lessonContentTeacherFeedback :: Maybe String
, lessonContentTopics :: [String]
, lessonContentFunctions :: [String]
, lessonContentDialogue :: Maybe [Sentence]
, lessonContentGrammar :: Maybe [GrammarPoint]
, lessonContentExpansion :: Maybe Expansion
, lessonContentVocabulary :: Maybe Vocabulary
}
deriving (Show, Generic, Data)
data Sentence = Sentence {
sentenceV3Id :: V3Id
, sentenceAudio :: String
, sentenceDisplayOrder :: Int
, sentenceId :: String
, sentencePinyin :: String
, sentenceRow3 :: String
, sentenceRow4 :: String
, sentenceSource :: String
, sentenceSourceT :: Maybe String
, sentenceSpeaker :: String
, sentenceTarget :: String
, sentenceVocabulary :: String
, sentenceSentenceWords :: [Word]
}
deriving (Show, Generic, Data)
data Word = Word {
wordV3Id :: Maybe V3Id
, wordAudio :: Maybe String
, wordId :: Maybe String
, wordPinyin :: String
, wordSource :: String
, wordSourceT :: String
, wordTarget :: String
, wordVcid :: Maybe String
, wordImage :: Maybe String
, wordDisplayOrder :: Maybe Int
, wordVocabularyClass :: Maybe String
}
deriving (Show, Generic, Data)
data GrammarPoint = GrammarPoint {
grammarPointCreateTime :: String
, grammarPointDisplayLayer :: Int
, grammarPointDisplaySort :: Int
, grammarPointDisplayType :: String
, grammarPointGrammarId :: String
, grammarPointImage :: String
, grammarPointIntroduction :: String
, grammarPointLevel :: Maybe Level
, grammarPointName :: String
, grammarPointParentId :: String
, grammarPointPath :: String
, grammarPointProductionId :: String
, grammarPointRelatedGrammar :: String
, grammarPointSentences :: [GrammarSentence]
, grammarPointSummary :: String
, grammarPointTree :: String
, grammarPointUpdateTime :: String
}
deriving (Show, Generic, Data)
data GrammarSentence = GrammarSentence {
grammarSentenceAudio :: String
, grammarSentenceCreateTime :: Maybe String
, grammarSentenceDescription :: String
, grammarSentenceDisplaySort :: Maybe Int
, grammarSentenceGrammarBlockId :: Maybe String
, grammarSentenceGrammarId :: String
, grammarSentenceGrammarSentenceId :: Maybe String
, grammarSentenceIsCorrect :: Maybe Bool
, grammarSentencePinyin :: String
, grammarSentenceSource :: String
, grammarSentenceSourceAudio :: Maybe String
, grammarSentenceSourceT :: String
, grammarSentenceSourceTrad :: Maybe String
, grammarSentenceSummary :: String
, grammarSentenceTarget :: Maybe String
, grammarSentenceTargetAnnotate :: Maybe String
, grammarSentenceTargetAudio :: Maybe String
, grammarSentenceTargetTrad :: Maybe String
, grammarSentenceTips :: Maybe String
, grammarSentenceUpdateTime :: Maybe String
, grammarSentenceWords :: [Word]
}
deriving (Show, Generic, Data)
data Example = Example {
exampleAudio :: String
, exampleExpansionWord :: [Word]
, exampleId :: String
, examplePinyin :: String
, exampleSource :: String
, exampleSourceT :: Maybe String
, exampleTarget :: String
}
deriving (Show, Generic, Data)
data Vocabulary = Vocabulary {
vocabularyKeyVocab :: [Word]
, vocabularySupVocab :: [Word]
}
deriving (Show, Generic, Data)
data Expansion = Expansion {
expansion :: Map String [Example]
}
deriving (Show, Generic, Data)
{-------------------------------------------------------------------------------
Encoding requests
-------------------------------------------------------------------------------}
| The ' ToText ' instance for ' ReqSignature ' is the hash
instance ToHttpApiData ReqSignature where
toQueryParam ReqSignature{..} =
toQueryParam . show . sha1 . BS.UTF8.fromString $ concat [
reqSignatureClientSecret
, reqSignatureUserPassword
]
where
sha1 :: BS.ByteString -> Digest SHA1
sha1 = hash
instance ToForm ReqLogin where
toForm ReqLogin{..} = mkForm [
( "client_id" , toQueryParam reqLoginClientId )
, ( "email" , toQueryParam reqLoginEmail )
, ( "signature" , toQueryParam reqLoginSignature )
]
instance ToForm ReqLogout where
toForm ReqLogout{..} = mkForm [
( "access_token" , toQueryParam reqLogoutAccessToken )
, ( "user_id" , toQueryParam reqLogoutUserId )
]
instance ToForm ReqGetUserInfo where
toForm ReqGetUserInfo{..} = mkForm [
( "access_token" , toQueryParam reqGetUserInfoAccessToken )
, ( "user_id" , toQueryParam reqGetUserInfoUserId )
]
instance ToForm ReqSearchLessons where
toForm ReqSearchLessons{..} = mkForm ([
( "access_token" , toQueryParam reqSearchLessonsAccessToken )
, ( "user_id" , toQueryParam reqSearchLessonsUserId )
, ( "search" , toQueryParam reqSearchLessonsSearch )
] ++ catMaybes [
optFormArg "search_level" (toQueryParam . Str) reqSearchLessonsSearchLevel
, optFormArg "num_results" (toQueryParam) reqSearchLessonsNumResults
, optFormArg "page" (toQueryParam) reqSearchLessonsPage
])
instance ToForm ReqGetLatestLessons where
toForm ReqGetLatestLessons{..} = mkForm ([
( "access_token" , toQueryParam reqGetLatestLessonsAccessToken )
, ( "user_id" , toQueryParam reqGetLatestLessonsUserId )
] ++ catMaybes [
optFormArg "page" (toQueryParam) reqGetLatestLessonsPage
, optFormArg "count" (toQueryParam) reqGetLatestLessonsCount
, optFormArg "lang" (toQueryParam) reqGetLatestLessonsLang
, optFormArg "level_id" (toQueryParam . Int) reqGetLatestLessonsLevelId
])
instance ToForm ReqGetLesson where
toForm ReqGetLesson{..} = mkForm ([
( "access_token" , toQueryParam reqGetLessonAccessToken )
, ( "user_id" , toQueryParam reqGetLessonUserId )
, ( "v3id" , toQueryParam reqGetLessonV3Id )
] ++ catMaybes [
optFormArg "type" (toQueryParam) reqGetLessonType
])
{-------------------------------------------------------------------------------
Auxiliary: Constructing forms
-------------------------------------------------------------------------------}
-- | Similar to 'fromEntriesByKey', but allowing only a single value per
-- entry and requires the /caller/ to call 'toQueryParam' on the values
-- (so that different entries can be of different types).
mkForm :: [(Text, Text)] -> Form
mkForm = Form . HashMap.fromListWith (<>) . map (bimap toFormKey (:[]))
optFormArg :: Text -> (a -> Text) -> Maybe a -> Maybe (Text, Text)
optFormArg nm f = fmap $ \a -> (nm, f a)
{-------------------------------------------------------------------------------
Decoding responses
-------------------------------------------------------------------------------}
instance FromJSON RespLogin where
parseJSON = withObject "RespLogin" $ \obj -> do
respLoginAccessToken <- obj .: "access_token"
respLoginUserId <- obj .: "user_id"
respLoginUsername <- obj .: "username"
respLoginName <- obj .: "name"
respLoginSelfStudyLessonsTotal <- obj .:~ "self_study_lessons_total"
respLoginAssignedLessonsTotal <- obj .: "assigned_lessons_total"
respLoginCoursesCount <- obj .:~ "courses_count"
respLoginLang <- obj .: "lang"
respLoginBio <- obj .: "bio"
respLoginAvatarUrl <- obj .: "avatar_url"
respLoginNewLessonNotification <- obj .:~ "new_lesson_notification"
respLoginNewShowNotification <- obj .:~ "new_show_notification"
respLoginNewsletterNotification <- obj .:~ "newsletter_notification"
respLoginGeneralNotification <- obj .:~ "general_notification"
respLoginBookmarkedLessons <- obj .: "bookmarked_lessons"
respLoginSubscribedLessons <- obj .: "subscribed_lessons"
respLoginStudiedLessons <- obj .: "studied_lessons"
return RespLogin{..}
instance FromJSON RespGetUserInfo where
parseJSON = withObject "RespGetUserInfo" $ \obj -> do
respGetUserInfoName <- obj .: "name"
respGetUserInfoUsername <- obj .: "username"
respGetUserInfoAvatarUrl <- obj .: "avatar_url"
respGetUserInfoBio <- obj .: "bio"
respGetUserInfoUseTraditionalCharacters <- obj .:~ "use_traditional_characters"
respGetUserInfoUserId <- obj .:~ "user_id"
respGetUserInfoNewLessonNotification <- obj .:~ "new_lesson_notification"
respGetUserInfoNewShowNotification <- obj .:~ "new_show_notification"
respGetUserInfoNewsletterNotification <- obj .:~ "newsletter_notification"
respGetUserInfoGeneralNotification <- obj .:~ "general_notification"
respGetUserInfoLevel <- obj .:~ "level"
respGetUserInfoType <- obj .:? "type"
return RespGetUserInfo{..}
------------------------------------------------------------------------------
Encoding / decoding ChinesePod types
------------------------------------------------------------------------------
Encoding/decoding ChinesePod types
-------------------------------------------------------------------------------}
instance FromJSON OK where
parseJSON = withObject "OK" $ \obj -> do
result <- obj .: "result"
case result :: String of
"OK" -> return OK
_ -> fail $ "Expected OK"
instance FromJSON Lesson where
parseJSON = withObject "Lesson" $ \obj -> do
lessonV3Id <- obj .: "v3_id"
lessonTitle <- obj .: "title"
lessonIntroduction <- obj .: "introduction"
lessonLevel <- join <$> obj .:?~ "level"
lessonName <- obj .:? "name" .!= ""
lessonSlug <- obj .: "slug"
lessonLessonId <- obj .:? "lesson_id"
lessonPublicationTimestamp <- obj .: "publication_timestamp"
lessonImage <- obj .: "image"
lessonBookMarked <- obj .:~ "book_marked"
lessonMarkAsStudied <- obj .:~ "mark_as_studied"
lessonSource <- obj .:? "source"
lessonStatus <- obj .:? "status"
lessonRadioQualityMp3 <- obj .:? "radio_quality_mp3"
lessonDialogueMp3 <- obj .:? "dialogue_mp3"
lessonReviewMp3 <- obj .:? "review_mp3"
return Lesson{..}
instance ToHttpApiData LessonContentType where
toQueryParam LessonContentAll = "all"
toQueryParam LessonContentExercise = "exercise"
toQueryParam LessonContentVocabulary = "vocabulary"
toQueryParam LessonContentDialogue = "dialogue"
toQueryParam LessonContentGrammar = "grammar"
instance FromHttpApiData LessonContentType where
parseQueryParam "all" = Right $ LessonContentAll
parseQueryParam "exercise" = Right $ LessonContentExercise
parseQueryParam "vocabulary" = Right $ LessonContentVocabulary
parseQueryParam "dialogue" = Right $ LessonContentDialogue
parseQueryParam "grammar" = Right $ LessonContentGrammar
parseQueryParam typ = Left $ T.pack $ "Invalid lesson content type " ++ show typ
instance FromJSON LessonContent where
parseJSON = withObject "LessonContent" $ \obj -> do
lessonContentContentId <- obj .: "content_id"
lessonContentCreatedAt <- obj .: "created_at"
lessonContentUpdatedAt <- obj .: "updated_at"
lessonContentStatusComments <- obj .: "status_comments"
lessonContentStatusLocked <- obj .: "status_locked"
lessonContentStatusPublished <- obj .: "status_published"
lessonContentCreatedBy <- obj .: "created_by"
lessonContentUpdatedBy <- obj .: "updated_by"
lessonContentPopularity <- obj .: "popularity"
lessonContentRank <- obj .: "rank"
lessonContentSlug <- obj .: "slug"
lessonContentType <- obj .: "type"
lessonContentSeriesId <- obj .: "series_id"
lessonContentChannelId <- obj .: "channel_id"
lessonContentMaturity <- obj .: "maturity"
lessonContentTitle <- obj .: "title"
lessonContentIntroduction <- obj .: "introduction"
lessonContentTheme <- obj .: "theme"
lessonContentChannel <- obj .: "channel"
lessonContentLevel <- join <$> obj .:?~ "level"
lessonContentHosts <- obj .: "hosts"
lessonContentV3Id <- obj .: "v3_id"
lessonContentHashCode <- obj .: "hash_code"
lessonContentPublicationTimestamp <- obj .: "publication_timestamp"
lessonContentTimeOffset <- obj .: "time_offset"
lessonContentImage <- obj .: "image"
lessonContentText <- obj .: "text"
lessonContentTranscription1 <- obj .: "transcription1"
lessonContentTranscription2 <- obj .: "transcription2"
lessonContentMp3Media <- obj .: "mp3_media"
lessonContentMp3Mobile <- obj .: "mp3_mobile"
lessonContentPdf1 <- obj .: "pdf1"
lessonContentPdf2 <- obj .: "pdf2"
lessonContentPdf3 <- obj .: "pdf3"
lessonContentPdf4 <- obj .: "pdf4"
lessonContentPpt <- obj .:? "ppt"
lessonContentPptSize <- obj .:? "ppt_size"
lessonContentVideoFix <- obj .: "video_fix"
lessonContentLinkSource <- obj .: "link_source"
lessonContentLinkRelated <- obj .: "link_related"
lessonContentExercisesExercise1 <- obj .: "exercises_exercise1"
lessonContentExercisesExercise2 <- obj .: "exercises_exercise2"
lessonContentExercisesExercise3 <- obj .: "exercises_exercise3"
lessonContentExercisesExercise4 <- obj .: "exercises_exercise4"
lessonContentXmlFileName <- obj .: "xml_file_name"
lessonContentMp3DialogueSize <- obj .:~ "mp3_dialogue_size"
lessonContentMp3MediaSize <- obj .:~ "mp3_media_size"
lessonContentMp3MobileSize <- obj .:~ "mp3_mobile_size"
lessonContentMp3PublicSize <- obj .:~ "mp3_public_size"
lessonContentMp3PrivateSize <- obj .:~ "mp3_private_size"
lessonContentMp3ThefixSize <- obj .:~ "mp3_thefix_size"
lessonContentMp3ThefixLength <- obj .: "mp3_thefix_length"
lessonContentMp3PublicLength <- obj .: "mp3_public_length"
lessonContentMp3PrivateLength <- obj .: "mp3_private_length"
lessonContentMp3MobileLength <- obj .: "mp3_mobile_length"
lessonContentMp3MediaLength <- obj .: "mp3_media_length"
lessonContentMp3DialogueLength <- obj .: "mp3_dialogue_length"
lessonContentVideoFlv <- obj .: "video_flv"
lessonContentVideoFlvSize <- obj .:~ "video_flv_size"
lessonContentVideoFlvLength <- obj .: "video_flv_length"
lessonContentVideoMp4 <- obj .: "video_mp4"
lessonContentVideoMp4Size <- obj .:~ "video_mp4_size"
lessonContentVideoMp4Length <- obj .: "video_mp4_length"
lessonContentVideoM4v <- obj .: "video_m4v"
lessonContentVideoM4vSize <- obj .:~ "video_m4v_size"
lessonContentVideoM4vLength <- obj .: "video_m4v_length"
lessonContentLastCommentId <- obj .: "last_comment_id"
lessonContentLastCommentTime <- obj .: "last_comment_time"
lessonContentIsPrivate <- obj .:~ "is_private"
lessonContentVideo <- obj .:? "video"
lessonContentLessonPlan <- obj .: "lesson_plan"
lessonContentLessonAssignment <- obj .: "lesson_assignment"
lessonContentName <- obj .: "name"
lessonContentSeriesName <- obj .: "series_name"
lessonContentRadioQualityMp3 <- obj .: "radio_quality_mp3"
lessonContentCdQualityMp3 <- obj .:? "cd_quality_mp3"
lessonContentDialogueMp3 <- obj .:? "dialogue_mp3"
lessonContentReviewMp3 <- obj .:? "review_mp3"
lessonContentCommentCount <- obj .:~ "comment_count"
lessonContentVideoLesson <- obj .:? "video_lesson"
lessonContentAccessLevel <- obj .: "access_level"
lessonContentBookMarked <- obj .:~ "book_marked"
lessonContentMarkAsStudied <- obj .:~ "mark_as_studied"
lessonContentStudentFullname <- obj .: "student_fullname"
lessonContentPostDate <- obj .:? "post_date"
lessonContentStudentComment <- obj .:? "student_comment"
lessonContentFileName <- obj .: "file_name"
lessonContentFileUrl <- obj .:? "file_url"
lessonContentTeacherName <- obj .:? "teacher_name"
lessonContentTeacherId <- obj .: "teacher_id"
lessonContentReviewDate <- obj .:? "review_date"
lessonContentTeacherFeedback <- obj .:? "teacher_feedback"
lessonContentTopics <- obj .: "topics"
lessonContentFunctions <- obj .: "functions"
lessonContentDialogue <- obj .:? "dialogue"
lessonContentGrammar <- obj .:? "grammar"
lessonContentExpansion <- obj .:? "expansion"
lessonContentVocabulary <- obj .:? "vocabulary"
return LessonContent{..}
instance FromJSON Sentence where
parseJSON = withObject "Sentence" $ \obj -> do
sentenceV3Id <- obj .: "v3_id"
sentenceAudio <- obj .: "audio"
sentenceDisplayOrder <- obj .:~ "display_order"
sentenceId <- obj .: "id"
sentencePinyin <- obj .: "pinyin"
sentenceRow3 <- obj .: "row_3"
sentenceRow4 <- obj .: "row_4"
sentenceSource <- obj .: "source"
sentenceSourceT <- obj .:? "source_t"
sentenceSpeaker <- obj .: "speaker"
sentenceTarget <- obj .: "target"
sentenceVocabulary <- obj .: "vocabulary"
sentenceSentenceWords <- obj .: "sentence_words"
return Sentence{..}
instance FromJSON Word where
parseJSON = withObject "Word" $ \obj -> do
wordV3Id <- obj .:? "v3_id"
wordAudio <- obj .:? "audio"
wordId <- obj .:? "id"
wordPinyin <- obj .: "pinyin"
wordSource <- obj .: "source"
wordSourceT <- obj .: "source_t"
wordTarget <- obj .: "target"
wordVcid <- obj .:? "vcid"
wordImage <- obj .:? "image"
wordDisplayOrder <- obj .:?~ "display_order"
wordVocabularyClass <- obj .:? "vocabulary_class"
return Word{..}
instance FromJSON GrammarPoint where
parseJSON = withObject "GrammarPoint" $ \obj -> do
grammarPointCreateTime <- obj .: "create_time"
grammarPointDisplayLayer <- obj .:~ "display_layer"
grammarPointDisplaySort <- obj .:~ "display_sort"
grammarPointDisplayType <- obj .: "display_type"
grammarPointGrammarId <- obj .: "grammar_id"
grammarPointImage <- obj .: "image"
grammarPointIntroduction <- obj .: "introduction"
grammarPointLevel <- join <$> obj .:?~ "level_name"
grammarPointName <- obj .: "name"
grammarPointParentId <- obj .: "parent_id"
grammarPointPath <- obj .: "path"
grammarPointProductionId <- obj .: "production_id"
grammarPointRelatedGrammar <- obj .: "related_grammar"
grammarPointSentences <- obj .: "sentences"
grammarPointSummary <- obj .: "summary"
grammarPointTree <- obj .: "tree"
grammarPointUpdateTime <- obj .: "update_time"
return GrammarPoint{..}
instance FromJSON GrammarSentence where
parseJSON = withObject "GrammarSentence" $ \obj -> do
grammarSentenceAudio <- obj .: "audio"
grammarSentenceCreateTime <- obj .:? "create_time"
grammarSentenceDescription <- obj .: "description"
grammarSentenceDisplaySort <- obj .:?~ "display_sort"
grammarSentenceGrammarBlockId <- obj .:? "grammar_block_id"
grammarSentenceGrammarId <- obj .: "grammar_id"
grammarSentenceGrammarSentenceId <- obj .:? "grammar_sentence_id"
grammarSentenceIsCorrect <- obj .:?~ "is_correct"
grammarSentencePinyin <- obj .: "pinyin"
grammarSentenceSource <- obj .: "source"
grammarSentenceSourceAudio <- obj .:? "source_audio"
grammarSentenceSourceT <- obj .: "source_t"
grammarSentenceSourceTrad <- obj .:? "source_trad"
grammarSentenceSummary <- obj .: "summary"
grammarSentenceTarget <- obj .:? "target"
grammarSentenceTargetAnnotate <- obj .:? "target_annotate"
grammarSentenceTargetAudio <- obj .:? "target_audio"
grammarSentenceTargetTrad <- obj .:? "target_trad"
grammarSentenceTips <- obj .:? "tips"
grammarSentenceUpdateTime <- obj .:? "update_time"
grammarSentenceWords <- obj .: "words"
return GrammarSentence{..}
instance FromJSON Example where
parseJSON = withObject "Example" $ \obj -> do
exampleAudio <- obj .: "audio"
exampleExpansionWord <- maybeIndexed <$> obj .: "expansion_word"
exampleId <- obj .: "id"
examplePinyin <- obj .: "pinyin"
exampleSource <- obj .: "source"
exampleSourceT <- obj .:? "source_t"
exampleTarget <- obj .: "target"
return Example{..}
instance FromJSON Vocabulary where
parseJSON = withObject "Vocabulary" $ \obj -> do
vocabularyKeyVocab <- obj .: "key_vocab"
vocabularySupVocab <- obj .: "sup_vocab"
return Vocabulary{..}
instance FromJSON Expansion where
parseJSON (Object obj) =
Expansion . Map.fromList <$> mapM parseFld (HashMap.toList obj)
where
parseFld :: (Text, Value) -> Parser (String, [Example])
parseFld (word, val) = do
examples <- parseJSON val
return (T.unpack word, examples)
parseJSON (Array arr) = do
if Vector.null arr
then return Expansion { expansion = Map.empty }
else fail $ "Unexpected non-empty array in 'expansion'"
parseJSON val =
typeMismatch "Expansion" val
{-------------------------------------------------------------------------------
String/int encoding for specific types
-------------------------------------------------------------------------------}
instance ToStrOrInt Level where
toStr = go
where
go LevelNewbie = "Newbie"
go LevelElementary = "Elementary"
go LevelIntermediate = "Intermediate"
go LevelUpperIntermediate = "Upper Intermediate"
go LevelAdvanced = "Advanced"
go LevelMedia = "Media"
go (LevelOther other) = T.pack other
toInt = go
where
go LevelNewbie = 1
go LevelElementary = 2
go LevelIntermediate = 3
go LevelUpperIntermediate = 4
go LevelAdvanced = 5
go LevelMedia = 6
go (LevelOther other) = error $ "No numeric value for " ++ other
instance FromStrOrInt Int where
fromStr = tryRead . T.unpack
fromInt = Right
instance FromStrOrInt UserId where
fromStr = Right . UserId . T.unpack
fromInt = Right . UserId . show
instance FromStrOrInt Bool where
fromStr = go
where
go "0" = Right False
go "1" = Right True
go _ = Left "Expected 0 or 1"
fromInt = go
where
go 0 = Right False
go 1 = Right True
go _ = Left "Expected 0 or 1"
instance FromStrOrInt (Maybe Level) where
fromStr = go
where
go "Newbie" = Right $ Just LevelNewbie
go "Elementary" = Right $ Just LevelElementary
go "Intermediate" = Right $ Just LevelIntermediate
go "Upper Intermediate" = Right $ Just LevelUpperIntermediate
go "Advanced" = Right $ Just LevelAdvanced
go "Media" = Right $ Just LevelMedia
go other = Right $ Just (LevelOther $ T.unpack other)
fromInt = go
where
go 0 = Right $ Nothing
go 1 = Right $ Just LevelNewbie
go 2 = Right $ Just LevelElementary
go 3 = Right $ Just LevelIntermediate
go 4 = Right $ Just LevelUpperIntermediate
go 5 = Right $ Just LevelAdvanced
go 6 = Right $ Just LevelMedia
go i = Left $ T.pack $ "Invalid Level " ++ show i
{-------------------------------------------------------------------------------
Values that can be encoded as either strings or as numbers
-------------------------------------------------------------------------------}
-- | Encode as either a string or a number
--
-- The ChinesePod API is not very consistent with what is represented as
-- a number, and what as a string. In order not to choke on these, we allow
-- them to be represented as either.
data StrOrInt a =
Str { strOrInt :: a }
| Int { strOrInt :: a }
class ToStrOrInt a where
toStr :: a -> Text
toInt :: a -> Int
class FromStrOrInt a where
fromStr :: Text -> Either Text a
fromInt :: Int -> Either Text a
instance (Typeable a, FromStrOrInt a) => FromJSON (StrOrInt a) where
parseJSON (String s) = case fromStr s of
Right level -> return $ Str level
Left err -> parseFailure err
parseJSON (Number n) = case fromInt (round n) of
Right level -> return $ Int level
Left err -> parseFailure err
parseJSON val = typeMismatch (show (typeOf (undefined :: a))) val
instance FromStrOrInt a => FromHttpApiData (StrOrInt a) where
parseQueryParam txt =
either (\_err -> fmap Str $ fromStr txt)
(fmap Int . fromInt)
(tryRead $ T.unpack txt)
instance ToStrOrInt a => ToHttpApiData (StrOrInt a) where
toQueryParam (Str a) = toStr a
toQueryParam (Int a) = T.pack $ show (toInt a)
------------------------------------------------------------------------------
Generic search results
------------------------------------------------------------------------------
Generic search results
-------------------------------------------------------------------------------}
data SearchResults a = SearchResults {
searchResults :: Map Int a
, searchResultsTotal :: Int
}
deriving (Show, Generic, Data)
instance FromJSON a => FromJSON (SearchResults a) where
parseJSON = withObject "SearchResults" $ \obj -> do
let rawResults = rights $ map extractRaw (HashMap.toList obj)
searchResults <- Map.fromList <$> mapM parseRaw rawResults
searchResultsTotal <- obj .:~ "total"
return SearchResults{..}
where
extractRaw :: (Text, Value) -> Either Text (Int, Value)
extractRaw (idx, val) = do
idx' <- parseQueryParam idx
return (idx', val)
parseRaw :: (Int, Value) -> Parser (Int, a)
parseRaw (idx, val) = do
val' <- parseJSON val
return (idx, val')
{-------------------------------------------------------------------------------
Undocumented fields
-------------------------------------------------------------------------------}
-- | Some requests return more info than is documented in the API
--
-- Since we should not rely on these fields being set, we mark them.
type Undocumented = Maybe
------------------------------------------------------------------------------
Parser auxiliary
------------------------------------------------------------------------------
Parser auxiliary
-------------------------------------------------------------------------------}
tryRead :: Read a => String -> Either Text a
tryRead strA =
case filter fullParse (readsPrec 0 strA) of
[(a, _)] -> Right a
_otherwise -> Left $ T.pack $ "Failed to parse " ++ show strA
where
fullParse :: (a, String) -> Bool
fullParse = null . snd
parseFailure :: forall a m. (Typeable a, Monad m) => Text -> m a
parseFailure inp = fail $ "Could not parse " ++ show inp ++ " "
++ "as " ++ show (typeOf (undefined :: a))
| Variant on ' ( .. : ? ) ' which regards ' Null ' as absent , too .
(.:?) :: FromJSON a => Object -> Text -> Parser (Maybe a)
obj .:? key = case HashMap.lookup key obj of
Just Null -> return Nothing
_otherwise -> obj Aeson..:? key
| " Approximate " accessor that uses StrOrInt
(.:~) :: (Typeable a, FromStrOrInt a) => Object -> Text -> Parser a
obj .:~ key = strOrInt <$> obj .: key
-- | Combination of '(.:?)' and '(.:~)'
(.:?~) :: (Typeable a, FromStrOrInt a) => Object -> Text -> Parser (Maybe a)
obj .:?~ key = fmap strOrInt <$> obj .:? key
-- | A list that is either represented as a JSON list or as a JSON object with
-- indices as keys
newtype MaybeIndexed a = MaybeIndexed { maybeIndexed :: [a] }
instance (Typeable a, FromJSON a) => FromJSON (MaybeIndexed a) where
parseJSON (Array arr) =
MaybeIndexed <$> mapM parseJSON (Vector.toList arr)
parseJSON (Object obj) = do
let rawResults = rights $ map extractRaw (HashMap.toList obj)
MaybeIndexed <$> mapM parseJSON (sortRaw rawResults)
where
extractRaw :: (Text, Value) -> Either Text (Int, Value)
extractRaw (idx, val) = do
idx' <- parseQueryParam idx
return (idx', val)
sortRaw :: [(Int, Value)] -> [Value]
sortRaw = map snd . sortBy (comparing fst)
parseJSON val =
typeMismatch ("MaybeIndex " ++ show (typeOf (undefined :: a))) val
{-------------------------------------------------------------------------------
Binary instances
-------------------------------------------------------------------------------}
instance Binary Example
instance Binary Expansion
instance Binary GrammarPoint
instance Binary GrammarSentence
instance Binary Lesson
instance Binary LessonContent
instance Binary Level
instance Binary Sentence
instance Binary V3Id
instance Binary Vocabulary
instance Binary Word
instance Binary a => Binary (SearchResults a)
------------------------------------------------------------------------------
PrettyVal instances
------------------------------------------------------------------------------
PrettyVal instances
-------------------------------------------------------------------------------}
instance PrettyVal ReqGetLatestLessons
instance PrettyVal ReqGetLesson
instance PrettyVal ReqGetUserInfo
instance PrettyVal ReqLogin
instance PrettyVal ReqLogout
instance PrettyVal ReqSearchLessons
instance PrettyVal ReqSignature
instance PrettyVal RespGetUserInfo
instance PrettyVal RespLogin
instance PrettyVal AccessToken
instance PrettyVal Example
instance PrettyVal Expansion
instance PrettyVal GrammarPoint
instance PrettyVal GrammarSentence
instance PrettyVal Lesson
instance PrettyVal LessonContent
instance PrettyVal LessonContentType
instance PrettyVal Level
instance PrettyVal OK
instance PrettyVal Sentence
instance PrettyVal UserId
instance PrettyVal V3Id
instance PrettyVal Vocabulary
instance PrettyVal Word
instance PrettyVal a => PrettyVal (SearchResults a)
| null | https://raw.githubusercontent.com/edsko/ChinesePodAPI/f77ebfd55286316c4a54c42c195d5a51b4a0e4cd/src/Servant/ChinesePod/API.hs | haskell | # LANGUAGE OverloadedStrings #
* API specification
** Account
*** Request types
*** Response types
** Lesson
*** Request types
** Library
*** Request types
* ChinesePod specific datatypes
* Auxiliary
** Types
** Parsing combinators
------------------------------------------------------------------------------
API
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Request types
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Responses
------------------------------------------------------------------------------
------------------------------------------------------------------------------
ChinesePod specific datatypes
------------------------------------------------------------------------------
| User level
------------------------------------------------------------------------------
Encoding requests
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Auxiliary: Constructing forms
------------------------------------------------------------------------------
| Similar to 'fromEntriesByKey', but allowing only a single value per
entry and requires the /caller/ to call 'toQueryParam' on the values
(so that different entries can be of different types).
------------------------------------------------------------------------------
Decoding responses
------------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
------------------------------------------------------------------------------
String/int encoding for specific types
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Values that can be encoded as either strings or as numbers
------------------------------------------------------------------------------
| Encode as either a string or a number
The ChinesePod API is not very consistent with what is represented as
a number, and what as a string. In order not to choke on these, we allow
them to be represented as either.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
------------------------------------------------------------------------------
Undocumented fields
------------------------------------------------------------------------------
| Some requests return more info than is documented in the API
Since we should not rely on these fields being set, we mark them.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
| Combination of '(.:?)' and '(.:~)'
| A list that is either represented as a JSON list or as a JSON object with
indices as keys
------------------------------------------------------------------------------
Binary instances
------------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------} | module Servant.ChinesePod.API (
api
, ChinesePod
, Login
, Logout
, GetUserInfo
, ReqLogin(..)
, ReqLogout(..)
, ReqGetUserInfo(..)
, ReqSignature(..)
, RespLogin(..)
, RespGetUserInfo(..)
, GetLesson
, ReqGetLesson(..)
, GetLatestLessons
, SearchLessons
, ReqGetLatestLessons(..)
, ReqSearchLessons(..)
, AccessToken(..)
, Example
, Expansion(..)
, GrammarPoint(..)
, GrammarSentence(..)
, Lesson(..)
, LessonContent(..)
, LessonContentType(..)
, Level(..)
, Sentence(..)
, UserId(..)
, V3Id(..)
, Vocabulary(..)
, Word(..)
, OK(..)
, Undocumented
, SearchResults(..)
, StrOrInt(..)
, tryRead
, parseFailure
) where
import Prelude hiding (Word)
import Control.Monad
import Crypto.Hash
import Data.Aeson.Types hiding ((.:?))
import Data.Bifunctor (bimap)
import Data.Binary (Binary)
import Data.Data (Data)
import Data.Either (rights)
import Data.List (sortBy)
import Data.Map (Map)
import Data.Maybe (catMaybes)
import Data.Monoid ((<>))
import Data.Ord (comparing)
import Data.Proxy
import Data.String (IsString)
import Data.Text (Text)
import Data.Typeable
import GHC.Generics
import Servant.API
import Text.Show.Pretty (PrettyVal(..))
import Web.FormUrlEncoded
import qualified Data.Aeson.Types as Aeson
import qualified Data.ByteString as BS
import qualified Data.ByteString.UTF8 as BS.UTF8
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Map as Map
import qualified Data.Text as T
import qualified Data.Vector as Vector
import Servant.ChinesePod.Util.Orphans.PrettyVal ()
api :: Proxy ChinesePod
api = Proxy
type ChinesePod = "api" :> "0.6" :> Services
type Services = "account" :> "login" :> Login
:<|> "account" :> "logout" :> Logout
:<|> "account" :> "get-user-info" :> GetUserInfo
:<|> "lesson" :> "get-lesson" :> GetLesson
:<|> "library" :> "get-latest-lessons" :> GetLatestLessons
:<|> "library" :> "search-lessons" :> SearchLessons
type Login = Request ReqLogin RespLogin
type Logout = Request ReqLogout OK
type GetUserInfo = Request ReqGetUserInfo RespGetUserInfo
type GetLesson = Request ReqGetLesson LessonContent
type GetLatestLessons = Request ReqGetLatestLessons (SearchResults Lesson)
type SearchLessons = Request ReqSearchLessons (SearchResults Lesson)
type Request req resp = ReqBody '[FormUrlEncoded] req :> Post '[JSON] resp
data ReqLogin = ReqLogin {
reqLoginClientId :: String
, reqLoginEmail :: String
, reqLoginSignature :: ReqSignature
}
deriving (Show, Generic, Data)
data ReqSignature = ReqSignature {
reqSignatureClientSecret :: String
, reqSignatureUserPassword :: String
}
deriving (Show, Generic, Data)
data ReqLogout = ReqLogout {
reqLogoutAccessToken :: AccessToken
, reqLogoutUserId :: UserId
}
deriving (Show, Generic, Data)
data ReqGetUserInfo = ReqGetUserInfo {
reqGetUserInfoAccessToken :: AccessToken
, reqGetUserInfoUserId :: UserId
}
deriving (Show, Generic, Data)
data ReqGetLesson = ReqGetLesson {
reqGetLessonAccessToken :: AccessToken
, reqGetLessonUserId :: UserId
, reqGetLessonV3Id :: V3Id
, reqGetLessonType :: Maybe LessonContentType
}
deriving (Show, Generic, Data)
data ReqSearchLessons = ReqSearchLessons {
reqSearchLessonsAccessToken :: AccessToken
, reqSearchLessonsUserId :: UserId
, reqSearchLessonsSearch :: String
, reqSearchLessonsSearchLevel :: Maybe Level
, reqSearchLessonsNumResults :: Maybe Int
, reqSearchLessonsPage :: Maybe Int
}
deriving (Show, Generic, Data)
data ReqGetLatestLessons = ReqGetLatestLessons {
reqGetLatestLessonsAccessToken :: AccessToken
, reqGetLatestLessonsUserId :: UserId
, reqGetLatestLessonsPage :: Maybe Int
, reqGetLatestLessonsCount :: Maybe Int
, reqGetLatestLessonsLang :: Maybe String
, reqGetLatestLessonsLevelId :: Maybe Level
}
deriving (Show, Generic, Data)
data RespLogin = RespLogin {
respLoginAccessToken :: AccessToken
, respLoginUserId :: UserId
, respLoginUsername :: String
, respLoginName :: String
, respLoginSelfStudyLessonsTotal :: Int
, respLoginAssignedLessonsTotal :: Int
, respLoginCoursesCount :: Int
, respLoginLang :: String
, respLoginBio :: String
, respLoginAvatarUrl :: String
, respLoginNewLessonNotification :: Bool
, respLoginNewShowNotification :: Bool
, respLoginNewsletterNotification :: Bool
, respLoginGeneralNotification :: Bool
, respLoginBookmarkedLessons :: Int
, respLoginSubscribedLessons :: Int
, respLoginStudiedLessons :: Int
}
deriving (Show, Generic, Data)
data RespGetUserInfo = RespGetUserInfo {
respGetUserInfoName :: String
, respGetUserInfoUsername :: String
, respGetUserInfoAvatarUrl :: String
, respGetUserInfoBio :: String
, respGetUserInfoUseTraditionalCharacters :: Bool
, respGetUserInfoUserId :: UserId
, respGetUserInfoNewLessonNotification :: Bool
, respGetUserInfoNewShowNotification :: Bool
, respGetUserInfoNewsletterNotification :: Bool
, respGetUserInfoGeneralNotification :: Bool
, respGetUserInfoLevel :: Maybe Level
, respGetUserInfoType :: Undocumented String
}
deriving (Show, Generic, Data)
newtype UserId = UserId { userIdString :: String }
deriving (Show, Generic, Data, Eq, Ord, FromJSON, ToHttpApiData, FromHttpApiData, IsString)
newtype AccessToken = AccessToken { accessTokenString :: String }
deriving (Show, Generic, Data, Eq, Ord, FromJSON, ToHttpApiData, FromHttpApiData, IsString)
newtype V3Id = V3Id { v3IdString :: String }
deriving (Show, Generic, Data, Eq, Ord, FromJSON, ToHttpApiData, FromHttpApiData, IsString)
| Some ChinesePod requests simply return OK
data OK = OK
deriving (Show, Generic, Data)
data Level =
LevelNewbie
| LevelElementary
| LevelIntermediate
| LevelUpperIntermediate
| LevelAdvanced
| LevelMedia
| LevelOther String
deriving (Show, Generic, Data)
data Lesson = Lesson {
lessonV3Id :: V3Id
, lessonTitle :: String
, lessonIntroduction :: String
, lessonLevel :: Maybe Level
, lessonName :: String
, lessonSlug :: String
, lessonLessonId :: Maybe String
, lessonPublicationTimestamp :: String
, lessonImage :: String
, lessonBookMarked :: Bool
, lessonMarkAsStudied :: Bool
, lessonSource :: Maybe String
, lessonStatus :: Maybe String
, lessonRadioQualityMp3 :: Maybe String
, lessonDialogueMp3 :: Maybe String
, lessonReviewMp3 :: Maybe String
}
deriving (Show, Generic, Data)
data LessonContentType =
LessonContentAll
| LessonContentExercise
| LessonContentVocabulary
| LessonContentDialogue
| LessonContentGrammar
deriving (Show, Generic, Data)
data LessonContent = LessonContent {
lessonContentContentId :: String
, lessonContentCreatedAt :: String
, lessonContentUpdatedAt :: String
, lessonContentStatusComments :: String
, lessonContentStatusLocked :: String
, lessonContentStatusPublished :: String
, lessonContentCreatedBy :: String
, lessonContentUpdatedBy :: String
, lessonContentPopularity :: String
, lessonContentRank :: String
, lessonContentSlug :: String
, lessonContentType :: String
, lessonContentSeriesId :: String
, lessonContentChannelId :: String
, lessonContentMaturity :: String
, lessonContentTitle :: String
, lessonContentIntroduction :: String
, lessonContentTheme :: String
, lessonContentChannel :: String
, lessonContentLevel :: Maybe Level
, lessonContentHosts :: String
, lessonContentV3Id :: V3Id
, lessonContentHashCode :: String
, lessonContentPublicationTimestamp :: String
, lessonContentTimeOffset :: String
, lessonContentImage :: String
, lessonContentText :: String
, lessonContentTranscription1 :: String
, lessonContentTranscription2 :: String
, lessonContentMp3Media :: String
, lessonContentMp3Mobile :: String
, lessonContentPdf1 :: String
, lessonContentPdf2 :: String
, lessonContentPdf3 :: String
, lessonContentPdf4 :: String
, lessonContentPpt :: Maybe String
, lessonContentPptSize :: Maybe String
, lessonContentVideoFix :: String
, lessonContentLinkSource :: String
, lessonContentLinkRelated :: String
, lessonContentExercisesExercise1 :: String
, lessonContentExercisesExercise2 :: String
, lessonContentExercisesExercise3 :: String
, lessonContentExercisesExercise4 :: String
, lessonContentXmlFileName :: String
, lessonContentMp3DialogueSize :: Int
, lessonContentMp3MediaSize :: Int
, lessonContentMp3MobileSize :: Int
, lessonContentMp3PublicSize :: Int
, lessonContentMp3PrivateSize :: Int
, lessonContentMp3ThefixSize :: Int
, lessonContentMp3ThefixLength :: String
, lessonContentMp3PublicLength :: String
, lessonContentMp3PrivateLength :: String
, lessonContentMp3MobileLength :: String
, lessonContentMp3MediaLength :: String
, lessonContentMp3DialogueLength :: String
, lessonContentVideoFlv :: String
, lessonContentVideoFlvSize :: Int
, lessonContentVideoFlvLength :: String
, lessonContentVideoMp4 :: String
, lessonContentVideoMp4Size :: Int
, lessonContentVideoMp4Length :: String
, lessonContentVideoM4v :: String
, lessonContentVideoM4vSize :: Int
, lessonContentVideoM4vLength :: String
, lessonContentLastCommentId :: String
, lessonContentLastCommentTime :: String
, lessonContentIsPrivate :: Bool
, lessonContentVideo :: Maybe String
, lessonContentLessonPlan :: String
, lessonContentLessonAssignment :: String
, lessonContentName :: String
, lessonContentSeriesName :: String
, lessonContentRadioQualityMp3 :: String
, lessonContentCdQualityMp3 :: Maybe String
, lessonContentDialogueMp3 :: Maybe String
, lessonContentReviewMp3 :: Maybe String
, lessonContentCommentCount :: Int
, lessonContentVideoLesson :: Maybe Bool
, lessonContentAccessLevel :: String
, lessonContentBookMarked :: Bool
, lessonContentMarkAsStudied :: Bool
, lessonContentStudentFullname :: String
, lessonContentPostDate :: Maybe String
, lessonContentStudentComment :: Maybe String
, lessonContentFileName :: String
, lessonContentFileUrl :: Maybe String
, lessonContentTeacherName :: Maybe String
, lessonContentTeacherId :: Maybe String
, lessonContentReviewDate :: Maybe String
, lessonContentTeacherFeedback :: Maybe String
, lessonContentTopics :: [String]
, lessonContentFunctions :: [String]
, lessonContentDialogue :: Maybe [Sentence]
, lessonContentGrammar :: Maybe [GrammarPoint]
, lessonContentExpansion :: Maybe Expansion
, lessonContentVocabulary :: Maybe Vocabulary
}
deriving (Show, Generic, Data)
data Sentence = Sentence {
sentenceV3Id :: V3Id
, sentenceAudio :: String
, sentenceDisplayOrder :: Int
, sentenceId :: String
, sentencePinyin :: String
, sentenceRow3 :: String
, sentenceRow4 :: String
, sentenceSource :: String
, sentenceSourceT :: Maybe String
, sentenceSpeaker :: String
, sentenceTarget :: String
, sentenceVocabulary :: String
, sentenceSentenceWords :: [Word]
}
deriving (Show, Generic, Data)
data Word = Word {
wordV3Id :: Maybe V3Id
, wordAudio :: Maybe String
, wordId :: Maybe String
, wordPinyin :: String
, wordSource :: String
, wordSourceT :: String
, wordTarget :: String
, wordVcid :: Maybe String
, wordImage :: Maybe String
, wordDisplayOrder :: Maybe Int
, wordVocabularyClass :: Maybe String
}
deriving (Show, Generic, Data)
data GrammarPoint = GrammarPoint {
grammarPointCreateTime :: String
, grammarPointDisplayLayer :: Int
, grammarPointDisplaySort :: Int
, grammarPointDisplayType :: String
, grammarPointGrammarId :: String
, grammarPointImage :: String
, grammarPointIntroduction :: String
, grammarPointLevel :: Maybe Level
, grammarPointName :: String
, grammarPointParentId :: String
, grammarPointPath :: String
, grammarPointProductionId :: String
, grammarPointRelatedGrammar :: String
, grammarPointSentences :: [GrammarSentence]
, grammarPointSummary :: String
, grammarPointTree :: String
, grammarPointUpdateTime :: String
}
deriving (Show, Generic, Data)
data GrammarSentence = GrammarSentence {
grammarSentenceAudio :: String
, grammarSentenceCreateTime :: Maybe String
, grammarSentenceDescription :: String
, grammarSentenceDisplaySort :: Maybe Int
, grammarSentenceGrammarBlockId :: Maybe String
, grammarSentenceGrammarId :: String
, grammarSentenceGrammarSentenceId :: Maybe String
, grammarSentenceIsCorrect :: Maybe Bool
, grammarSentencePinyin :: String
, grammarSentenceSource :: String
, grammarSentenceSourceAudio :: Maybe String
, grammarSentenceSourceT :: String
, grammarSentenceSourceTrad :: Maybe String
, grammarSentenceSummary :: String
, grammarSentenceTarget :: Maybe String
, grammarSentenceTargetAnnotate :: Maybe String
, grammarSentenceTargetAudio :: Maybe String
, grammarSentenceTargetTrad :: Maybe String
, grammarSentenceTips :: Maybe String
, grammarSentenceUpdateTime :: Maybe String
, grammarSentenceWords :: [Word]
}
deriving (Show, Generic, Data)
data Example = Example {
exampleAudio :: String
, exampleExpansionWord :: [Word]
, exampleId :: String
, examplePinyin :: String
, exampleSource :: String
, exampleSourceT :: Maybe String
, exampleTarget :: String
}
deriving (Show, Generic, Data)
data Vocabulary = Vocabulary {
vocabularyKeyVocab :: [Word]
, vocabularySupVocab :: [Word]
}
deriving (Show, Generic, Data)
data Expansion = Expansion {
expansion :: Map String [Example]
}
deriving (Show, Generic, Data)
| The ' ToText ' instance for ' ReqSignature ' is the hash
instance ToHttpApiData ReqSignature where
toQueryParam ReqSignature{..} =
toQueryParam . show . sha1 . BS.UTF8.fromString $ concat [
reqSignatureClientSecret
, reqSignatureUserPassword
]
where
sha1 :: BS.ByteString -> Digest SHA1
sha1 = hash
instance ToForm ReqLogin where
toForm ReqLogin{..} = mkForm [
( "client_id" , toQueryParam reqLoginClientId )
, ( "email" , toQueryParam reqLoginEmail )
, ( "signature" , toQueryParam reqLoginSignature )
]
instance ToForm ReqLogout where
toForm ReqLogout{..} = mkForm [
( "access_token" , toQueryParam reqLogoutAccessToken )
, ( "user_id" , toQueryParam reqLogoutUserId )
]
instance ToForm ReqGetUserInfo where
toForm ReqGetUserInfo{..} = mkForm [
( "access_token" , toQueryParam reqGetUserInfoAccessToken )
, ( "user_id" , toQueryParam reqGetUserInfoUserId )
]
instance ToForm ReqSearchLessons where
toForm ReqSearchLessons{..} = mkForm ([
( "access_token" , toQueryParam reqSearchLessonsAccessToken )
, ( "user_id" , toQueryParam reqSearchLessonsUserId )
, ( "search" , toQueryParam reqSearchLessonsSearch )
] ++ catMaybes [
optFormArg "search_level" (toQueryParam . Str) reqSearchLessonsSearchLevel
, optFormArg "num_results" (toQueryParam) reqSearchLessonsNumResults
, optFormArg "page" (toQueryParam) reqSearchLessonsPage
])
instance ToForm ReqGetLatestLessons where
toForm ReqGetLatestLessons{..} = mkForm ([
( "access_token" , toQueryParam reqGetLatestLessonsAccessToken )
, ( "user_id" , toQueryParam reqGetLatestLessonsUserId )
] ++ catMaybes [
optFormArg "page" (toQueryParam) reqGetLatestLessonsPage
, optFormArg "count" (toQueryParam) reqGetLatestLessonsCount
, optFormArg "lang" (toQueryParam) reqGetLatestLessonsLang
, optFormArg "level_id" (toQueryParam . Int) reqGetLatestLessonsLevelId
])
instance ToForm ReqGetLesson where
toForm ReqGetLesson{..} = mkForm ([
( "access_token" , toQueryParam reqGetLessonAccessToken )
, ( "user_id" , toQueryParam reqGetLessonUserId )
, ( "v3id" , toQueryParam reqGetLessonV3Id )
] ++ catMaybes [
optFormArg "type" (toQueryParam) reqGetLessonType
])
mkForm :: [(Text, Text)] -> Form
mkForm = Form . HashMap.fromListWith (<>) . map (bimap toFormKey (:[]))
optFormArg :: Text -> (a -> Text) -> Maybe a -> Maybe (Text, Text)
optFormArg nm f = fmap $ \a -> (nm, f a)
instance FromJSON RespLogin where
parseJSON = withObject "RespLogin" $ \obj -> do
respLoginAccessToken <- obj .: "access_token"
respLoginUserId <- obj .: "user_id"
respLoginUsername <- obj .: "username"
respLoginName <- obj .: "name"
respLoginSelfStudyLessonsTotal <- obj .:~ "self_study_lessons_total"
respLoginAssignedLessonsTotal <- obj .: "assigned_lessons_total"
respLoginCoursesCount <- obj .:~ "courses_count"
respLoginLang <- obj .: "lang"
respLoginBio <- obj .: "bio"
respLoginAvatarUrl <- obj .: "avatar_url"
respLoginNewLessonNotification <- obj .:~ "new_lesson_notification"
respLoginNewShowNotification <- obj .:~ "new_show_notification"
respLoginNewsletterNotification <- obj .:~ "newsletter_notification"
respLoginGeneralNotification <- obj .:~ "general_notification"
respLoginBookmarkedLessons <- obj .: "bookmarked_lessons"
respLoginSubscribedLessons <- obj .: "subscribed_lessons"
respLoginStudiedLessons <- obj .: "studied_lessons"
return RespLogin{..}
instance FromJSON RespGetUserInfo where
parseJSON = withObject "RespGetUserInfo" $ \obj -> do
respGetUserInfoName <- obj .: "name"
respGetUserInfoUsername <- obj .: "username"
respGetUserInfoAvatarUrl <- obj .: "avatar_url"
respGetUserInfoBio <- obj .: "bio"
respGetUserInfoUseTraditionalCharacters <- obj .:~ "use_traditional_characters"
respGetUserInfoUserId <- obj .:~ "user_id"
respGetUserInfoNewLessonNotification <- obj .:~ "new_lesson_notification"
respGetUserInfoNewShowNotification <- obj .:~ "new_show_notification"
respGetUserInfoNewsletterNotification <- obj .:~ "newsletter_notification"
respGetUserInfoGeneralNotification <- obj .:~ "general_notification"
respGetUserInfoLevel <- obj .:~ "level"
respGetUserInfoType <- obj .:? "type"
return RespGetUserInfo{..}
Encoding / decoding ChinesePod types
Encoding/decoding ChinesePod types
instance FromJSON OK where
parseJSON = withObject "OK" $ \obj -> do
result <- obj .: "result"
case result :: String of
"OK" -> return OK
_ -> fail $ "Expected OK"
instance FromJSON Lesson where
parseJSON = withObject "Lesson" $ \obj -> do
lessonV3Id <- obj .: "v3_id"
lessonTitle <- obj .: "title"
lessonIntroduction <- obj .: "introduction"
lessonLevel <- join <$> obj .:?~ "level"
lessonName <- obj .:? "name" .!= ""
lessonSlug <- obj .: "slug"
lessonLessonId <- obj .:? "lesson_id"
lessonPublicationTimestamp <- obj .: "publication_timestamp"
lessonImage <- obj .: "image"
lessonBookMarked <- obj .:~ "book_marked"
lessonMarkAsStudied <- obj .:~ "mark_as_studied"
lessonSource <- obj .:? "source"
lessonStatus <- obj .:? "status"
lessonRadioQualityMp3 <- obj .:? "radio_quality_mp3"
lessonDialogueMp3 <- obj .:? "dialogue_mp3"
lessonReviewMp3 <- obj .:? "review_mp3"
return Lesson{..}
instance ToHttpApiData LessonContentType where
toQueryParam LessonContentAll = "all"
toQueryParam LessonContentExercise = "exercise"
toQueryParam LessonContentVocabulary = "vocabulary"
toQueryParam LessonContentDialogue = "dialogue"
toQueryParam LessonContentGrammar = "grammar"
instance FromHttpApiData LessonContentType where
parseQueryParam "all" = Right $ LessonContentAll
parseQueryParam "exercise" = Right $ LessonContentExercise
parseQueryParam "vocabulary" = Right $ LessonContentVocabulary
parseQueryParam "dialogue" = Right $ LessonContentDialogue
parseQueryParam "grammar" = Right $ LessonContentGrammar
parseQueryParam typ = Left $ T.pack $ "Invalid lesson content type " ++ show typ
instance FromJSON LessonContent where
parseJSON = withObject "LessonContent" $ \obj -> do
lessonContentContentId <- obj .: "content_id"
lessonContentCreatedAt <- obj .: "created_at"
lessonContentUpdatedAt <- obj .: "updated_at"
lessonContentStatusComments <- obj .: "status_comments"
lessonContentStatusLocked <- obj .: "status_locked"
lessonContentStatusPublished <- obj .: "status_published"
lessonContentCreatedBy <- obj .: "created_by"
lessonContentUpdatedBy <- obj .: "updated_by"
lessonContentPopularity <- obj .: "popularity"
lessonContentRank <- obj .: "rank"
lessonContentSlug <- obj .: "slug"
lessonContentType <- obj .: "type"
lessonContentSeriesId <- obj .: "series_id"
lessonContentChannelId <- obj .: "channel_id"
lessonContentMaturity <- obj .: "maturity"
lessonContentTitle <- obj .: "title"
lessonContentIntroduction <- obj .: "introduction"
lessonContentTheme <- obj .: "theme"
lessonContentChannel <- obj .: "channel"
lessonContentLevel <- join <$> obj .:?~ "level"
lessonContentHosts <- obj .: "hosts"
lessonContentV3Id <- obj .: "v3_id"
lessonContentHashCode <- obj .: "hash_code"
lessonContentPublicationTimestamp <- obj .: "publication_timestamp"
lessonContentTimeOffset <- obj .: "time_offset"
lessonContentImage <- obj .: "image"
lessonContentText <- obj .: "text"
lessonContentTranscription1 <- obj .: "transcription1"
lessonContentTranscription2 <- obj .: "transcription2"
lessonContentMp3Media <- obj .: "mp3_media"
lessonContentMp3Mobile <- obj .: "mp3_mobile"
lessonContentPdf1 <- obj .: "pdf1"
lessonContentPdf2 <- obj .: "pdf2"
lessonContentPdf3 <- obj .: "pdf3"
lessonContentPdf4 <- obj .: "pdf4"
lessonContentPpt <- obj .:? "ppt"
lessonContentPptSize <- obj .:? "ppt_size"
lessonContentVideoFix <- obj .: "video_fix"
lessonContentLinkSource <- obj .: "link_source"
lessonContentLinkRelated <- obj .: "link_related"
lessonContentExercisesExercise1 <- obj .: "exercises_exercise1"
lessonContentExercisesExercise2 <- obj .: "exercises_exercise2"
lessonContentExercisesExercise3 <- obj .: "exercises_exercise3"
lessonContentExercisesExercise4 <- obj .: "exercises_exercise4"
lessonContentXmlFileName <- obj .: "xml_file_name"
lessonContentMp3DialogueSize <- obj .:~ "mp3_dialogue_size"
lessonContentMp3MediaSize <- obj .:~ "mp3_media_size"
lessonContentMp3MobileSize <- obj .:~ "mp3_mobile_size"
lessonContentMp3PublicSize <- obj .:~ "mp3_public_size"
lessonContentMp3PrivateSize <- obj .:~ "mp3_private_size"
lessonContentMp3ThefixSize <- obj .:~ "mp3_thefix_size"
lessonContentMp3ThefixLength <- obj .: "mp3_thefix_length"
lessonContentMp3PublicLength <- obj .: "mp3_public_length"
lessonContentMp3PrivateLength <- obj .: "mp3_private_length"
lessonContentMp3MobileLength <- obj .: "mp3_mobile_length"
lessonContentMp3MediaLength <- obj .: "mp3_media_length"
lessonContentMp3DialogueLength <- obj .: "mp3_dialogue_length"
lessonContentVideoFlv <- obj .: "video_flv"
lessonContentVideoFlvSize <- obj .:~ "video_flv_size"
lessonContentVideoFlvLength <- obj .: "video_flv_length"
lessonContentVideoMp4 <- obj .: "video_mp4"
lessonContentVideoMp4Size <- obj .:~ "video_mp4_size"
lessonContentVideoMp4Length <- obj .: "video_mp4_length"
lessonContentVideoM4v <- obj .: "video_m4v"
lessonContentVideoM4vSize <- obj .:~ "video_m4v_size"
lessonContentVideoM4vLength <- obj .: "video_m4v_length"
lessonContentLastCommentId <- obj .: "last_comment_id"
lessonContentLastCommentTime <- obj .: "last_comment_time"
lessonContentIsPrivate <- obj .:~ "is_private"
lessonContentVideo <- obj .:? "video"
lessonContentLessonPlan <- obj .: "lesson_plan"
lessonContentLessonAssignment <- obj .: "lesson_assignment"
lessonContentName <- obj .: "name"
lessonContentSeriesName <- obj .: "series_name"
lessonContentRadioQualityMp3 <- obj .: "radio_quality_mp3"
lessonContentCdQualityMp3 <- obj .:? "cd_quality_mp3"
lessonContentDialogueMp3 <- obj .:? "dialogue_mp3"
lessonContentReviewMp3 <- obj .:? "review_mp3"
lessonContentCommentCount <- obj .:~ "comment_count"
lessonContentVideoLesson <- obj .:? "video_lesson"
lessonContentAccessLevel <- obj .: "access_level"
lessonContentBookMarked <- obj .:~ "book_marked"
lessonContentMarkAsStudied <- obj .:~ "mark_as_studied"
lessonContentStudentFullname <- obj .: "student_fullname"
lessonContentPostDate <- obj .:? "post_date"
lessonContentStudentComment <- obj .:? "student_comment"
lessonContentFileName <- obj .: "file_name"
lessonContentFileUrl <- obj .:? "file_url"
lessonContentTeacherName <- obj .:? "teacher_name"
lessonContentTeacherId <- obj .: "teacher_id"
lessonContentReviewDate <- obj .:? "review_date"
lessonContentTeacherFeedback <- obj .:? "teacher_feedback"
lessonContentTopics <- obj .: "topics"
lessonContentFunctions <- obj .: "functions"
lessonContentDialogue <- obj .:? "dialogue"
lessonContentGrammar <- obj .:? "grammar"
lessonContentExpansion <- obj .:? "expansion"
lessonContentVocabulary <- obj .:? "vocabulary"
return LessonContent{..}
instance FromJSON Sentence where
parseJSON = withObject "Sentence" $ \obj -> do
sentenceV3Id <- obj .: "v3_id"
sentenceAudio <- obj .: "audio"
sentenceDisplayOrder <- obj .:~ "display_order"
sentenceId <- obj .: "id"
sentencePinyin <- obj .: "pinyin"
sentenceRow3 <- obj .: "row_3"
sentenceRow4 <- obj .: "row_4"
sentenceSource <- obj .: "source"
sentenceSourceT <- obj .:? "source_t"
sentenceSpeaker <- obj .: "speaker"
sentenceTarget <- obj .: "target"
sentenceVocabulary <- obj .: "vocabulary"
sentenceSentenceWords <- obj .: "sentence_words"
return Sentence{..}
instance FromJSON Word where
parseJSON = withObject "Word" $ \obj -> do
wordV3Id <- obj .:? "v3_id"
wordAudio <- obj .:? "audio"
wordId <- obj .:? "id"
wordPinyin <- obj .: "pinyin"
wordSource <- obj .: "source"
wordSourceT <- obj .: "source_t"
wordTarget <- obj .: "target"
wordVcid <- obj .:? "vcid"
wordImage <- obj .:? "image"
wordDisplayOrder <- obj .:?~ "display_order"
wordVocabularyClass <- obj .:? "vocabulary_class"
return Word{..}
instance FromJSON GrammarPoint where
parseJSON = withObject "GrammarPoint" $ \obj -> do
grammarPointCreateTime <- obj .: "create_time"
grammarPointDisplayLayer <- obj .:~ "display_layer"
grammarPointDisplaySort <- obj .:~ "display_sort"
grammarPointDisplayType <- obj .: "display_type"
grammarPointGrammarId <- obj .: "grammar_id"
grammarPointImage <- obj .: "image"
grammarPointIntroduction <- obj .: "introduction"
grammarPointLevel <- join <$> obj .:?~ "level_name"
grammarPointName <- obj .: "name"
grammarPointParentId <- obj .: "parent_id"
grammarPointPath <- obj .: "path"
grammarPointProductionId <- obj .: "production_id"
grammarPointRelatedGrammar <- obj .: "related_grammar"
grammarPointSentences <- obj .: "sentences"
grammarPointSummary <- obj .: "summary"
grammarPointTree <- obj .: "tree"
grammarPointUpdateTime <- obj .: "update_time"
return GrammarPoint{..}
instance FromJSON GrammarSentence where
parseJSON = withObject "GrammarSentence" $ \obj -> do
grammarSentenceAudio <- obj .: "audio"
grammarSentenceCreateTime <- obj .:? "create_time"
grammarSentenceDescription <- obj .: "description"
grammarSentenceDisplaySort <- obj .:?~ "display_sort"
grammarSentenceGrammarBlockId <- obj .:? "grammar_block_id"
grammarSentenceGrammarId <- obj .: "grammar_id"
grammarSentenceGrammarSentenceId <- obj .:? "grammar_sentence_id"
grammarSentenceIsCorrect <- obj .:?~ "is_correct"
grammarSentencePinyin <- obj .: "pinyin"
grammarSentenceSource <- obj .: "source"
grammarSentenceSourceAudio <- obj .:? "source_audio"
grammarSentenceSourceT <- obj .: "source_t"
grammarSentenceSourceTrad <- obj .:? "source_trad"
grammarSentenceSummary <- obj .: "summary"
grammarSentenceTarget <- obj .:? "target"
grammarSentenceTargetAnnotate <- obj .:? "target_annotate"
grammarSentenceTargetAudio <- obj .:? "target_audio"
grammarSentenceTargetTrad <- obj .:? "target_trad"
grammarSentenceTips <- obj .:? "tips"
grammarSentenceUpdateTime <- obj .:? "update_time"
grammarSentenceWords <- obj .: "words"
return GrammarSentence{..}
instance FromJSON Example where
parseJSON = withObject "Example" $ \obj -> do
exampleAudio <- obj .: "audio"
exampleExpansionWord <- maybeIndexed <$> obj .: "expansion_word"
exampleId <- obj .: "id"
examplePinyin <- obj .: "pinyin"
exampleSource <- obj .: "source"
exampleSourceT <- obj .:? "source_t"
exampleTarget <- obj .: "target"
return Example{..}
instance FromJSON Vocabulary where
parseJSON = withObject "Vocabulary" $ \obj -> do
vocabularyKeyVocab <- obj .: "key_vocab"
vocabularySupVocab <- obj .: "sup_vocab"
return Vocabulary{..}
instance FromJSON Expansion where
parseJSON (Object obj) =
Expansion . Map.fromList <$> mapM parseFld (HashMap.toList obj)
where
parseFld :: (Text, Value) -> Parser (String, [Example])
parseFld (word, val) = do
examples <- parseJSON val
return (T.unpack word, examples)
parseJSON (Array arr) = do
if Vector.null arr
then return Expansion { expansion = Map.empty }
else fail $ "Unexpected non-empty array in 'expansion'"
parseJSON val =
typeMismatch "Expansion" val
instance ToStrOrInt Level where
toStr = go
where
go LevelNewbie = "Newbie"
go LevelElementary = "Elementary"
go LevelIntermediate = "Intermediate"
go LevelUpperIntermediate = "Upper Intermediate"
go LevelAdvanced = "Advanced"
go LevelMedia = "Media"
go (LevelOther other) = T.pack other
toInt = go
where
go LevelNewbie = 1
go LevelElementary = 2
go LevelIntermediate = 3
go LevelUpperIntermediate = 4
go LevelAdvanced = 5
go LevelMedia = 6
go (LevelOther other) = error $ "No numeric value for " ++ other
instance FromStrOrInt Int where
fromStr = tryRead . T.unpack
fromInt = Right
instance FromStrOrInt UserId where
fromStr = Right . UserId . T.unpack
fromInt = Right . UserId . show
instance FromStrOrInt Bool where
fromStr = go
where
go "0" = Right False
go "1" = Right True
go _ = Left "Expected 0 or 1"
fromInt = go
where
go 0 = Right False
go 1 = Right True
go _ = Left "Expected 0 or 1"
instance FromStrOrInt (Maybe Level) where
fromStr = go
where
go "Newbie" = Right $ Just LevelNewbie
go "Elementary" = Right $ Just LevelElementary
go "Intermediate" = Right $ Just LevelIntermediate
go "Upper Intermediate" = Right $ Just LevelUpperIntermediate
go "Advanced" = Right $ Just LevelAdvanced
go "Media" = Right $ Just LevelMedia
go other = Right $ Just (LevelOther $ T.unpack other)
fromInt = go
where
go 0 = Right $ Nothing
go 1 = Right $ Just LevelNewbie
go 2 = Right $ Just LevelElementary
go 3 = Right $ Just LevelIntermediate
go 4 = Right $ Just LevelUpperIntermediate
go 5 = Right $ Just LevelAdvanced
go 6 = Right $ Just LevelMedia
go i = Left $ T.pack $ "Invalid Level " ++ show i
data StrOrInt a =
Str { strOrInt :: a }
| Int { strOrInt :: a }
class ToStrOrInt a where
toStr :: a -> Text
toInt :: a -> Int
class FromStrOrInt a where
fromStr :: Text -> Either Text a
fromInt :: Int -> Either Text a
instance (Typeable a, FromStrOrInt a) => FromJSON (StrOrInt a) where
parseJSON (String s) = case fromStr s of
Right level -> return $ Str level
Left err -> parseFailure err
parseJSON (Number n) = case fromInt (round n) of
Right level -> return $ Int level
Left err -> parseFailure err
parseJSON val = typeMismatch (show (typeOf (undefined :: a))) val
instance FromStrOrInt a => FromHttpApiData (StrOrInt a) where
parseQueryParam txt =
either (\_err -> fmap Str $ fromStr txt)
(fmap Int . fromInt)
(tryRead $ T.unpack txt)
instance ToStrOrInt a => ToHttpApiData (StrOrInt a) where
toQueryParam (Str a) = toStr a
toQueryParam (Int a) = T.pack $ show (toInt a)
Generic search results
Generic search results
data SearchResults a = SearchResults {
searchResults :: Map Int a
, searchResultsTotal :: Int
}
deriving (Show, Generic, Data)
instance FromJSON a => FromJSON (SearchResults a) where
parseJSON = withObject "SearchResults" $ \obj -> do
let rawResults = rights $ map extractRaw (HashMap.toList obj)
searchResults <- Map.fromList <$> mapM parseRaw rawResults
searchResultsTotal <- obj .:~ "total"
return SearchResults{..}
where
extractRaw :: (Text, Value) -> Either Text (Int, Value)
extractRaw (idx, val) = do
idx' <- parseQueryParam idx
return (idx', val)
parseRaw :: (Int, Value) -> Parser (Int, a)
parseRaw (idx, val) = do
val' <- parseJSON val
return (idx, val')
type Undocumented = Maybe
Parser auxiliary
Parser auxiliary
tryRead :: Read a => String -> Either Text a
tryRead strA =
case filter fullParse (readsPrec 0 strA) of
[(a, _)] -> Right a
_otherwise -> Left $ T.pack $ "Failed to parse " ++ show strA
where
fullParse :: (a, String) -> Bool
fullParse = null . snd
parseFailure :: forall a m. (Typeable a, Monad m) => Text -> m a
parseFailure inp = fail $ "Could not parse " ++ show inp ++ " "
++ "as " ++ show (typeOf (undefined :: a))
| Variant on ' ( .. : ? ) ' which regards ' Null ' as absent , too .
(.:?) :: FromJSON a => Object -> Text -> Parser (Maybe a)
obj .:? key = case HashMap.lookup key obj of
Just Null -> return Nothing
_otherwise -> obj Aeson..:? key
| " Approximate " accessor that uses StrOrInt
(.:~) :: (Typeable a, FromStrOrInt a) => Object -> Text -> Parser a
obj .:~ key = strOrInt <$> obj .: key
(.:?~) :: (Typeable a, FromStrOrInt a) => Object -> Text -> Parser (Maybe a)
obj .:?~ key = fmap strOrInt <$> obj .:? key
newtype MaybeIndexed a = MaybeIndexed { maybeIndexed :: [a] }
instance (Typeable a, FromJSON a) => FromJSON (MaybeIndexed a) where
parseJSON (Array arr) =
MaybeIndexed <$> mapM parseJSON (Vector.toList arr)
parseJSON (Object obj) = do
let rawResults = rights $ map extractRaw (HashMap.toList obj)
MaybeIndexed <$> mapM parseJSON (sortRaw rawResults)
where
extractRaw :: (Text, Value) -> Either Text (Int, Value)
extractRaw (idx, val) = do
idx' <- parseQueryParam idx
return (idx', val)
sortRaw :: [(Int, Value)] -> [Value]
sortRaw = map snd . sortBy (comparing fst)
parseJSON val =
typeMismatch ("MaybeIndex " ++ show (typeOf (undefined :: a))) val
instance Binary Example
instance Binary Expansion
instance Binary GrammarPoint
instance Binary GrammarSentence
instance Binary Lesson
instance Binary LessonContent
instance Binary Level
instance Binary Sentence
instance Binary V3Id
instance Binary Vocabulary
instance Binary Word
instance Binary a => Binary (SearchResults a)
PrettyVal instances
PrettyVal instances
instance PrettyVal ReqGetLatestLessons
instance PrettyVal ReqGetLesson
instance PrettyVal ReqGetUserInfo
instance PrettyVal ReqLogin
instance PrettyVal ReqLogout
instance PrettyVal ReqSearchLessons
instance PrettyVal ReqSignature
instance PrettyVal RespGetUserInfo
instance PrettyVal RespLogin
instance PrettyVal AccessToken
instance PrettyVal Example
instance PrettyVal Expansion
instance PrettyVal GrammarPoint
instance PrettyVal GrammarSentence
instance PrettyVal Lesson
instance PrettyVal LessonContent
instance PrettyVal LessonContentType
instance PrettyVal Level
instance PrettyVal OK
instance PrettyVal Sentence
instance PrettyVal UserId
instance PrettyVal V3Id
instance PrettyVal Vocabulary
instance PrettyVal Word
instance PrettyVal a => PrettyVal (SearchResults a)
|
dfd1641c004a7e11d2fab8bb797fbd12f71b56b10e37ade3f46d6010a1631fd9 | avsm/platform | reason.mli |
* Copyright ( c ) 2016 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2016 Thomas Refis <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
val compilation_unit :
?theme_uri:Tree.uri -> Odoc_model.Lang.Compilation_unit.t -> Tree.t
val page : ?theme_uri:Tree.uri -> Odoc_model.Lang.Page.t -> Tree.t
(** Convert compilation unit or page models into HTML trees.
Optionally [theme_uri] can be provided to locate custom theme files. The
HTML output directory will be used by default. *)
| null | https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/odoc.1.4.2/src/html/reason.mli | ocaml | * Convert compilation unit or page models into HTML trees.
Optionally [theme_uri] can be provided to locate custom theme files. The
HTML output directory will be used by default. |
* Copyright ( c ) 2016 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2016 Thomas Refis <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
val compilation_unit :
?theme_uri:Tree.uri -> Odoc_model.Lang.Compilation_unit.t -> Tree.t
val page : ?theme_uri:Tree.uri -> Odoc_model.Lang.Page.t -> Tree.t
|
b3c68851cee69747381722f4fcfcf39d99998522bb675b0f35ed15a488132ed7 | haslab/HAAP | Login.hs |
HAAP : Haskell Automated Assessment Platform
This module provides functions for injecting basic PHP authentication into HTML files .
TODO : generalize login database format .
HAAP: Haskell Automated Assessment Platform
This module provides functions for injecting basic PHP authentication into HTML files.
TODO: generalize login database format.
-}
{-# LANGUAGE OverloadedStrings #-}
module HAAP.Web.PHP.Login where
import HAAP.Utils
import Hakyll
import System.FilePath
phpRoute :: Routes
phpRoute = customRoute $ \iden -> case takeExtension (toFilePath iden) of
".html" -> replaceExtension (toFilePath iden) "php"
otherwise -> toFilePath iden
phpFunRoute :: FilePath -> FilePath
phpFunRoute = \iden -> case takeExtension iden of
".html" -> replaceExtension iden "php"
otherwise -> iden
addPHPLogin :: FilePath -> Item String -> Compiler (Item String)
addPHPLogin logindb a = do
Just dest <- getUnderlying >>= getRoute
let phpCtx = constField "code" (itemBody a)
`mappend` constField "logindb" (fileToRoot dest </> logindb)
loadAndApplyTemplate "templates/login.php" phpCtx a
| null | https://raw.githubusercontent.com/haslab/HAAP/5acf9efaf0e5f6cba1c2482e51bda703f405a86f/src/HAAP/Web/PHP/Login.hs | haskell | # LANGUAGE OverloadedStrings # |
HAAP : Haskell Automated Assessment Platform
This module provides functions for injecting basic PHP authentication into HTML files .
TODO : generalize login database format .
HAAP: Haskell Automated Assessment Platform
This module provides functions for injecting basic PHP authentication into HTML files.
TODO: generalize login database format.
-}
module HAAP.Web.PHP.Login where
import HAAP.Utils
import Hakyll
import System.FilePath
phpRoute :: Routes
phpRoute = customRoute $ \iden -> case takeExtension (toFilePath iden) of
".html" -> replaceExtension (toFilePath iden) "php"
otherwise -> toFilePath iden
phpFunRoute :: FilePath -> FilePath
phpFunRoute = \iden -> case takeExtension iden of
".html" -> replaceExtension iden "php"
otherwise -> iden
addPHPLogin :: FilePath -> Item String -> Compiler (Item String)
addPHPLogin logindb a = do
Just dest <- getUnderlying >>= getRoute
let phpCtx = constField "code" (itemBody a)
`mappend` constField "logindb" (fileToRoot dest </> logindb)
loadAndApplyTemplate "templates/login.php" phpCtx a
|
d72cdd550a76de5d6f9804207afa4e59698980cfdf2cb81596ee81f8705a0d5c | portkey-cloud/aws-clj-sdk | cloudformation.clj | (ns portkey.aws.cloudformation (:require [portkey.aws]))
(def
endpoints
'{"ap-northeast-1"
{:credential-scope
{:service "cloudformation", :region "ap-northeast-1"},
:ssl-common-name "cloudformation.ap-northeast-1.amazonaws.com",
:endpoint "-northeast-1.amazonaws.com",
:signature-version :v4},
"eu-west-1"
{:credential-scope {:service "cloudformation", :region "eu-west-1"},
:ssl-common-name "cloudformation.eu-west-1.amazonaws.com",
:endpoint "-west-1.amazonaws.com",
:signature-version :v4},
"us-east-2"
{:credential-scope {:service "cloudformation", :region "us-east-2"},
:ssl-common-name "cloudformation.us-east-2.amazonaws.com",
:endpoint "-east-2.amazonaws.com",
:signature-version :v4},
"ap-southeast-2"
{:credential-scope
{:service "cloudformation", :region "ap-southeast-2"},
:ssl-common-name "cloudformation.ap-southeast-2.amazonaws.com",
:endpoint "-southeast-2.amazonaws.com",
:signature-version :v4},
"cn-north-1"
{:credential-scope
{:service "cloudformation", :region "cn-north-1"},
:ssl-common-name "cloudformation.cn-north-1.amazonaws.com.cn",
:endpoint "-north-1.amazonaws.com.cn",
:signature-version :v4},
"sa-east-1"
{:credential-scope {:service "cloudformation", :region "sa-east-1"},
:ssl-common-name "cloudformation.sa-east-1.amazonaws.com",
:endpoint "-east-1.amazonaws.com",
:signature-version :v4},
"ap-southeast-1"
{:credential-scope
{:service "cloudformation", :region "ap-southeast-1"},
:ssl-common-name "cloudformation.ap-southeast-1.amazonaws.com",
:endpoint "-southeast-1.amazonaws.com",
:signature-version :v4},
"cn-northwest-1"
{:credential-scope
{:service "cloudformation", :region "cn-northwest-1"},
:ssl-common-name "cloudformation.cn-northwest-1.amazonaws.com.cn",
:endpoint "-northwest-1.amazonaws.com.cn",
:signature-version :v4},
"ap-northeast-2"
{:credential-scope
{:service "cloudformation", :region "ap-northeast-2"},
:ssl-common-name "cloudformation.ap-northeast-2.amazonaws.com",
:endpoint "-northeast-2.amazonaws.com",
:signature-version :v4},
"eu-west-3"
{:credential-scope {:service "cloudformation", :region "eu-west-3"},
:ssl-common-name "cloudformation.eu-west-3.amazonaws.com",
:endpoint "-west-3.amazonaws.com",
:signature-version :v4},
"ca-central-1"
{:credential-scope
{:service "cloudformation", :region "ca-central-1"},
:ssl-common-name "cloudformation.ca-central-1.amazonaws.com",
:endpoint "-central-1.amazonaws.com",
:signature-version :v4},
"eu-central-1"
{:credential-scope
{:service "cloudformation", :region "eu-central-1"},
:ssl-common-name "cloudformation.eu-central-1.amazonaws.com",
:endpoint "-central-1.amazonaws.com",
:signature-version :v4},
"eu-west-2"
{:credential-scope {:service "cloudformation", :region "eu-west-2"},
:ssl-common-name "cloudformation.eu-west-2.amazonaws.com",
:endpoint "-west-2.amazonaws.com",
:signature-version :v4},
"us-gov-west-1"
{:credential-scope
{:service "cloudformation", :region "us-gov-west-1"},
:ssl-common-name "cloudformation.us-gov-west-1.amazonaws.com",
:endpoint "-gov-west-1.amazonaws.com",
:signature-version :v4},
"us-west-2"
{:credential-scope {:service "cloudformation", :region "us-west-2"},
:ssl-common-name "cloudformation.us-west-2.amazonaws.com",
:endpoint "-west-2.amazonaws.com",
:signature-version :v4},
"us-east-1"
{:credential-scope {:service "cloudformation", :region "us-east-1"},
:ssl-common-name "cloudformation.us-east-1.amazonaws.com",
:endpoint "-east-1.amazonaws.com",
:signature-version :v4},
"us-west-1"
{:credential-scope {:service "cloudformation", :region "us-west-1"},
:ssl-common-name "cloudformation.us-west-1.amazonaws.com",
:endpoint "-west-1.amazonaws.com",
:signature-version :v4},
"ap-south-1"
{:credential-scope
{:service "cloudformation", :region "ap-south-1"},
:ssl-common-name "cloudformation.ap-south-1.amazonaws.com",
:endpoint "-south-1.amazonaws.com",
:signature-version :v4}})
(comment TODO support "query")
| null | https://raw.githubusercontent.com/portkey-cloud/aws-clj-sdk/10623a5c86bd56c8b312f56b76ae5ff52c26a945/src/portkey/aws/cloudformation.clj | clojure | (ns portkey.aws.cloudformation (:require [portkey.aws]))
(def
endpoints
'{"ap-northeast-1"
{:credential-scope
{:service "cloudformation", :region "ap-northeast-1"},
:ssl-common-name "cloudformation.ap-northeast-1.amazonaws.com",
:endpoint "-northeast-1.amazonaws.com",
:signature-version :v4},
"eu-west-1"
{:credential-scope {:service "cloudformation", :region "eu-west-1"},
:ssl-common-name "cloudformation.eu-west-1.amazonaws.com",
:endpoint "-west-1.amazonaws.com",
:signature-version :v4},
"us-east-2"
{:credential-scope {:service "cloudformation", :region "us-east-2"},
:ssl-common-name "cloudformation.us-east-2.amazonaws.com",
:endpoint "-east-2.amazonaws.com",
:signature-version :v4},
"ap-southeast-2"
{:credential-scope
{:service "cloudformation", :region "ap-southeast-2"},
:ssl-common-name "cloudformation.ap-southeast-2.amazonaws.com",
:endpoint "-southeast-2.amazonaws.com",
:signature-version :v4},
"cn-north-1"
{:credential-scope
{:service "cloudformation", :region "cn-north-1"},
:ssl-common-name "cloudformation.cn-north-1.amazonaws.com.cn",
:endpoint "-north-1.amazonaws.com.cn",
:signature-version :v4},
"sa-east-1"
{:credential-scope {:service "cloudformation", :region "sa-east-1"},
:ssl-common-name "cloudformation.sa-east-1.amazonaws.com",
:endpoint "-east-1.amazonaws.com",
:signature-version :v4},
"ap-southeast-1"
{:credential-scope
{:service "cloudformation", :region "ap-southeast-1"},
:ssl-common-name "cloudformation.ap-southeast-1.amazonaws.com",
:endpoint "-southeast-1.amazonaws.com",
:signature-version :v4},
"cn-northwest-1"
{:credential-scope
{:service "cloudformation", :region "cn-northwest-1"},
:ssl-common-name "cloudformation.cn-northwest-1.amazonaws.com.cn",
:endpoint "-northwest-1.amazonaws.com.cn",
:signature-version :v4},
"ap-northeast-2"
{:credential-scope
{:service "cloudformation", :region "ap-northeast-2"},
:ssl-common-name "cloudformation.ap-northeast-2.amazonaws.com",
:endpoint "-northeast-2.amazonaws.com",
:signature-version :v4},
"eu-west-3"
{:credential-scope {:service "cloudformation", :region "eu-west-3"},
:ssl-common-name "cloudformation.eu-west-3.amazonaws.com",
:endpoint "-west-3.amazonaws.com",
:signature-version :v4},
"ca-central-1"
{:credential-scope
{:service "cloudformation", :region "ca-central-1"},
:ssl-common-name "cloudformation.ca-central-1.amazonaws.com",
:endpoint "-central-1.amazonaws.com",
:signature-version :v4},
"eu-central-1"
{:credential-scope
{:service "cloudformation", :region "eu-central-1"},
:ssl-common-name "cloudformation.eu-central-1.amazonaws.com",
:endpoint "-central-1.amazonaws.com",
:signature-version :v4},
"eu-west-2"
{:credential-scope {:service "cloudformation", :region "eu-west-2"},
:ssl-common-name "cloudformation.eu-west-2.amazonaws.com",
:endpoint "-west-2.amazonaws.com",
:signature-version :v4},
"us-gov-west-1"
{:credential-scope
{:service "cloudformation", :region "us-gov-west-1"},
:ssl-common-name "cloudformation.us-gov-west-1.amazonaws.com",
:endpoint "-gov-west-1.amazonaws.com",
:signature-version :v4},
"us-west-2"
{:credential-scope {:service "cloudformation", :region "us-west-2"},
:ssl-common-name "cloudformation.us-west-2.amazonaws.com",
:endpoint "-west-2.amazonaws.com",
:signature-version :v4},
"us-east-1"
{:credential-scope {:service "cloudformation", :region "us-east-1"},
:ssl-common-name "cloudformation.us-east-1.amazonaws.com",
:endpoint "-east-1.amazonaws.com",
:signature-version :v4},
"us-west-1"
{:credential-scope {:service "cloudformation", :region "us-west-1"},
:ssl-common-name "cloudformation.us-west-1.amazonaws.com",
:endpoint "-west-1.amazonaws.com",
:signature-version :v4},
"ap-south-1"
{:credential-scope
{:service "cloudformation", :region "ap-south-1"},
:ssl-common-name "cloudformation.ap-south-1.amazonaws.com",
:endpoint "-south-1.amazonaws.com",
:signature-version :v4}})
(comment TODO support "query")
| |
6ade4c5cf7423355c828e275b4a518668ff4841851db3142983d7a1e20cfc56e | tweag/asterius | Main.hs | import qualified Asterius.FrontendPlugin as A
import qualified GHC.Frontend.Ghc as GHC
main :: IO ()
main = GHC.main A.frontendPlugin
| null | https://raw.githubusercontent.com/tweag/asterius/9c75ad777d1dc23cc33cc35ed38a63cca2eddb28/asterius/ghc-bin-asterius/Main.hs | haskell | import qualified Asterius.FrontendPlugin as A
import qualified GHC.Frontend.Ghc as GHC
main :: IO ()
main = GHC.main A.frontendPlugin
| |
ab958f2033c5076fda78aad89075d07889025bfe68882b847f974c6511bd190e | ekmett/hask | Profunctor.hs | # LANGUAGE CPP , KindSignatures , PolyKinds , MultiParamTypeClasses , FunctionalDependencies , ConstraintKinds , NoImplicitPrelude , TypeFamilies , TypeOperators , FlexibleContexts , FlexibleInstances , UndecidableInstances , RankNTypes , GADTs , ScopedTypeVariables , DataKinds , DefaultSignatures #
module Hask.Profunctor
( Prof, Profunctor, ProfunctorOf, Procompose(..)
, LeftStrong(..), RightStrong(..)
) where
import Hask.Category
type Prof c d = Nat (Op c) (Nat d (->))
class Bifunctor f => Profunctor f
instance Bifunctor f => Profunctor f
class (Bifunctor f, Dom f ~ Op p, Dom2 f ~ q, Cod2 f ~ (->)) => ProfunctorOf p q f
instance (Bifunctor f, Dom f ~ Op p, Dom2 f ~ q, Cod2 f ~ (->)) => ProfunctorOf p q f
data Procompose (c :: i -> i -> *) (d :: j -> j -> *) (e :: k -> k -> *)
(p :: j -> k -> *) (q :: i -> j -> *) (a :: i) (b :: k) where
Procompose :: Ob d x => p x b -> q a x -> Procompose c d e p q a b
instance (Category c, Category d, Category e) => Functor (Procompose c d e) where
type Dom (Procompose c d e) = Prof d e
type Cod (Procompose c d e) = Nat (Prof c d) (Prof c e)
fmap = fmap' where
fmap' :: Prof d e a b -> Nat (Prof c d) (Prof c e) (Procompose c d e a) (Procompose c d e b)
fmap' (Nat n) = Nat $ Nat $ Nat $ \(Procompose p q) -> Procompose (runNat n p) q
instance (Category c, Category d, Category e, ProfunctorOf d e p) => Functor (Procompose c d e p) where
type Dom (Procompose c d e p) = Prof c d
type Cod (Procompose c d e p) = Prof c e
fmap = fmap' where
fmap' :: Prof c d a b -> Prof c e (Procompose c d e p a) (Procompose c d e p b)
fmap' (Nat n) = Nat $ Nat $ \(Procompose p q) -> Procompose p (runNat n q)
instance (Category c, Category d, Category e, ProfunctorOf d e p, ProfunctorOf c d q) => Functor (Procompose c d e p q) where
type Dom (Procompose c d e p q) = Op c
type Cod (Procompose c d e p q) = Nat e (->)
fmap f = case observe f of
Dict -> Nat $ \(Procompose p q) -> Procompose p (runNat (fmap f) q)
instance (Category c, Category d, Category e, ProfunctorOf d e p, ProfunctorOf c d q, Ob c a) => Functor (Procompose c d e p q a) where
type Dom (Procompose c d e p q a) = e
type Cod (Procompose c d e p q a) = (->)
fmap f (Procompose p q) = Procompose (fmap1 f p) q
TODO
associateProcompose : : ( Prof c e ) ( Prof c e ) ( - > )
( Procompose c d f ( d e f p q ) r ) ( Procompose c ' d ' f ' ( d ' e ' f ' p ' q ' ) r ' )
( Procompose c e f p ( Procompose c d e q r ) ) ( Procompose c ' e ' f ' p ' ( Procompose c ' d ' e ' q ' r ' ) )
associateProcompose = ( Nat $ Nat $ \ ( ( Procompose a b ) c ) - > Procompose a ( Procompose b c ) )
( Nat $ Nat $ \ ( Procompose a ( Procompose b c ) ) - > Procompose ( Procompose a b ) c )
associateProcompose :: Iso (Prof c e) (Prof c e) (->)
(Procompose c d f (Procompose d e f p q) r) (Procompose c' d' f' (Procompose d' e' f' p' q') r')
(Procompose c e f p (Procompose c d e q r)) (Procompose c' e' f' p' (Procompose c' d' e' q' r'))
associateProcompose = dimap
(Nat $ Nat $ \ (Procompose (Procompose a b) c) -> Procompose a (Procompose b c))
(Nat $ Nat $ \ (Procompose a (Procompose b c)) -> Procompose (Procompose a b) c)
-}
class LeftStrong t p where
_1 :: Ob (Dom2 t) c => p a b -> p (t a c) (t b c)
instance LeftStrong (,) (->) where
_1 f ~(a, b) = (f a, b)
instance LeftStrong Either (->) where
_1 f (Left a) = Left (f a)
_1 _ (Right b) = Right b
-- | In this vocabulary every category is also right strong over its internal hom
class RightStrong t p where
_2 :: Ob (Dom t) c => p a b -> p (t c a) (t c b)
instance RightStrong (,) (->) where
_2 f ~(a, b) = (a, f b)
instance RightStrong Either (->) where
_2 _ (Left a) = Left a
_2 f (Right b) = Right (f b)
instance RightStrong (->) (->) where
_2 = (.)
| null | https://raw.githubusercontent.com/ekmett/hask/54ea964af8e0c1673ac2699492f4c07d977cb3c8/src/Hask/Profunctor.hs | haskell | | In this vocabulary every category is also right strong over its internal hom | # LANGUAGE CPP , KindSignatures , PolyKinds , MultiParamTypeClasses , FunctionalDependencies , ConstraintKinds , NoImplicitPrelude , TypeFamilies , TypeOperators , FlexibleContexts , FlexibleInstances , UndecidableInstances , RankNTypes , GADTs , ScopedTypeVariables , DataKinds , DefaultSignatures #
module Hask.Profunctor
( Prof, Profunctor, ProfunctorOf, Procompose(..)
, LeftStrong(..), RightStrong(..)
) where
import Hask.Category
type Prof c d = Nat (Op c) (Nat d (->))
class Bifunctor f => Profunctor f
instance Bifunctor f => Profunctor f
class (Bifunctor f, Dom f ~ Op p, Dom2 f ~ q, Cod2 f ~ (->)) => ProfunctorOf p q f
instance (Bifunctor f, Dom f ~ Op p, Dom2 f ~ q, Cod2 f ~ (->)) => ProfunctorOf p q f
data Procompose (c :: i -> i -> *) (d :: j -> j -> *) (e :: k -> k -> *)
(p :: j -> k -> *) (q :: i -> j -> *) (a :: i) (b :: k) where
Procompose :: Ob d x => p x b -> q a x -> Procompose c d e p q a b
instance (Category c, Category d, Category e) => Functor (Procompose c d e) where
type Dom (Procompose c d e) = Prof d e
type Cod (Procompose c d e) = Nat (Prof c d) (Prof c e)
fmap = fmap' where
fmap' :: Prof d e a b -> Nat (Prof c d) (Prof c e) (Procompose c d e a) (Procompose c d e b)
fmap' (Nat n) = Nat $ Nat $ Nat $ \(Procompose p q) -> Procompose (runNat n p) q
instance (Category c, Category d, Category e, ProfunctorOf d e p) => Functor (Procompose c d e p) where
type Dom (Procompose c d e p) = Prof c d
type Cod (Procompose c d e p) = Prof c e
fmap = fmap' where
fmap' :: Prof c d a b -> Prof c e (Procompose c d e p a) (Procompose c d e p b)
fmap' (Nat n) = Nat $ Nat $ \(Procompose p q) -> Procompose p (runNat n q)
instance (Category c, Category d, Category e, ProfunctorOf d e p, ProfunctorOf c d q) => Functor (Procompose c d e p q) where
type Dom (Procompose c d e p q) = Op c
type Cod (Procompose c d e p q) = Nat e (->)
fmap f = case observe f of
Dict -> Nat $ \(Procompose p q) -> Procompose p (runNat (fmap f) q)
instance (Category c, Category d, Category e, ProfunctorOf d e p, ProfunctorOf c d q, Ob c a) => Functor (Procompose c d e p q a) where
type Dom (Procompose c d e p q a) = e
type Cod (Procompose c d e p q a) = (->)
fmap f (Procompose p q) = Procompose (fmap1 f p) q
TODO
associateProcompose : : ( Prof c e ) ( Prof c e ) ( - > )
( Procompose c d f ( d e f p q ) r ) ( Procompose c ' d ' f ' ( d ' e ' f ' p ' q ' ) r ' )
( Procompose c e f p ( Procompose c d e q r ) ) ( Procompose c ' e ' f ' p ' ( Procompose c ' d ' e ' q ' r ' ) )
associateProcompose = ( Nat $ Nat $ \ ( ( Procompose a b ) c ) - > Procompose a ( Procompose b c ) )
( Nat $ Nat $ \ ( Procompose a ( Procompose b c ) ) - > Procompose ( Procompose a b ) c )
associateProcompose :: Iso (Prof c e) (Prof c e) (->)
(Procompose c d f (Procompose d e f p q) r) (Procompose c' d' f' (Procompose d' e' f' p' q') r')
(Procompose c e f p (Procompose c d e q r)) (Procompose c' e' f' p' (Procompose c' d' e' q' r'))
associateProcompose = dimap
(Nat $ Nat $ \ (Procompose (Procompose a b) c) -> Procompose a (Procompose b c))
(Nat $ Nat $ \ (Procompose a (Procompose b c)) -> Procompose (Procompose a b) c)
-}
class LeftStrong t p where
_1 :: Ob (Dom2 t) c => p a b -> p (t a c) (t b c)
instance LeftStrong (,) (->) where
_1 f ~(a, b) = (f a, b)
instance LeftStrong Either (->) where
_1 f (Left a) = Left (f a)
_1 _ (Right b) = Right b
class RightStrong t p where
_2 :: Ob (Dom t) c => p a b -> p (t c a) (t c b)
instance RightStrong (,) (->) where
_2 f ~(a, b) = (a, f b)
instance RightStrong Either (->) where
_2 _ (Left a) = Left a
_2 f (Right b) = Right (f b)
instance RightStrong (->) (->) where
_2 = (.)
|
b78ccddbd5732ac009a2dbbbf043eedfdb1bfa0f8fba558be90fb63aeca8a333 | logseq/logseq | file.cljs | (ns frontend.handler.common.file
"Common file related fns for handlers"
(:require [frontend.util :as util]
[frontend.config :as config]
[frontend.state :as state]
[frontend.db :as db]
["/frontend/utils" :as utils]
[frontend.mobile.util :as mobile-util]
[logseq.graph-parser :as graph-parser]
[logseq.graph-parser.util :as gp-util]
[logseq.graph-parser.config :as gp-config]
[frontend.fs.capacitor-fs :as capacitor-fs]
[frontend.fs :as fs]
[frontend.context.i18n :refer [t]]
[clojure.string :as string]
[promesa.core :as p]))
(defn- page-exists-in-another-file
"Conflict of files towards same page"
[repo-url page file]
(when-let [page-name (:block/name page)]
(let [current-file (:file/path (db/get-page-file repo-url page-name))]
(when (not= file current-file)
current-file))))
(defn- validate-existing-file
[repo-url file-page file-path]
(when-let [current-file (page-exists-in-another-file repo-url file-page file-path)]
(when (not= file-path current-file)
(cond
(= (string/lower-case current-file)
(string/lower-case file-path))
;; case renamed
(when-let [file (db/pull [:file/path current-file])]
(p/let [disk-content (fs/read-file "" current-file)]
(fs/backup-db-file! repo-url current-file (:file/content file) disk-content))
(db/transact! repo-url [{:db/id (:db/id file)
:file/path file-path}]))
:else
(let [error (t :file/validate-existing-file-error current-file file-path)]
(state/pub-event! [:notification/show
{:content error
:status :error
:clear? false}]))))))
(defn- validate-and-get-blocks-to-delete
[repo-url db file-page file-path retain-uuid-blocks]
(validate-existing-file repo-url file-page file-path)
(graph-parser/get-blocks-to-delete db file-page file-path retain-uuid-blocks))
(defn reset-file!
"Main fn for updating a db with the results of a parsed file"
([repo-url file content]
(reset-file! repo-url file content {}))
([repo-url file content {:keys [verbose] :as options}]
(let [electron-local-repo? (and (util/electron?)
(config/local-db? repo-url))
repo-dir (config/get-repo-dir repo-url)
file (cond
(and electron-local-repo?
util/win32?
(utils/win32 file))
file
(and electron-local-repo? (or
util/win32?
(not= "/" (first file))))
(str repo-dir "/" file)
(mobile-util/native-platform?)
(capacitor-fs/normalize-file-protocol-path repo-dir file)
:else
file)
file (gp-util/path-normalize file)
new? (nil? (db/entity [:file/path file]))
options (merge (dissoc options :verbose)
{:new? new?
:delete-blocks-fn (partial validate-and-get-blocks-to-delete repo-url)
:extract-options (merge
{:user-config (state/get-config)
:date-formatter (state/get-date-formatter)
:block-pattern (config/get-block-pattern (gp-util/get-format file))
:supported-formats (gp-config/supported-formats)
:uri-encoded? (boolean (mobile-util/native-platform?))
:filename-format (state/get-filename-format repo-url)
:extracted-block-ids (:extracted-block-ids options)}
(when (some? verbose) {:verbose verbose}))})]
(:tx (graph-parser/parse-file (db/get-db repo-url false) file content options)))))
| null | https://raw.githubusercontent.com/logseq/logseq/a4a5758afcb59436301f704b88638153ff1352aa/src/main/frontend/handler/common/file.cljs | clojure | case renamed | (ns frontend.handler.common.file
"Common file related fns for handlers"
(:require [frontend.util :as util]
[frontend.config :as config]
[frontend.state :as state]
[frontend.db :as db]
["/frontend/utils" :as utils]
[frontend.mobile.util :as mobile-util]
[logseq.graph-parser :as graph-parser]
[logseq.graph-parser.util :as gp-util]
[logseq.graph-parser.config :as gp-config]
[frontend.fs.capacitor-fs :as capacitor-fs]
[frontend.fs :as fs]
[frontend.context.i18n :refer [t]]
[clojure.string :as string]
[promesa.core :as p]))
(defn- page-exists-in-another-file
"Conflict of files towards same page"
[repo-url page file]
(when-let [page-name (:block/name page)]
(let [current-file (:file/path (db/get-page-file repo-url page-name))]
(when (not= file current-file)
current-file))))
(defn- validate-existing-file
[repo-url file-page file-path]
(when-let [current-file (page-exists-in-another-file repo-url file-page file-path)]
(when (not= file-path current-file)
(cond
(= (string/lower-case current-file)
(string/lower-case file-path))
(when-let [file (db/pull [:file/path current-file])]
(p/let [disk-content (fs/read-file "" current-file)]
(fs/backup-db-file! repo-url current-file (:file/content file) disk-content))
(db/transact! repo-url [{:db/id (:db/id file)
:file/path file-path}]))
:else
(let [error (t :file/validate-existing-file-error current-file file-path)]
(state/pub-event! [:notification/show
{:content error
:status :error
:clear? false}]))))))
(defn- validate-and-get-blocks-to-delete
[repo-url db file-page file-path retain-uuid-blocks]
(validate-existing-file repo-url file-page file-path)
(graph-parser/get-blocks-to-delete db file-page file-path retain-uuid-blocks))
(defn reset-file!
"Main fn for updating a db with the results of a parsed file"
([repo-url file content]
(reset-file! repo-url file content {}))
([repo-url file content {:keys [verbose] :as options}]
(let [electron-local-repo? (and (util/electron?)
(config/local-db? repo-url))
repo-dir (config/get-repo-dir repo-url)
file (cond
(and electron-local-repo?
util/win32?
(utils/win32 file))
file
(and electron-local-repo? (or
util/win32?
(not= "/" (first file))))
(str repo-dir "/" file)
(mobile-util/native-platform?)
(capacitor-fs/normalize-file-protocol-path repo-dir file)
:else
file)
file (gp-util/path-normalize file)
new? (nil? (db/entity [:file/path file]))
options (merge (dissoc options :verbose)
{:new? new?
:delete-blocks-fn (partial validate-and-get-blocks-to-delete repo-url)
:extract-options (merge
{:user-config (state/get-config)
:date-formatter (state/get-date-formatter)
:block-pattern (config/get-block-pattern (gp-util/get-format file))
:supported-formats (gp-config/supported-formats)
:uri-encoded? (boolean (mobile-util/native-platform?))
:filename-format (state/get-filename-format repo-url)
:extracted-block-ids (:extracted-block-ids options)}
(when (some? verbose) {:verbose verbose}))})]
(:tx (graph-parser/parse-file (db/get-db repo-url false) file content options)))))
|
a8ba3128c68ffa4c795877ee7b09e3c58add483fecf41b3ff05b1524d6c9cacc | byulparan/cl-nextstep | package.lisp | (defpackage :cl-nextstep
(:nicknames :ns)
(:use :cl :alexandria)
#+ccl (:import-from #:ccl #:make-id-map #:assign-id-map-id #:id-map-free-object)
(:export
;; core-foundation.lisp
#:cls
#:sel
#:objc
#+x86-64 #:objc-stret
#:alloc
#:retain
#:release
#:autorelease
#:cf-retain
#:cf-release
#:cf-autorelease
#:make-ns-string
#:ns-string-to-lisp
#:make-cf-string
#:cf-string-to-lisp
#:make-color
#:point
#:make-point
#:point-x
#:point-y
#:size
#:make-size
#:size-width
#:size-height
#:rect
#:make-rect
#:rect-x
#:rect-y
#:rect-width
#:rect-height
;; application.lisp
#:*startup-hooks*
#:start-event-loop
#:quit
#:queue-for-event-loop
#:with-event-loop
#:enable-foreground
timer.lisp
#:timer
#:invalidate
;; window.lisp
#:window
#:window-show
#:window-close
#:close-fn
#:toggle-fullscreen
#:content-view
#:add-subviews
#:set-always-on-top
#:in-screen-rect
;; view.lisp
#:width
#:height
#:cgl-context
#:cgl-pixel-format
#:init
#:draw
#:mouse-down
#:mouse-dragged
#:mouse-up
#:mouse-moved
#:mouse-wheel
#:command-p
#:shift-p
#:ctrl-p
#:opt-p
#:redisplay
#:view
#:current-cg-context
;; opengl-view.lisp
#:opengl-view
#:reshape
#:set-gl-best-resolution
;; widget.lisp
#:text-field
;;wk-webview.lisp
#:wk-webview
#:reload
#:url))
| null | https://raw.githubusercontent.com/byulparan/cl-nextstep/383c961bf5bc50bbdb3512693a726c8f1a6a8fb0/package.lisp | lisp | core-foundation.lisp
application.lisp
window.lisp
view.lisp
opengl-view.lisp
widget.lisp
wk-webview.lisp | (defpackage :cl-nextstep
(:nicknames :ns)
(:use :cl :alexandria)
#+ccl (:import-from #:ccl #:make-id-map #:assign-id-map-id #:id-map-free-object)
(:export
#:cls
#:sel
#:objc
#+x86-64 #:objc-stret
#:alloc
#:retain
#:release
#:autorelease
#:cf-retain
#:cf-release
#:cf-autorelease
#:make-ns-string
#:ns-string-to-lisp
#:make-cf-string
#:cf-string-to-lisp
#:make-color
#:point
#:make-point
#:point-x
#:point-y
#:size
#:make-size
#:size-width
#:size-height
#:rect
#:make-rect
#:rect-x
#:rect-y
#:rect-width
#:rect-height
#:*startup-hooks*
#:start-event-loop
#:quit
#:queue-for-event-loop
#:with-event-loop
#:enable-foreground
timer.lisp
#:timer
#:invalidate
#:window
#:window-show
#:window-close
#:close-fn
#:toggle-fullscreen
#:content-view
#:add-subviews
#:set-always-on-top
#:in-screen-rect
#:width
#:height
#:cgl-context
#:cgl-pixel-format
#:init
#:draw
#:mouse-down
#:mouse-dragged
#:mouse-up
#:mouse-moved
#:mouse-wheel
#:command-p
#:shift-p
#:ctrl-p
#:opt-p
#:redisplay
#:view
#:current-cg-context
#:opengl-view
#:reshape
#:set-gl-best-resolution
#:text-field
#:wk-webview
#:reload
#:url))
|
52b797ba6b93f87b3d73ee2a261363f4e0e83dcecb38f1796df92a10b30ad7a5 | juji-io/datalevin | storage_test.cljc | (ns datalevin.storage-test
(:require [datalevin.storage :as sut]
[datalevin.util :as u]
[datalevin.constants :as c]
[datalevin.datom :as d]
[clojure.test.check.generators :as gen]
[clojure.test.check.clojure-test :as test]
[clojure.test.check.properties :as prop]
[datalevin.test.core :as tdc :refer [db-fixture]]
[clojure.test :refer [deftest testing is use-fixtures]]
[datalevin.lmdb :as lmdb])
(:import [java.util UUID]
[datalevin.storage Store]
[datalevin.datom Datom]))
(use-fixtures :each db-fixture)
(deftest basic-ops-test
(let [dir (u/tmp-dir (str "storage-test-" (UUID/randomUUID)))
store (sut/open dir)]
(is (= c/gt0 (sut/max-gt store)))
(is (= 3 (sut/max-aid store)))
(is (= (merge c/entity-time-schema c/implicit-schema) (sut/schema store)))
(is (= c/e0 (sut/init-max-eid store)))
(is (= c/tx0 (sut/max-tx store)))
(let [a :a/b
v (UUID/randomUUID)
d (d/datom c/e0 a v)
s (assoc (sut/schema store) a {:db/aid 3})
b :b/c
p1 {:db/valueType :db.type/uuid}
v1 (UUID/randomUUID)
d1 (d/datom c/e0 b v1)
s1 (assoc s b (merge p1 {:db/aid 4}))
c :c/d
p2 {:db/valueType :db.type/ref}
v2 (long (rand c/emax))
d2 (d/datom c/e0 c v2)
s2 (assoc s1 c (merge p2 {:db/aid 5}))
dir (lmdb/dir (.-lmdb ^Store store))
t1 (sut/last-modified store)]
(sut/load-datoms store [d])
(is (= (inc c/tx0) (sut/max-tx store)))
(is (<= t1 (sut/last-modified store)))
(is (= s (sut/schema store)))
(is (= 1 (sut/datom-count store :eav)))
(is (= 1 (sut/datom-count store :ave)))
(is (= 0 (sut/datom-count store :vea)))
(is (= [d] (sut/fetch store d)))
(is (= [d] (sut/slice store :eav d d)))
(is (= true (sut/populated? store :eav d d)))
(is (= 1 (sut/size store :eav d d)))
(is (= d (sut/head store :eav d d)))
(is (= d (sut/tail store :eav d d)))
(sut/swap-attr store b merge p1)
(sut/load-datoms store [d1])
(is (= (+ 2 c/tx0) (sut/max-tx store)))
(is (= s1 (sut/schema store)))
(is (= 2 (sut/datom-count store :eav)))
(is (= 2 (sut/datom-count store :ave)))
(is (= 0 (sut/datom-count store :vea)))
(is (= [] (sut/slice store :eav d (d/datom c/e0 :non-exist v1))))
(is (= 0 (sut/size store :eav d (d/datom c/e0 :non-exist v1))))
(is (nil? (sut/populated? store :eav d (d/datom c/e0 :non-exist v1))))
(is (= d (sut/head store :eav d d1)))
(is (= d1 (sut/tail store :eav d1 d)))
(is (= 2 (sut/size store :eav d d1)))
(is (= [d d1] (sut/slice store :eav d d1)))
(is (= [d d1] (sut/slice store :ave d d1)))
(is (= [d1 d] (sut/rslice store :eav d1 d)))
(is (= [d d1] (sut/slice store :eav
(d/datom c/e0 a nil)
(d/datom c/e0 nil nil))))
(is (= [d1 d] (sut/rslice store :eav
(d/datom c/e0 b nil)
(d/datom c/e0 nil nil))))
(is (= 1 (sut/size-filter store :eav
(fn [^Datom d] (= v (.-v d)))
(d/datom c/e0 nil nil)
(d/datom c/e0 nil nil))))
(is (= d (sut/head-filter store :eav
(fn [^Datom d] (= v (.-v d)))
(d/datom c/e0 nil nil)
(d/datom c/e0 nil nil))))
(is (= d (sut/tail-filter store :eav
(fn [^Datom d] (= v (.-v d)))
(d/datom c/e0 nil nil)
(d/datom c/e0 nil nil))))
(is (= [d] (sut/slice-filter store :eav
(fn [^Datom d] (= v (.-v d)))
(d/datom c/e0 nil nil)
(d/datom c/e0 nil nil))))
(is (= [d1 d] (sut/rslice store :ave d1 d)))
(is (= [d d1] (sut/slice store :ave
(d/datom c/e0 a nil)
(d/datom c/e0 nil nil))))
(is (= [d1 d] (sut/rslice store :ave
(d/datom c/e0 b nil)
(d/datom c/e0 nil nil))))
(is (= [d] (sut/slice-filter store :ave
(fn [^Datom d] (= v (.-v d)))
(d/datom c/e0 nil nil)
(d/datom c/e0 nil nil))))
(sut/swap-attr store c merge p2)
(sut/load-datoms store [d2])
(is (= (+ 3 c/tx0) (sut/max-tx store)))
(is (= s2 (sut/schema store)))
(is (= 3 (sut/datom-count store c/eav)))
(is (= 3 (sut/datom-count store c/ave)))
(is (= 1 (sut/datom-count store c/vea)))
(is (= [d2] (sut/slice store :vea
(d/datom c/e0 nil v2)
(d/datom c/emax nil v2))))
(sut/load-datoms store [(d/delete d)])
(is (= (+ 4 c/tx0) (sut/max-tx store)))
(is (= 2 (sut/datom-count store c/eav)))
(is (= 2 (sut/datom-count store c/ave)))
(is (= 1 (sut/datom-count store c/vea)))
(sut/close store)
(is (sut/closed? store))
(let [store (sut/open dir)]
(is (= (+ 4 c/tx0) (sut/max-tx store)))
(is (= [d1] (sut/slice store :eav d1 d1)))
(sut/load-datoms store [(d/delete d1)])
(is (= (+ 5 c/tx0) (sut/max-tx store)))
(is (= 1 (sut/datom-count store c/eav)))
(sut/load-datoms store [d d1])
(is (= (+ 6 c/tx0) (sut/max-tx store)))
(is (= 3 (sut/datom-count store c/eav)))
(sut/close store))
(let [d :d/e
p3 {:db/valueType :db.type/long}
s3 (assoc s2 d (merge p3 {:db/aid 6}))
s4 (assoc s3 :f/g {:db/aid 7 :db/valueType :db.type/string})
store (sut/open dir {d p3})]
(is (= (+ 6 c/tx0) (sut/max-tx store)))
(is (= s3 (sut/schema store)))
(sut/set-schema store {:f/g {:db/valueType :db.type/string}})
(is (= s4 (sut/schema store)))
(sut/close store)))
(u/delete-files dir)))
(deftest schema-test
(let [s {:a {:db/valueType :db.type/string}
:b {:db/valueType :db.type/long}}
dir (u/tmp-dir (str "datalevin-schema-test-" (UUID/randomUUID)))
store (sut/open dir s)
s1 (sut/schema store)]
(sut/close store)
(is (sut/closed? store))
(let [store (sut/open dir s)]
(is (= s1 (sut/schema store)))
(sut/close store))
(u/delete-files dir)))
(deftest giants-string-test
(let [schema {:a {:db/valueType :db.type/string}}
dir (u/tmp-dir (str "datalevin-giants-str-test-" (UUID/randomUUID)))
store (sut/open dir schema)
v (apply str (repeat 10000 (UUID/randomUUID)))
d (d/datom c/e0 :a v)]
(sut/load-datoms store [d])
(is (= [d] (sut/fetch store d)))
(is (= [d] (sut/slice store :eavt
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax))))
(sut/close store)
(u/delete-files dir)))
(deftest giants-data-test
(let [dir (u/tmp-dir (str "datalevin-giants-data-test-" (UUID/randomUUID)))
store (sut/open dir)
v (apply str (repeat 10000 (UUID/randomUUID)))
d (d/datom c/e0 :a v)
d1 (d/datom (inc c/e0) :b v)]
(sut/load-datoms store [d])
(is (= [d] (sut/fetch store d)))
(is (= [d] (sut/slice store :eavt
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax))))
(sut/close store)
(let [store' (sut/open dir)]
(is (sut/populated? store' :eav
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax)))
(is (= [d] (sut/fetch store' d)))
(is (= [d] (sut/slice store' :eavt
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax))))
(sut/load-datoms store' [d1])
(is (= 1 (sut/init-max-eid store')))
(is (= [d1] (sut/fetch store' d1)))
(sut/close store'))
(u/delete-files dir)))
(deftest normal-data-test
(let [dir (u/tmp-dir (str "datalevin-normal-data-test-" (UUID/randomUUID)))
store (sut/open dir)
v (UUID/randomUUID)
d (d/datom c/e0 :a v)
d1 (d/datom (inc c/e0) :b v)]
(sut/load-datoms store [d])
(is (= [d] (sut/fetch store d)))
(is (= [d] (sut/slice store :eavt
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax))))
(sut/close store)
(let [store' (sut/open dir)]
(is (sut/populated? store' :eav
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax)))
(is (= [d] (sut/fetch store' d)))
(is (= [d] (sut/slice store' :eavt
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax))))
(sut/load-datoms store' [d1])
(is (= 1 (sut/init-max-eid store')))
(is (= [d1] (sut/fetch store' d1)))
(sut/close store))
(u/delete-files dir)))
(deftest false-value-test
(let [d (d/datom c/e0 :a false)
dir (u/tmp-dir (str "storage-test-" (UUID/randomUUID)))
store (sut/open dir)]
(sut/load-datoms store [d])
(is (= [d] (sut/fetch store d)))
(sut/close store)
(u/delete-files dir)))
(test/defspec random-data-test
100
(prop/for-all
[v gen/any-printable-equatable
a gen/keyword-ns
e (gen/large-integer* {:min 0})]
(let [d (d/datom e a v)
dir (u/tmp-dir (str "storage-test-" (UUID/randomUUID)))
store (sut/open dir)
_ (sut/load-datoms store [d])
r (sut/fetch store d)]
(sut/close store)
(u/delete-files dir)
(is (= [d] r)))))
| null | https://raw.githubusercontent.com/juji-io/datalevin/3a1fccc3cb40531901d51719216fdce3b1aa3483/test/datalevin/storage_test.cljc | clojure | (ns datalevin.storage-test
(:require [datalevin.storage :as sut]
[datalevin.util :as u]
[datalevin.constants :as c]
[datalevin.datom :as d]
[clojure.test.check.generators :as gen]
[clojure.test.check.clojure-test :as test]
[clojure.test.check.properties :as prop]
[datalevin.test.core :as tdc :refer [db-fixture]]
[clojure.test :refer [deftest testing is use-fixtures]]
[datalevin.lmdb :as lmdb])
(:import [java.util UUID]
[datalevin.storage Store]
[datalevin.datom Datom]))
(use-fixtures :each db-fixture)
(deftest basic-ops-test
(let [dir (u/tmp-dir (str "storage-test-" (UUID/randomUUID)))
store (sut/open dir)]
(is (= c/gt0 (sut/max-gt store)))
(is (= 3 (sut/max-aid store)))
(is (= (merge c/entity-time-schema c/implicit-schema) (sut/schema store)))
(is (= c/e0 (sut/init-max-eid store)))
(is (= c/tx0 (sut/max-tx store)))
(let [a :a/b
v (UUID/randomUUID)
d (d/datom c/e0 a v)
s (assoc (sut/schema store) a {:db/aid 3})
b :b/c
p1 {:db/valueType :db.type/uuid}
v1 (UUID/randomUUID)
d1 (d/datom c/e0 b v1)
s1 (assoc s b (merge p1 {:db/aid 4}))
c :c/d
p2 {:db/valueType :db.type/ref}
v2 (long (rand c/emax))
d2 (d/datom c/e0 c v2)
s2 (assoc s1 c (merge p2 {:db/aid 5}))
dir (lmdb/dir (.-lmdb ^Store store))
t1 (sut/last-modified store)]
(sut/load-datoms store [d])
(is (= (inc c/tx0) (sut/max-tx store)))
(is (<= t1 (sut/last-modified store)))
(is (= s (sut/schema store)))
(is (= 1 (sut/datom-count store :eav)))
(is (= 1 (sut/datom-count store :ave)))
(is (= 0 (sut/datom-count store :vea)))
(is (= [d] (sut/fetch store d)))
(is (= [d] (sut/slice store :eav d d)))
(is (= true (sut/populated? store :eav d d)))
(is (= 1 (sut/size store :eav d d)))
(is (= d (sut/head store :eav d d)))
(is (= d (sut/tail store :eav d d)))
(sut/swap-attr store b merge p1)
(sut/load-datoms store [d1])
(is (= (+ 2 c/tx0) (sut/max-tx store)))
(is (= s1 (sut/schema store)))
(is (= 2 (sut/datom-count store :eav)))
(is (= 2 (sut/datom-count store :ave)))
(is (= 0 (sut/datom-count store :vea)))
(is (= [] (sut/slice store :eav d (d/datom c/e0 :non-exist v1))))
(is (= 0 (sut/size store :eav d (d/datom c/e0 :non-exist v1))))
(is (nil? (sut/populated? store :eav d (d/datom c/e0 :non-exist v1))))
(is (= d (sut/head store :eav d d1)))
(is (= d1 (sut/tail store :eav d1 d)))
(is (= 2 (sut/size store :eav d d1)))
(is (= [d d1] (sut/slice store :eav d d1)))
(is (= [d d1] (sut/slice store :ave d d1)))
(is (= [d1 d] (sut/rslice store :eav d1 d)))
(is (= [d d1] (sut/slice store :eav
(d/datom c/e0 a nil)
(d/datom c/e0 nil nil))))
(is (= [d1 d] (sut/rslice store :eav
(d/datom c/e0 b nil)
(d/datom c/e0 nil nil))))
(is (= 1 (sut/size-filter store :eav
(fn [^Datom d] (= v (.-v d)))
(d/datom c/e0 nil nil)
(d/datom c/e0 nil nil))))
(is (= d (sut/head-filter store :eav
(fn [^Datom d] (= v (.-v d)))
(d/datom c/e0 nil nil)
(d/datom c/e0 nil nil))))
(is (= d (sut/tail-filter store :eav
(fn [^Datom d] (= v (.-v d)))
(d/datom c/e0 nil nil)
(d/datom c/e0 nil nil))))
(is (= [d] (sut/slice-filter store :eav
(fn [^Datom d] (= v (.-v d)))
(d/datom c/e0 nil nil)
(d/datom c/e0 nil nil))))
(is (= [d1 d] (sut/rslice store :ave d1 d)))
(is (= [d d1] (sut/slice store :ave
(d/datom c/e0 a nil)
(d/datom c/e0 nil nil))))
(is (= [d1 d] (sut/rslice store :ave
(d/datom c/e0 b nil)
(d/datom c/e0 nil nil))))
(is (= [d] (sut/slice-filter store :ave
(fn [^Datom d] (= v (.-v d)))
(d/datom c/e0 nil nil)
(d/datom c/e0 nil nil))))
(sut/swap-attr store c merge p2)
(sut/load-datoms store [d2])
(is (= (+ 3 c/tx0) (sut/max-tx store)))
(is (= s2 (sut/schema store)))
(is (= 3 (sut/datom-count store c/eav)))
(is (= 3 (sut/datom-count store c/ave)))
(is (= 1 (sut/datom-count store c/vea)))
(is (= [d2] (sut/slice store :vea
(d/datom c/e0 nil v2)
(d/datom c/emax nil v2))))
(sut/load-datoms store [(d/delete d)])
(is (= (+ 4 c/tx0) (sut/max-tx store)))
(is (= 2 (sut/datom-count store c/eav)))
(is (= 2 (sut/datom-count store c/ave)))
(is (= 1 (sut/datom-count store c/vea)))
(sut/close store)
(is (sut/closed? store))
(let [store (sut/open dir)]
(is (= (+ 4 c/tx0) (sut/max-tx store)))
(is (= [d1] (sut/slice store :eav d1 d1)))
(sut/load-datoms store [(d/delete d1)])
(is (= (+ 5 c/tx0) (sut/max-tx store)))
(is (= 1 (sut/datom-count store c/eav)))
(sut/load-datoms store [d d1])
(is (= (+ 6 c/tx0) (sut/max-tx store)))
(is (= 3 (sut/datom-count store c/eav)))
(sut/close store))
(let [d :d/e
p3 {:db/valueType :db.type/long}
s3 (assoc s2 d (merge p3 {:db/aid 6}))
s4 (assoc s3 :f/g {:db/aid 7 :db/valueType :db.type/string})
store (sut/open dir {d p3})]
(is (= (+ 6 c/tx0) (sut/max-tx store)))
(is (= s3 (sut/schema store)))
(sut/set-schema store {:f/g {:db/valueType :db.type/string}})
(is (= s4 (sut/schema store)))
(sut/close store)))
(u/delete-files dir)))
(deftest schema-test
(let [s {:a {:db/valueType :db.type/string}
:b {:db/valueType :db.type/long}}
dir (u/tmp-dir (str "datalevin-schema-test-" (UUID/randomUUID)))
store (sut/open dir s)
s1 (sut/schema store)]
(sut/close store)
(is (sut/closed? store))
(let [store (sut/open dir s)]
(is (= s1 (sut/schema store)))
(sut/close store))
(u/delete-files dir)))
(deftest giants-string-test
(let [schema {:a {:db/valueType :db.type/string}}
dir (u/tmp-dir (str "datalevin-giants-str-test-" (UUID/randomUUID)))
store (sut/open dir schema)
v (apply str (repeat 10000 (UUID/randomUUID)))
d (d/datom c/e0 :a v)]
(sut/load-datoms store [d])
(is (= [d] (sut/fetch store d)))
(is (= [d] (sut/slice store :eavt
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax))))
(sut/close store)
(u/delete-files dir)))
(deftest giants-data-test
(let [dir (u/tmp-dir (str "datalevin-giants-data-test-" (UUID/randomUUID)))
store (sut/open dir)
v (apply str (repeat 10000 (UUID/randomUUID)))
d (d/datom c/e0 :a v)
d1 (d/datom (inc c/e0) :b v)]
(sut/load-datoms store [d])
(is (= [d] (sut/fetch store d)))
(is (= [d] (sut/slice store :eavt
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax))))
(sut/close store)
(let [store' (sut/open dir)]
(is (sut/populated? store' :eav
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax)))
(is (= [d] (sut/fetch store' d)))
(is (= [d] (sut/slice store' :eavt
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax))))
(sut/load-datoms store' [d1])
(is (= 1 (sut/init-max-eid store')))
(is (= [d1] (sut/fetch store' d1)))
(sut/close store'))
(u/delete-files dir)))
(deftest normal-data-test
(let [dir (u/tmp-dir (str "datalevin-normal-data-test-" (UUID/randomUUID)))
store (sut/open dir)
v (UUID/randomUUID)
d (d/datom c/e0 :a v)
d1 (d/datom (inc c/e0) :b v)]
(sut/load-datoms store [d])
(is (= [d] (sut/fetch store d)))
(is (= [d] (sut/slice store :eavt
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax))))
(sut/close store)
(let [store' (sut/open dir)]
(is (sut/populated? store' :eav
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax)))
(is (= [d] (sut/fetch store' d)))
(is (= [d] (sut/slice store' :eavt
(d/datom c/e0 :a c/v0)
(d/datom c/e0 :a c/vmax))))
(sut/load-datoms store' [d1])
(is (= 1 (sut/init-max-eid store')))
(is (= [d1] (sut/fetch store' d1)))
(sut/close store))
(u/delete-files dir)))
(deftest false-value-test
(let [d (d/datom c/e0 :a false)
dir (u/tmp-dir (str "storage-test-" (UUID/randomUUID)))
store (sut/open dir)]
(sut/load-datoms store [d])
(is (= [d] (sut/fetch store d)))
(sut/close store)
(u/delete-files dir)))
(test/defspec random-data-test
100
(prop/for-all
[v gen/any-printable-equatable
a gen/keyword-ns
e (gen/large-integer* {:min 0})]
(let [d (d/datom e a v)
dir (u/tmp-dir (str "storage-test-" (UUID/randomUUID)))
store (sut/open dir)
_ (sut/load-datoms store [d])
r (sut/fetch store d)]
(sut/close store)
(u/delete-files dir)
(is (= [d] r)))))
| |
c91d9ec6c6f9afc41d49707b6406dcab2646e6b5986eadf1ccfa328ff3861ff9 | zotonic/zotonic | zotonic_filewatcher_inotify.erl | @author < >
2011 - 2015 < >
Date : 2011 - 10 - 12
%% @doc Watch for changed files using inotifywait.
%% -tools/wiki
Copyright 2011 - 2015
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(zotonic_filewatcher_inotify).
-author("Arjan Scherpenisse <>").
-behaviour(gen_server).
%% gen_server exports
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-export([start_link/0]).
-record(state, {
pid :: pid() | undefined,
port :: integer() | undefined,
executable :: string()
}).
%% interface functions
-export([
is_installed/0,
restart/0
]).
-include_lib("kernel/include/logger.hrl").
%%====================================================================
%% API
%%====================================================================
%% @doc Starts the server
-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
start_link() ->
case os:find_executable("inotifywait") of
false ->
{error, "inotifywait not found"};
Executable ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [Executable], [])
end.
-spec is_installed() -> boolean().
is_installed() ->
os:find_executable("inotifywait") =/= false.
-spec restart() -> ok.
restart() ->
gen_server:cast(?MODULE, restart).
%%====================================================================
%% gen_server callbacks
%%====================================================================
) - > { ok , State } |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% @doc Initiates the server.
init([Executable]) ->
process_flag(trap_exit, true),
State = #state{
executable = Executable,
port = undefined,
pid = undefined
},
timer:send_after(100, start),
{ok, State}.
%% @doc Trap unknown calls
handle_call(Message, _From, State) ->
{stop, {unknown_call, Message}, State}.
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
handle_cast(restart, #state{ pid = undefined } = State) ->
{noreply, State};
handle_cast(restart, #state{ pid = Pid } = State) when is_pid(Pid) ->
?LOG_INFO("[inotify] Stopping inotify file monitor."),
catch exec:stop(Pid),
{noreply, start_inotify(State#state{ port = undefined })};
handle_cast(Message, State) ->
{stop, {unknown_cast, Message}, State}.
%% @doc Reading a line from the inotifywait program. Sets a timer to
%% prevent duplicate file changed message for the same filename
%% (e.g. if a editor saves a file twice for some reason).
handle_info({stdout, _Port, Data}, #state{} = State) ->
Lines = binary:split(
binary:replace(Data, <<"\r\n">>, <<"\n">>, [global]),
<<"\n">>,
[global]),
lists:map(
fun(Line) ->
case re:run(Line, "^(.+) (MODIFY|CREATE|DELETE|MOVED_TO|MOVED_FROM) (.+)", [{capture, all_but_first, binary}]) of
nomatch ->
ok;
{match, [Path, Verb, File]} ->
Filename = filename:join(Path, File),
zotonic_filewatcher_handler:file_changed(verb(Verb), Filename)
end
end,
Lines),
{noreply, State};
handle_info({'DOWN', _Port, process, Pid, Reason}, #state{pid = Pid} = State) ->
?LOG_ERROR(#{
text => <<"[inotify] inotify port closed, restarting in 5 seconds.">>,
result => error,
reason => Reason
}),
State1 = State#state{
pid = undefined,
port = undefined
},
timer:send_after(5000, start),
{noreply, State1};
handle_info({'EXIT', _Pid, _Reason}, State) ->
{noreply, State};
handle_info(start, #state{ port = undefined } = State) ->
{noreply, start_inotify(State)};
handle_info(start, State) ->
{noreply, State};
handle_info(_Info, State) ->
{noreply, State}.
, State ) - > void ( )
%% @doc This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any necessary
%% cleaning up. When it returns, the gen_server terminates with Reason.
%% The return value is ignored.
terminate(_Reason, #state{pid = undefined}) ->
ok;
terminate(_Reason, #state{pid = Pid}) ->
catch exec:stop(Pid),
ok.
, State , Extra ) - > { ok , NewState }
%% @doc Convert process state when code is changed
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%====================================================================
%% support functions
%%====================================================================
start_inotify(#state{executable = Executable, port = undefined} = State) ->
?LOG_INFO("[inotify] Starting inotify file monitor."),
Args = [
Executable,
"-q", "-e", "modify,create,delete,moved_to,moved_from", "-m", "-r",
"--exclude", zotonic_filewatcher_handler:re_exclude()
]
++ zotonic_filewatcher_sup:watch_dirs_expanded(),
{ok, Pid, Port} = exec:run_link(Args, [stdout, monitor]),
State#state{
port = Port,
pid = Pid
}.
verb(<<"CREATE">>) -> create;
verb(<<"MODIFY">>) -> modify;
verb(<<"DELETE">>) -> delete;
verb(<<"MOVED_FROM">>) -> delete;
verb(<<"MOVED_TO">>) -> create.
| null | https://raw.githubusercontent.com/zotonic/zotonic/1bb4aa8a0688d007dd8ec8ba271546f658312da8/apps/zotonic_filewatcher/src/zotonic_filewatcher_inotify.erl | erlang | @doc Watch for changed files using inotifywait.
-tools/wiki
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
gen_server exports
interface functions
====================================================================
API
====================================================================
@doc Starts the server
====================================================================
gen_server callbacks
====================================================================
ignore |
{stop, Reason}
@doc Initiates the server.
@doc Trap unknown calls
{stop, Reason, State}
@doc Reading a line from the inotifywait program. Sets a timer to
prevent duplicate file changed message for the same filename
(e.g. if a editor saves a file twice for some reason).
@doc This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any necessary
cleaning up. When it returns, the gen_server terminates with Reason.
The return value is ignored.
@doc Convert process state when code is changed
====================================================================
support functions
==================================================================== | @author < >
2011 - 2015 < >
Date : 2011 - 10 - 12
Copyright 2011 - 2015
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(zotonic_filewatcher_inotify).
-author("Arjan Scherpenisse <>").
-behaviour(gen_server).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-export([start_link/0]).
-record(state, {
pid :: pid() | undefined,
port :: integer() | undefined,
executable :: string()
}).
-export([
is_installed/0,
restart/0
]).
-include_lib("kernel/include/logger.hrl").
-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
start_link() ->
case os:find_executable("inotifywait") of
false ->
{error, "inotifywait not found"};
Executable ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [Executable], [])
end.
-spec is_installed() -> boolean().
is_installed() ->
os:find_executable("inotifywait") =/= false.
-spec restart() -> ok.
restart() ->
gen_server:cast(?MODULE, restart).
) - > { ok , State } |
{ ok , State , Timeout } |
init([Executable]) ->
process_flag(trap_exit, true),
State = #state{
executable = Executable,
port = undefined,
pid = undefined
},
timer:send_after(100, start),
{ok, State}.
handle_call(Message, _From, State) ->
{stop, {unknown_call, Message}, State}.
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_cast(restart, #state{ pid = undefined } = State) ->
{noreply, State};
handle_cast(restart, #state{ pid = Pid } = State) when is_pid(Pid) ->
?LOG_INFO("[inotify] Stopping inotify file monitor."),
catch exec:stop(Pid),
{noreply, start_inotify(State#state{ port = undefined })};
handle_cast(Message, State) ->
{stop, {unknown_cast, Message}, State}.
handle_info({stdout, _Port, Data}, #state{} = State) ->
Lines = binary:split(
binary:replace(Data, <<"\r\n">>, <<"\n">>, [global]),
<<"\n">>,
[global]),
lists:map(
fun(Line) ->
case re:run(Line, "^(.+) (MODIFY|CREATE|DELETE|MOVED_TO|MOVED_FROM) (.+)", [{capture, all_but_first, binary}]) of
nomatch ->
ok;
{match, [Path, Verb, File]} ->
Filename = filename:join(Path, File),
zotonic_filewatcher_handler:file_changed(verb(Verb), Filename)
end
end,
Lines),
{noreply, State};
handle_info({'DOWN', _Port, process, Pid, Reason}, #state{pid = Pid} = State) ->
?LOG_ERROR(#{
text => <<"[inotify] inotify port closed, restarting in 5 seconds.">>,
result => error,
reason => Reason
}),
State1 = State#state{
pid = undefined,
port = undefined
},
timer:send_after(5000, start),
{noreply, State1};
handle_info({'EXIT', _Pid, _Reason}, State) ->
{noreply, State};
handle_info(start, #state{ port = undefined } = State) ->
{noreply, start_inotify(State)};
handle_info(start, State) ->
{noreply, State};
handle_info(_Info, State) ->
{noreply, State}.
, State ) - > void ( )
terminate(_Reason, #state{pid = undefined}) ->
ok;
terminate(_Reason, #state{pid = Pid}) ->
catch exec:stop(Pid),
ok.
, State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
start_inotify(#state{executable = Executable, port = undefined} = State) ->
?LOG_INFO("[inotify] Starting inotify file monitor."),
Args = [
Executable,
"-q", "-e", "modify,create,delete,moved_to,moved_from", "-m", "-r",
"--exclude", zotonic_filewatcher_handler:re_exclude()
]
++ zotonic_filewatcher_sup:watch_dirs_expanded(),
{ok, Pid, Port} = exec:run_link(Args, [stdout, monitor]),
State#state{
port = Port,
pid = Pid
}.
verb(<<"CREATE">>) -> create;
verb(<<"MODIFY">>) -> modify;
verb(<<"DELETE">>) -> delete;
verb(<<"MOVED_FROM">>) -> delete;
verb(<<"MOVED_TO">>) -> create.
|
880904938c13fb49658244a57e2961961a127e9de1c27e3a7dc202f8129e4da8 | eta-lang/eta-prelude | Foldable.hs | module Eta.Classes.Foldable
( Data.Foldable.Foldable(foldMap, foldr)
, foldRight
, foldLeft
, strictFoldRight
, strictFoldLeft
, monadicFoldRight
, monadicFoldLeft
, Eta.Classes.Foldable.toList
, isEmpty
, Eta.Classes.Foldable.length
, isElementOf
, Eta.Classes.Foldable.maximum
, maximumBy
, unsafeMaximum
, Eta.Classes.Foldable.minimum
, minimumBy
, unsafeMinimum
, Eta.Classes.Foldable.sum
, Eta.Classes.Foldable.product
, findBy
, Eta.Classes.Foldable.any
, Eta.Classes.Foldable.all
, discardTraverse
, foreach
)
where
import Eta.Classes.Applicative
import Eta.Classes.Eq
import Eta.Classes.Monad
import Eta.Classes.Num
import Eta.Classes.Ord
import Eta.Types
import Data.Foldable(Foldable(foldr, foldMap))
import qualified Data.Foldable
-- $ setup
> > > import Eta . Classes . Show
> > > import Eta . Types
-- ** Foldable
|
The ' Foldable ' type class defines that a type
can have a ' foldRight ' operation which can be used
to reduce it by passing a binary operation and a
neutral element . This reduces starting from the right .
The List type is an example of ' Foldable '
> > > ( + ) 0 [ 1 , 2 , 3 ]
6
/Mnemonic : Reduceable/
The 'Foldable' type class defines that a type
can have a 'foldRight' operation which can be used
to reduce it by passing a binary operation and a
neutral element. This reduces starting from the right.
The List type is an example of 'Foldable'
>>> foldRight (+) 0 [1, 2, 3]
6
/Mnemonic: Reduceable/
-}
foldRight :: (Foldable f) => (a -> b -> b) -> b -> f a -> b
foldRight = Data.Foldable.foldr
|
' foldLeft ' starts from the left , using the lazy evaluation
capabilities of Eta . Note that this will get stuck in an
infite loop if you pass an infite list to it .
'foldLeft' starts from the left, using the lazy evaluation
capabilities of Eta. Note that this will get stuck in an
infite loop if you pass an infite list to it.
-}
foldLeft :: (Foldable f) => (b -> a -> b) -> b -> f a -> b
foldLeft = Data.Foldable.foldl
|
A version of ' foldRight ' that evaluates the operations inline ,
hence /strict/ , the opposite of lazy .
A version of 'foldRight' that evaluates the operations inline,
hence /strict/, the opposite of lazy.
-}
strictFoldRight :: (Foldable f) => (a -> b -> b) -> b -> f a -> b
strictFoldRight = Data.Foldable.foldr'
|
A version of ' foldLeft ' that evaluates the operations inline ,
hence /strict/ , the opposite of lazy .
A version of 'foldLeft' that evaluates the operations inline,
hence /strict/, the opposite of lazy.
-}
strictFoldLeft :: (Foldable f) => (b -> a -> b) -> b -> f a -> b
strictFoldLeft = Data.Foldable.foldl'
|
A version of ' foldRight ' that applies functions that are flatmappable ,
in the context of a type that implements a monad , and returns the
result produced by the reduction of the structure , wrapped in that type :
> > > : {
addIntoMaybe : : Int - > Int - > Maybe Int
addIntoMaybe a b = Just ( a + b )
:}
> > > monadicFoldRight 0 [ 1,2,3 ]
Just 6
A version of 'foldRight' that applies functions that are flatmappable,
in the context of a type that implements a monad, and returns the
result produced by the reduction of the structure, wrapped in that type:
>>> :{
addIntoMaybe :: Int -> Int -> Maybe Int
addIntoMaybe a b = Just (a + b)
:}
>>> monadicFoldRight addIntoMaybe 0 [1,2,3]
Just 6
-}
monadicFoldRight :: (Foldable f, Monad m) => (a -> b -> m b) -> b -> f a -> m b
monadicFoldRight = Data.Foldable.foldrM
{-|
Left-biased version of 'monadicFoldRight'
-}
monadicFoldLeft :: (Foldable f, Monad m) => (b -> a -> m b) -> b -> f a -> m b
monadicFoldLeft = Data.Foldable.foldlM
{-|
Converts a type that implements 'Foldable' into a list
-}
toList :: (Foldable f) => f a -> [a]
toList = Data.Foldable.toList
{-|
Checks if a 'Foldable' structure is empty.
-}
isEmpty :: (Foldable f) => f a -> Bool
isEmpty = Data.Foldable.null
{-|
Returns the size of a structure.
-}
length :: (Foldable f) => f a -> Int
length = Data.Foldable.length
{-|
Checks if the element is contained in the structure.
-}
isElementOf :: (Eq a, Foldable f) => a -> f a -> Bool
isElementOf = Data.Foldable.elem
{-|
Largest element in a structure.
Returns 'Nothing' if the structure is empty.
-}
maximum :: (Ord a, Foldable f) => f a -> Maybe a
maximum x =
if isEmpty x
then Nothing
else Just (unsafeMaximum x)
{-|
Given some comparison function, return the maximum of a
structure. Returns 'Nothing' if the structure is empty.
-}
maximumBy :: (Foldable f) => (a -> a -> Ordering) -> f a -> Maybe a
maximumBy pred x =
if isEmpty x
then Nothing
else Just (Data.Foldable.maximumBy pred x)
{-|
Largest element in a structure.
Errors if the structure is empty
-}
unsafeMaximum :: (Ord a, Foldable f) => f a -> a
unsafeMaximum = Data.Foldable.maximum
{-# WARNING unsafeMaximum "Partial functions should be avoided"#-}
{-|
Smallest element in a structure
Returns 'Nothing' if the structure is empty.
-}
minimum :: (Ord a, Foldable f) => f a -> Maybe a
minimum x =
if isEmpty x
then Nothing
else Just (unsafeMinimum x)
{-|
Given some comparison function, return the minimum of a
structure. Returns 'Nothing' if the structure is empty.
-}
minimumBy :: (Foldable f) => (a -> a -> Ordering) -> f a -> Maybe a
minimumBy pred x =
if isEmpty x
then Nothing
else Just (Data.Foldable.minimumBy pred x)
{-|
Largest element in a structure.
Errors if the structure is empty
-}
unsafeMinimum :: (Ord a, Foldable f) => f a -> a
unsafeMinimum = Data.Foldable.minimum
{-# WARNING unsafeMinimum "Partial functions should be avoided"#-}
{-|
Sum of the numbers of a structure
-}
sum :: (Num a, Foldable f) => f a -> a
sum = Data.Foldable.sum
{-|
Product of the numbers of a structure
-}
product :: (Num a, Foldable f) => f a -> a
product = Data.Foldable.product
|
Given some predicate , ' findBy ' will return
the first element that matches the predicate
or ' Nothing ' if there is no such element
Given some predicate, 'findBy' will return
the first element that matches the predicate
or 'Nothing' if there is no such element
-}
findBy :: (Foldable f) => (a -> Bool) -> f a -> Maybe a
findBy = Data.Foldable.find
|
Determines if any element satisfies the predicate
> > > any (= = 1 ) [ 1 , 2 , 3 ]
True
> > > any (= = 5 ) [ 1 , 2 , 3 ]
False
Determines if any element satisfies the predicate
>>> any (== 1) [1, 2, 3]
True
>>> any (== 5) [1, 2, 3]
False
-}
any :: (Foldable f) => (a -> Bool) -> f a -> Bool
any = Data.Foldable.any
|
Determines if all elements satisfy the predicate
> > > all (= = 1 ) [ 1 , 2 , 3 ]
False
> > > all ( < 5 ) [ 1 , 2 , 3 ]
True
Determines if all elements satisfy the predicate
>>> all (== 1) [1, 2, 3]
False
>>> all (< 5) [1, 2, 3]
True
-}
all :: (Foldable f) => (a -> Bool) -> f a -> Bool
all = Data.Foldable.all
-- ** Acting on Foldables
|
Sometimes we need to apply an action to each one of
the elements . The function ' discardTraverse ' maps an
action over each of the elements of the structure
> > > discardTraverse printLine [ " Hello " , " world " , " ! " ]
Hello
world
!
Sometimes we need to apply an action to each one of
the elements. The function 'discardTraverse' maps an
action over each of the elements of the structure
>>> discardTraverse printLine ["Hello", "world", "!"]
Hello
world
!
-}
discardTraverse :: (Foldable f, Applicative m) => (a -> m b) -> f a -> m ()
discardTraverse = Data.Foldable.traverse_
-- *** foreach
|
Another alternative to ' discardTraverse ' is to use the
' foreach ' function , which is very familiar to a lot
of developers .
> > > foreach [ 1 .. 3 ] printShow
1
2
3
Another alternative to 'discardTraverse' is to use the
'foreach' function, which is very familiar to a lot
of developers.
>>> foreach [1..3] printShow
1
2
3
-}
foreach :: (Foldable f, Applicative m) => f a -> (a -> m b) -> m ()
foreach = Data.Foldable.for_
| null | https://raw.githubusercontent.com/eta-lang/eta-prelude/e25e9aa42093e090a86d2728b0cac288a25bc52e/src/Eta/Classes/Foldable.hs | haskell | $ setup
** Foldable
|
Left-biased version of 'monadicFoldRight'
|
Converts a type that implements 'Foldable' into a list
|
Checks if a 'Foldable' structure is empty.
|
Returns the size of a structure.
|
Checks if the element is contained in the structure.
|
Largest element in a structure.
Returns 'Nothing' if the structure is empty.
|
Given some comparison function, return the maximum of a
structure. Returns 'Nothing' if the structure is empty.
|
Largest element in a structure.
Errors if the structure is empty
# WARNING unsafeMaximum "Partial functions should be avoided"#
|
Smallest element in a structure
Returns 'Nothing' if the structure is empty.
|
Given some comparison function, return the minimum of a
structure. Returns 'Nothing' if the structure is empty.
|
Largest element in a structure.
Errors if the structure is empty
# WARNING unsafeMinimum "Partial functions should be avoided"#
|
Sum of the numbers of a structure
|
Product of the numbers of a structure
** Acting on Foldables
*** foreach | module Eta.Classes.Foldable
( Data.Foldable.Foldable(foldMap, foldr)
, foldRight
, foldLeft
, strictFoldRight
, strictFoldLeft
, monadicFoldRight
, monadicFoldLeft
, Eta.Classes.Foldable.toList
, isEmpty
, Eta.Classes.Foldable.length
, isElementOf
, Eta.Classes.Foldable.maximum
, maximumBy
, unsafeMaximum
, Eta.Classes.Foldable.minimum
, minimumBy
, unsafeMinimum
, Eta.Classes.Foldable.sum
, Eta.Classes.Foldable.product
, findBy
, Eta.Classes.Foldable.any
, Eta.Classes.Foldable.all
, discardTraverse
, foreach
)
where
import Eta.Classes.Applicative
import Eta.Classes.Eq
import Eta.Classes.Monad
import Eta.Classes.Num
import Eta.Classes.Ord
import Eta.Types
import Data.Foldable(Foldable(foldr, foldMap))
import qualified Data.Foldable
> > > import Eta . Classes . Show
> > > import Eta . Types
|
The ' Foldable ' type class defines that a type
can have a ' foldRight ' operation which can be used
to reduce it by passing a binary operation and a
neutral element . This reduces starting from the right .
The List type is an example of ' Foldable '
> > > ( + ) 0 [ 1 , 2 , 3 ]
6
/Mnemonic : Reduceable/
The 'Foldable' type class defines that a type
can have a 'foldRight' operation which can be used
to reduce it by passing a binary operation and a
neutral element. This reduces starting from the right.
The List type is an example of 'Foldable'
>>> foldRight (+) 0 [1, 2, 3]
6
/Mnemonic: Reduceable/
-}
foldRight :: (Foldable f) => (a -> b -> b) -> b -> f a -> b
foldRight = Data.Foldable.foldr
|
' foldLeft ' starts from the left , using the lazy evaluation
capabilities of Eta . Note that this will get stuck in an
infite loop if you pass an infite list to it .
'foldLeft' starts from the left, using the lazy evaluation
capabilities of Eta. Note that this will get stuck in an
infite loop if you pass an infite list to it.
-}
foldLeft :: (Foldable f) => (b -> a -> b) -> b -> f a -> b
foldLeft = Data.Foldable.foldl
|
A version of ' foldRight ' that evaluates the operations inline ,
hence /strict/ , the opposite of lazy .
A version of 'foldRight' that evaluates the operations inline,
hence /strict/, the opposite of lazy.
-}
strictFoldRight :: (Foldable f) => (a -> b -> b) -> b -> f a -> b
strictFoldRight = Data.Foldable.foldr'
|
A version of ' foldLeft ' that evaluates the operations inline ,
hence /strict/ , the opposite of lazy .
A version of 'foldLeft' that evaluates the operations inline,
hence /strict/, the opposite of lazy.
-}
strictFoldLeft :: (Foldable f) => (b -> a -> b) -> b -> f a -> b
strictFoldLeft = Data.Foldable.foldl'
|
A version of ' foldRight ' that applies functions that are flatmappable ,
in the context of a type that implements a monad , and returns the
result produced by the reduction of the structure , wrapped in that type :
> > > : {
addIntoMaybe : : Int - > Int - > Maybe Int
addIntoMaybe a b = Just ( a + b )
:}
> > > monadicFoldRight 0 [ 1,2,3 ]
Just 6
A version of 'foldRight' that applies functions that are flatmappable,
in the context of a type that implements a monad, and returns the
result produced by the reduction of the structure, wrapped in that type:
>>> :{
addIntoMaybe :: Int -> Int -> Maybe Int
addIntoMaybe a b = Just (a + b)
:}
>>> monadicFoldRight addIntoMaybe 0 [1,2,3]
Just 6
-}
monadicFoldRight :: (Foldable f, Monad m) => (a -> b -> m b) -> b -> f a -> m b
monadicFoldRight = Data.Foldable.foldrM
monadicFoldLeft :: (Foldable f, Monad m) => (b -> a -> m b) -> b -> f a -> m b
monadicFoldLeft = Data.Foldable.foldlM
toList :: (Foldable f) => f a -> [a]
toList = Data.Foldable.toList
isEmpty :: (Foldable f) => f a -> Bool
isEmpty = Data.Foldable.null
length :: (Foldable f) => f a -> Int
length = Data.Foldable.length
isElementOf :: (Eq a, Foldable f) => a -> f a -> Bool
isElementOf = Data.Foldable.elem
maximum :: (Ord a, Foldable f) => f a -> Maybe a
maximum x =
if isEmpty x
then Nothing
else Just (unsafeMaximum x)
maximumBy :: (Foldable f) => (a -> a -> Ordering) -> f a -> Maybe a
maximumBy pred x =
if isEmpty x
then Nothing
else Just (Data.Foldable.maximumBy pred x)
unsafeMaximum :: (Ord a, Foldable f) => f a -> a
unsafeMaximum = Data.Foldable.maximum
minimum :: (Ord a, Foldable f) => f a -> Maybe a
minimum x =
if isEmpty x
then Nothing
else Just (unsafeMinimum x)
minimumBy :: (Foldable f) => (a -> a -> Ordering) -> f a -> Maybe a
minimumBy pred x =
if isEmpty x
then Nothing
else Just (Data.Foldable.minimumBy pred x)
unsafeMinimum :: (Ord a, Foldable f) => f a -> a
unsafeMinimum = Data.Foldable.minimum
sum :: (Num a, Foldable f) => f a -> a
sum = Data.Foldable.sum
product :: (Num a, Foldable f) => f a -> a
product = Data.Foldable.product
|
Given some predicate , ' findBy ' will return
the first element that matches the predicate
or ' Nothing ' if there is no such element
Given some predicate, 'findBy' will return
the first element that matches the predicate
or 'Nothing' if there is no such element
-}
findBy :: (Foldable f) => (a -> Bool) -> f a -> Maybe a
findBy = Data.Foldable.find
|
Determines if any element satisfies the predicate
> > > any (= = 1 ) [ 1 , 2 , 3 ]
True
> > > any (= = 5 ) [ 1 , 2 , 3 ]
False
Determines if any element satisfies the predicate
>>> any (== 1) [1, 2, 3]
True
>>> any (== 5) [1, 2, 3]
False
-}
any :: (Foldable f) => (a -> Bool) -> f a -> Bool
any = Data.Foldable.any
|
Determines if all elements satisfy the predicate
> > > all (= = 1 ) [ 1 , 2 , 3 ]
False
> > > all ( < 5 ) [ 1 , 2 , 3 ]
True
Determines if all elements satisfy the predicate
>>> all (== 1) [1, 2, 3]
False
>>> all (< 5) [1, 2, 3]
True
-}
all :: (Foldable f) => (a -> Bool) -> f a -> Bool
all = Data.Foldable.all
|
Sometimes we need to apply an action to each one of
the elements . The function ' discardTraverse ' maps an
action over each of the elements of the structure
> > > discardTraverse printLine [ " Hello " , " world " , " ! " ]
Hello
world
!
Sometimes we need to apply an action to each one of
the elements. The function 'discardTraverse' maps an
action over each of the elements of the structure
>>> discardTraverse printLine ["Hello", "world", "!"]
Hello
world
!
-}
discardTraverse :: (Foldable f, Applicative m) => (a -> m b) -> f a -> m ()
discardTraverse = Data.Foldable.traverse_
|
Another alternative to ' discardTraverse ' is to use the
' foreach ' function , which is very familiar to a lot
of developers .
> > > foreach [ 1 .. 3 ] printShow
1
2
3
Another alternative to 'discardTraverse' is to use the
'foreach' function, which is very familiar to a lot
of developers.
>>> foreach [1..3] printShow
1
2
3
-}
foreach :: (Foldable f, Applicative m) => f a -> (a -> m b) -> m ()
foreach = Data.Foldable.for_
|
bf18274d613210dfa1ebf78ac6c22396fcfa98cca99285a4065dc2d59079bb23 | Ralith/cllvm | analysis.lisp | (in-package #:llvm-bindings)
(defcenum verifier-failure-action
:abort-process
:print-message
:return-status)
(defcfun (verify-module "LLVMVerifyModule") :boolean
(module module-ref)
(action verifier-failure-action)
(out-message (:pointer (:pointer :char))))
(defcfun (verify-function "LLVMVerifyFunction") :boolean
(function value-ref)
(action verifier-failure-action))
| null | https://raw.githubusercontent.com/Ralith/cllvm/342860f0f0b1747ddeeffa1941a930fb9adf1c4c/bindings/analysis.lisp | lisp | (in-package #:llvm-bindings)
(defcenum verifier-failure-action
:abort-process
:print-message
:return-status)
(defcfun (verify-module "LLVMVerifyModule") :boolean
(module module-ref)
(action verifier-failure-action)
(out-message (:pointer (:pointer :char))))
(defcfun (verify-function "LLVMVerifyFunction") :boolean
(function value-ref)
(action verifier-failure-action))
| |
ebecd7e64ded9392c846848bb1a90e147d2f274d71796cf0ab2e562fba47a2c6 | mxthevs/Caml_bot | twitch_irc.ml | open Logger
type connection = {
host : string;
port : int;
}
let conn = { host = "irc.chat.twitch.tv"; port = 6667 }
module Irc_protocol = struct
type message_typ =
| JOIN
| NICK
| PASS
| PRIVMSG
| PONG
let typ_to_string = function
| JOIN -> "JOIN #"
| NICK -> "NICK "
| PASS -> "PASS "
| PRIVMSG -> "PRIVMSG "
| PONG -> "PONG :"
let create ~command content = typ_to_string command ^ content ^ "\r\n"
let send_message message =
[%log debug ">>> %s" (String.trim message)];
message
let join channel = create ~command:JOIN channel
let nick username = create ~command:NICK username
let pass password = create ~command:PASS password
let privmsg content ~target = content |> Printf.sprintf "%s :%s" target |> create ~command:PRIVMSG
let pong target = create ~command:PONG target
end
type out_string = out_channel -> string -> unit
let join_and_greet (config : Config.t) (out_string : out_string) (out_descr : out_channel) =
let pass, nick, chan = (config.pass, config.nick, config.chan) in
pass |> Irc_protocol.pass |> out_string out_descr;
nick |> Irc_protocol.nick |> Irc_protocol.send_message |> out_string out_descr;
chan |> Irc_protocol.join |> Irc_protocol.send_message |> out_string out_descr
let list_to_option xs =
match xs with
| x :: _ -> Some x
| [] -> None
let start (config : Config.t) =
[%log info "Trying to connect to %s:%d" conn.host conn.port];
flush stdout;
let client_socket = Unix.socket ~cloexec:true Unix.PF_INET Unix.SOCK_STREAM 0 in
let addr =
match (Unix.gethostbyname conn.host).h_addr_list |> Array.to_list |> list_to_option with
| Some addr -> addr
| None ->
[%log err "Could not resolve %s" conn.host];
exit 1
in
Unix.connect client_socket (ADDR_INET (addr, conn.port));
[%log info "Connected!"];
flush stdout;
let input_channel = client_socket |> Unix.in_channel_of_descr in
let output_channel = client_socket |> Unix.out_channel_of_descr in
let rec wait_for_messages_and_reply () =
let input = input_channel |> input_line in
let handle_privsmg ~target ~message ~sender =
[%log debug "<<< %s" (String.trim message)];
if message.[0] = '!' then
match Bot.handle_command ~message ~user:sender with
| Ok reply ->
Irc_protocol.privmsg ~target reply
|> Irc_protocol.send_message
|> output_string output_channel
| Error () -> ()
in
(match Message.parse input with
| Ok message -> (
match message.command with
| PRIVMSG (target, message, sender) -> handle_privsmg ~target ~message ~sender
| PING (target, _) ->
target |> Irc_protocol.pong |> Irc_protocol.send_message |> output_string output_channel
| _ -> ())
| Error error -> [%log err "%s" error]);
flush_all ();
wait_for_messages_and_reply ()
in
join_and_greet config output_string output_channel;
flush_all ();
wait_for_messages_and_reply ()
| null | https://raw.githubusercontent.com/mxthevs/Caml_bot/891b240ac8047b17cd0ccc420d790a47e29a9725/src/twitch_irc.ml | ocaml | open Logger
type connection = {
host : string;
port : int;
}
let conn = { host = "irc.chat.twitch.tv"; port = 6667 }
module Irc_protocol = struct
type message_typ =
| JOIN
| NICK
| PASS
| PRIVMSG
| PONG
let typ_to_string = function
| JOIN -> "JOIN #"
| NICK -> "NICK "
| PASS -> "PASS "
| PRIVMSG -> "PRIVMSG "
| PONG -> "PONG :"
let create ~command content = typ_to_string command ^ content ^ "\r\n"
let send_message message =
[%log debug ">>> %s" (String.trim message)];
message
let join channel = create ~command:JOIN channel
let nick username = create ~command:NICK username
let pass password = create ~command:PASS password
let privmsg content ~target = content |> Printf.sprintf "%s :%s" target |> create ~command:PRIVMSG
let pong target = create ~command:PONG target
end
type out_string = out_channel -> string -> unit
let join_and_greet (config : Config.t) (out_string : out_string) (out_descr : out_channel) =
let pass, nick, chan = (config.pass, config.nick, config.chan) in
pass |> Irc_protocol.pass |> out_string out_descr;
nick |> Irc_protocol.nick |> Irc_protocol.send_message |> out_string out_descr;
chan |> Irc_protocol.join |> Irc_protocol.send_message |> out_string out_descr
let list_to_option xs =
match xs with
| x :: _ -> Some x
| [] -> None
let start (config : Config.t) =
[%log info "Trying to connect to %s:%d" conn.host conn.port];
flush stdout;
let client_socket = Unix.socket ~cloexec:true Unix.PF_INET Unix.SOCK_STREAM 0 in
let addr =
match (Unix.gethostbyname conn.host).h_addr_list |> Array.to_list |> list_to_option with
| Some addr -> addr
| None ->
[%log err "Could not resolve %s" conn.host];
exit 1
in
Unix.connect client_socket (ADDR_INET (addr, conn.port));
[%log info "Connected!"];
flush stdout;
let input_channel = client_socket |> Unix.in_channel_of_descr in
let output_channel = client_socket |> Unix.out_channel_of_descr in
let rec wait_for_messages_and_reply () =
let input = input_channel |> input_line in
let handle_privsmg ~target ~message ~sender =
[%log debug "<<< %s" (String.trim message)];
if message.[0] = '!' then
match Bot.handle_command ~message ~user:sender with
| Ok reply ->
Irc_protocol.privmsg ~target reply
|> Irc_protocol.send_message
|> output_string output_channel
| Error () -> ()
in
(match Message.parse input with
| Ok message -> (
match message.command with
| PRIVMSG (target, message, sender) -> handle_privsmg ~target ~message ~sender
| PING (target, _) ->
target |> Irc_protocol.pong |> Irc_protocol.send_message |> output_string output_channel
| _ -> ())
| Error error -> [%log err "%s" error]);
flush_all ();
wait_for_messages_and_reply ()
in
join_and_greet config output_string output_channel;
flush_all ();
wait_for_messages_and_reply ()
| |
30ee437874812cdfc317cc58dbf2a668c85542527c713713908c15552de0a114 | fpco/ide-backend | TestPkgG.hs | module Testing.TestPkgG where
testPkgG :: String
testPkgG = "This is test package G-0.1"
| null | https://raw.githubusercontent.com/fpco/ide-backend/860636f2d0e872e9481569236bce690637e0016e/ide-backend/test-packages/testpkg-G-0.1/Testing/TestPkgG.hs | haskell | module Testing.TestPkgG where
testPkgG :: String
testPkgG = "This is test package G-0.1"
| |
73b738ce0b785f00bfb09601c80860c8fc7b69fdf6265012bcd8e0cfdd9f1703 | andreasabel/miniagda | Warshall.hs | {-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module Warshall where
{- construct a graph from constraints
x + n <= y becomes x ---(-n)---> y
x <= n + y becomes x ---(+n)---> y
the default edge (= no edge is) labelled with infinity
building the graph involves keeping track of the node names.
We do this in a finite map, assigning consecutive numbers to nodes.
-}
import Control.Monad.State
import Data.Maybe -- fromJust
import Data.Array
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.List as List
-- import Debug.Trace
-- import Util
traceSolve :: String -> a -> a
traceSolve _msg = id
-- traceSolve = trace
traceSolveM :: Monad m => String -> m ()
traceSolveM _msg = return ()
-- traceSolveM = traceM
-- semi rings ----------------------------------------------------
class SemiRing a where
oplus :: a -> a -> a
otimes :: a -> a -> a
ozero :: a -- neutral for oplus, dominant for otimes
oone :: a -- neutral for otimes
type Matrix a = Array (Int,Int) a
-- assuming a square matrix
warshall :: SemiRing a => Matrix a -> Matrix a
warshall a0 = loop r a0 where
b@((r,c),(r',c')) = bounds a0 -- assuming r == c and r' == c'
loop k a | k <= r' =
loop (k+1) (array b [ ((i,j),
(a!(i,j)) `oplus` ((a!(i,k)) `otimes` (a!(k,j))))
| i <- [r..r'], j <- [c..c'] ])
| otherwise = a
-- edge weight in the graph, forming a semi ring
data Weight = Finite Int | Infinite
deriving (Eq)
inc :: Weight -> Int -> Weight
inc Infinite _ = Infinite
inc (Finite k) n = Finite (k + n)
instance Show Weight where
show (Finite i) = show i
show Infinite = "."
instance Ord Weight where
_ <= Infinite = True
Infinite <= _ = False
Finite a <= Finite b = a <= b
instance SemiRing Weight where
oplus = min
otimes Infinite _ = Infinite
otimes _ Infinite = Infinite
otimes (Finite a) (Finite b) = Finite (a + b)
ozero = Infinite
oone = Finite 0
-- constraints ---------------------------------------------------
-- nodes of the graph are either
-- - flexible variables (with identifiers drawn from Int),
- rigid variables ( also identified by ) , or
-- - constants (like 0, infinity, or anything between)
data Node rigid
= Rigid rigid
| Flex FlexId
deriving (Eq, Ord)
instance Show rigid => Show (Node rigid) where
show (Flex i) = "?" ++ show i
show (Rigid r) = show r
data Rigid = RConst Weight
| RVar RigidId
deriving (Eq, Ord)
instance Show Rigid where
show (RVar i) = "v" ++ show i
show (RConst Infinite) = "#"
show (RConst (Finite n)) = show n
type NodeId = Int
type RigidId = Int
type FlexId = Int
type Scope = RigidId -> Bool
-- which rigid variables a flex may be instatiated to
infinite :: Rigid -> Bool
infinite (RConst Infinite) = True
infinite _ = False
-- isBelow r w r'
-- checks, if r and r' are connected by w (meaning w not infinite)
-- wether r + w <= r'
-- precondition: not the same rigid variable
isBelow :: Rigid -> Weight -> Rigid -> Bool
isBelow _ Infinite _ = True
isBelow _ _ (RConst Infinite) = True
-- isBelow (RConst Infinite) n (RConst (Finite _)) = False
isBelow (RConst (Finite i)) (Finite n) (RConst (Finite j)) = i + n <= j
isBelow _ _ _ = False -- rigid variables are not related
-- a constraint is an edge in the graph
data Constrnt edgeLabel rigid flexScope
= NewFlex FlexId flexScope
| Arc (Node rigid) edgeLabel (Node rigid)
Arc v1 k v2 at least one of v1,v2 is a VMeta ( Flex ) ,
the other a VMeta or a VGen ( Rigid )
-- if k <= 0 this means $^(-k) v1 <= v2
otherwise v1 < = v3
type Constraint = Constrnt Weight Rigid Scope
arc :: Node Rigid -> Int -> Node Rigid -> Constraint
arc a k b = Arc a (Finite k) b
instance Show Constraint where
show (NewFlex i _) = "SizeMeta(?" ++ show i ++ ")"
show (Arc v1 (Finite k) v2)
| k == 0 = show v1 ++ "<=" ++ show v2
| k < 0 = show v1 ++ "+" ++ show (-k) ++ "<=" ++ show v2
| otherwise = show v1 ++ "<=" ++ show v2 ++ "+" ++ show k
show _ = undefined
type Constraints = [Constraint]
emptyConstraints :: Constraints
emptyConstraints = []
-- graph (matrix) ------------------------------------------------
data Graph edgeLabel rigid flexScope = Graph
{ flexScope :: Map FlexId flexScope -- scope for each flexible var
, nodeMap :: Map (Node rigid) NodeId -- node labels to node numbers
, intMap :: Map NodeId (Node rigid) -- node numbers to node labels
, nextNode :: NodeId -- number of nodes (n)
, graph :: NodeId -> NodeId -> edgeLabel -- the edges (restrict to [0..n[)
}
-- the empty graph: no nodes, edges are all undefined (infinity weight)
initGraph :: SemiRing edgeLabel => Graph edgeLabel rigid flexScope
initGraph = Graph Map.empty Map.empty Map.empty 0 (\ _ _ -> ozero)
-- the Graph Monad, for constructing a graph iteratively
type GM edgeLabel rigid flexScope = State (Graph edgeLabel rigid flexScope)
addFlex :: FlexId -> flexScope -> GM edgeLabel rigid flexScope ()
addFlex x scope = do
st <- get
put $ st { flexScope = Map.insert x scope (flexScope st) }
i < - addNode n returns number of node n. if not present , it is added first
addNode :: (Eq rigid, Ord rigid) => (Node rigid) -> GM edgeLabel rigid flexScope Int
addNode n = do
st <- get
case Map.lookup n (nodeMap st) of
Just i -> return i
Nothing -> do let i = nextNode st
put $ st { nodeMap = Map.insert n i (nodeMap st)
, intMap = Map.insert i n (intMap st)
, nextNode = i + 1
}
return i
-- addEdge n1 k n2
improves the weight of egde n1->n2 to be at most k
-- also adds nodes if not yet present
addEdge :: (Eq rigid, Ord rigid, SemiRing edgeLabel) => (Node rigid) -> edgeLabel -> (Node rigid) -> GM edgeLabel rigid flexScope ()
addEdge n1 k n2 = do
i1 <- addNode n1
i2 <- addNode n2
st <- get
let graph' x y = if (x,y) == (i1,i2) then k `oplus` (graph st) x y
else graph st x y
put $ st { graph = graph' }
addConstraint :: (Eq rigid, Ord rigid, SemiRing edgeLabel) =>
Constrnt edgeLabel rigid flexScope -> GM edgeLabel rigid flexScope ()
addConstraint (NewFlex x scope) = do
addFlex x scope
addEdge (Flex x) oone (Flex x) -- add dummy edge to make sure each meta variable
-- is in the matrix and gets solved
addConstraint (Arc n1 k n2) = addEdge n1 k n2
buildGraph :: (Eq rigid, Ord rigid, SemiRing edgeLabel) =>
[Constrnt edgeLabel rigid flexScope] -> Graph edgeLabel rigid flexScope
buildGraph cs = execState (mapM_ addConstraint cs) initGraph
mkMatrix :: Int -> (Int -> Int -> a) -> Matrix a
mkMatrix n g = array ((0,0),(n-1,n-1))
[ ((i,j), g i j) | i <- [0..n-1], j <- [0..n-1]]
-- displaying matrices with row and column labels --------------------
-- a matrix with row descriptions in b and column descriptions in c
data LegendMatrix a b c = LegendMatrix
{ matrix :: Matrix a
, rowdescr :: Int -> b
, coldescr :: Int -> c
}
instance (Show a, Show b, Show c) => Show (LegendMatrix a b c) where
show (LegendMatrix m rd cd) =
-- first show column description
let ((r,c),(r',c')) = bounds m
in foldr (\ j s -> "\t" ++ show (cd j) ++ s) "" [c .. c'] ++
-- then output rows
foldr (\ i s -> "\n" ++ show (rd i) ++
foldr (\ j t -> "\t" ++ show (m!(i,j)) ++ t)
(s)
[c .. c'])
"" [r .. r']
-- solving the constraints -------------------------------------------
-- a solution assigns to each flexible variable a size expression
-- which is either a constant or a v + n for a rigid variable v
type Solution = Map Int MaxExpr
emptySolution :: Solution
emptySolution = Map.empty
extendSolution :: Solution -> Int -> SizeExpr -> Solution
extendSolution subst k v = Map.insertWith (++) k [v] subst
type MaxExpr = [SizeExpr]
newtype MaxExpr = MaxExpr { sizeExprs : : [ SizeExpr ] } deriving ( Show )
e.g. x + 5
| SizeConst Weight -- a number or infinity
instance Show SizeExpr where
show (SizeVar n 0) = show (Rigid (RVar n))
show (SizeVar n k) = show (Rigid (RVar n)) ++ "+" ++ show k
show (SizeConst (Finite i)) = show i
show (SizeConst Infinite) = "#"
-- sizeRigid r n returns the size expression corresponding to r + n
sizeRigid :: Rigid -> Int -> SizeExpr
sizeRigid (RConst k) n = SizeConst (inc k n)
sizeRigid (RVar i) n = SizeVar i n
apply : : SizeExpr - > Solution - > SizeExpr
apply e@(SizeExpr ( Rigid _ ) _ ) = e
apply e@(SizeExpr ( Flex x ) i ) phi = case Map.lookup x phi of
Nothing - > e
Just ( SizeExpr v j ) - > SizeExpr v ( i + j )
after : : Solution - > Solution - > Solution
after = Map.map ( \ e - > e ` apply ` phi ) psi
apply :: SizeExpr -> Solution -> SizeExpr
apply e@(SizeExpr (Rigid _) _) phi = e
apply e@(SizeExpr (Flex x) i) phi = case Map.lookup x phi of
Nothing -> e
Just (SizeExpr v j) -> SizeExpr v (i + j)
after :: Solution -> Solution -> Solution
after psi phi = Map.map (\ e -> e `apply` phi) psi
-}
solve : : Constraints - > Maybe Solution
solve cs = if any ( \ x - > x < Finite 0 ) d then Nothing
else Map .
where gr = = nextNode gr
m = mkMatrix n ( graph gr )
m ' = warshall m
d = [ m!(i , i ) | i < - [ 0 .. ( n-1 ) ] ]
ns = keys ( nodeMap gr )
solve :: Constraints -> Maybe Solution
solve cs = if any (\ x -> x < Finite 0) d then Nothing
else Map.
where gr = buildGraph cs
n = nextNode gr
m = mkMatrix n (graph gr)
m' = warshall m
d = [ m!(i,i) | i <- [0 .. (n-1)] ]
ns = keys (nodeMap gr)
-}
compute solution
a solution CANNOT exist if
v < v for a rigid variable v
v < = v ' for rigid variables v , v '
x < v for a flexible variable x and a rigid variable v
thus , for each flexible x , only one of the following cases is possible
r+n < = < = infty for a unique rigid r ( meaning r --(m - n)-- > x )
x < = r+n for a unique rigid r ( meaning x --(n)-- > r )
we are looking for the least values for flexible variables that solve
the constraints . Algorithm
while flexible variables and rigid rows left
find a rigid variable row i
for all flexible columns j
if i --n-- > j with ( meaning i+n < = j ) then j = i + n
while flexible variables j left
search the row j for entry i
if j --n-- > i with n > = 0 ( meaning j < = i + n ) then j = i
a solution CANNOT exist if
v < v for a rigid variable v
v <= v' for rigid variables v,v'
x < v for a flexible variable x and a rigid variable v
thus, for each flexible x, only one of the following cases is possible
r+n <= x+m <= infty for a unique rigid r (meaning r --(m-n)--> x)
x <= r+n for a unique rigid r (meaning x --(n)--> r)
we are looking for the least values for flexible variables that solve
the constraints. Algorithm
while flexible variables and rigid rows left
find a rigid variable row i
for all flexible columns j
if i --n--> j with n<=0 (meaning i+n <= j) then j = i + n
while flexible variables j left
search the row j for entry i
if j --n--> i with n >= 0 (meaning j <= i + n) then j = i
-}
solve :: Constraints -> Maybe Solution
solve cs = traceSolve (show lm0) $ traceSolve (show lm) $ traceSolve (show cs) $
let solution = if solvable then loop1 rigids emptySolution
else Nothing
in traceSolve ("solution = " ++ show solution) $
solution
where -- compute the graph and its transitive closure m
gr = buildGraph cs
n = nextNode gr -- number of nodes
m0 = mkMatrix n (graph gr)
m = warshall m0
-- tracing only: build output version of transitive graph
legend i = fromJust $ Map.lookup i (intMap gr) -- trace only
lm0 = LegendMatrix m0 legend legend -- trace only
lm = LegendMatrix m legend legend -- trace only
-- compute the sets of flexible and rigid node numbers
ns = Map.keys (nodeMap gr)
-- a set of flexible variables
flexs = foldl (\ l k -> case k of (Flex i) -> i : l
(Rigid _) -> l) [] ns
-- a set of rigid variables
rigids = foldl (\ l k -> case k of (Flex _) -> l
(Rigid i) -> i : l) [] ns
-- rigid matrix indices
rInds = foldl (\ l r -> let Just i = Map.lookup (Rigid r) (nodeMap gr)
in i : l) [] rigids
-- check whether there is a solution
-- d = [ m!(i,i) | i <- [0 .. (n-1)] ] -- diagonal
-- a rigid variable might not be less than it self, so no -.. on the
-- rigid part of the diagonal
solvable = all (\ x -> x >= oone) [ m!(i,i) | i <- rInds ] &&
-- a rigid variable might not be bounded below by infinity or
-- bounded above by a constant
-- it might not be related to another rigid variable
all (\ (r, r') -> r == r' ||
let Just row = (Map.lookup (Rigid r) (nodeMap gr))
Just col = (Map.lookup (Rigid r') (nodeMap gr))
edge = m!(row,col)
in isBelow r edge r' )
[ (r,r') | r <- rigids, r' <- rigids ]
&&
-- a flexible variable might not be strictly below a rigid variable
all (\ (x, v) ->
let Just row = (Map.lookup (Flex x) (nodeMap gr))
Just col = (Map.lookup (Rigid (RVar v)) (nodeMap gr))
edge = m!(row,col)
in edge >= Finite 0)
[ (x,v) | x <- flexs, (RVar v) <- rigids ]
UNUSED :
-- inScope :: FlexId -> Rigid -> Bool
-- inScope x (RConst _) = True
-- inScope x (RVar v) = case Map.lookup x (flexScope gr) of
-- Just scope -> scope v
-- Nothing -> error $ "Warshall.inScope panic: flexible " ++ show x ++ " does not carry scope info when assigning it rigid variable " ++ show v
loop1
while flexible variables and rigid rows left
find a rigid variable row i
for all flexible columns j
if i --n-- > j with ( meaning i + n < = j ) then
add i + n to the solution of j
while flexible variables and rigid rows left
find a rigid variable row i
for all flexible columns j
if i --n--> j with n<=0 (meaning i + n <= j) then
add i + n to the solution of j
-}
loop1 :: [Rigid] -> Solution -> Maybe Solution
loop1 (r:rgds) subst = loop1 rgds subst' where
row = fromJust $ Map.lookup (Rigid r) (nodeMap gr)
subst' =
foldl (\ sub f ->
let col = fromJust $ Map.lookup (Flex f) (nodeMap gr)
in case (True -- inScope f r -- SEEMS WRONG TO IGNORE THINGS NOT IN SCOPE
, m!(row,col)) of
-- Finite z | z <= 0 ->
(True, Finite z) ->
let trunc_z | z >= 0 = 0
| otherwise = -z
in extendSolution sub f (sizeRigid r (trunc_z))
_ -> sub
) subst flexs
loop1 [] subst = case flexs List.\\ (Map.keys subst) of
[] -> Just subst
flexs' -> loop2 flexs' subst
loop2
while flexible variables j left
search the row j for entry i
if j --n-- > i with n > = 0 ( meaning j < = i + n ) then j = i
while flexible variables j left
search the row j for entry i
if j --n--> i with n >= 0 (meaning j <= i + n) then j = i
-}
loop2 :: [FlexId] -> Solution -> Maybe Solution
loop2 [] = Just
loop2 (f:flxs) = loop3 0
where row = fromJust $ Map.lookup (Flex f) (nodeMap gr)
loop3 col subst | col >= n =
-- default to infinity
loop2 flxs (extendSolution subst f (SizeConst Infinite))
loop3 col subst =
case Map.lookup col (intMap gr) of
Just (Rigid r) | not (infinite r) ->
case (True -- inScope f r
,m!(row,col)) of
(True, Finite z) | z >= 0 ->
loop2 flxs (extendSolution subst f (sizeRigid r z))
(_, Infinite) -> loop3 (col+1) subst
_ -> Nothing
_ -> loop3 (col+1) subst
| null | https://raw.githubusercontent.com/andreasabel/miniagda/55374597238071c2c2c74acf166cf95b34281e1f/src/Warshall.hs | haskell | # LANGUAGE TypeSynonymInstances, FlexibleInstances #
construct a graph from constraints
x + n <= y becomes x ---(-n)---> y
x <= n + y becomes x ---(+n)---> y
the default edge (= no edge is) labelled with infinity
building the graph involves keeping track of the node names.
We do this in a finite map, assigning consecutive numbers to nodes.
fromJust
import Debug.Trace
import Util
traceSolve = trace
traceSolveM = traceM
semi rings ----------------------------------------------------
neutral for oplus, dominant for otimes
neutral for otimes
assuming a square matrix
assuming r == c and r' == c'
edge weight in the graph, forming a semi ring
constraints ---------------------------------------------------
nodes of the graph are either
- flexible variables (with identifiers drawn from Int),
- constants (like 0, infinity, or anything between)
which rigid variables a flex may be instatiated to
isBelow r w r'
checks, if r and r' are connected by w (meaning w not infinite)
wether r + w <= r'
precondition: not the same rigid variable
isBelow (RConst Infinite) n (RConst (Finite _)) = False
rigid variables are not related
a constraint is an edge in the graph
if k <= 0 this means $^(-k) v1 <= v2
graph (matrix) ------------------------------------------------
scope for each flexible var
node labels to node numbers
node numbers to node labels
number of nodes (n)
the edges (restrict to [0..n[)
the empty graph: no nodes, edges are all undefined (infinity weight)
the Graph Monad, for constructing a graph iteratively
addEdge n1 k n2
also adds nodes if not yet present
add dummy edge to make sure each meta variable
is in the matrix and gets solved
displaying matrices with row and column labels --------------------
a matrix with row descriptions in b and column descriptions in c
first show column description
then output rows
solving the constraints -------------------------------------------
a solution assigns to each flexible variable a size expression
which is either a constant or a v + n for a rigid variable v
a number or infinity
sizeRigid r n returns the size expression corresponding to r + n
(m - n)-- > x )
(n)-- > r )
n-- > j with ( meaning i+n < = j ) then j = i + n
n-- > i with n > = 0 ( meaning j < = i + n ) then j = i
(m-n)--> x)
(n)--> r)
n--> j with n<=0 (meaning i+n <= j) then j = i + n
n--> i with n >= 0 (meaning j <= i + n) then j = i
compute the graph and its transitive closure m
number of nodes
tracing only: build output version of transitive graph
trace only
trace only
trace only
compute the sets of flexible and rigid node numbers
a set of flexible variables
a set of rigid variables
rigid matrix indices
check whether there is a solution
d = [ m!(i,i) | i <- [0 .. (n-1)] ] -- diagonal
a rigid variable might not be less than it self, so no -.. on the
rigid part of the diagonal
a rigid variable might not be bounded below by infinity or
bounded above by a constant
it might not be related to another rigid variable
a flexible variable might not be strictly below a rigid variable
inScope :: FlexId -> Rigid -> Bool
inScope x (RConst _) = True
inScope x (RVar v) = case Map.lookup x (flexScope gr) of
Just scope -> scope v
Nothing -> error $ "Warshall.inScope panic: flexible " ++ show x ++ " does not carry scope info when assigning it rigid variable " ++ show v
n-- > j with ( meaning i + n < = j ) then
n--> j with n<=0 (meaning i + n <= j) then
inScope f r -- SEEMS WRONG TO IGNORE THINGS NOT IN SCOPE
Finite z | z <= 0 ->
n-- > i with n > = 0 ( meaning j < = i + n ) then j = i
n--> i with n >= 0 (meaning j <= i + n) then j = i
default to infinity
inScope f r |
module Warshall where
import Control.Monad.State
import Data.Array
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.List as List
traceSolve :: String -> a -> a
traceSolve _msg = id
traceSolveM :: Monad m => String -> m ()
traceSolveM _msg = return ()
class SemiRing a where
oplus :: a -> a -> a
otimes :: a -> a -> a
type Matrix a = Array (Int,Int) a
warshall :: SemiRing a => Matrix a -> Matrix a
warshall a0 = loop r a0 where
loop k a | k <= r' =
loop (k+1) (array b [ ((i,j),
(a!(i,j)) `oplus` ((a!(i,k)) `otimes` (a!(k,j))))
| i <- [r..r'], j <- [c..c'] ])
| otherwise = a
data Weight = Finite Int | Infinite
deriving (Eq)
inc :: Weight -> Int -> Weight
inc Infinite _ = Infinite
inc (Finite k) n = Finite (k + n)
instance Show Weight where
show (Finite i) = show i
show Infinite = "."
instance Ord Weight where
_ <= Infinite = True
Infinite <= _ = False
Finite a <= Finite b = a <= b
instance SemiRing Weight where
oplus = min
otimes Infinite _ = Infinite
otimes _ Infinite = Infinite
otimes (Finite a) (Finite b) = Finite (a + b)
ozero = Infinite
oone = Finite 0
- rigid variables ( also identified by ) , or
data Node rigid
= Rigid rigid
| Flex FlexId
deriving (Eq, Ord)
instance Show rigid => Show (Node rigid) where
show (Flex i) = "?" ++ show i
show (Rigid r) = show r
data Rigid = RConst Weight
| RVar RigidId
deriving (Eq, Ord)
instance Show Rigid where
show (RVar i) = "v" ++ show i
show (RConst Infinite) = "#"
show (RConst (Finite n)) = show n
type NodeId = Int
type RigidId = Int
type FlexId = Int
type Scope = RigidId -> Bool
infinite :: Rigid -> Bool
infinite (RConst Infinite) = True
infinite _ = False
isBelow :: Rigid -> Weight -> Rigid -> Bool
isBelow _ Infinite _ = True
isBelow _ _ (RConst Infinite) = True
isBelow (RConst (Finite i)) (Finite n) (RConst (Finite j)) = i + n <= j
data Constrnt edgeLabel rigid flexScope
= NewFlex FlexId flexScope
| Arc (Node rigid) edgeLabel (Node rigid)
Arc v1 k v2 at least one of v1,v2 is a VMeta ( Flex ) ,
the other a VMeta or a VGen ( Rigid )
otherwise v1 < = v3
type Constraint = Constrnt Weight Rigid Scope
arc :: Node Rigid -> Int -> Node Rigid -> Constraint
arc a k b = Arc a (Finite k) b
instance Show Constraint where
show (NewFlex i _) = "SizeMeta(?" ++ show i ++ ")"
show (Arc v1 (Finite k) v2)
| k == 0 = show v1 ++ "<=" ++ show v2
| k < 0 = show v1 ++ "+" ++ show (-k) ++ "<=" ++ show v2
| otherwise = show v1 ++ "<=" ++ show v2 ++ "+" ++ show k
show _ = undefined
type Constraints = [Constraint]
emptyConstraints :: Constraints
emptyConstraints = []
data Graph edgeLabel rigid flexScope = Graph
}
initGraph :: SemiRing edgeLabel => Graph edgeLabel rigid flexScope
initGraph = Graph Map.empty Map.empty Map.empty 0 (\ _ _ -> ozero)
type GM edgeLabel rigid flexScope = State (Graph edgeLabel rigid flexScope)
addFlex :: FlexId -> flexScope -> GM edgeLabel rigid flexScope ()
addFlex x scope = do
st <- get
put $ st { flexScope = Map.insert x scope (flexScope st) }
i < - addNode n returns number of node n. if not present , it is added first
addNode :: (Eq rigid, Ord rigid) => (Node rigid) -> GM edgeLabel rigid flexScope Int
addNode n = do
st <- get
case Map.lookup n (nodeMap st) of
Just i -> return i
Nothing -> do let i = nextNode st
put $ st { nodeMap = Map.insert n i (nodeMap st)
, intMap = Map.insert i n (intMap st)
, nextNode = i + 1
}
return i
improves the weight of egde n1->n2 to be at most k
addEdge :: (Eq rigid, Ord rigid, SemiRing edgeLabel) => (Node rigid) -> edgeLabel -> (Node rigid) -> GM edgeLabel rigid flexScope ()
addEdge n1 k n2 = do
i1 <- addNode n1
i2 <- addNode n2
st <- get
let graph' x y = if (x,y) == (i1,i2) then k `oplus` (graph st) x y
else graph st x y
put $ st { graph = graph' }
addConstraint :: (Eq rigid, Ord rigid, SemiRing edgeLabel) =>
Constrnt edgeLabel rigid flexScope -> GM edgeLabel rigid flexScope ()
addConstraint (NewFlex x scope) = do
addFlex x scope
addConstraint (Arc n1 k n2) = addEdge n1 k n2
buildGraph :: (Eq rigid, Ord rigid, SemiRing edgeLabel) =>
[Constrnt edgeLabel rigid flexScope] -> Graph edgeLabel rigid flexScope
buildGraph cs = execState (mapM_ addConstraint cs) initGraph
mkMatrix :: Int -> (Int -> Int -> a) -> Matrix a
mkMatrix n g = array ((0,0),(n-1,n-1))
[ ((i,j), g i j) | i <- [0..n-1], j <- [0..n-1]]
data LegendMatrix a b c = LegendMatrix
{ matrix :: Matrix a
, rowdescr :: Int -> b
, coldescr :: Int -> c
}
instance (Show a, Show b, Show c) => Show (LegendMatrix a b c) where
show (LegendMatrix m rd cd) =
let ((r,c),(r',c')) = bounds m
in foldr (\ j s -> "\t" ++ show (cd j) ++ s) "" [c .. c'] ++
foldr (\ i s -> "\n" ++ show (rd i) ++
foldr (\ j t -> "\t" ++ show (m!(i,j)) ++ t)
(s)
[c .. c'])
"" [r .. r']
type Solution = Map Int MaxExpr
emptySolution :: Solution
emptySolution = Map.empty
extendSolution :: Solution -> Int -> SizeExpr -> Solution
extendSolution subst k v = Map.insertWith (++) k [v] subst
type MaxExpr = [SizeExpr]
newtype MaxExpr = MaxExpr { sizeExprs : : [ SizeExpr ] } deriving ( Show )
e.g. x + 5
instance Show SizeExpr where
show (SizeVar n 0) = show (Rigid (RVar n))
show (SizeVar n k) = show (Rigid (RVar n)) ++ "+" ++ show k
show (SizeConst (Finite i)) = show i
show (SizeConst Infinite) = "#"
sizeRigid :: Rigid -> Int -> SizeExpr
sizeRigid (RConst k) n = SizeConst (inc k n)
sizeRigid (RVar i) n = SizeVar i n
apply : : SizeExpr - > Solution - > SizeExpr
apply e@(SizeExpr ( Rigid _ ) _ ) = e
apply e@(SizeExpr ( Flex x ) i ) phi = case Map.lookup x phi of
Nothing - > e
Just ( SizeExpr v j ) - > SizeExpr v ( i + j )
after : : Solution - > Solution - > Solution
after = Map.map ( \ e - > e ` apply ` phi ) psi
apply :: SizeExpr -> Solution -> SizeExpr
apply e@(SizeExpr (Rigid _) _) phi = e
apply e@(SizeExpr (Flex x) i) phi = case Map.lookup x phi of
Nothing -> e
Just (SizeExpr v j) -> SizeExpr v (i + j)
after :: Solution -> Solution -> Solution
after psi phi = Map.map (\ e -> e `apply` phi) psi
-}
solve : : Constraints - > Maybe Solution
solve cs = if any ( \ x - > x < Finite 0 ) d then Nothing
else Map .
where gr = = nextNode gr
m = mkMatrix n ( graph gr )
m ' = warshall m
d = [ m!(i , i ) | i < - [ 0 .. ( n-1 ) ] ]
ns = keys ( nodeMap gr )
solve :: Constraints -> Maybe Solution
solve cs = if any (\ x -> x < Finite 0) d then Nothing
else Map.
where gr = buildGraph cs
n = nextNode gr
m = mkMatrix n (graph gr)
m' = warshall m
d = [ m!(i,i) | i <- [0 .. (n-1)] ]
ns = keys (nodeMap gr)
-}
compute solution
a solution CANNOT exist if
v < v for a rigid variable v
v < = v ' for rigid variables v , v '
x < v for a flexible variable x and a rigid variable v
thus , for each flexible x , only one of the following cases is possible
we are looking for the least values for flexible variables that solve
the constraints . Algorithm
while flexible variables and rigid rows left
find a rigid variable row i
for all flexible columns j
while flexible variables j left
search the row j for entry i
a solution CANNOT exist if
v < v for a rigid variable v
v <= v' for rigid variables v,v'
x < v for a flexible variable x and a rigid variable v
thus, for each flexible x, only one of the following cases is possible
we are looking for the least values for flexible variables that solve
the constraints. Algorithm
while flexible variables and rigid rows left
find a rigid variable row i
for all flexible columns j
while flexible variables j left
search the row j for entry i
-}
solve :: Constraints -> Maybe Solution
solve cs = traceSolve (show lm0) $ traceSolve (show lm) $ traceSolve (show cs) $
let solution = if solvable then loop1 rigids emptySolution
else Nothing
in traceSolve ("solution = " ++ show solution) $
solution
gr = buildGraph cs
m0 = mkMatrix n (graph gr)
m = warshall m0
ns = Map.keys (nodeMap gr)
flexs = foldl (\ l k -> case k of (Flex i) -> i : l
(Rigid _) -> l) [] ns
rigids = foldl (\ l k -> case k of (Flex _) -> l
(Rigid i) -> i : l) [] ns
rInds = foldl (\ l r -> let Just i = Map.lookup (Rigid r) (nodeMap gr)
in i : l) [] rigids
solvable = all (\ x -> x >= oone) [ m!(i,i) | i <- rInds ] &&
all (\ (r, r') -> r == r' ||
let Just row = (Map.lookup (Rigid r) (nodeMap gr))
Just col = (Map.lookup (Rigid r') (nodeMap gr))
edge = m!(row,col)
in isBelow r edge r' )
[ (r,r') | r <- rigids, r' <- rigids ]
&&
all (\ (x, v) ->
let Just row = (Map.lookup (Flex x) (nodeMap gr))
Just col = (Map.lookup (Rigid (RVar v)) (nodeMap gr))
edge = m!(row,col)
in edge >= Finite 0)
[ (x,v) | x <- flexs, (RVar v) <- rigids ]
UNUSED :
loop1
while flexible variables and rigid rows left
find a rigid variable row i
for all flexible columns j
add i + n to the solution of j
while flexible variables and rigid rows left
find a rigid variable row i
for all flexible columns j
add i + n to the solution of j
-}
loop1 :: [Rigid] -> Solution -> Maybe Solution
loop1 (r:rgds) subst = loop1 rgds subst' where
row = fromJust $ Map.lookup (Rigid r) (nodeMap gr)
subst' =
foldl (\ sub f ->
let col = fromJust $ Map.lookup (Flex f) (nodeMap gr)
, m!(row,col)) of
(True, Finite z) ->
let trunc_z | z >= 0 = 0
| otherwise = -z
in extendSolution sub f (sizeRigid r (trunc_z))
_ -> sub
) subst flexs
loop1 [] subst = case flexs List.\\ (Map.keys subst) of
[] -> Just subst
flexs' -> loop2 flexs' subst
loop2
while flexible variables j left
search the row j for entry i
while flexible variables j left
search the row j for entry i
-}
loop2 :: [FlexId] -> Solution -> Maybe Solution
loop2 [] = Just
loop2 (f:flxs) = loop3 0
where row = fromJust $ Map.lookup (Flex f) (nodeMap gr)
loop3 col subst | col >= n =
loop2 flxs (extendSolution subst f (SizeConst Infinite))
loop3 col subst =
case Map.lookup col (intMap gr) of
Just (Rigid r) | not (infinite r) ->
,m!(row,col)) of
(True, Finite z) | z >= 0 ->
loop2 flxs (extendSolution subst f (sizeRigid r z))
(_, Infinite) -> loop3 (col+1) subst
_ -> Nothing
_ -> loop3 (col+1) subst
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.