_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
fdfb8fe4ac488d81e5b291e39ff391da8924d8694edc901ed26131d552e511fb | janestreet/ecaml | abbrev.mli | * are words which expand into other text when typed in .
- [ ( Info - goto - node " ( emacs)Abbrevs " ) ]
- [(Info-goto-node "(emacs)Abbrevs")] *)
open! Core
open! Async_kernel
open! Import
(** [(describe-variable 'save-abbrevs)] *)
val save_abbrevs : bool Customization.t
| null | https://raw.githubusercontent.com/janestreet/ecaml/7c16e5720ee1da04e0757cf185a074debf9088df/src/abbrev.mli | ocaml | * [(describe-variable 'save-abbrevs)] | * are words which expand into other text when typed in .
- [ ( Info - goto - node " ( emacs)Abbrevs " ) ]
- [(Info-goto-node "(emacs)Abbrevs")] *)
open! Core
open! Async_kernel
open! Import
val save_abbrevs : bool Customization.t
|
58be10e5828f522c36b1ed4abda3bfa00dd6904dd6e2aabf53870112193696d7 | cdepillabout/servant-static-th | Util.hs | |
Module : Servant . Static . TH.Internal . Util
Copyright : 2017
License : BSD3
Maintainer : ( )
Stability : experimental
Portability : unknown
Utilities functions for use in this package .
Module : Servant.Static.TH.Internal.Util
Copyright : Dennis Gosnell 2017
License : BSD3
Maintainer : Dennis Gosnell ()
Stability : experimental
Portability : unknown
Utilities functions for use in this package.
-}
module Servant.Static.TH.Internal.Util where
import System.FilePath (takeExtension)
-- | Remove a leading period from a 'String'.
--
-- >>> removeLeadingPeriod ".jpg"
-- "jpg"
--
-- Just return the 'String' if it doesn't start with a period:
--
-- >>> removeLeadingPeriod "hello"
-- "hello"
--
-- Return an empty string if the only character in the string is a period:
--
-- >>> removeLeadingPeriod "."
-- ""
--
-- Remove at most one period:
--
-- >>> removeLeadingPeriod "..bye"
-- ".bye"
removeLeadingPeriod :: String -> String
removeLeadingPeriod ('.':chars) = chars
removeLeadingPeriod string = string
-- | Return an extension for a 'FilePath'. Just like 'takeExtension', but
-- doesn't return the leading period.
--
-- >>> getExtension "/some/file.html"
-- "html"
--
-- Empty string is returned for files with no extension:
--
-- >>> getExtension "file"
-- ""
getExtension :: FilePath -> FilePath
getExtension = removeLeadingPeriod . takeExtension
| null | https://raw.githubusercontent.com/cdepillabout/servant-static-th/5ec0027ed2faa6f8c42a2259a963f52e6b30edad/src/Servant/Static/TH/Internal/Util.hs | haskell | | Remove a leading period from a 'String'.
>>> removeLeadingPeriod ".jpg"
"jpg"
Just return the 'String' if it doesn't start with a period:
>>> removeLeadingPeriod "hello"
"hello"
Return an empty string if the only character in the string is a period:
>>> removeLeadingPeriod "."
""
Remove at most one period:
>>> removeLeadingPeriod "..bye"
".bye"
| Return an extension for a 'FilePath'. Just like 'takeExtension', but
doesn't return the leading period.
>>> getExtension "/some/file.html"
"html"
Empty string is returned for files with no extension:
>>> getExtension "file"
"" | |
Module : Servant . Static . TH.Internal . Util
Copyright : 2017
License : BSD3
Maintainer : ( )
Stability : experimental
Portability : unknown
Utilities functions for use in this package .
Module : Servant.Static.TH.Internal.Util
Copyright : Dennis Gosnell 2017
License : BSD3
Maintainer : Dennis Gosnell ()
Stability : experimental
Portability : unknown
Utilities functions for use in this package.
-}
module Servant.Static.TH.Internal.Util where
import System.FilePath (takeExtension)
removeLeadingPeriod :: String -> String
removeLeadingPeriod ('.':chars) = chars
removeLeadingPeriod string = string
getExtension :: FilePath -> FilePath
getExtension = removeLeadingPeriod . takeExtension
|
886452a8799f89e50dbeaae775111a14d33470f342663e12814ee79ece05f3a2 | haskell/hackage-server | CreateUserTest.hs |
This a separate part of the high - level test of the hackage server
( see HighLevelTest.hs ) . This set of tests check that the user self
registration wors . This test needs local outgoing email , which is n't
available on all hosts , so we keep it as a separate test .
System requirements :
1 . Port ` testPort ` ( currently 8392 ) must be available on localhost
2 . You must have sendmail configured so that it can send emails to external
domains ( for user registration ) -- currently we use mailinator.com accounts
3 . You must allow for outgoing HTTP traffic , as we POST to validator.w3.org
for HTML validation .
This a separate part of the high-level test of the hackage server
(see HighLevelTest.hs). This set of tests check that the user self
registration wors. This test needs local outgoing email, which isn't
available on all hosts, so we keep it as a separate test.
System requirements:
1. Port `testPort` (currently 8392) must be available on localhost
2. You must have sendmail configured so that it can send emails to external
domains (for user registration) -- currently we use mailinator.com accounts
3. You must allow for outgoing HTTP traffic, as we POST to validator.w3.org
for HTML validation.
-}
module Main (main) where
import Control.Exception
import Control.Monad
import Data.List (isInfixOf)
import Data.String ()
import System.Directory
import System.Exit (die)
import System.FilePath
import System.IO
import System.Random
import MailUtils
import Util
import HttpUtils (Authorization(..))
import HackageClientUtils
main :: IO ()
main = do hSetBuffering stdout LineBuffering
info "Initialising"
root <- getCurrentDirectory
info "Setting up test directory"
exists <- doesDirectoryExist (testDir root)
when exists $ removeDirectoryRecursive (testDir root)
createDirectory (testDir root)
(setCurrentDirectory (testDir root) >> doit root)
`finally` removeDirectoryRecursive (testDir root)
testName :: FilePath
testName = "CreateUserTestTemp"
testDir :: FilePath -> FilePath
testDir root = root </> "tests" </> testName
doit :: FilePath -> IO ()
doit root
= do info "initialising hackage database"
runServerChecked root ["init"]
withServerRunning root runUserTests
runUserTests :: IO ()
runUserTests = do
do info "Getting user list"
xs <- getUsers
unless (xs == ["admin"]) $
die ("Bad user list: " ++ show xs)
do info "Getting admin user list"
xs <- getAdmins
unless (groupMembers xs == ["admin"]) $
die ("Bad admin user list: " ++ show xs)
testEmail1 <- do
-- Create random test email addresses so that we don't confuse
-- confirmation emails from other sessions
testEmail1 <- mkTestEmail `liftM` randomIO
testEmail2 <- mkTestEmail `liftM` randomIO
createUserSelfRegister "HackageTestUser1" "Test User 1" testEmail1
createUserSelfRegister "HackageTestUser2" "Test User 2" testEmail2
confirmUser testEmail1 "testpass1"
confirmUser testEmail2 "testpass2"
return (testEmailAddress testEmail1)
do info "Checking new users are now in user list"
xs <- getUsers
unless (xs == ["admin","HackageTestUser1","HackageTestUser2"]) $
die ("Bad user list: " ++ show xs)
do info "Checking new users are not in admin list"
xs <- getAdmins
unless (groupMembers xs == ["admin"]) $
die ("Bad admin user list: " ++ show xs)
do info "Checking new users name & contact info"
ncinf <- getNameContactInfo (Auth "HackageTestUser1" "testpass1")
"/user/HackageTestUser1/name-contact.json"
unless (realName ncinf == "Test User 1") $
die ("Bad user real name: " ++ realName ncinf)
unless (contactEmailAddress ncinf == testEmail1) $
die ("Bad user email: " ++ contactEmailAddress ncinf)
do info "Checking new users admin info"
uainf <- getUserAdminInfo (Auth "admin" "admin") "/user/HackageTestUser1/admin-info.json"
unless (accountKind uainf == Just "AccountKindRealUser") $
die ("Bad user account kind: " ++ show (accountKind uainf))
unless ("self-registration" `isInfixOf` accountNotes uainf) $
die ("Bad user notes: " ++ accountNotes uainf)
where
mkTestEmail :: Int -> String
mkTestEmail n = "HackageTestUser" ++ show n
| null | https://raw.githubusercontent.com/haskell/hackage-server/5e3389e8c44decb45d2b9ffca83cfdf7ab6b9a46/tests/CreateUserTest.hs | haskell | currently we use mailinator.com accounts
currently we use mailinator.com accounts
Create random test email addresses so that we don't confuse
confirmation emails from other sessions |
This a separate part of the high - level test of the hackage server
( see HighLevelTest.hs ) . This set of tests check that the user self
registration wors . This test needs local outgoing email , which is n't
available on all hosts , so we keep it as a separate test .
System requirements :
1 . Port ` testPort ` ( currently 8392 ) must be available on localhost
2 . You must have sendmail configured so that it can send emails to external
3 . You must allow for outgoing HTTP traffic , as we POST to validator.w3.org
for HTML validation .
This a separate part of the high-level test of the hackage server
(see HighLevelTest.hs). This set of tests check that the user self
registration wors. This test needs local outgoing email, which isn't
available on all hosts, so we keep it as a separate test.
System requirements:
1. Port `testPort` (currently 8392) must be available on localhost
2. You must have sendmail configured so that it can send emails to external
3. You must allow for outgoing HTTP traffic, as we POST to validator.w3.org
for HTML validation.
-}
module Main (main) where
import Control.Exception
import Control.Monad
import Data.List (isInfixOf)
import Data.String ()
import System.Directory
import System.Exit (die)
import System.FilePath
import System.IO
import System.Random
import MailUtils
import Util
import HttpUtils (Authorization(..))
import HackageClientUtils
main :: IO ()
main = do hSetBuffering stdout LineBuffering
info "Initialising"
root <- getCurrentDirectory
info "Setting up test directory"
exists <- doesDirectoryExist (testDir root)
when exists $ removeDirectoryRecursive (testDir root)
createDirectory (testDir root)
(setCurrentDirectory (testDir root) >> doit root)
`finally` removeDirectoryRecursive (testDir root)
testName :: FilePath
testName = "CreateUserTestTemp"
testDir :: FilePath -> FilePath
testDir root = root </> "tests" </> testName
doit :: FilePath -> IO ()
doit root
= do info "initialising hackage database"
runServerChecked root ["init"]
withServerRunning root runUserTests
runUserTests :: IO ()
runUserTests = do
do info "Getting user list"
xs <- getUsers
unless (xs == ["admin"]) $
die ("Bad user list: " ++ show xs)
do info "Getting admin user list"
xs <- getAdmins
unless (groupMembers xs == ["admin"]) $
die ("Bad admin user list: " ++ show xs)
testEmail1 <- do
testEmail1 <- mkTestEmail `liftM` randomIO
testEmail2 <- mkTestEmail `liftM` randomIO
createUserSelfRegister "HackageTestUser1" "Test User 1" testEmail1
createUserSelfRegister "HackageTestUser2" "Test User 2" testEmail2
confirmUser testEmail1 "testpass1"
confirmUser testEmail2 "testpass2"
return (testEmailAddress testEmail1)
do info "Checking new users are now in user list"
xs <- getUsers
unless (xs == ["admin","HackageTestUser1","HackageTestUser2"]) $
die ("Bad user list: " ++ show xs)
do info "Checking new users are not in admin list"
xs <- getAdmins
unless (groupMembers xs == ["admin"]) $
die ("Bad admin user list: " ++ show xs)
do info "Checking new users name & contact info"
ncinf <- getNameContactInfo (Auth "HackageTestUser1" "testpass1")
"/user/HackageTestUser1/name-contact.json"
unless (realName ncinf == "Test User 1") $
die ("Bad user real name: " ++ realName ncinf)
unless (contactEmailAddress ncinf == testEmail1) $
die ("Bad user email: " ++ contactEmailAddress ncinf)
do info "Checking new users admin info"
uainf <- getUserAdminInfo (Auth "admin" "admin") "/user/HackageTestUser1/admin-info.json"
unless (accountKind uainf == Just "AccountKindRealUser") $
die ("Bad user account kind: " ++ show (accountKind uainf))
unless ("self-registration" `isInfixOf` accountNotes uainf) $
die ("Bad user notes: " ++ accountNotes uainf)
where
mkTestEmail :: Int -> String
mkTestEmail n = "HackageTestUser" ++ show n
|
100a8f22145185e810d3843ab39688f5ba85aa61550b23a0ae760312d96528ed | atlas-engineer/nyxt | editor.lisp | SPDX - FileCopyrightText : Atlas Engineer LLC
SPDX - License - Identifier : BSD-3 - Clause
(in-package :nyxt/tests)
(define-test toggle-editor-mode ()
(let ((editor-buffer (make-instance 'nyxt/editor-mode:editor-buffer)))
(with-current-buffer editor-buffer
(assert-true (enable-modes* 'nyxt/editor-mode:editor-mode editor-buffer))
(assert-true (disable-modes* 'nyxt/editor-mode:editor-mode editor-buffer))
(assert-true (enable-modes* 'nyxt/editor-mode:plaintext-editor-mode
editor-buffer))
(assert-true (disable-modes* 'nyxt/editor-mode:plaintext-editor-mode
editor-buffer)))))
| null | https://raw.githubusercontent.com/atlas-engineer/nyxt/65a9aa568b16c109281fa403a7c045b3b6d87025/tests/offline/mode/editor.lisp | lisp | SPDX - FileCopyrightText : Atlas Engineer LLC
SPDX - License - Identifier : BSD-3 - Clause
(in-package :nyxt/tests)
(define-test toggle-editor-mode ()
(let ((editor-buffer (make-instance 'nyxt/editor-mode:editor-buffer)))
(with-current-buffer editor-buffer
(assert-true (enable-modes* 'nyxt/editor-mode:editor-mode editor-buffer))
(assert-true (disable-modes* 'nyxt/editor-mode:editor-mode editor-buffer))
(assert-true (enable-modes* 'nyxt/editor-mode:plaintext-editor-mode
editor-buffer))
(assert-true (disable-modes* 'nyxt/editor-mode:plaintext-editor-mode
editor-buffer)))))
| |
e89cc192138a8e273e4c9977f2105d8a050b146313cb9391be02e69b40502c88 | rabbitmq/rabbitmq-stomp | amqqueue_SUITE.erl | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
%%
-module(amqqueue_SUITE).
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
-include("rabbit_stomp.hrl").
-include("rabbit_stomp_frame.hrl").
-include("rabbit_stomp_headers.hrl").
-define(QUEUE, <<"TestQueue">>).
-define(DESTINATION, "/amq/queue/TestQueue").
all() ->
[{group, version_to_group_name(V)} || V <- ?SUPPORTED_VERSIONS].
groups() ->
Tests = [
publish_no_dest_error,
publish_unauthorized_error,
subscribe_error,
subscribe,
unsubscribe_ack,
subscribe_ack,
send,
delete_queue_subscribe,
temp_destination_queue,
temp_destination_in_send,
blank_destination_in_send
],
[{version_to_group_name(V), [sequence], Tests}
|| V <- ?SUPPORTED_VERSIONS].
version_to_group_name(V) ->
list_to_atom(re:replace("version_" ++ V,
"\\.",
"_",
[global, {return, list}])).
init_per_suite(Config) ->
Config1 = rabbit_ct_helpers:set_config(Config,
[{rmq_nodename_suffix, ?MODULE}]),
rabbit_ct_helpers:log_environment(),
rabbit_ct_helpers:run_setup_steps(Config1,
rabbit_ct_broker_helpers:setup_steps()).
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config,
rabbit_ct_broker_helpers:teardown_steps()).
init_per_group(Group, Config) ->
Suffix = string:sub_string(atom_to_list(Group), 9),
Version = re:replace(Suffix, "_", ".", [global, {return, list}]),
rabbit_ct_helpers:set_config(Config, [{version, Version}]).
end_per_group(_Group, Config) -> Config.
init_per_testcase(TestCase, Config) ->
Version = ?config(version, Config),
StompPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp),
{ok, Connection} = amqp_connection:start(#amqp_params_direct{
node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename)
}),
{ok, Channel} = amqp_connection:open_channel(Connection),
{ok, Client} = rabbit_stomp_client:connect(Version, StompPort),
Config1 = rabbit_ct_helpers:set_config(Config, [
{amqp_connection, Connection},
{amqp_channel, Channel},
{stomp_client, Client}
]),
init_per_testcase0(TestCase, Config1).
end_per_testcase(TestCase, Config) ->
Connection = ?config(amqp_connection, Config),
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
rabbit_stomp_client:disconnect(Client),
amqp_channel:close(Channel),
amqp_connection:close(Connection),
end_per_testcase0(TestCase, Config).
init_per_testcase0(publish_unauthorized_error, Config) ->
Channel = ?config(amqp_channel, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = <<"RestrictedQueue">>,
auto_delete = true}),
rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, add_user,
[<<"user">>, <<"pass">>, <<"acting-user">>]),
rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, set_permissions, [
<<"user">>, <<"/">>, <<"nothing">>, <<"nothing">>, <<"nothing">>, <<"acting-user">>]),
Version = ?config(version, Config),
StompPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp),
{ok, ClientFoo} = rabbit_stomp_client:connect(Version, "user", "pass", StompPort),
rabbit_ct_helpers:set_config(Config, [{client_foo, ClientFoo}]);
init_per_testcase0(_, Config) ->
Config.
end_per_testcase0(publish_unauthorized_error, Config) ->
ClientFoo = ?config(client_foo, Config),
rabbit_stomp_client:disconnect(ClientFoo),
rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, delete_user,
[<<"user">>, <<"acting-user">>]),
Config;
end_per_testcase0(_, Config) ->
Config.
publish_no_dest_error(Config) ->
Client = ?config(stomp_client, Config),
rabbit_stomp_client:send(
Client, "SEND", [{"destination", "/exchange/non-existent"}], ["hello"]),
{ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
"not_found" = proplists:get_value("message", Hdrs),
ok.
publish_unauthorized_error(Config) ->
ClientFoo = ?config(client_foo, Config),
rabbit_stomp_client:send(
ClientFoo, "SEND", [{"destination", "/amq/queue/RestrictedQueue"}], ["hello"]),
{ok, _Client1, Hdrs, _} = stomp_receive(ClientFoo, "ERROR"),
"access_refused" = proplists:get_value("message", Hdrs),
ok.
subscribe_error(Config) ->
Client = ?config(stomp_client, Config),
%% SUBSCRIBE to missing queue
rabbit_stomp_client:send(
Client, "SUBSCRIBE", [{"destination", ?DESTINATION}]),
{ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
"not_found" = proplists:get_value("message", Hdrs),
ok.
subscribe(Config) ->
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
auto_delete = true}),
%% subscribe and wait for receipt
rabbit_stomp_client:send(
Client, "SUBSCRIBE", [{"destination", ?DESTINATION}, {"receipt", "foo"}]),
{ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
send from amqp
Method = #'basic.publish'{exchange = <<"">>, routing_key = ?QUEUE},
amqp_channel:call(Channel, Method, #amqp_msg{props = #'P_basic'{},
payload = <<"hello">>}),
{ok, _Client2, _, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
ok.
unsubscribe_ack(Config) ->
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
Version = ?config(version, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
auto_delete = true}),
%% subscribe and wait for receipt
rabbit_stomp_client:send(
Client, "SUBSCRIBE", [{"destination", ?DESTINATION},
{"receipt", "rcpt1"},
{"ack", "client"},
{"id", "subscription-id"}]),
{ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
send from amqp
Method = #'basic.publish'{exchange = <<"">>, routing_key = ?QUEUE},
amqp_channel:call(Channel, Method, #amqp_msg{props = #'P_basic'{},
payload = <<"hello">>}),
{ok, Client2, Hdrs1, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
rabbit_stomp_client:send(
Client2, "UNSUBSCRIBE", [{"destination", ?DESTINATION},
{"id", "subscription-id"}]),
rabbit_stomp_client:send(
Client2, "ACK", [{rabbit_stomp_util:ack_header_name(Version),
proplists:get_value(
rabbit_stomp_util:msg_header_name(Version), Hdrs1)},
{"receipt", "rcpt2"}]),
{ok, _Client3, Hdrs2, _Body2} = stomp_receive(Client2, "ERROR"),
?assertEqual("Subscription not found",
proplists:get_value("message", Hdrs2)),
ok.
subscribe_ack(Config) ->
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
Version = ?config(version, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
auto_delete = true}),
%% subscribe and wait for receipt
rabbit_stomp_client:send(
Client, "SUBSCRIBE", [{"destination", ?DESTINATION},
{"receipt", "foo"},
{"ack", "client"}]),
{ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
send from amqp
Method = #'basic.publish'{exchange = <<"">>, routing_key = ?QUEUE},
amqp_channel:call(Channel, Method, #amqp_msg{props = #'P_basic'{},
payload = <<"hello">>}),
{ok, _Client2, Headers, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
false = (Version == "1.2") xor proplists:is_defined(?HEADER_ACK, Headers),
MsgHeader = rabbit_stomp_util:msg_header_name(Version),
AckValue = proplists:get_value(MsgHeader, Headers),
AckHeader = rabbit_stomp_util:ack_header_name(Version),
rabbit_stomp_client:send(Client, "ACK", [{AckHeader, AckValue}]),
#'basic.get_empty'{} =
amqp_channel:call(Channel, #'basic.get'{queue = ?QUEUE}),
ok.
send(Config) ->
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
auto_delete = true}),
%% subscribe and wait for receipt
rabbit_stomp_client:send(
Client, "SUBSCRIBE", [{"destination", ?DESTINATION}, {"receipt", "foo"}]),
{ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
%% send from stomp
rabbit_stomp_client:send(
Client1, "SEND", [{"destination", ?DESTINATION}], ["hello"]),
{ok, _Client2, _, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
ok.
delete_queue_subscribe(Config) ->
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
auto_delete = true}),
%% subscribe and wait for receipt
rabbit_stomp_client:send(
Client, "SUBSCRIBE", [{"destination", ?DESTINATION}, {"receipt", "bah"}]),
{ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
%% delete queue while subscribed
#'queue.delete_ok'{} =
amqp_channel:call(Channel, #'queue.delete'{queue = ?QUEUE}),
{ok, _Client2, Headers, _} = stomp_receive(Client1, "ERROR"),
?DESTINATION = proplists:get_value("subscription", Headers),
% server closes connection
ok.
temp_destination_queue(Config) ->
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
auto_delete = true}),
rabbit_stomp_client:send( Client, "SEND", [{"destination", ?DESTINATION},
{"reply-to", "/temp-queue/foo"}],
["ping"]),
amqp_channel:call(Channel,#'basic.consume'{queue = ?QUEUE, no_ack = true}),
receive #'basic.consume_ok'{consumer_tag = _Tag} -> ok end,
ReplyTo = receive {#'basic.deliver'{delivery_tag = _DTag},
#'amqp_msg'{payload = <<"ping">>,
props = #'P_basic'{reply_to = RT}}} -> RT
end,
ok = amqp_channel:call(Channel,
#'basic.publish'{routing_key = ReplyTo},
#amqp_msg{payload = <<"pong">>}),
{ok, _Client1, _, [<<"pong">>]} = stomp_receive(Client, "MESSAGE"),
ok.
temp_destination_in_send(Config) ->
Client = ?config(stomp_client, Config),
rabbit_stomp_client:send( Client, "SEND", [{"destination", "/temp-queue/foo"}],
["poing"]),
{ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
"Invalid destination" = proplists:get_value("message", Hdrs),
ok.
blank_destination_in_send(Config) ->
Client = ?config(stomp_client, Config),
rabbit_stomp_client:send( Client, "SEND", [{"destination", ""}],
["poing"]),
{ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
"Invalid destination" = proplists:get_value("message", Hdrs),
ok.
stomp_receive(Client, Command) ->
{#stomp_frame{command = Command,
headers = Hdrs,
body_iolist = Body}, Client1} =
rabbit_stomp_client:recv(Client),
{ok, Client1, Hdrs, Body}.
| null | https://raw.githubusercontent.com/rabbitmq/rabbitmq-stomp/925d78e2c7152723a68452b38fbc2713d2797b8b/test/amqqueue_SUITE.erl | erlang |
SUBSCRIBE to missing queue
subscribe and wait for receipt
subscribe and wait for receipt
subscribe and wait for receipt
subscribe and wait for receipt
send from stomp
subscribe and wait for receipt
delete queue while subscribed
server closes connection | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
-module(amqqueue_SUITE).
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
-include("rabbit_stomp.hrl").
-include("rabbit_stomp_frame.hrl").
-include("rabbit_stomp_headers.hrl").
-define(QUEUE, <<"TestQueue">>).
-define(DESTINATION, "/amq/queue/TestQueue").
all() ->
[{group, version_to_group_name(V)} || V <- ?SUPPORTED_VERSIONS].
groups() ->
Tests = [
publish_no_dest_error,
publish_unauthorized_error,
subscribe_error,
subscribe,
unsubscribe_ack,
subscribe_ack,
send,
delete_queue_subscribe,
temp_destination_queue,
temp_destination_in_send,
blank_destination_in_send
],
[{version_to_group_name(V), [sequence], Tests}
|| V <- ?SUPPORTED_VERSIONS].
version_to_group_name(V) ->
list_to_atom(re:replace("version_" ++ V,
"\\.",
"_",
[global, {return, list}])).
init_per_suite(Config) ->
Config1 = rabbit_ct_helpers:set_config(Config,
[{rmq_nodename_suffix, ?MODULE}]),
rabbit_ct_helpers:log_environment(),
rabbit_ct_helpers:run_setup_steps(Config1,
rabbit_ct_broker_helpers:setup_steps()).
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config,
rabbit_ct_broker_helpers:teardown_steps()).
init_per_group(Group, Config) ->
Suffix = string:sub_string(atom_to_list(Group), 9),
Version = re:replace(Suffix, "_", ".", [global, {return, list}]),
rabbit_ct_helpers:set_config(Config, [{version, Version}]).
end_per_group(_Group, Config) -> Config.
init_per_testcase(TestCase, Config) ->
Version = ?config(version, Config),
StompPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp),
{ok, Connection} = amqp_connection:start(#amqp_params_direct{
node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename)
}),
{ok, Channel} = amqp_connection:open_channel(Connection),
{ok, Client} = rabbit_stomp_client:connect(Version, StompPort),
Config1 = rabbit_ct_helpers:set_config(Config, [
{amqp_connection, Connection},
{amqp_channel, Channel},
{stomp_client, Client}
]),
init_per_testcase0(TestCase, Config1).
end_per_testcase(TestCase, Config) ->
Connection = ?config(amqp_connection, Config),
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
rabbit_stomp_client:disconnect(Client),
amqp_channel:close(Channel),
amqp_connection:close(Connection),
end_per_testcase0(TestCase, Config).
init_per_testcase0(publish_unauthorized_error, Config) ->
Channel = ?config(amqp_channel, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = <<"RestrictedQueue">>,
auto_delete = true}),
rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, add_user,
[<<"user">>, <<"pass">>, <<"acting-user">>]),
rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, set_permissions, [
<<"user">>, <<"/">>, <<"nothing">>, <<"nothing">>, <<"nothing">>, <<"acting-user">>]),
Version = ?config(version, Config),
StompPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp),
{ok, ClientFoo} = rabbit_stomp_client:connect(Version, "user", "pass", StompPort),
rabbit_ct_helpers:set_config(Config, [{client_foo, ClientFoo}]);
init_per_testcase0(_, Config) ->
Config.
end_per_testcase0(publish_unauthorized_error, Config) ->
ClientFoo = ?config(client_foo, Config),
rabbit_stomp_client:disconnect(ClientFoo),
rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, delete_user,
[<<"user">>, <<"acting-user">>]),
Config;
end_per_testcase0(_, Config) ->
Config.
publish_no_dest_error(Config) ->
Client = ?config(stomp_client, Config),
rabbit_stomp_client:send(
Client, "SEND", [{"destination", "/exchange/non-existent"}], ["hello"]),
{ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
"not_found" = proplists:get_value("message", Hdrs),
ok.
publish_unauthorized_error(Config) ->
ClientFoo = ?config(client_foo, Config),
rabbit_stomp_client:send(
ClientFoo, "SEND", [{"destination", "/amq/queue/RestrictedQueue"}], ["hello"]),
{ok, _Client1, Hdrs, _} = stomp_receive(ClientFoo, "ERROR"),
"access_refused" = proplists:get_value("message", Hdrs),
ok.
subscribe_error(Config) ->
Client = ?config(stomp_client, Config),
rabbit_stomp_client:send(
Client, "SUBSCRIBE", [{"destination", ?DESTINATION}]),
{ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
"not_found" = proplists:get_value("message", Hdrs),
ok.
subscribe(Config) ->
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
auto_delete = true}),
rabbit_stomp_client:send(
Client, "SUBSCRIBE", [{"destination", ?DESTINATION}, {"receipt", "foo"}]),
{ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
send from amqp
Method = #'basic.publish'{exchange = <<"">>, routing_key = ?QUEUE},
amqp_channel:call(Channel, Method, #amqp_msg{props = #'P_basic'{},
payload = <<"hello">>}),
{ok, _Client2, _, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
ok.
unsubscribe_ack(Config) ->
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
Version = ?config(version, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
auto_delete = true}),
rabbit_stomp_client:send(
Client, "SUBSCRIBE", [{"destination", ?DESTINATION},
{"receipt", "rcpt1"},
{"ack", "client"},
{"id", "subscription-id"}]),
{ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
send from amqp
Method = #'basic.publish'{exchange = <<"">>, routing_key = ?QUEUE},
amqp_channel:call(Channel, Method, #amqp_msg{props = #'P_basic'{},
payload = <<"hello">>}),
{ok, Client2, Hdrs1, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
rabbit_stomp_client:send(
Client2, "UNSUBSCRIBE", [{"destination", ?DESTINATION},
{"id", "subscription-id"}]),
rabbit_stomp_client:send(
Client2, "ACK", [{rabbit_stomp_util:ack_header_name(Version),
proplists:get_value(
rabbit_stomp_util:msg_header_name(Version), Hdrs1)},
{"receipt", "rcpt2"}]),
{ok, _Client3, Hdrs2, _Body2} = stomp_receive(Client2, "ERROR"),
?assertEqual("Subscription not found",
proplists:get_value("message", Hdrs2)),
ok.
subscribe_ack(Config) ->
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
Version = ?config(version, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
auto_delete = true}),
rabbit_stomp_client:send(
Client, "SUBSCRIBE", [{"destination", ?DESTINATION},
{"receipt", "foo"},
{"ack", "client"}]),
{ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
send from amqp
Method = #'basic.publish'{exchange = <<"">>, routing_key = ?QUEUE},
amqp_channel:call(Channel, Method, #amqp_msg{props = #'P_basic'{},
payload = <<"hello">>}),
{ok, _Client2, Headers, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
false = (Version == "1.2") xor proplists:is_defined(?HEADER_ACK, Headers),
MsgHeader = rabbit_stomp_util:msg_header_name(Version),
AckValue = proplists:get_value(MsgHeader, Headers),
AckHeader = rabbit_stomp_util:ack_header_name(Version),
rabbit_stomp_client:send(Client, "ACK", [{AckHeader, AckValue}]),
#'basic.get_empty'{} =
amqp_channel:call(Channel, #'basic.get'{queue = ?QUEUE}),
ok.
send(Config) ->
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
auto_delete = true}),
rabbit_stomp_client:send(
Client, "SUBSCRIBE", [{"destination", ?DESTINATION}, {"receipt", "foo"}]),
{ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
rabbit_stomp_client:send(
Client1, "SEND", [{"destination", ?DESTINATION}], ["hello"]),
{ok, _Client2, _, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
ok.
delete_queue_subscribe(Config) ->
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
auto_delete = true}),
rabbit_stomp_client:send(
Client, "SUBSCRIBE", [{"destination", ?DESTINATION}, {"receipt", "bah"}]),
{ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
#'queue.delete_ok'{} =
amqp_channel:call(Channel, #'queue.delete'{queue = ?QUEUE}),
{ok, _Client2, Headers, _} = stomp_receive(Client1, "ERROR"),
?DESTINATION = proplists:get_value("subscription", Headers),
ok.
temp_destination_queue(Config) ->
Channel = ?config(amqp_channel, Config),
Client = ?config(stomp_client, Config),
#'queue.declare_ok'{} =
amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
auto_delete = true}),
rabbit_stomp_client:send( Client, "SEND", [{"destination", ?DESTINATION},
{"reply-to", "/temp-queue/foo"}],
["ping"]),
amqp_channel:call(Channel,#'basic.consume'{queue = ?QUEUE, no_ack = true}),
receive #'basic.consume_ok'{consumer_tag = _Tag} -> ok end,
ReplyTo = receive {#'basic.deliver'{delivery_tag = _DTag},
#'amqp_msg'{payload = <<"ping">>,
props = #'P_basic'{reply_to = RT}}} -> RT
end,
ok = amqp_channel:call(Channel,
#'basic.publish'{routing_key = ReplyTo},
#amqp_msg{payload = <<"pong">>}),
{ok, _Client1, _, [<<"pong">>]} = stomp_receive(Client, "MESSAGE"),
ok.
temp_destination_in_send(Config) ->
Client = ?config(stomp_client, Config),
rabbit_stomp_client:send( Client, "SEND", [{"destination", "/temp-queue/foo"}],
["poing"]),
{ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
"Invalid destination" = proplists:get_value("message", Hdrs),
ok.
blank_destination_in_send(Config) ->
Client = ?config(stomp_client, Config),
rabbit_stomp_client:send( Client, "SEND", [{"destination", ""}],
["poing"]),
{ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
"Invalid destination" = proplists:get_value("message", Hdrs),
ok.
stomp_receive(Client, Command) ->
{#stomp_frame{command = Command,
headers = Hdrs,
body_iolist = Body}, Client1} =
rabbit_stomp_client:recv(Client),
{ok, Client1, Hdrs, Body}.
|
7e99687a9ac4cee6b5952285490a34405988a3f95965c55aa8fb18a183f85b8c | evdubs/renegade-way | save-condor-analysis.rkt | #lang racket/base
; This (module) is a hack to get this code to load before the (requires) call below.
We want to first set up the command line args before initializing stuff in db-queries.rkt .
(module cmd racket/base
(require gregor
racket/cmdline
"params.rkt")
(command-line
#:program "racket save-condor-analysis.rkt"
#:once-each
[("-d" "--end-date") end-date
"End date for saving. Defaults to today"
(save-end-date (iso8601->date end-date))]
[("-m" "--markets") markets
"Markets to save. Defaults to SPY,MDY,SLY"
(save-markets markets)]
[("-n" "--db-name") name
"Database name. Defaults to 'local'"
(db-name name)]
[("-p" "--db-pass") password
"Database password"
(db-pass password)]
[("-u" "--db-user") user
"Database user name. Defaults to 'user'"
(db-user user)]))
(require 'cmd
gregor
racket/list
"db-queries.rkt"
"params.rkt"
"condor-analysis.rkt"
"structs.rkt")
(cond [(and (or (= 0 (->wday (save-end-date)))
(= 6 (->wday (save-end-date)))))
(displayln (string-append "Requested date " (date->iso8601 (save-end-date)) " falls on a weekend. Terminating."))
(exit)])
(run-condor-analysis (save-markets) "" (date->iso8601 (-months (save-end-date) 5)) (date->iso8601 (save-end-date)))
(for-each (λ (msis)
(with-handlers
([exn:fail? (λ (e) (displayln (string-append "Failed to process " (condor-analysis-stock msis) " for date "
(date->iso8601 (save-end-date))))
(displayln e))])
(insert-condor-analysis (date->iso8601 (save-end-date))
msis
(first (hash-ref condor-analysis-hash (condor-analysis-market msis)))
(second (hash-ref condor-analysis-hash (condor-analysis-market msis)))
(first (hash-ref condor-analysis-hash (condor-analysis-sector msis)))
(second (hash-ref condor-analysis-hash (condor-analysis-sector msis)))
(first (hash-ref condor-analysis-hash (condor-analysis-industry msis)))
(second (hash-ref condor-analysis-hash (condor-analysis-industry msis)))
(first (hash-ref condor-analysis-hash (condor-analysis-stock msis)))
(second (hash-ref condor-analysis-hash (condor-analysis-stock msis))))))
condor-analysis-list)
| null | https://raw.githubusercontent.com/evdubs/renegade-way/995e61a7871365fa92133ed054cd889826fb34e0/save-condor-analysis.rkt | racket | This (module) is a hack to get this code to load before the (requires) call below. | #lang racket/base
We want to first set up the command line args before initializing stuff in db-queries.rkt .
(module cmd racket/base
(require gregor
racket/cmdline
"params.rkt")
(command-line
#:program "racket save-condor-analysis.rkt"
#:once-each
[("-d" "--end-date") end-date
"End date for saving. Defaults to today"
(save-end-date (iso8601->date end-date))]
[("-m" "--markets") markets
"Markets to save. Defaults to SPY,MDY,SLY"
(save-markets markets)]
[("-n" "--db-name") name
"Database name. Defaults to 'local'"
(db-name name)]
[("-p" "--db-pass") password
"Database password"
(db-pass password)]
[("-u" "--db-user") user
"Database user name. Defaults to 'user'"
(db-user user)]))
(require 'cmd
gregor
racket/list
"db-queries.rkt"
"params.rkt"
"condor-analysis.rkt"
"structs.rkt")
(cond [(and (or (= 0 (->wday (save-end-date)))
(= 6 (->wday (save-end-date)))))
(displayln (string-append "Requested date " (date->iso8601 (save-end-date)) " falls on a weekend. Terminating."))
(exit)])
(run-condor-analysis (save-markets) "" (date->iso8601 (-months (save-end-date) 5)) (date->iso8601 (save-end-date)))
(for-each (λ (msis)
(with-handlers
([exn:fail? (λ (e) (displayln (string-append "Failed to process " (condor-analysis-stock msis) " for date "
(date->iso8601 (save-end-date))))
(displayln e))])
(insert-condor-analysis (date->iso8601 (save-end-date))
msis
(first (hash-ref condor-analysis-hash (condor-analysis-market msis)))
(second (hash-ref condor-analysis-hash (condor-analysis-market msis)))
(first (hash-ref condor-analysis-hash (condor-analysis-sector msis)))
(second (hash-ref condor-analysis-hash (condor-analysis-sector msis)))
(first (hash-ref condor-analysis-hash (condor-analysis-industry msis)))
(second (hash-ref condor-analysis-hash (condor-analysis-industry msis)))
(first (hash-ref condor-analysis-hash (condor-analysis-stock msis)))
(second (hash-ref condor-analysis-hash (condor-analysis-stock msis))))))
condor-analysis-list)
|
4295834d4aaf8ff9b8b7f9f2532a50975c69ee70ff1b6901e3b12e98d6194292 | sdanzan/erlang-systools | plimiter_tests.erl | -module(plimiter_tests).
-export([a_process/1]).
-include_lib("eunit/include/eunit.hrl").
start_stop_test() ->
Limiter = plimiter:start(10),
?assert(is_pid(Limiter)),
?assert(is_process_alive(Limiter)),
Monitor = monitor(process, Limiter),
plimiter:stop(Limiter),
Dead =
receive
{ 'DOWN', Monitor, process, Limiter, normal } -> true
after 5000 -> false end,
?assert(Dead).
start_stop_registered_test() ->
?assert(plimiter:start(limiter, 10)),
?assert(is_pid(whereis(limiter))),
Limiter = whereis(limiter),
Monitor = monitor(process, Limiter),
plimiter:stop(limiter),
Dead =
receive
{ 'DOWN', Monitor, process, Limiter, normal } -> true
after 5000 -> false end,
?assert(Dead).
start_bad_args_test_() ->
[
{ "negative max", ?_assertError(function_clause, plimiter:start(-2)) }
, { "bad name", ?_assertError(badarg, plimiter:start("test", 10)) }
].
complete_flow_test_() ->
{ inorder, [
{
setup,
fun() -> setup(Max) end,
fun cleanup/1,
[
{ "flow_" ++ integer_to_list(Max), fun() -> flow({limiter, Max}) end }
]
}
|| Max <- lists:seq(1, 1001, 200)
]}.
setup(Max) -> plimiter:start(limiter, Max), Max.
cleanup(_) -> plimiter:stop(limiter), unregister(limiter).
flow({Limiter, Max}) ->
Self = self(),
plimiter:spawn(Limiter, fun() -> a_process(Self) end),
Pid = wait_for_a_process(),
Pid ! { Self, stop },
ok = wait_for_a_process_stop(Pid),
plimiter:spawn(Limiter, plimiter_tests, a_process, [Self]),
Pid2 = wait_for_a_process(),
Pid2 ! { Self, stop },
ok = wait_for_a_process_stop(Pid2),
lists:foreach(fun(_) -> plimiter:spawn(Limiter, fun() -> a_process(Self) end) end,
lists:seq(1, Max)),
[ H | Pids ] = AllPids = [ wait_for_a_process() || _ <- lists:seq(1, Max) ],
?assertEqual(Max, length(AllPids)),
plimiter:spawn(Limiter, fun() -> a_process(Self) end),
wait_for_a_process(),
plimiter:spawn(Limiter, fun() -> a_process(Self) end),
?assertMatch(none, wait_for_a_process()),
H ! { Self, stop },
ok = wait_for_a_process_stop(H),
L = wait_for_a_process(),
?assert(is_pid(L)),
lists:foreach(fun(P) -> P ! { Self, stop } end, [ L | Pids ]),
flush().
flush() -> receive _ -> flush() after 0 -> ok end.
wait_for_a_process() -> receive { Pid, a_process_started } -> Pid after 5 -> none end.
wait_for_a_process_stop(Pid) -> receive { Pid, a_process_stopped } -> ok end.
a_process(Parent) ->
Parent ! { self(), a_process_started },
receive { Parent, stop } -> Parent ! { self(), a_process_stopped } end.
| null | https://raw.githubusercontent.com/sdanzan/erlang-systools/ced3faf1c807d36c528e53cbb366d69f464ff4e5/test/plimiter_tests.erl | erlang | -module(plimiter_tests).
-export([a_process/1]).
-include_lib("eunit/include/eunit.hrl").
start_stop_test() ->
Limiter = plimiter:start(10),
?assert(is_pid(Limiter)),
?assert(is_process_alive(Limiter)),
Monitor = monitor(process, Limiter),
plimiter:stop(Limiter),
Dead =
receive
{ 'DOWN', Monitor, process, Limiter, normal } -> true
after 5000 -> false end,
?assert(Dead).
start_stop_registered_test() ->
?assert(plimiter:start(limiter, 10)),
?assert(is_pid(whereis(limiter))),
Limiter = whereis(limiter),
Monitor = monitor(process, Limiter),
plimiter:stop(limiter),
Dead =
receive
{ 'DOWN', Monitor, process, Limiter, normal } -> true
after 5000 -> false end,
?assert(Dead).
start_bad_args_test_() ->
[
{ "negative max", ?_assertError(function_clause, plimiter:start(-2)) }
, { "bad name", ?_assertError(badarg, plimiter:start("test", 10)) }
].
complete_flow_test_() ->
{ inorder, [
{
setup,
fun() -> setup(Max) end,
fun cleanup/1,
[
{ "flow_" ++ integer_to_list(Max), fun() -> flow({limiter, Max}) end }
]
}
|| Max <- lists:seq(1, 1001, 200)
]}.
setup(Max) -> plimiter:start(limiter, Max), Max.
cleanup(_) -> plimiter:stop(limiter), unregister(limiter).
flow({Limiter, Max}) ->
Self = self(),
plimiter:spawn(Limiter, fun() -> a_process(Self) end),
Pid = wait_for_a_process(),
Pid ! { Self, stop },
ok = wait_for_a_process_stop(Pid),
plimiter:spawn(Limiter, plimiter_tests, a_process, [Self]),
Pid2 = wait_for_a_process(),
Pid2 ! { Self, stop },
ok = wait_for_a_process_stop(Pid2),
lists:foreach(fun(_) -> plimiter:spawn(Limiter, fun() -> a_process(Self) end) end,
lists:seq(1, Max)),
[ H | Pids ] = AllPids = [ wait_for_a_process() || _ <- lists:seq(1, Max) ],
?assertEqual(Max, length(AllPids)),
plimiter:spawn(Limiter, fun() -> a_process(Self) end),
wait_for_a_process(),
plimiter:spawn(Limiter, fun() -> a_process(Self) end),
?assertMatch(none, wait_for_a_process()),
H ! { Self, stop },
ok = wait_for_a_process_stop(H),
L = wait_for_a_process(),
?assert(is_pid(L)),
lists:foreach(fun(P) -> P ! { Self, stop } end, [ L | Pids ]),
flush().
flush() -> receive _ -> flush() after 0 -> ok end.
wait_for_a_process() -> receive { Pid, a_process_started } -> Pid after 5 -> none end.
wait_for_a_process_stop(Pid) -> receive { Pid, a_process_stopped } -> ok end.
a_process(Parent) ->
Parent ! { self(), a_process_started },
receive { Parent, stop } -> Parent ! { self(), a_process_stopped } end.
| |
959b5b3a5468faedf3678c338139dda14a5c01cbe8c00ab2e17971460663a8cd | haskell/hackage-server | Types.hs | # LANGUAGE DeriveDataTypeable , , TemplateHaskell #
# LANGUAGE TypeFamilies #
module Distribution.Server.Users.Types (
module Distribution.Server.Users.Types,
module Distribution.Server.Users.AuthToken,
module Distribution.Server.Framework.AuthTypes
) where
import Distribution.Server.Framework.AuthTypes
import Distribution.Server.Framework.MemSize
import Distribution.Server.Users.AuthToken
import Distribution.Pretty (Pretty(..))
import Distribution.Parsec (Parsec(..))
import qualified Distribution.Parsec as P
import qualified Distribution.Compat.Parsing as P
import qualified Distribution.Compat.CharParsing as P
import qualified Text.PrettyPrint as Disp
import qualified Data.Char as Char
import qualified Data.Text as T
import qualified Data.Map as M
import qualified Data.List as L
import Data.Aeson (ToJSON, FromJSON)
import Data.SafeCopy (base, extension, deriveSafeCopy, Migrate(..))
import Data.Typeable (Typeable)
import Data.Hashable
newtype UserId = UserId Int
deriving (Eq, Ord, Read, Show, Typeable, MemSize, ToJSON, FromJSON, Pretty)
newtype UserName = UserName String
deriving (Eq, Ord, Read, Show, Typeable, MemSize, ToJSON, FromJSON, Hashable)
data UserInfo = UserInfo {
userName :: !UserName,
userStatus :: !UserStatus,
userTokens :: !(M.Map AuthToken T.Text) -- tokens and descriptions
} deriving (Eq, Show, Typeable)
data UserStatus = AccountEnabled UserAuth
| AccountDisabled (Maybe UserAuth)
| AccountDeleted
deriving (Eq, Show, Typeable)
newtype UserAuth = UserAuth PasswdHash
deriving (Show, Eq, Typeable)
isActiveAccount :: UserStatus -> Bool
isActiveAccount (AccountEnabled _) = True
isActiveAccount (AccountDisabled _) = True
isActiveAccount AccountDeleted = False
instance MemSize UserInfo where
memSize (UserInfo a b c) = memSize3 a b c
instance MemSize UserStatus where
memSize (AccountEnabled a) = memSize1 a
memSize (AccountDisabled a) = memSize1 a
memSize (AccountDeleted) = memSize0
instance MemSize UserAuth where
memSize (UserAuth a) = memSize1 a
instance Parsec UserId where
parse a non - negative integer . No redundant leading zeros allowed .
-- (this is effectively a relabeled versionDigitParser)
parsec = (P.some d >>= (fmap UserId . toNumber)) P.<?> "UserId (natural number without redunant leading zeroes)"
where
toNumber :: P.CabalParsing m => [Int] -> m Int
toNumber [0] = return 0
toNumber (0:_) = P.unexpected "UserId with redundant leading zero"
-- TODO: Add sanity check this doesn't overflow
toNumber xs = return $ L.foldl' (\a b -> a * 10 + b) 0 xs
d :: P.CharParsing m => m Int
d = f <$> P.satisfyRange '0' '9'
f c = Char.ord c - Char.ord '0'
instance Pretty UserName where
pretty (UserName name) = Disp.text name
instance Parsec UserName where
parsec = UserName <$> P.munch1 isValidUserNameChar
isValidUserNameChar :: Char -> Bool
isValidUserNameChar c = (c < '\127' && Char.isAlphaNum c) || (c == '_')
data UserInfo_v0 = UserInfo_v0 {
userName_v0 :: !UserName,
userStatus_v0 :: !UserStatus
} deriving (Eq, Show, Typeable)
$(deriveSafeCopy 0 'base ''UserId)
$(deriveSafeCopy 0 'base ''UserName)
$(deriveSafeCopy 1 'base ''UserAuth)
$(deriveSafeCopy 0 'base ''UserStatus)
$(deriveSafeCopy 0 'base ''UserInfo_v0)
instance Migrate UserInfo where
type MigrateFrom UserInfo = UserInfo_v0
migrate v0 =
UserInfo
{ userName = userName_v0 v0
, userStatus = userStatus_v0 v0
, userTokens = M.empty
}
$(deriveSafeCopy 1 'extension ''UserInfo)
| null | https://raw.githubusercontent.com/haskell/hackage-server/1543b15b6f4b96d732b5b352f9991b421f6a7316/src/Distribution/Server/Users/Types.hs | haskell | tokens and descriptions
(this is effectively a relabeled versionDigitParser)
TODO: Add sanity check this doesn't overflow | # LANGUAGE DeriveDataTypeable , , TemplateHaskell #
# LANGUAGE TypeFamilies #
module Distribution.Server.Users.Types (
module Distribution.Server.Users.Types,
module Distribution.Server.Users.AuthToken,
module Distribution.Server.Framework.AuthTypes
) where
import Distribution.Server.Framework.AuthTypes
import Distribution.Server.Framework.MemSize
import Distribution.Server.Users.AuthToken
import Distribution.Pretty (Pretty(..))
import Distribution.Parsec (Parsec(..))
import qualified Distribution.Parsec as P
import qualified Distribution.Compat.Parsing as P
import qualified Distribution.Compat.CharParsing as P
import qualified Text.PrettyPrint as Disp
import qualified Data.Char as Char
import qualified Data.Text as T
import qualified Data.Map as M
import qualified Data.List as L
import Data.Aeson (ToJSON, FromJSON)
import Data.SafeCopy (base, extension, deriveSafeCopy, Migrate(..))
import Data.Typeable (Typeable)
import Data.Hashable
newtype UserId = UserId Int
deriving (Eq, Ord, Read, Show, Typeable, MemSize, ToJSON, FromJSON, Pretty)
newtype UserName = UserName String
deriving (Eq, Ord, Read, Show, Typeable, MemSize, ToJSON, FromJSON, Hashable)
data UserInfo = UserInfo {
userName :: !UserName,
userStatus :: !UserStatus,
} deriving (Eq, Show, Typeable)
data UserStatus = AccountEnabled UserAuth
| AccountDisabled (Maybe UserAuth)
| AccountDeleted
deriving (Eq, Show, Typeable)
newtype UserAuth = UserAuth PasswdHash
deriving (Show, Eq, Typeable)
isActiveAccount :: UserStatus -> Bool
isActiveAccount (AccountEnabled _) = True
isActiveAccount (AccountDisabled _) = True
isActiveAccount AccountDeleted = False
instance MemSize UserInfo where
memSize (UserInfo a b c) = memSize3 a b c
instance MemSize UserStatus where
memSize (AccountEnabled a) = memSize1 a
memSize (AccountDisabled a) = memSize1 a
memSize (AccountDeleted) = memSize0
instance MemSize UserAuth where
memSize (UserAuth a) = memSize1 a
instance Parsec UserId where
parse a non - negative integer . No redundant leading zeros allowed .
parsec = (P.some d >>= (fmap UserId . toNumber)) P.<?> "UserId (natural number without redunant leading zeroes)"
where
toNumber :: P.CabalParsing m => [Int] -> m Int
toNumber [0] = return 0
toNumber (0:_) = P.unexpected "UserId with redundant leading zero"
toNumber xs = return $ L.foldl' (\a b -> a * 10 + b) 0 xs
d :: P.CharParsing m => m Int
d = f <$> P.satisfyRange '0' '9'
f c = Char.ord c - Char.ord '0'
instance Pretty UserName where
pretty (UserName name) = Disp.text name
instance Parsec UserName where
parsec = UserName <$> P.munch1 isValidUserNameChar
isValidUserNameChar :: Char -> Bool
isValidUserNameChar c = (c < '\127' && Char.isAlphaNum c) || (c == '_')
data UserInfo_v0 = UserInfo_v0 {
userName_v0 :: !UserName,
userStatus_v0 :: !UserStatus
} deriving (Eq, Show, Typeable)
$(deriveSafeCopy 0 'base ''UserId)
$(deriveSafeCopy 0 'base ''UserName)
$(deriveSafeCopy 1 'base ''UserAuth)
$(deriveSafeCopy 0 'base ''UserStatus)
$(deriveSafeCopy 0 'base ''UserInfo_v0)
instance Migrate UserInfo where
type MigrateFrom UserInfo = UserInfo_v0
migrate v0 =
UserInfo
{ userName = userName_v0 v0
, userStatus = userStatus_v0 v0
, userTokens = M.empty
}
$(deriveSafeCopy 1 'extension ''UserInfo)
|
024e072f8e9928ec2d74765c0b0c8621cf0b4e4954d33fa109b4d34a8a349110 | agentm/project-m36 | InformationOperator.hs | # LANGUAGE CPP #
module TutorialD.Interpreter.InformationOperator where
import Data.Text
import Text.Megaparsec
import TutorialD.Interpreter.Base
-- older versions of stack fail to
#if !defined(VERSION_project_m36)
# warning Failed to discover proper version from cabal_macros.h
# define VERSION_project_m36 "<unknown>"
#endif
-- this module provides information about the current interpreter
data InformationOperator = HelpOperator |
GetVersionOperator
deriving (Show)
infoOpP :: Parser InformationOperator
infoOpP = helpOpP <|> getVersionP
helpOpP :: Parser InformationOperator
helpOpP = reserved ":help" >> pure HelpOperator
getVersionP :: Parser InformationOperator
getVersionP = reserved ":version" >> pure GetVersionOperator
evalInformationOperator :: InformationOperator -> Either Text Text
evalInformationOperator GetVersionOperator = Right ("tutd " `append` VERSION_project_m36)
-- display generic help
evalInformationOperator HelpOperator = Right $ intercalate "\n" help
where
help = ["tutd Help",
"Quick Examples:",
":showexpr true",
":showexpr relation{name Text, address Text}{tuple{name \"Steve\", address \"Main St.\"}}",
"address := relation{tuple{name \"Steve\", address \"Main St.\"}}",
":showexpr true join false = false",
"Relational Operators:",
":showexpr relation{a Int, b Text}{} -- relation creation",
":showexpr relation{tuple{c t}} -- relation creation",
":showexpr relation{tuple{a 4, b 4}}{a} -- projection",
":showexpr relation{tuple{a 5}} rename {a as num} -- rename",
":showexpr relation{tuple{d 10}} where d=10 or d=5 -- restriction",
":showexpr relation{tuple{d 10}} : {e:=add(@d,5)} -- extension",
"Database Context Operators:",
"animal := relation{tuple{name \"octopus\", legs_count 8}} -- assignment",
"insert animal relation{tuple{name \"cat\", legs_count 4}} -- insertion",
"car :: {model Text, make Text, year Int} -- definition",
"undefine car -- undefine",
"delete animal where legs_count=4 -- deletion",
"update animal where name=\"octopus\" (name:=\"Mr. Octopus\") -- updating",
"employee:=relation{id Int, name Text, age Int}{}; key emp_unique_id {id} employee --uniqueness constraint",
"constraint age_gt_zero (employee{age} where ^lt(@age,0)){} equals false -- constraint",
"notify teenager_added employee where ^lt(@age,20) and ^gte(@age,13) employee{age} where ^lt(@age,20) and ^gte(@age,13) -- change notification",
"Graph Operators: ",
":jumphead <head_name> - change the current database context to point to a current head",
":jump <transaction_id> - change the current database context to that of a past transaction",
":commit - push the current context into the current head and make it immutable",
":rollback - discard any changes made in the current context",
":showgraph - display the transaction graph",
"View more documentation at: -m36/blob/master/docs/tutd_tutorial.markdown"
]
| null | https://raw.githubusercontent.com/agentm/project-m36/57a75b35e84bebf0945db6dae53350fda83f24b6/src/bin/TutorialD/Interpreter/InformationOperator.hs | haskell | older versions of stack fail to
this module provides information about the current interpreter
display generic help | # LANGUAGE CPP #
module TutorialD.Interpreter.InformationOperator where
import Data.Text
import Text.Megaparsec
import TutorialD.Interpreter.Base
#if !defined(VERSION_project_m36)
# warning Failed to discover proper version from cabal_macros.h
# define VERSION_project_m36 "<unknown>"
#endif
data InformationOperator = HelpOperator |
GetVersionOperator
deriving (Show)
infoOpP :: Parser InformationOperator
infoOpP = helpOpP <|> getVersionP
helpOpP :: Parser InformationOperator
helpOpP = reserved ":help" >> pure HelpOperator
getVersionP :: Parser InformationOperator
getVersionP = reserved ":version" >> pure GetVersionOperator
evalInformationOperator :: InformationOperator -> Either Text Text
evalInformationOperator GetVersionOperator = Right ("tutd " `append` VERSION_project_m36)
evalInformationOperator HelpOperator = Right $ intercalate "\n" help
where
help = ["tutd Help",
"Quick Examples:",
":showexpr true",
":showexpr relation{name Text, address Text}{tuple{name \"Steve\", address \"Main St.\"}}",
"address := relation{tuple{name \"Steve\", address \"Main St.\"}}",
":showexpr true join false = false",
"Relational Operators:",
":showexpr relation{a Int, b Text}{} -- relation creation",
":showexpr relation{tuple{c t}} -- relation creation",
":showexpr relation{tuple{a 4, b 4}}{a} -- projection",
":showexpr relation{tuple{a 5}} rename {a as num} -- rename",
":showexpr relation{tuple{d 10}} where d=10 or d=5 -- restriction",
":showexpr relation{tuple{d 10}} : {e:=add(@d,5)} -- extension",
"Database Context Operators:",
"animal := relation{tuple{name \"octopus\", legs_count 8}} -- assignment",
"insert animal relation{tuple{name \"cat\", legs_count 4}} -- insertion",
"car :: {model Text, make Text, year Int} -- definition",
"undefine car -- undefine",
"delete animal where legs_count=4 -- deletion",
"update animal where name=\"octopus\" (name:=\"Mr. Octopus\") -- updating",
"employee:=relation{id Int, name Text, age Int}{}; key emp_unique_id {id} employee --uniqueness constraint",
"constraint age_gt_zero (employee{age} where ^lt(@age,0)){} equals false -- constraint",
"notify teenager_added employee where ^lt(@age,20) and ^gte(@age,13) employee{age} where ^lt(@age,20) and ^gte(@age,13) -- change notification",
"Graph Operators: ",
":jumphead <head_name> - change the current database context to point to a current head",
":jump <transaction_id> - change the current database context to that of a past transaction",
":commit - push the current context into the current head and make it immutable",
":rollback - discard any changes made in the current context",
":showgraph - display the transaction graph",
"View more documentation at: -m36/blob/master/docs/tutd_tutorial.markdown"
]
|
eacf7a943bf4c734f51ee243865dcbcfe03e81eb6027ced0a32118cd795a844e | ucsd-progsys/nate | depend.mli | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ I d : depend.mli , v 1.2 2002/04/18 07:27:43 garrigue Exp $
(** Module dependencies. *)
module StringSet : Set.S with type elt = string
val free_structure_names : StringSet.t ref
val add_use_file : StringSet.t -> Parsetree.toplevel_phrase list -> unit
val add_signature : StringSet.t -> Parsetree.signature -> unit
| null | https://raw.githubusercontent.com/ucsd-progsys/nate/8b1267cd8b10283d8bc239d16a28c654a4cb8942/eval/sherrloc/easyocaml%2B%2B/tools/depend.mli | ocaml | *********************************************************************
Objective Caml
*********************************************************************
* Module dependencies. | , projet Cristal , INRIA Rocquencourt
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ I d : depend.mli , v 1.2 2002/04/18 07:27:43 garrigue Exp $
module StringSet : Set.S with type elt = string
val free_structure_names : StringSet.t ref
val add_use_file : StringSet.t -> Parsetree.toplevel_phrase list -> unit
val add_signature : StringSet.t -> Parsetree.signature -> unit
|
a55c273288d52b1f363e09780dd5fd57b437cab4cdba43fe6e6b9bacae219f85 | racketscript/racketscript | keyword-args.rkt | #lang racket
(define (foo #:a a #:b b)
(displayln a)
(displayln b))
(foo #:a 'hello #:b 'world)
(foo #:b 'world #:a 'hello)
(define (bar a b #:c c #:d d)
(displayln (list a b c d)))
(bar 1 2 #:c 3 #:d 4)
(bar 1 2 #:d 3 #:c 4)
| null | https://raw.githubusercontent.com/racketscript/racketscript/f94006d11338a674ae10f6bd83fc53e6806d07d8/tests/experimental/keyword-args.rkt | racket | #lang racket
(define (foo #:a a #:b b)
(displayln a)
(displayln b))
(foo #:a 'hello #:b 'world)
(foo #:b 'world #:a 'hello)
(define (bar a b #:c c #:d d)
(displayln (list a b c d)))
(bar 1 2 #:c 3 #:d 4)
(bar 1 2 #:d 3 #:c 4)
| |
fc3238bf9dfc771bdb6c1f265a9c11fa83092dff7b7004698f26a7cfb53af527 | earl-ducaine/cl-garnet | framed-text-list-pix.lisp | (when (boundp 'win) (opal:destroy win))
(create-instance 'win inter:interactor-window
(:left 800) (:top 300)
(:height 170) (:width 180)
(:aggregate (create-instance 'agg opal:aggregate)))
(create-instance 'FRAMED-TEXT-LIST opal:aggrelist
(:left 20) (:top 20)
(:items '("An aggrelist" "using an" "aggregate"
"as an" "item-prototype"))
(:item-prototype
`(,opal:aggregadget
(:parts
((:frame ,opal:rectangle
(:left ,(o-formula (gvl :parent :left)))
(:top ,(o-formula (gvl :parent :top)))
(:width ,(o-formula (+ (gvl :parent :text :width) 4)))
(:height ,(o-formula (+ (gvl :parent :text :height) 4))))
(:text ,opal:cursor-text
(:left ,(o-formula (+ (gvl :parent :left) 2)))
(:top ,(o-formula (+ (gvl :parent :top) 2)))
(:cursor-index NIL)
(:font ,(create-instance NIL opal:font
(:size :large)))
(:string ,(o-formula
(nth (gvl :parent :rank)
(gvl :parent :parent :items)))))))
(:interactors
((:text-inter ,inter:text-interactor
(:window ,(o-formula
(gv-local :self :operates-on :window)))
(:feedback-obj NIL)
(:start-where ,(o-formula
(list :in (gvl :operates-on :text))))
(:abort-event #\control-\g)
(:stop-event (:leftdown #\RETURN))
(:final-function
,#'(lambda (inter text event string x y)
(let ((elem (g-value inter :operates-on)))
(change-item (g-value elem :parent)
string
(g-value elem :rank))))) ))))))
(opal:add-components agg framed-text-list)
(opal:update win T)
| null | https://raw.githubusercontent.com/earl-ducaine/cl-garnet/f0095848513ba69c370ed1dc51ee01f0bb4dd108/doc/previous-version/src/aggregadgets/garnet-code/framed-text-list-pix.lisp | lisp | (when (boundp 'win) (opal:destroy win))
(create-instance 'win inter:interactor-window
(:left 800) (:top 300)
(:height 170) (:width 180)
(:aggregate (create-instance 'agg opal:aggregate)))
(create-instance 'FRAMED-TEXT-LIST opal:aggrelist
(:left 20) (:top 20)
(:items '("An aggrelist" "using an" "aggregate"
"as an" "item-prototype"))
(:item-prototype
`(,opal:aggregadget
(:parts
((:frame ,opal:rectangle
(:left ,(o-formula (gvl :parent :left)))
(:top ,(o-formula (gvl :parent :top)))
(:width ,(o-formula (+ (gvl :parent :text :width) 4)))
(:height ,(o-formula (+ (gvl :parent :text :height) 4))))
(:text ,opal:cursor-text
(:left ,(o-formula (+ (gvl :parent :left) 2)))
(:top ,(o-formula (+ (gvl :parent :top) 2)))
(:cursor-index NIL)
(:font ,(create-instance NIL opal:font
(:size :large)))
(:string ,(o-formula
(nth (gvl :parent :rank)
(gvl :parent :parent :items)))))))
(:interactors
((:text-inter ,inter:text-interactor
(:window ,(o-formula
(gv-local :self :operates-on :window)))
(:feedback-obj NIL)
(:start-where ,(o-formula
(list :in (gvl :operates-on :text))))
(:abort-event #\control-\g)
(:stop-event (:leftdown #\RETURN))
(:final-function
,#'(lambda (inter text event string x y)
(let ((elem (g-value inter :operates-on)))
(change-item (g-value elem :parent)
string
(g-value elem :rank))))) ))))))
(opal:add-components agg framed-text-list)
(opal:update win T)
| |
4c18b03aea25e002b4e8b1115a7f2a45cb129e552ec16dd75f4ba0a6bf033251 | turtl/core-cl | config.lisp | (in-package :turtl-core)
;; client self-awareness
(defparameter *client* "core")
(defparameter *version* (asdf/component:component-version (asdf/system:find-system :turtl-core)))
(defparameter *root* (asdf:system-relative-pathname :turtl-core #P"")
"Defines the directory we're loading from.")
;; data directory
(defvar *data-directory* "~/.turtl"
"Holds our Turtl database and any other files associated with storage.")
(defparameter *comm-url* "inproc"
"The nanomsg URL we use to talk to Turtl.")
;; api stuff
(defparameter *api-url* ":8181/api")
(defparameter *api-key* "")
(defparameter *api-auth*
'((:post . "/users")
(:post . "/log/error"))
"API resources that *don't* need auth.")
(defparameter *db-schema*
'(("kv")
("users")
("keychain"
:indexes (("item_id" . :id)))
("personas")
("boards"
:indexes (("user_id" . :id)))
("notes"
:indexes (("user_id" . :id)
("board_id" . :id)
("has_file" . :bool)))
("files"
:binary-data t
:indexes (("note_id" . :id)
("synced" . :bool)
("has_data" . :bool)))
;; kinda lame to jam the queue into the document-store, but it sort of fits
;; due to the fact that jobs will be document data and we can still run
;; custom queries on it (plus it can benefit from lossless schema upgrades).
("queue"
:id :rowid
:indexes (("qid" . :integer)
("grabbed" . :integer)
("failed" . :integer))))
"Holds the local DB schema. This is really a table name and a set of indexes
for that table. Anything else for each table is implemented as a JSON blob,
effectively giving us an object store without th hassle of codifying every
last possible field. So in general the setup is:
ID | [indexed_field1] | [indexed_field2] | ... | data
where `data` is a JSON blob, or possibly a binary blob (if :binary-data t is
given in the table's metadata).")
(vom:config :turtl-core :debug)
| null | https://raw.githubusercontent.com/turtl/core-cl/8fdfaaf918f731318e734eaa7adcd083494f44b8/app/config.lisp | lisp | client self-awareness
data directory
api stuff
kinda lame to jam the queue into the document-store, but it sort of fits
due to the fact that jobs will be document data and we can still run
custom queries on it (plus it can benefit from lossless schema upgrades). | (in-package :turtl-core)
(defparameter *client* "core")
(defparameter *version* (asdf/component:component-version (asdf/system:find-system :turtl-core)))
(defparameter *root* (asdf:system-relative-pathname :turtl-core #P"")
"Defines the directory we're loading from.")
(defvar *data-directory* "~/.turtl"
"Holds our Turtl database and any other files associated with storage.")
(defparameter *comm-url* "inproc"
"The nanomsg URL we use to talk to Turtl.")
(defparameter *api-url* ":8181/api")
(defparameter *api-key* "")
(defparameter *api-auth*
'((:post . "/users")
(:post . "/log/error"))
"API resources that *don't* need auth.")
(defparameter *db-schema*
'(("kv")
("users")
("keychain"
:indexes (("item_id" . :id)))
("personas")
("boards"
:indexes (("user_id" . :id)))
("notes"
:indexes (("user_id" . :id)
("board_id" . :id)
("has_file" . :bool)))
("files"
:binary-data t
:indexes (("note_id" . :id)
("synced" . :bool)
("has_data" . :bool)))
("queue"
:id :rowid
:indexes (("qid" . :integer)
("grabbed" . :integer)
("failed" . :integer))))
"Holds the local DB schema. This is really a table name and a set of indexes
for that table. Anything else for each table is implemented as a JSON blob,
effectively giving us an object store without th hassle of codifying every
last possible field. So in general the setup is:
ID | [indexed_field1] | [indexed_field2] | ... | data
where `data` is a JSON blob, or possibly a binary blob (if :binary-data t is
given in the table's metadata).")
(vom:config :turtl-core :debug)
|
badf7721899ccb0424449a2f2882df60eb5ee66d905b24b6327a2cbad59a1772 | shirok/Gauche | 125.scm | ;; SRFI-125 became scheme.hash-table
(define-module srfi.125 (extend scheme.hash-table))
| null | https://raw.githubusercontent.com/shirok/Gauche/e606bfe5a94b100d5807bca9c2bb95df94f60aa6/lib/srfi/125.scm | scheme | SRFI-125 became scheme.hash-table | (define-module srfi.125 (extend scheme.hash-table))
|
ab5e364b6479a0bd0a51808261d0bdfafc14f73b8456b7073fce8463c403726d | tweag/ormolu | type-applications.hs | # LANGUAGE TypeApplications #
foo = f @String a b c
bar = f @(Maybe Int) a b
baz = f @Int @String
a b
goo = hash
@(HASH TPraosStandardCrypto)
@ByteString
"And the lamb lies down on Broadway"
test x = case x of
Foo @t -> show @t 0
Bar
@t @u v
-> ""
| null | https://raw.githubusercontent.com/tweag/ormolu/570b3175a2328f9cd5f6e6e648c200b89f23af2f/data/examples/declaration/value/function/type-applications.hs | haskell | # LANGUAGE TypeApplications #
foo = f @String a b c
bar = f @(Maybe Int) a b
baz = f @Int @String
a b
goo = hash
@(HASH TPraosStandardCrypto)
@ByteString
"And the lamb lies down on Broadway"
test x = case x of
Foo @t -> show @t 0
Bar
@t @u v
-> ""
| |
cd0c2bcf2b2c27ae90c8c138d2c5511f8ac6ee13729ef913f820ae78135fd387 | facebookarchive/JSCaml | file_util.ml |
* Copyright ( c ) 2013 - present , Facebook , Inc.
* All rights reserved .
*
* This source code is licensed under the BSD - style license found in the
* LICENSE file in the root directory of this source tree . An additional grant
* of patent rights can be found in the PATENTS file in the same directory .
*
* Copyright (c) 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*)
external char_code: char -> int = "%identity"
external char_chr: int -> char = "%identity"
let escape_name name =
let rec find_escape_pos i n str =
if i = n then -1, ' '
else
let ch = String.unsafe_get str i in
if ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z') ||
('0' <= ch && ch <= '9') || ch = '_' then
find_escape_pos (i+1) n str
else
i, ch in
let char_a = char_code 'a' in
let append_escape ch buffer =
let b = char_code ch in
Buffer.add_char buffer (char_chr (char_a + b / 16));
Buffer.add_char buffer (char_chr (char_a + b mod 16)) in
let rec find_append_escape_and_repeat buffer i n str =
match find_escape_pos i n str with
| (-1), _ ->
if i = 0 then Buffer.add_string buffer "_";
Buffer.add_substring buffer str i (n-i);
| j, ch ->
if i = 0 then Buffer.add_string buffer "__";
Buffer.add_substring buffer str i (j-i);
append_escape ch buffer;
find_append_escape_and_repeat buffer (j+1) n str in
let n = (String.length name) in
let buffer = Buffer.create n in
find_append_escape_and_repeat buffer 0 n name;
Buffer.contents buffer
| null | https://raw.githubusercontent.com/facebookarchive/JSCaml/adf48cc4aa87e02b6a70765dc1e0904c4739804f/compiler/file_util.ml | ocaml |
* Copyright ( c ) 2013 - present , Facebook , Inc.
* All rights reserved .
*
* This source code is licensed under the BSD - style license found in the
* LICENSE file in the root directory of this source tree . An additional grant
* of patent rights can be found in the PATENTS file in the same directory .
*
* Copyright (c) 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*)
external char_code: char -> int = "%identity"
external char_chr: int -> char = "%identity"
let escape_name name =
let rec find_escape_pos i n str =
if i = n then -1, ' '
else
let ch = String.unsafe_get str i in
if ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z') ||
('0' <= ch && ch <= '9') || ch = '_' then
find_escape_pos (i+1) n str
else
i, ch in
let char_a = char_code 'a' in
let append_escape ch buffer =
let b = char_code ch in
Buffer.add_char buffer (char_chr (char_a + b / 16));
Buffer.add_char buffer (char_chr (char_a + b mod 16)) in
let rec find_append_escape_and_repeat buffer i n str =
match find_escape_pos i n str with
| (-1), _ ->
if i = 0 then Buffer.add_string buffer "_";
Buffer.add_substring buffer str i (n-i);
| j, ch ->
if i = 0 then Buffer.add_string buffer "__";
Buffer.add_substring buffer str i (j-i);
append_escape ch buffer;
find_append_escape_and_repeat buffer (j+1) n str in
let n = (String.length name) in
let buffer = Buffer.create n in
find_append_escape_and_repeat buffer 0 n name;
Buffer.contents buffer
| |
285640e58b0efb0bcc4567889d0254ec9c8a60ba6a8f4fd40eeaa55d60d6af59 | dandoh/web-haskell-graphql-postgres-boilerplate | JWT.hs | {-# LANGUAGE OverloadedStrings #-}
module Authentication.JWT where
import Control.Monad (guard)
import Data.Aeson.Types (Value (Bool, Number))
import qualified Data.Map as Map
import Data.Scientific (base10Exponent, coefficient)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time.Clock (UTCTime)
import Data.Time.Clock.POSIX
import Web.JWT
import Prelude hiding (exp)
-------------------------------------------------------------------------------
type SecretKey = Text
type Token = Text
userIDKey :: Text
userIDKey = "USERID"
-- |
-------------------------------------------------------------------------------
makeJWT :: UTCTime -> SecretKey -> Int -> Token
makeJWT currentTime secret userId =
let cs =
mempty returns a default JWTClaimsSet
{ iss = stringOrURI "webhaskell",
unregisteredClaims =
ClaimsMap $
Map.fromList [(userIDKey, Number $ fromIntegral userId)],
exp =
numericDate $
utcTimeToPOSIXSeconds currentTime + 30 * posixDayLength
}
signer = hmacSecret secret
in encodeSigned signer mempty cs
-- |
-------------------------------------------------------------------------------
verifyJWT :: UTCTime -> SecretKey -> Token -> Maybe Int
verifyJWT currentTime secret token = do
let signer = hmacSecret secret
unverifiedJWT <- decode token
verifiedJWT <- verify signer unverifiedJWT
expTime <- exp . claims $ verifiedJWT
now <- numericDate $ utcTimeToPOSIXSeconds currentTime
guard (now < expTime)
let kv = unClaimsMap . unregisteredClaims . claims $ verifiedJWT
userIDVal <- Map.lookup userIDKey kv
case userIDVal of
Number userID -> return . fromIntegral $ coefficient userID
_ -> Nothing
| null | https://raw.githubusercontent.com/dandoh/web-haskell-graphql-postgres-boilerplate/e673e9ee07ce7a4dd9b023328038664e8fdfdd78/src/Authentication/JWT.hs | haskell | # LANGUAGE OverloadedStrings #
-----------------------------------------------------------------------------
|
-----------------------------------------------------------------------------
|
----------------------------------------------------------------------------- |
module Authentication.JWT where
import Control.Monad (guard)
import Data.Aeson.Types (Value (Bool, Number))
import qualified Data.Map as Map
import Data.Scientific (base10Exponent, coefficient)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time.Clock (UTCTime)
import Data.Time.Clock.POSIX
import Web.JWT
import Prelude hiding (exp)
type SecretKey = Text
type Token = Text
userIDKey :: Text
userIDKey = "USERID"
makeJWT :: UTCTime -> SecretKey -> Int -> Token
makeJWT currentTime secret userId =
let cs =
mempty returns a default JWTClaimsSet
{ iss = stringOrURI "webhaskell",
unregisteredClaims =
ClaimsMap $
Map.fromList [(userIDKey, Number $ fromIntegral userId)],
exp =
numericDate $
utcTimeToPOSIXSeconds currentTime + 30 * posixDayLength
}
signer = hmacSecret secret
in encodeSigned signer mempty cs
verifyJWT :: UTCTime -> SecretKey -> Token -> Maybe Int
verifyJWT currentTime secret token = do
let signer = hmacSecret secret
unverifiedJWT <- decode token
verifiedJWT <- verify signer unverifiedJWT
expTime <- exp . claims $ verifiedJWT
now <- numericDate $ utcTimeToPOSIXSeconds currentTime
guard (now < expTime)
let kv = unClaimsMap . unregisteredClaims . claims $ verifiedJWT
userIDVal <- Map.lookup userIDKey kv
case userIDVal of
Number userID -> return . fromIntegral $ coefficient userID
_ -> Nothing
|
4cfd4bf6d9329f63a65a9c47273a171fd3e41baa433a8e1cb71a437951ab27f2 | esb-lwb/lwb | prop_rules.clj | lwb Logic WorkBench -- Natural deduction
Copyright ( c ) 2016 , THM . All rights reserved .
; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php ) .
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
(ns lwb.nd.examples.prop-rules
(:require [lwb.nd.repl :refer :all]))
; interactive checking in the repl for nd
(load-logic :prop)
; -----------------------------------------------------------------------------------------
; and-introduction
; forward
(proof '[P1 P2] '(and P1 P2))
(step-f :and-i 1 2)
(proof '[P1 P2] '(and P1 P2))
(step-f :and-i 1)
(swap '?1 'P2)
(proof '[P1 P2] '(and P1 P2))
(step-f :and-i :? 2)
(swap '?1 'P1)
; --------------------------------------------------
1 : P1 premise
2 : P2 premise
3 : ( and P1 P2 ) : and - i [ 1 2 ]
; --------------------------------------------------
(proof '[P1 P2] '(and P1 P2))
(step-b :and-i 4)
(proof 'A '(and A A))
(step-b :and-i 3)
; -----------------------------------------------------------------------------------------
; and-elimination
; forward preserving the left side
(proof '(and P1 P2) 'P1)
(step-f :and-e1 1)
; --------------------------------------------------
1 : ( and P1 P2 ) premise
2 : P1 : and - e1 [ 1 ]
; --------------------------------------------------
; forward preserving the right side
(proof '(and P1 P2) 'P2)
(step-f :and-e2 1)
(proof '(and A A) 'A)
(step-f :and-e1 1)
; -----------------------------------------------------------------------------------------
; or-introduction
; forward inventing the right side
(proof 'P1 '(or P1 P2))
(step-f :or-i1 1)
(swap '?1 'P2)
; backward preserving the left side
(proof 'P1 '(or P1 P2))
(step-b :or-i1 3)
;
; --------------------------------------------------
1 : P1 premise
2 : ( or P1 P2 ) : or - i1 [ 1 ]
; --------------------------------------------------
; forward inventing the left side
(proof 'P2 '(or P1 P2))
(step-f :or-i2 1)
(swap '?1 'P1)
; backward preserving the right side
(proof 'P2 '(or P1 P2))
(step-b :or-i2 3)
(proof 'A '(or A A))
(step-b :or-i1 3)
; -----------------------------------------------------------------------------------------
; or-elimination
; forward
(proof '(or (and P R) (and Q R)) 'R)
(step-f :or-e 1 3)
(step-f :and-e2 2)
(step-f :and-e2 4)
;
; --------------------------------------------------
1 : ( or ( and P R ) ( and Q R ) ) : premise
; ------------------------------------------------
2 : | ( and P R ) : assumption
3 : | R : and - e2 [ 2 ]
; ------------------------------------------------
; ------------------------------------------------
4 : | ( and Q R ) : assumption
5 : | R : and - e2 [ 4 ]
; ------------------------------------------------
6 : R : or - e [ 1 [ 2 3 ] [ 4 5 ] ]
; --------------------------------------------------
; backward
(proof '(or (and P R) (and Q R)) 'R)
(step-b :or-e 3 1)
(step-f :and-e2 2)
(step-f :and-e2 4)
(proof '(or A A) 'A)
(step-f :or-e 1 3)
; --------------------------------------------------
1 : ( or A A ) : premise
; ------------------------------------------------
2 : | A : assumption
3 : | A : repeat [ 2 ]
; ------------------------------------------------
; ------------------------------------------------
4 : | A : assumption
5 : | A : repeat [ 4 ]
; ------------------------------------------------
6 : A : or - e [ 1 [ 2 3 ] [ 4 5 ] ]
; --------------------------------------------------
; -----------------------------------------------------------------------------------------
; impl-introduction
; backwards
(proof '(impl (and P (not P)) Q))
(step-b :impl-i 2)
(step-f :and-e1 1)
(step-f :and-e2 1)
(step-f :not-e 3 2)
(step-b :efq 6)
(proof 'A '(impl A A))
(step-b :impl-i 3)
; -----------------------------------------------------------------------------------------
; impl-elimination
; forward
(proof '[P (impl P Q)] 'Q)
(step-f :impl-e 2 1)
; backward
(proof '[P (impl P Q)] 'Q)
(step-b :impl-e 4 2)
(proof '[P (impl P Q)] 'Q)
(step-b :impl-e 4 :? 1)
(proof '(impl truth A) 'A)
(step-f :truth)
(step-f :impl-e 1 2)
; -----------------------------------------------------------------------------------------
; reductio ad absurdum
; backwards
(proof '(or P (not P)))
(step-b :raa 2)
(step-f :tnd)
(swap '?1 'P)
(step-f :not-e 1 2)
; -----------------------------------------------------------------------------------------
ex falso
; forward
(proof '(and P (not P)) 'Q)
(step-f :and-e1 1)
(step-f :and-e2 1)
(step-f :not-e 3 2)
(step-b :efq 6)
(proof '(and P (not P)) 'Q)
(step-f :and-e1 1)
(step-f :and-e2 1)
(step-f :not-e 3 2)
(step-f :efq 4)
(swap '?1 'Q)
; backward
(proof '(and P (not P)) 'Q)
(step-b :efq 3)
(step-f :and-e1 1)
(step-f :and-e2 1)
(step-f :not-e 3 2)
; -----------------------------------------------------------------------------------------
; Derived rules
; -----------------------------------------------------------------------------------------
; notnot-introduction
(proof 'P '(not (not P)))
(step-b :not-i 3)
(step-f :not-e 2 1)
; -----------------------------------------------------------------------------------------
; notnot-elimination
(proof '(not (not P)) 'P)
(step-b :raa 3)
(step-f :not-e 1 2)
; -----------------------------------------------------------------------------------------
Modus Tollens
(proof '[(impl P Q) (not Q)] '(not P))
(step-b :not-i 4)
(step-f :impl-e 1 3)
(step-f :not-e 2 4)
; -----------------------------------------------------------------------------------------
; Tertium non datur
(proof '(or P (not P)))
(step-b :raa 2)
(step-b :not-e 3 1)
(step-b :or-i2 3)
(step-b :not-i 3)
(step-f :or-i1 2)
(swap '?1 '(not P))
(step-f :not-e 1 3)
; --------------------------------------------------
; ------------------------------------------------
1 : | ( not ( or P ( not P ) ) ) : assumption
; | ----------------------------------------------
2 : | | P : assumption
3 : | | ( or P ( not P ) ) : or - i1 [ 2 ]
4 : | | contradiction : not - e [ 1 3 ]
; | ----------------------------------------------
5 : | ( not P ) : not - i [ [ 2 4 ] ]
6 : | ( or P ( not P ) ) : or - i2 [ 5 ]
7 : | contradiction : not - e [ 1 6 ]
; ------------------------------------------------
8 : ( or P ( not P ) ) : raa [ [ 1 7 ] ]
; --------------------------------------------------
; -----------------------------------------------------------------------------------------
| null | https://raw.githubusercontent.com/esb-lwb/lwb/bba51ada7f7316341733d37b0dc4848c4891ef3a/src/lwb/nd/examples/prop_rules.clj | clojure | The use and distribution terms for this software are covered by the
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
interactive checking in the repl for nd
-----------------------------------------------------------------------------------------
and-introduction
forward
--------------------------------------------------
--------------------------------------------------
-----------------------------------------------------------------------------------------
and-elimination
forward preserving the left side
--------------------------------------------------
--------------------------------------------------
forward preserving the right side
-----------------------------------------------------------------------------------------
or-introduction
forward inventing the right side
backward preserving the left side
--------------------------------------------------
--------------------------------------------------
forward inventing the left side
backward preserving the right side
-----------------------------------------------------------------------------------------
or-elimination
forward
--------------------------------------------------
------------------------------------------------
------------------------------------------------
------------------------------------------------
------------------------------------------------
--------------------------------------------------
backward
--------------------------------------------------
------------------------------------------------
------------------------------------------------
------------------------------------------------
------------------------------------------------
--------------------------------------------------
-----------------------------------------------------------------------------------------
impl-introduction
backwards
-----------------------------------------------------------------------------------------
impl-elimination
forward
backward
-----------------------------------------------------------------------------------------
reductio ad absurdum
backwards
-----------------------------------------------------------------------------------------
forward
backward
-----------------------------------------------------------------------------------------
Derived rules
-----------------------------------------------------------------------------------------
notnot-introduction
-----------------------------------------------------------------------------------------
notnot-elimination
-----------------------------------------------------------------------------------------
-----------------------------------------------------------------------------------------
Tertium non datur
--------------------------------------------------
------------------------------------------------
| ----------------------------------------------
| ----------------------------------------------
------------------------------------------------
--------------------------------------------------
----------------------------------------------------------------------------------------- | lwb Logic WorkBench -- Natural deduction
Copyright ( c ) 2016 , THM . All rights reserved .
Eclipse Public License 1.0 ( -1.0.php ) .
(ns lwb.nd.examples.prop-rules
(:require [lwb.nd.repl :refer :all]))
(load-logic :prop)
(proof '[P1 P2] '(and P1 P2))
(step-f :and-i 1 2)
(proof '[P1 P2] '(and P1 P2))
(step-f :and-i 1)
(swap '?1 'P2)
(proof '[P1 P2] '(and P1 P2))
(step-f :and-i :? 2)
(swap '?1 'P1)
1 : P1 premise
2 : P2 premise
3 : ( and P1 P2 ) : and - i [ 1 2 ]
(proof '[P1 P2] '(and P1 P2))
(step-b :and-i 4)
(proof 'A '(and A A))
(step-b :and-i 3)
(proof '(and P1 P2) 'P1)
(step-f :and-e1 1)
1 : ( and P1 P2 ) premise
2 : P1 : and - e1 [ 1 ]
(proof '(and P1 P2) 'P2)
(step-f :and-e2 1)
(proof '(and A A) 'A)
(step-f :and-e1 1)
(proof 'P1 '(or P1 P2))
(step-f :or-i1 1)
(swap '?1 'P2)
(proof 'P1 '(or P1 P2))
(step-b :or-i1 3)
1 : P1 premise
2 : ( or P1 P2 ) : or - i1 [ 1 ]
(proof 'P2 '(or P1 P2))
(step-f :or-i2 1)
(swap '?1 'P1)
(proof 'P2 '(or P1 P2))
(step-b :or-i2 3)
(proof 'A '(or A A))
(step-b :or-i1 3)
(proof '(or (and P R) (and Q R)) 'R)
(step-f :or-e 1 3)
(step-f :and-e2 2)
(step-f :and-e2 4)
1 : ( or ( and P R ) ( and Q R ) ) : premise
2 : | ( and P R ) : assumption
3 : | R : and - e2 [ 2 ]
4 : | ( and Q R ) : assumption
5 : | R : and - e2 [ 4 ]
6 : R : or - e [ 1 [ 2 3 ] [ 4 5 ] ]
(proof '(or (and P R) (and Q R)) 'R)
(step-b :or-e 3 1)
(step-f :and-e2 2)
(step-f :and-e2 4)
(proof '(or A A) 'A)
(step-f :or-e 1 3)
1 : ( or A A ) : premise
2 : | A : assumption
3 : | A : repeat [ 2 ]
4 : | A : assumption
5 : | A : repeat [ 4 ]
6 : A : or - e [ 1 [ 2 3 ] [ 4 5 ] ]
(proof '(impl (and P (not P)) Q))
(step-b :impl-i 2)
(step-f :and-e1 1)
(step-f :and-e2 1)
(step-f :not-e 3 2)
(step-b :efq 6)
(proof 'A '(impl A A))
(step-b :impl-i 3)
(proof '[P (impl P Q)] 'Q)
(step-f :impl-e 2 1)
(proof '[P (impl P Q)] 'Q)
(step-b :impl-e 4 2)
(proof '[P (impl P Q)] 'Q)
(step-b :impl-e 4 :? 1)
(proof '(impl truth A) 'A)
(step-f :truth)
(step-f :impl-e 1 2)
(proof '(or P (not P)))
(step-b :raa 2)
(step-f :tnd)
(swap '?1 'P)
(step-f :not-e 1 2)
ex falso
(proof '(and P (not P)) 'Q)
(step-f :and-e1 1)
(step-f :and-e2 1)
(step-f :not-e 3 2)
(step-b :efq 6)
(proof '(and P (not P)) 'Q)
(step-f :and-e1 1)
(step-f :and-e2 1)
(step-f :not-e 3 2)
(step-f :efq 4)
(swap '?1 'Q)
(proof '(and P (not P)) 'Q)
(step-b :efq 3)
(step-f :and-e1 1)
(step-f :and-e2 1)
(step-f :not-e 3 2)
(proof 'P '(not (not P)))
(step-b :not-i 3)
(step-f :not-e 2 1)
(proof '(not (not P)) 'P)
(step-b :raa 3)
(step-f :not-e 1 2)
Modus Tollens
(proof '[(impl P Q) (not Q)] '(not P))
(step-b :not-i 4)
(step-f :impl-e 1 3)
(step-f :not-e 2 4)
(proof '(or P (not P)))
(step-b :raa 2)
(step-b :not-e 3 1)
(step-b :or-i2 3)
(step-b :not-i 3)
(step-f :or-i1 2)
(swap '?1 '(not P))
(step-f :not-e 1 3)
1 : | ( not ( or P ( not P ) ) ) : assumption
2 : | | P : assumption
3 : | | ( or P ( not P ) ) : or - i1 [ 2 ]
4 : | | contradiction : not - e [ 1 3 ]
5 : | ( not P ) : not - i [ [ 2 4 ] ]
6 : | ( or P ( not P ) ) : or - i2 [ 5 ]
7 : | contradiction : not - e [ 1 6 ]
8 : ( or P ( not P ) ) : raa [ [ 1 7 ] ]
|
630409af316aea0fd088693620dc0664de06b9f826320c7d5a718b2561c543c3 | johnlawrenceaspden/hobby-code | clojure-is-fast-part-one.clj | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Clojure is Fast !
said that Common Lisp was two languages , one for writing programs
;; fast, and one for writing fast programs.
I 've never tried to find Clojure 's fast bits before , but I thought I 'd give
it a try , using a simple example of a numerical algorithm that C and FORTRAN
;; would be very good for.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Let's try to integrate a differential equation.
;; Don't be scared! That means that we've got a number that needs to change over
;; time, and a function that tells us how much it needs to change by.
You 've got a variable y , say it 's 0 ( feet ) . We start at time 0 ( seconds ) .
We calculate f(0,0 ) , lets say that 's 0 . ( feet / second )
Then y has to change by 0 feet per second . So after a tenth of a second we
calculate that t should be 0.1 seconds , y should still be about 0 feet , and
;; that lets us work out roughly what f is now.
Say f is 0.1 : then y needs to change by 0.1 feet / second . So after another
tenth of a second , t is 0.2 , y is roughly 0.01 , and we can work out f again .
;; And repeat, for as many steps as you're interested in.
;; And that's how you find an approximate numerical solution to the differential
;; equation:
;; dy/dt = f(t,y) where f(t, y) = t-y and y=0 when t=0.
using a step size of one - tenth of a second .
This challenging procedure is known as 's Method , or sometimes as
first - order Runge - Kutta .
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; A Test Case
;; As it happens, we can work out by devious mathematical trickery that the
;; exact solution (which is what happens if you make the steps so small that you
;; can't tell they're steps any more, and everything is nice and smooth) to this
;; equation is y=e^(-t)+t-1
So if we write our program correctly then when t is 1 ,
y should be close to ( Math / exp -1 ) = 0.36787944117144233
;; And it should get closer if we make our steps smaller.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; So that's the scene set. Here is the program in which I am interested:
(defn f [t y] (- t y))
(defn solveit [t0 y0 h its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (f t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))
And here 's an invocation : start from 0.0 at time 0.0 , step size is 0.1 , run for 10 iterations
(solveit 0.0 0.0 0.1 10)
[ 0.9999999999999999 0.34867844010000004 0.1 0 ]
The answer tells us that after 10 steps t is 0.999 ... , or 1 as it 's
traditionally known , and y is 0.348678 .... The other two numbers are the
;; step size and the remaining iteration count, now down to 0 because the
process has done its ten steps .
In the exact answer , when t is 1 , y should be e^-1 , or 0.36787944117144233 .
So the answer 's right to within 0.02 , which is a good indicator that the
;; process works.
;; Let's have a look at the answers with different numbers of steps:
(let [steps '(1 10 100 1000 10000 100000)
results (map #(second (solveit 0.0 0.0 (/ 1.0 %) %)) steps )
errors (map #(- (Math/exp -1) %) results)]
(partition 3 (interleave steps results errors)))
;; steps result error
((1 0.0 0.36787944117144233)
(10 0.34867844010000004 0.019201001071442292)
(100 0.3660323412732297 0.001847099898212634)
(1000 0.36769542477096434 1.8401640047799317E-4)
(10000 0.367861046432899 1.8394738543314748E-5)
(100000 0.3678776017662642 1.8394051781167597E-6))
Ten times more iterations leads to a ten times better result , which we 'd
expect from theory . That 's why it 's called a first order method .
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; For this program, I care how fast the iteration is.
;; What gets measured gets improved:
My computer runs at 2.399 GHz
;; We can define a microbenchmarking macro which takes an expression
;; and the number of iterations that its calculation represents, and
;; tell us how many cpu cycles went into every iteration.
(defmacro cyclesperit [expr its]
`(let [start# (. System (nanoTime))
ret# ( ~@expr (/ 1.0 ~its) ~its )
finish# (. System (nanoTime))]
(int (/ (* *cpuspeed* (- finish# start#)) ~its))))
So here 's an expression which times the loop over 100000 iterations .
(cyclesperit (solveit 0.0 1.0) 1000000)
;; What are we expecting? Well, if modern computers work the same way as the
;; computers I used to write assembly language for, then we can estimate thus:
;; Here's the program again:
(defn f [t y] (- t y))
(defn solveit [t0 y0 h its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (f t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))
;; For every go round the loop we have to:
;; compare its with 0,
;; branch depending on the result,
;; add t0 to h,
call f with t0 and y0 ,
;; multiply h and the result,
add that to y0 ,
;; jump.
;; So if this was an assembly language program that worked the way you'd expect,
each loop would take 7 cycles .
;; This estimate turns out to have been a little optimistic.
;; On my desktop machine, the results of the timing expression
(cyclesperit (solveit 0.0 1.0) 1000000)
over four trial runs are :
2382
2290
2278
2317
So we 're looking at a slowdown of about 300 times over what we could probably
;; achieve coding in assembler or in C with a good optimizing compiler (and of
course I 'm assuming that floating point operations take one cycle each )
;; This is about the sort of speed that you'd expect from a dynamic language
;; without any optimization or type hinting.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; So how do we make it faster?
;; There's a fairly significant speed-up to be had from killing off function
;; calls. I think this is because primitives don't make it through function
;; boundaries They need to be boxed and unboxed.
;; There is something a bit odd about a functional language where function calls
;; are inefficient, but I understand that great men are working on the problem,
so it will probably not be a problem for clojure 1.3
;; In the meantime however, we'll inline f by hand and we'll create an internal
;; target for recur, using casts on the initial values to make sure that inside
;; the loop/recur, only the java primitives int and double are seen:
(defn solveit-2 [t0 y0 h its]
(loop [t0 (double t0), y0 (double y0), h (double h), its (int its)]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
;; Let's time that and see how it goes:
(cyclesperit (solveit-2 0.0 1.0) 10000000)
488
506
486
That 's much better . The slowdown is now about 70 times compared with the
;; program and CPU in my head.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
The first law of optimizing things is that you need a profiler to find out
;; where the slow bits are.
;; At this point, we'll bring in jvisualvm, an excellent piece of software that
;; can be installed on Ubuntu with:
;; # sudo apt-get visualvm
and probably with something similar on any other system where Java will run .
;; Just run it. How it works should be fairly obvious. I'm sure there are
;; docs and stuff. I haven't looked.
;; When using jvisualvm, you should be careful to use the most stripped-down
;; clojure image possible.
;; I usually 'require' all of contrib on startup, and
this means that the poor profiler has to instrument something like 10000
;; classes. This takes ages.
;; If you start with a clean image (it's ok to have everything on the classpath,
just do n't load it if you do n't need it ) , then it 's only about 1000 classes ,
and everything happens 10 times faster . You still need to wait about 10
;; seconds while turning profiling on or off, but that's bearable.
;; Attach jvisualvm to your clojure, and then run
(cyclesperit (solveit-2 0.0 1.0) 1000000)
;; The profiling slows everything down to treacle, even the REPL, so remember to
;; de-attach it before trying to do anything that might take noticeable time.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Results of profiling:
There are four innocent looking calls to add , minus , and multi , all with
signature ( double , double ) . There 's one to dec(int ) . But there 's also one to
gt(int , Object ) . That only takes 20 % of the time , apparently , but under it
;; there's a whole tree of other calls. Something is getting resolved at run
;; time, which is usually bad for speed.
;; The profiler suggest that function overload resolutions are being done every
;; time round the loop. Weirdly, it suggests that they're not very expensive
;; compared to add(double,double). I am suspicious, so I'm going to try
;; changing (> its 0) to (> its (int 0)). That should allow the compiler to work
;; out the type of the call to > at compile time, rather than every time round.
(defn solveit-3 [t0 y0 h its]
(loop [t0 (double t0), y0 (double y0), h (double h), its (int its)]
(if (> its (int 0))
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
;; Let's time that:
Remember to detach the profiler ! If you do n't you 'll get cycle counts in the 100000s
(cyclesperit (solveit-3 0.0 1.0) 1000000)
79
79
63
;; Wow! That's made a vast difference. I don't understand why.
;; Apparently the literal 0 was being treated as a generic object. I can see why
that would be slow , but the profiler said that it was only 20 % of the running
;; cost. It seems more likely that removing it has somehow decontaminated the
;; other calls. Maybe it's allowing the variables to stay in registers where
;; before they were being pushed out back onto the heap, or something?
;; I wonder if there's a way to examine the code that clojure generates for a
;; function?
At any rate , the loop is now about six times faster than it was .
;; Let's have another look with the profiler:
;; Attach it and run:
(cyclesperit (solveit-3 0.0 1.0) 1000000)
;; Again, the profiling looks about what you'd expect, except that a method
;; called RT.intCast is being called just as often as the multiplies, minuses,
;; and decs that I'm expecting to see. The profiler claims that it's not taking
;; up much time, but let's try to get rid of it by making an explicit local
variable for zero . For some reason this reminds me of ZX81 BASIC .
(defn solveit-4 [t0 y0 h its]
(let [zero (int 0)]
(loop [t0 (double t0) y0 (double y0) h (double h) its (int its)]
(if (> its zero)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))))
;; Remove the profiler and re-time:
(cyclesperit (solveit-4 0.0 1.0) 100000000)
23
23
23
;; Doing the (int 0) outside the loop again seems to have tripled the speed of
;; the loop again.
The profiler is now telling me that there are : 2 adds(double , double ) , 1
gt(int , int ) , 1 minus(double , double ) , 1 dec(int ) and 1 multiply(double ,
double ) in every loop , which is what I 'd expect if I was writing C or Java to
;; do this, but I'm suspicious that it can tell! Presumably there's still some
;; dispatching going on? These should be single assembler instructions, and
;; invisible to a profiler working at function level.
With 4 floating point , 1 gt , 1 dec , and 1 conditional branch I 'd imagine that
7 cycles / loop would be as fast as this loop could be made to run without being clever .
So it appears that there 's now only around a factor of 3 between this loop as
written , and what I 'd expect from a C , Java or assembler program .
;; In absolute terms:
"Elapsed time: 1019.442664 msecs"
[1.0000000022898672 0.7357588790870762 1.0E-8 0]
(time (solveit-4 0.0 1.0 (/ 1.0 100000000) 100000000))
1 second to do 100 000 000 iterations on my desktop , at about 23 cycles / loop
;; I'm pretty happy with that, especially given that the loop is still readable!
;; It's only slightly more complicated than the original. Optimizing Common Lisp
;; tends to make it look horrible.
;; Does anyone have any ideas how to squeeze a few more cycles out of the loop?
One more thing . We can make it go pretty fast . Does it still work ?
Remember y(1 ) should approximate e^-1 0.36787944117144233 , and our vast
speedup means that it 's now not unreasonable to throw 1 000 000 000
;; iterations at the problem.
(let [steps '(1 10 100 1000 10000 100000 1000000 10000000 100000000 1000000000)
results (map #(second (solveit-4 0.0 0.0 (/ 1.0 %) %)) steps )
errors (map #(- (Math/exp -1) %) results)]
(partition 3 (interleave steps results errors)))
((1 0.0 0.36787944117144233)
(10 0.34867844010000004 0.019201001071442292)
(100 0.3660323412732297 0.001847099898212634)
(1000 0.36769542477096434 1.8401640047799317E-4)
(10000 0.367861046432899 1.8394738543314748E-5)
(100000 0.3678776017662642 1.8394051781167597E-6)
(1000000 0.3678792572317447 1.8393969763996765E-7)
(10000000 0.3678794227282174 1.8443224947262138E-8)
(100000000 0.3678794397549051 1.4165372208552185E-9)
(1000000000 0.3678794410553999 1.1604245342411446E-10))
Cool ! Accuracy improves as predicted , and with 10 ^ 9 steps we get nine
significant figures in about ten seconds .
(time (solveit-4 0.0 1.0 (/ 1.0 1000000000) 1000000000))
;; Note:
;;
;; Just in order to keep my credibility as a numerical analyst intact, I ought
;; to point out that if I really was trying to solve a smooth ODE (instead of
investigating how fast I could make some simple numeric code in Clojure ) , I
would n't be using Euler 's method . Optimize your algorithm before you optimize
;; your code.
;; No differential equations were harmed in the making of this blogpost.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Conclusion
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Clojure is a fast language , if you write so that your intentions are clear to
;; the compiler. Something tells me that as clojure gets older, it will be
;; getting better at working out what your intentions are.
;; It would not surprise me in the slightest if very soon, the code as originally
;; written runs as fast or faster than my speeded up version.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; | null | https://raw.githubusercontent.com/johnlawrenceaspden/hobby-code/48e2a89d28557994c72299962cd8e3ace6a75b2d/clojure-is-fast-part-one.clj | clojure |
fast, and one for writing fast programs.
would be very good for.
Let's try to integrate a differential equation.
Don't be scared! That means that we've got a number that needs to change over
time, and a function that tells us how much it needs to change by.
that lets us work out roughly what f is now.
And repeat, for as many steps as you're interested in.
And that's how you find an approximate numerical solution to the differential
equation:
dy/dt = f(t,y) where f(t, y) = t-y and y=0 when t=0.
A Test Case
As it happens, we can work out by devious mathematical trickery that the
exact solution (which is what happens if you make the steps so small that you
can't tell they're steps any more, and everything is nice and smooth) to this
equation is y=e^(-t)+t-1
And it should get closer if we make our steps smaller.
So that's the scene set. Here is the program in which I am interested:
step size and the remaining iteration count, now down to 0 because the
process works.
Let's have a look at the answers with different numbers of steps:
steps result error
For this program, I care how fast the iteration is.
What gets measured gets improved:
We can define a microbenchmarking macro which takes an expression
and the number of iterations that its calculation represents, and
tell us how many cpu cycles went into every iteration.
What are we expecting? Well, if modern computers work the same way as the
computers I used to write assembly language for, then we can estimate thus:
Here's the program again:
For every go round the loop we have to:
compare its with 0,
branch depending on the result,
add t0 to h,
multiply h and the result,
jump.
So if this was an assembly language program that worked the way you'd expect,
This estimate turns out to have been a little optimistic.
On my desktop machine, the results of the timing expression
achieve coding in assembler or in C with a good optimizing compiler (and of
This is about the sort of speed that you'd expect from a dynamic language
without any optimization or type hinting.
So how do we make it faster?
There's a fairly significant speed-up to be had from killing off function
calls. I think this is because primitives don't make it through function
boundaries They need to be boxed and unboxed.
There is something a bit odd about a functional language where function calls
are inefficient, but I understand that great men are working on the problem,
In the meantime however, we'll inline f by hand and we'll create an internal
target for recur, using casts on the initial values to make sure that inside
the loop/recur, only the java primitives int and double are seen:
Let's time that and see how it goes:
program and CPU in my head.
where the slow bits are.
At this point, we'll bring in jvisualvm, an excellent piece of software that
can be installed on Ubuntu with:
# sudo apt-get visualvm
Just run it. How it works should be fairly obvious. I'm sure there are
docs and stuff. I haven't looked.
When using jvisualvm, you should be careful to use the most stripped-down
clojure image possible.
I usually 'require' all of contrib on startup, and
classes. This takes ages.
If you start with a clean image (it's ok to have everything on the classpath,
seconds while turning profiling on or off, but that's bearable.
Attach jvisualvm to your clojure, and then run
The profiling slows everything down to treacle, even the REPL, so remember to
de-attach it before trying to do anything that might take noticeable time.
Results of profiling:
there's a whole tree of other calls. Something is getting resolved at run
time, which is usually bad for speed.
The profiler suggest that function overload resolutions are being done every
time round the loop. Weirdly, it suggests that they're not very expensive
compared to add(double,double). I am suspicious, so I'm going to try
changing (> its 0) to (> its (int 0)). That should allow the compiler to work
out the type of the call to > at compile time, rather than every time round.
Let's time that:
Wow! That's made a vast difference. I don't understand why.
Apparently the literal 0 was being treated as a generic object. I can see why
cost. It seems more likely that removing it has somehow decontaminated the
other calls. Maybe it's allowing the variables to stay in registers where
before they were being pushed out back onto the heap, or something?
I wonder if there's a way to examine the code that clojure generates for a
function?
Let's have another look with the profiler:
Attach it and run:
Again, the profiling looks about what you'd expect, except that a method
called RT.intCast is being called just as often as the multiplies, minuses,
and decs that I'm expecting to see. The profiler claims that it's not taking
up much time, but let's try to get rid of it by making an explicit local
Remove the profiler and re-time:
Doing the (int 0) outside the loop again seems to have tripled the speed of
the loop again.
do this, but I'm suspicious that it can tell! Presumably there's still some
dispatching going on? These should be single assembler instructions, and
invisible to a profiler working at function level.
In absolute terms:
I'm pretty happy with that, especially given that the loop is still readable!
It's only slightly more complicated than the original. Optimizing Common Lisp
tends to make it look horrible.
Does anyone have any ideas how to squeeze a few more cycles out of the loop?
iterations at the problem.
Note:
Just in order to keep my credibility as a numerical analyst intact, I ought
to point out that if I really was trying to solve a smooth ODE (instead of
your code.
No differential equations were harmed in the making of this blogpost.
Conclusion
the compiler. Something tells me that as clojure gets older, it will be
getting better at working out what your intentions are.
It would not surprise me in the slightest if very soon, the code as originally
written runs as fast or faster than my speeded up version.
|
Clojure is Fast !
said that Common Lisp was two languages , one for writing programs
I 've never tried to find Clojure 's fast bits before , but I thought I 'd give
it a try , using a simple example of a numerical algorithm that C and FORTRAN
You 've got a variable y , say it 's 0 ( feet ) . We start at time 0 ( seconds ) .
We calculate f(0,0 ) , lets say that 's 0 . ( feet / second )
Then y has to change by 0 feet per second . So after a tenth of a second we
calculate that t should be 0.1 seconds , y should still be about 0 feet , and
Say f is 0.1 : then y needs to change by 0.1 feet / second . So after another
tenth of a second , t is 0.2 , y is roughly 0.01 , and we can work out f again .
using a step size of one - tenth of a second .
This challenging procedure is known as 's Method , or sometimes as
first - order Runge - Kutta .
So if we write our program correctly then when t is 1 ,
y should be close to ( Math / exp -1 ) = 0.36787944117144233
(defn f [t y] (- t y))
(defn solveit [t0 y0 h its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (f t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))
And here 's an invocation : start from 0.0 at time 0.0 , step size is 0.1 , run for 10 iterations
(solveit 0.0 0.0 0.1 10)
[ 0.9999999999999999 0.34867844010000004 0.1 0 ]
The answer tells us that after 10 steps t is 0.999 ... , or 1 as it 's
traditionally known , and y is 0.348678 .... The other two numbers are the
process has done its ten steps .
In the exact answer , when t is 1 , y should be e^-1 , or 0.36787944117144233 .
So the answer 's right to within 0.02 , which is a good indicator that the
(let [steps '(1 10 100 1000 10000 100000)
results (map #(second (solveit 0.0 0.0 (/ 1.0 %) %)) steps )
errors (map #(- (Math/exp -1) %) results)]
(partition 3 (interleave steps results errors)))
((1 0.0 0.36787944117144233)
(10 0.34867844010000004 0.019201001071442292)
(100 0.3660323412732297 0.001847099898212634)
(1000 0.36769542477096434 1.8401640047799317E-4)
(10000 0.367861046432899 1.8394738543314748E-5)
(100000 0.3678776017662642 1.8394051781167597E-6))
Ten times more iterations leads to a ten times better result , which we 'd
expect from theory . That 's why it 's called a first order method .
My computer runs at 2.399 GHz
(defmacro cyclesperit [expr its]
`(let [start# (. System (nanoTime))
ret# ( ~@expr (/ 1.0 ~its) ~its )
finish# (. System (nanoTime))]
(int (/ (* *cpuspeed* (- finish# start#)) ~its))))
So here 's an expression which times the loop over 100000 iterations .
(cyclesperit (solveit 0.0 1.0) 1000000)
(defn f [t y] (- t y))
(defn solveit [t0 y0 h its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (f t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))
call f with t0 and y0 ,
add that to y0 ,
each loop would take 7 cycles .
(cyclesperit (solveit 0.0 1.0) 1000000)
over four trial runs are :
2382
2290
2278
2317
So we 're looking at a slowdown of about 300 times over what we could probably
course I 'm assuming that floating point operations take one cycle each )
so it will probably not be a problem for clojure 1.3
(defn solveit-2 [t0 y0 h its]
(loop [t0 (double t0), y0 (double y0), h (double h), its (int its)]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
(cyclesperit (solveit-2 0.0 1.0) 10000000)
488
506
486
That 's much better . The slowdown is now about 70 times compared with the
The first law of optimizing things is that you need a profiler to find out
and probably with something similar on any other system where Java will run .
this means that the poor profiler has to instrument something like 10000
just do n't load it if you do n't need it ) , then it 's only about 1000 classes ,
and everything happens 10 times faster . You still need to wait about 10
(cyclesperit (solveit-2 0.0 1.0) 1000000)
There are four innocent looking calls to add , minus , and multi , all with
signature ( double , double ) . There 's one to dec(int ) . But there 's also one to
gt(int , Object ) . That only takes 20 % of the time , apparently , but under it
(defn solveit-3 [t0 y0 h its]
(loop [t0 (double t0), y0 (double y0), h (double h), its (int its)]
(if (> its (int 0))
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
Remember to detach the profiler ! If you do n't you 'll get cycle counts in the 100000s
(cyclesperit (solveit-3 0.0 1.0) 1000000)
79
79
63
that would be slow , but the profiler said that it was only 20 % of the running
At any rate , the loop is now about six times faster than it was .
(cyclesperit (solveit-3 0.0 1.0) 1000000)
variable for zero . For some reason this reminds me of ZX81 BASIC .
(defn solveit-4 [t0 y0 h its]
(let [zero (int 0)]
(loop [t0 (double t0) y0 (double y0) h (double h) its (int its)]
(if (> its zero)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))))
(cyclesperit (solveit-4 0.0 1.0) 100000000)
23
23
23
The profiler is now telling me that there are : 2 adds(double , double ) , 1
gt(int , int ) , 1 minus(double , double ) , 1 dec(int ) and 1 multiply(double ,
double ) in every loop , which is what I 'd expect if I was writing C or Java to
With 4 floating point , 1 gt , 1 dec , and 1 conditional branch I 'd imagine that
7 cycles / loop would be as fast as this loop could be made to run without being clever .
So it appears that there 's now only around a factor of 3 between this loop as
written , and what I 'd expect from a C , Java or assembler program .
"Elapsed time: 1019.442664 msecs"
[1.0000000022898672 0.7357588790870762 1.0E-8 0]
(time (solveit-4 0.0 1.0 (/ 1.0 100000000) 100000000))
1 second to do 100 000 000 iterations on my desktop , at about 23 cycles / loop
One more thing . We can make it go pretty fast . Does it still work ?
Remember y(1 ) should approximate e^-1 0.36787944117144233 , and our vast
speedup means that it 's now not unreasonable to throw 1 000 000 000
(let [steps '(1 10 100 1000 10000 100000 1000000 10000000 100000000 1000000000)
results (map #(second (solveit-4 0.0 0.0 (/ 1.0 %) %)) steps )
errors (map #(- (Math/exp -1) %) results)]
(partition 3 (interleave steps results errors)))
((1 0.0 0.36787944117144233)
(10 0.34867844010000004 0.019201001071442292)
(100 0.3660323412732297 0.001847099898212634)
(1000 0.36769542477096434 1.8401640047799317E-4)
(10000 0.367861046432899 1.8394738543314748E-5)
(100000 0.3678776017662642 1.8394051781167597E-6)
(1000000 0.3678792572317447 1.8393969763996765E-7)
(10000000 0.3678794227282174 1.8443224947262138E-8)
(100000000 0.3678794397549051 1.4165372208552185E-9)
(1000000000 0.3678794410553999 1.1604245342411446E-10))
Cool ! Accuracy improves as predicted , and with 10 ^ 9 steps we get nine
significant figures in about ten seconds .
(time (solveit-4 0.0 1.0 (/ 1.0 1000000000) 1000000000))
investigating how fast I could make some simple numeric code in Clojure ) , I
would n't be using Euler 's method . Optimize your algorithm before you optimize
Clojure is a fast language , if you write so that your intentions are clear to
|
90216a75ee48e451f2233472495496c285d986046769184f9420c9a383e88781 | z3z1ma/pipette | core_test.clj | (ns pipette.core-test
(:require [clojure.test :refer :all]
[pipette.core :refer :all]))
(deftest a-test
(testing "TODO: Build out test suite."
(is (= 1 1))))
| null | https://raw.githubusercontent.com/z3z1ma/pipette/bf01b61c563a2f54e3688bf8579ef62ac22fbf9d/test/pipewise/core_test.clj | clojure | (ns pipette.core-test
(:require [clojure.test :refer :all]
[pipette.core :refer :all]))
(deftest a-test
(testing "TODO: Build out test suite."
(is (= 1 1))))
| |
fb719c6de474e3503226d9fd92448b9dbce45e21d76e0d6670f4037c727ceb88 | 23Skidoo/ghc-parmake | Parse.hs | # LANGUAGE TupleSections #
-- Parsing.
module GHC.ParMake.Parse (getModuleDeps, depsListToDeps)
where
import Control.Concurrent
import Control.Monad
import Data.Char (isAlphaNum, isSpace)
import Data.Functor ((<$>))
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (catMaybes)
import qualified Data.Set as Set
import System.Exit (ExitCode(..))
import System.FilePath ((</>))
import System.IO.Temp (withSystemTempDirectory)
import Distribution.Compat.ReadP
import GHC.ParMake.Types (Dep(..))
import GHC.ParMake.Util (Verbosity, debug', fatal,
defaultOutputHooks, runProcess)
TODO This random choice of characters is * insane * , this will NOT WORK when
-- some unexpected character is in the filename.
-- Worse even, `parseLine` will just return Nothing, silencing the
problem and making ghc - parmake exit with code 1 without reason .
--
-- This filename parsing and "careful" parsing (returning Nothing by
-- default instead of erroring) must be changed!
parseModuleName :: ReadP r String
parseModuleName = munch1 (\c -> isAlphaNum c || c == '.'
|| c == '-' || c == '/' || c == '_')
parseLine :: String -> Maybe (String, String)
parseLine l = case [ r | (r, rest) <- readP_to_S parser l, all isSpace rest] of
[] -> Nothing
[r] -> Just r
_ -> Nothing
where
parser = do skipSpaces
m <- parseModuleName
skipSpaces
_ <- char ':'
skipSpaces
d <- parseModuleName
skipSpaces
return (m,d)
trimLines :: [String] -> [String]
trimLines ls = [ l | l <- ls, isValidLine l]
where
isValidLine ('#':_) = False
isValidLine _ = True
-- Interaction with the outside world.
Run ' ghc -M ' and return dependencies for every module .
getModuleDeps :: Verbosity
-> FilePath
-> [String]
-> [FilePath]
-> IO [Dep]
getModuleDeps v ghcPath ghcArgs files =
withSystemTempDirectory "ghc-parmake" $ \tmpDir -> do
let tmpFileInternal = tmpDir </> "depends.internal.mk"
tmpFileExternal = tmpDir </> "depends.external.mk"
let ghcArgsInternal = files ++ ("-M":"-dep-makefile":tmpFileInternal:ghcArgs)
ghcArgsExternal = files ++
("-M":"-dep-makefile":tmpFileExternal:"-include-pkg-deps":ghcArgs)
-- Get all internal dependencies in this package.
let getInternalMakeDeps = do
debug' v $ "Running compiler with -M to get internal module deps: "
++ ghcPath ++ " " ++ show ghcArgsInternal
failOnError <$> runProcess defaultOutputHooks Nothing
ghcPath ghcArgsInternal
parseDepsFromFile tmpFileInternal
-- Pass -include-pkg-deps to also find out the external dependencies.
let getAllMakeDeps = do
debug' v $ "Running compiler with '-M -include-pkg-deps' "
++ "to get external module deps: "
++ ghcPath ++ " " ++ show ghcArgsExternal
failOnError <$> runProcess defaultOutputHooks Nothing
ghcPath ghcArgsExternal
parseDepsFromFile tmpFileExternal
The two ghc -M are mainly CPU - bound . Run them in parallel .
[internalMakeDeps, allMakeDeps] <- parallelIO [ getInternalMakeDeps
, getAllMakeDeps ]
Put internal and internal + external together
let depsIntAll = mergeValues (groupByTarget internalMakeDeps)
(groupByTarget allMakeDeps)
-- External deps are (all - internal) ones.
return [ Dep target int (intExt `diff` int)
| (target, (int, intExt)) <- Map.toList depsIntAll ]
where
failOnError (ExitSuccess ) = ()
failOnError (ExitFailure n) =
fatal $ "ghc -M exited with status " ++ show n
parseDepsFromFile :: FilePath -> IO [(String, String)]
parseDepsFromFile file = catMaybes . map parseLine . trimLines . lines
<$> readFile file
-- * Helpers
-- | Fast list difference. Uses `Set.difference`, but preserves order.
diff :: (Ord a) => [a] -> [a] -> [a]
xs `diff` ys = filter (`Set.member` diffSet) xs
where
diffSet = Set.fromList xs `Set.difference` Set.fromList ys
| Runs the IO actions in parallel , and waits until all are finished .
parallelIO :: [IO a] -> IO [a]
parallelIO ios = do
mvars <- forM ios $ \io -> do m <- newEmptyMVar
_ <- forkIO $ io >>= putMVar m
return m
mapM readMVar mvars
-- | Groups a list of (targets, dependencies) by the targets.
groupByTarget :: (Ord target) => [(target, dep)] -> Map target [dep]
groupByTarget deps = Map.fromListWith (++) [ (t, [d]) | (t, d) <- deps ]
| Merges two maps that have the same keys .
mergeValues :: (Ord k) => Map k [a] -> Map k [b] -> Map k ([a], [b])
mergeValues m1 m2 = Map.unionWith (\(a,b) (x,y) -> (a ++ x, b ++ y))
(fmap (, []) m1)
(fmap ([], ) m2)
-- | Converts a list of (targets, dependencies) to a `Dep` list
-- with no external dependencies.
depsListToDeps :: [(FilePath, FilePath)] -> [Dep]
depsListToDeps l = [ Dep t ds [] | (t, ds) <- Map.toList (groupByTarget l) ]
| null | https://raw.githubusercontent.com/23Skidoo/ghc-parmake/c4d7fb042f5138588fa54d0153be1aee93db3835/src/GHC/ParMake/Parse.hs | haskell | Parsing.
some unexpected character is in the filename.
Worse even, `parseLine` will just return Nothing, silencing the
This filename parsing and "careful" parsing (returning Nothing by
default instead of erroring) must be changed!
Interaction with the outside world.
Get all internal dependencies in this package.
Pass -include-pkg-deps to also find out the external dependencies.
External deps are (all - internal) ones.
* Helpers
| Fast list difference. Uses `Set.difference`, but preserves order.
| Groups a list of (targets, dependencies) by the targets.
| Converts a list of (targets, dependencies) to a `Dep` list
with no external dependencies. | # LANGUAGE TupleSections #
module GHC.ParMake.Parse (getModuleDeps, depsListToDeps)
where
import Control.Concurrent
import Control.Monad
import Data.Char (isAlphaNum, isSpace)
import Data.Functor ((<$>))
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (catMaybes)
import qualified Data.Set as Set
import System.Exit (ExitCode(..))
import System.FilePath ((</>))
import System.IO.Temp (withSystemTempDirectory)
import Distribution.Compat.ReadP
import GHC.ParMake.Types (Dep(..))
import GHC.ParMake.Util (Verbosity, debug', fatal,
defaultOutputHooks, runProcess)
TODO This random choice of characters is * insane * , this will NOT WORK when
problem and making ghc - parmake exit with code 1 without reason .
parseModuleName :: ReadP r String
parseModuleName = munch1 (\c -> isAlphaNum c || c == '.'
|| c == '-' || c == '/' || c == '_')
parseLine :: String -> Maybe (String, String)
parseLine l = case [ r | (r, rest) <- readP_to_S parser l, all isSpace rest] of
[] -> Nothing
[r] -> Just r
_ -> Nothing
where
parser = do skipSpaces
m <- parseModuleName
skipSpaces
_ <- char ':'
skipSpaces
d <- parseModuleName
skipSpaces
return (m,d)
trimLines :: [String] -> [String]
trimLines ls = [ l | l <- ls, isValidLine l]
where
isValidLine ('#':_) = False
isValidLine _ = True
Run ' ghc -M ' and return dependencies for every module .
getModuleDeps :: Verbosity
-> FilePath
-> [String]
-> [FilePath]
-> IO [Dep]
getModuleDeps v ghcPath ghcArgs files =
withSystemTempDirectory "ghc-parmake" $ \tmpDir -> do
let tmpFileInternal = tmpDir </> "depends.internal.mk"
tmpFileExternal = tmpDir </> "depends.external.mk"
let ghcArgsInternal = files ++ ("-M":"-dep-makefile":tmpFileInternal:ghcArgs)
ghcArgsExternal = files ++
("-M":"-dep-makefile":tmpFileExternal:"-include-pkg-deps":ghcArgs)
let getInternalMakeDeps = do
debug' v $ "Running compiler with -M to get internal module deps: "
++ ghcPath ++ " " ++ show ghcArgsInternal
failOnError <$> runProcess defaultOutputHooks Nothing
ghcPath ghcArgsInternal
parseDepsFromFile tmpFileInternal
let getAllMakeDeps = do
debug' v $ "Running compiler with '-M -include-pkg-deps' "
++ "to get external module deps: "
++ ghcPath ++ " " ++ show ghcArgsExternal
failOnError <$> runProcess defaultOutputHooks Nothing
ghcPath ghcArgsExternal
parseDepsFromFile tmpFileExternal
The two ghc -M are mainly CPU - bound . Run them in parallel .
[internalMakeDeps, allMakeDeps] <- parallelIO [ getInternalMakeDeps
, getAllMakeDeps ]
Put internal and internal + external together
let depsIntAll = mergeValues (groupByTarget internalMakeDeps)
(groupByTarget allMakeDeps)
return [ Dep target int (intExt `diff` int)
| (target, (int, intExt)) <- Map.toList depsIntAll ]
where
failOnError (ExitSuccess ) = ()
failOnError (ExitFailure n) =
fatal $ "ghc -M exited with status " ++ show n
parseDepsFromFile :: FilePath -> IO [(String, String)]
parseDepsFromFile file = catMaybes . map parseLine . trimLines . lines
<$> readFile file
diff :: (Ord a) => [a] -> [a] -> [a]
xs `diff` ys = filter (`Set.member` diffSet) xs
where
diffSet = Set.fromList xs `Set.difference` Set.fromList ys
| Runs the IO actions in parallel , and waits until all are finished .
parallelIO :: [IO a] -> IO [a]
parallelIO ios = do
mvars <- forM ios $ \io -> do m <- newEmptyMVar
_ <- forkIO $ io >>= putMVar m
return m
mapM readMVar mvars
groupByTarget :: (Ord target) => [(target, dep)] -> Map target [dep]
groupByTarget deps = Map.fromListWith (++) [ (t, [d]) | (t, d) <- deps ]
| Merges two maps that have the same keys .
mergeValues :: (Ord k) => Map k [a] -> Map k [b] -> Map k ([a], [b])
mergeValues m1 m2 = Map.unionWith (\(a,b) (x,y) -> (a ++ x, b ++ y))
(fmap (, []) m1)
(fmap ([], ) m2)
depsListToDeps :: [(FilePath, FilePath)] -> [Dep]
depsListToDeps l = [ Dep t ds [] | (t, ds) <- Map.toList (groupByTarget l) ]
|
5e2c20d8b3dd7eb2ff8d612638c8c4e05c6ed95d748460c5bf5a493c9c4f1d90 | fission-codes/fission | Public.hs | module Crypto.Key.Asymmetric.Public
( genRSA2048
, decodeASN1DERRSAPublicKey
, encodeASN1DERRSAPublicKey
) where
import qualified Crypto.PubKey.RSA as RSA
import qualified Data.ASN1.BitArray as ASN1
import qualified Data.ASN1.Types as ASN1
import qualified Data.ByteString as BS
import qualified Data.X509 as X509
import qualified OpenSSL.RSA as OpenSSL
import RIO
import Web.UCAN.Internal.Orphanage.RSA2048.Private ()
genRSA2048 :: MonadIO m => m RSA.PrivateKey
genRSA2048 = do
pair <- liftIO $ OpenSSL.generateRSAKey' 2048 65537
let
public_size = OpenSSL.rsaSize pair
public_n = OpenSSL.rsaN pair
public_e = OpenSSL.rsaE pair
private_pub = RSA.PublicKey {public_size, public_n, public_e}
private_d = OpenSSL.rsaD pair
private_p = OpenSSL.rsaP pair
private_q = OpenSSL.rsaQ pair
case OpenSSL.rsaDMP1 pair of
Nothing ->
genRSA2048
Just private_dP ->
case OpenSSL.rsaDMQ1 pair of
Nothing ->
genRSA2048
Just private_dQ ->
case OpenSSL.rsaIQMP pair of
Nothing ->
genRSA2048
Just private_qinv ->
return RSA.PrivateKey {..}
decodeASN1DERRSAPublicKey :: ByteString -> Either String RSA.PublicKey
decodeASN1DERRSAPublicKey bs =
let
spki =
[ ASN1.Start ASN1.Sequence
, ASN1.Start ASN1.Sequence
, ASN1.OID [1,2,840,113549,1,1,1]
, ASN1.Null
, ASN1.End ASN1.Sequence
, ASN1.BitString (ASN1.toBitArray bs (BS.length bs * 8))
, ASN1.End ASN1.Sequence
]
in case ASN1.fromASN1 spki of
Right (X509.PubKeyRSA pk, _) ->
Right pk
Right (pk, _) ->
Left $ "Couldn't parse RSAPublicKey (ASN1 DER encoded). Different format provided: " <> show pk
Left err ->
Left $ show err
encodeASN1DERRSAPublicKey :: RSA.PublicKey -> ByteString
encodeASN1DERRSAPublicKey pk =
case ASN1.toASN1 (X509.PubKeyRSA pk) [] of
[ ASN1.Start ASN1.Sequence
, ASN1.Start ASN1.Sequence
, ASN1.OID [1,2,840,113549,1,1,1]
, ASN1.Null
, ASN1.End ASN1.Sequence
, ASN1.BitString bitArray
, ASN1.End ASN1.Sequence
] ->
ASN1.bitArrayGetData bitArray
_ ->
error "Unexpected ASN1 SubjectPublicKeyInfo encoding of RSA public keys"
| null | https://raw.githubusercontent.com/fission-codes/fission/c7f5907867e94b76bdcbed3a499a4128263d7bfd/hs-ucan/library/Crypto/Key/Asymmetric/Public.hs | haskell | module Crypto.Key.Asymmetric.Public
( genRSA2048
, decodeASN1DERRSAPublicKey
, encodeASN1DERRSAPublicKey
) where
import qualified Crypto.PubKey.RSA as RSA
import qualified Data.ASN1.BitArray as ASN1
import qualified Data.ASN1.Types as ASN1
import qualified Data.ByteString as BS
import qualified Data.X509 as X509
import qualified OpenSSL.RSA as OpenSSL
import RIO
import Web.UCAN.Internal.Orphanage.RSA2048.Private ()
genRSA2048 :: MonadIO m => m RSA.PrivateKey
genRSA2048 = do
pair <- liftIO $ OpenSSL.generateRSAKey' 2048 65537
let
public_size = OpenSSL.rsaSize pair
public_n = OpenSSL.rsaN pair
public_e = OpenSSL.rsaE pair
private_pub = RSA.PublicKey {public_size, public_n, public_e}
private_d = OpenSSL.rsaD pair
private_p = OpenSSL.rsaP pair
private_q = OpenSSL.rsaQ pair
case OpenSSL.rsaDMP1 pair of
Nothing ->
genRSA2048
Just private_dP ->
case OpenSSL.rsaDMQ1 pair of
Nothing ->
genRSA2048
Just private_dQ ->
case OpenSSL.rsaIQMP pair of
Nothing ->
genRSA2048
Just private_qinv ->
return RSA.PrivateKey {..}
decodeASN1DERRSAPublicKey :: ByteString -> Either String RSA.PublicKey
decodeASN1DERRSAPublicKey bs =
let
spki =
[ ASN1.Start ASN1.Sequence
, ASN1.Start ASN1.Sequence
, ASN1.OID [1,2,840,113549,1,1,1]
, ASN1.Null
, ASN1.End ASN1.Sequence
, ASN1.BitString (ASN1.toBitArray bs (BS.length bs * 8))
, ASN1.End ASN1.Sequence
]
in case ASN1.fromASN1 spki of
Right (X509.PubKeyRSA pk, _) ->
Right pk
Right (pk, _) ->
Left $ "Couldn't parse RSAPublicKey (ASN1 DER encoded). Different format provided: " <> show pk
Left err ->
Left $ show err
encodeASN1DERRSAPublicKey :: RSA.PublicKey -> ByteString
encodeASN1DERRSAPublicKey pk =
case ASN1.toASN1 (X509.PubKeyRSA pk) [] of
[ ASN1.Start ASN1.Sequence
, ASN1.Start ASN1.Sequence
, ASN1.OID [1,2,840,113549,1,1,1]
, ASN1.Null
, ASN1.End ASN1.Sequence
, ASN1.BitString bitArray
, ASN1.End ASN1.Sequence
] ->
ASN1.bitArrayGetData bitArray
_ ->
error "Unexpected ASN1 SubjectPublicKeyInfo encoding of RSA public keys"
| |
2be164782576d3ea01b742a59c3adb61e14cba2f00b35be4e3d0466167ab1eec | discus-lang/ddc | Bind.hs |
module DDC.Type.Bind
(getBindType)
where
import DDC.Type.Exp
-- | Lookup the type of a bound thing from the binder stack.
The binder stack contains the binders of all the ` TForall`s we 've
-- entered under so far.
getBindType :: Eq n => [Bind n] -> Bound n -> Maybe (Int, Type n)
getBindType bs' u'
= go 0 u' bs'
where go n u (BName n1 t : bs)
| UName n2 <- u
, n1 == n2 = Just (n, t)
| otherwise = go (n + 1) u bs
go n (UIx i) (BAnon t : bs)
| i < 0 = Nothing
| i == 0 = Just (n, t)
| otherwise = go (n + 1) (UIx (i - 1)) bs
go n u (BAnon _ : bs)
| otherwise = go (n + 1) u bs
go n u (BNone _ : bs)
= go (n + 1) u bs
go _ _ [] = Nothing
| null | https://raw.githubusercontent.com/discus-lang/ddc/2baa1b4e2d43b6b02135257677671a83cb7384ac/src/s1/ddc-core/DDC/Type/Bind.hs | haskell | | Lookup the type of a bound thing from the binder stack.
entered under so far. |
module DDC.Type.Bind
(getBindType)
where
import DDC.Type.Exp
The binder stack contains the binders of all the ` TForall`s we 've
getBindType :: Eq n => [Bind n] -> Bound n -> Maybe (Int, Type n)
getBindType bs' u'
= go 0 u' bs'
where go n u (BName n1 t : bs)
| UName n2 <- u
, n1 == n2 = Just (n, t)
| otherwise = go (n + 1) u bs
go n (UIx i) (BAnon t : bs)
| i < 0 = Nothing
| i == 0 = Just (n, t)
| otherwise = go (n + 1) (UIx (i - 1)) bs
go n u (BAnon _ : bs)
| otherwise = go (n + 1) u bs
go n u (BNone _ : bs)
= go (n + 1) u bs
go _ _ [] = Nothing
|
f462439bd5a67763e3b9dfaba00d0f0afa9cfca54585aab113d72f4593a62edb | iskandr/parakeet-retired | Value_to_GenericValue.mli | open Base
open Llvm_executionengine
val parnum_to_generic : ParNum.t -> GenericValue.t
val to_llvm : Ptr.t Value.t -> GenericValue.t
val to_llvm_pointer : Ptr.t Value.t -> GenericValue.t
val delete_llvm_gv : GenericValue.t -> ImpType.t -> unit
| null | https://raw.githubusercontent.com/iskandr/parakeet-retired/3d7e6e5b699f83ce8a1c01290beed0b78c0d0945/LLVM/Value_to_GenericValue.mli | ocaml | open Base
open Llvm_executionengine
val parnum_to_generic : ParNum.t -> GenericValue.t
val to_llvm : Ptr.t Value.t -> GenericValue.t
val to_llvm_pointer : Ptr.t Value.t -> GenericValue.t
val delete_llvm_gv : GenericValue.t -> ImpType.t -> unit
| |
81eeafdf23909b93c121c85a8ce2b1945f121785662aeb53bd0ad6021164a7d1 | fpco/haskell-multi-docker-example | TestImport.hs | # LANGUAGE QuasiQuotes #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
module TestImport
( module TestImport
, module X
) where
import Application (makeFoundation, makeLogWare)
import ClassyPrelude as X hiding (delete, deleteBy, Handler)
import Database.Persist as X hiding (get)
import Database.Persist.Sql (SqlPersistM, SqlBackend, runSqlPersistMPool, rawExecute, rawSql, unSingle, connEscapeName)
import Foundation as X
import Model as X
import Test.Hspec as X
import Yesod.Default.Config2 (useEnv, loadYamlSettings)
import Yesod.Auth as X
import Yesod.Test as X
import Yesod.Core.Unsafe (fakeHandlerGetLogger)
-- Wiping the database
import Database.Persist.Sqlite (sqlDatabase, mkSqliteConnectionInfo, fkEnabled, createSqlitePoolFromInfo)
import Control.Monad.Logger (runLoggingT)
import Lens.Micro (set)
import Settings (appDatabaseConf)
import Yesod.Core (messageLoggerSource)
runDB :: SqlPersistM a -> YesodExample App a
runDB query = do
pool <- fmap appConnPool getTestYesod
liftIO $ runSqlPersistMPool query pool
runHandler :: Handler a -> YesodExample App a
runHandler handler = do
app <- getTestYesod
fakeHandlerGetLogger appLogger app handler
withApp :: SpecWith (TestApp App) -> Spec
withApp = before $ do
settings <- loadYamlSettings
["config/test-settings.yml", "config/settings.yml"]
[]
useEnv
foundation <- makeFoundation settings
wipeDB foundation
logWare <- liftIO $ makeLogWare foundation
return (foundation, logWare)
-- This function will truncate all of the tables in your database.
-- 'withApp' calls it before each test, creating a clean environment for each
-- spec to run in.
wipeDB :: App -> IO ()
wipeDB app = do
-- In order to wipe the database, we need to use a connection which has
-- foreign key checks disabled. Foreign key checks are enabled or disabled
-- per connection, so this won't effect queries outside this function.
--
-- Aside: foreign key checks are enabled by persistent-sqlite, as of
version 2.6.2 , unless they are explicitly disabled in the
-- SqliteConnectionInfo.
let logFunc = messageLoggerSource app (appLogger app)
let dbName = sqlDatabase $ appDatabaseConf $ appSettings app
connInfo = set fkEnabled False $ mkSqliteConnectionInfo dbName
pool <- runLoggingT (createSqlitePoolFromInfo connInfo 1) logFunc
flip runSqlPersistMPool pool $ do
tables <- getTables
sqlBackend <- ask
let queries = map (\t -> "DELETE FROM " ++ (connEscapeName sqlBackend $ DBName t)) tables
forM_ queries (\q -> rawExecute q [])
getTables :: MonadIO m => ReaderT SqlBackend m [Text]
getTables = do
tables <- rawSql "SELECT name FROM sqlite_master WHERE type = 'table';" []
return (fmap unSingle tables)
-- | Authenticate as a user. This relies on the `auth-dummy-login: true` flag
-- being set in test-settings.yaml, which enables dummy authentication in
Foundation.hs
authenticateAs :: Entity User -> YesodExample App ()
authenticateAs (Entity _ u) = do
request $ do
setMethod "POST"
addPostParam "ident" $ userIdent u
setUrl $ AuthR $ PluginR "dummy" []
-- | Create a user. The dummy email entry helps to confirm that foreign-key
checking is switched off in wipeDB for those database backends which need it .
createUser :: Text -> YesodExample App (Entity User)
createUser ident = runDB $ do
user <- insertEntity User
{ userIdent = ident
, userPassword = Nothing
}
_ <- insert Email
{ emailEmail = ident
, emailUserId = Just $ entityKey user
, emailVerkey = Nothing
}
return user
| null | https://raw.githubusercontent.com/fpco/haskell-multi-docker-example/0b3c08813a20e20ec60d3a00ecef6d15c125ad8d/test/TestImport.hs | haskell | # LANGUAGE OverloadedStrings #
Wiping the database
This function will truncate all of the tables in your database.
'withApp' calls it before each test, creating a clean environment for each
spec to run in.
In order to wipe the database, we need to use a connection which has
foreign key checks disabled. Foreign key checks are enabled or disabled
per connection, so this won't effect queries outside this function.
Aside: foreign key checks are enabled by persistent-sqlite, as of
SqliteConnectionInfo.
| Authenticate as a user. This relies on the `auth-dummy-login: true` flag
being set in test-settings.yaml, which enables dummy authentication in
| Create a user. The dummy email entry helps to confirm that foreign-key | # LANGUAGE QuasiQuotes #
# LANGUAGE NoImplicitPrelude #
module TestImport
( module TestImport
, module X
) where
import Application (makeFoundation, makeLogWare)
import ClassyPrelude as X hiding (delete, deleteBy, Handler)
import Database.Persist as X hiding (get)
import Database.Persist.Sql (SqlPersistM, SqlBackend, runSqlPersistMPool, rawExecute, rawSql, unSingle, connEscapeName)
import Foundation as X
import Model as X
import Test.Hspec as X
import Yesod.Default.Config2 (useEnv, loadYamlSettings)
import Yesod.Auth as X
import Yesod.Test as X
import Yesod.Core.Unsafe (fakeHandlerGetLogger)
import Database.Persist.Sqlite (sqlDatabase, mkSqliteConnectionInfo, fkEnabled, createSqlitePoolFromInfo)
import Control.Monad.Logger (runLoggingT)
import Lens.Micro (set)
import Settings (appDatabaseConf)
import Yesod.Core (messageLoggerSource)
runDB :: SqlPersistM a -> YesodExample App a
runDB query = do
pool <- fmap appConnPool getTestYesod
liftIO $ runSqlPersistMPool query pool
runHandler :: Handler a -> YesodExample App a
runHandler handler = do
app <- getTestYesod
fakeHandlerGetLogger appLogger app handler
withApp :: SpecWith (TestApp App) -> Spec
withApp = before $ do
settings <- loadYamlSettings
["config/test-settings.yml", "config/settings.yml"]
[]
useEnv
foundation <- makeFoundation settings
wipeDB foundation
logWare <- liftIO $ makeLogWare foundation
return (foundation, logWare)
wipeDB :: App -> IO ()
wipeDB app = do
version 2.6.2 , unless they are explicitly disabled in the
let logFunc = messageLoggerSource app (appLogger app)
let dbName = sqlDatabase $ appDatabaseConf $ appSettings app
connInfo = set fkEnabled False $ mkSqliteConnectionInfo dbName
pool <- runLoggingT (createSqlitePoolFromInfo connInfo 1) logFunc
flip runSqlPersistMPool pool $ do
tables <- getTables
sqlBackend <- ask
let queries = map (\t -> "DELETE FROM " ++ (connEscapeName sqlBackend $ DBName t)) tables
forM_ queries (\q -> rawExecute q [])
getTables :: MonadIO m => ReaderT SqlBackend m [Text]
getTables = do
tables <- rawSql "SELECT name FROM sqlite_master WHERE type = 'table';" []
return (fmap unSingle tables)
Foundation.hs
authenticateAs :: Entity User -> YesodExample App ()
authenticateAs (Entity _ u) = do
request $ do
setMethod "POST"
addPostParam "ident" $ userIdent u
setUrl $ AuthR $ PluginR "dummy" []
checking is switched off in wipeDB for those database backends which need it .
createUser :: Text -> YesodExample App (Entity User)
createUser ident = runDB $ do
user <- insertEntity User
{ userIdent = ident
, userPassword = Nothing
}
_ <- insert Email
{ emailEmail = ident
, emailUserId = Just $ entityKey user
, emailVerkey = Nothing
}
return user
|
8dfbce898076f2bc0110adce7904ae81fa67bc8b2391f423b3214641736329e3 | tweag/network-transport-zeromq | n-t-zmq.hs | # LANGUAGE CPP , BangPatterns , OverloadedStrings #
module Main where
import Control.Monad
import Data.Int
import System.Environment (getArgs, withArgs)
import Data.Time (getCurrentTime, diffUTCTime, NominalDiffTime)
import System.IO (withFile, IOMode(..), hPutStrLn, Handle, stderr)
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar (newEmptyMVar, takeMVar, putMVar)
import qualified Network.Socket as N
import Debug.Trace
import Data.ByteString (ByteString)
import Data.ByteString.Char8 (pack, unpack)
import qualified Data.ByteString as BS
import qualified Network.Socket.ByteString as NBS
import Data.Time (getCurrentTime, diffUTCTime, NominalDiffTime)
import Network.Transport
import Network.Transport.ZMQ
main :: IO ()
main = do
[pingsStr] <- getArgs
serverAddr <- newEmptyMVar
clientAddr <- newEmptyMVar
clientDone <- newEmptyMVar
-- Start the server
forkIO $ do
-- establish transport and endpoint
putStrLn "server: creating TCP connection"
Right transport <- createTransport defaultZMQParameters "127.0.0.1"
Right endpoint <- newEndPoint transport
putMVar serverAddr (address endpoint)
Connect to the client so that we can reply
theirAddr <- takeMVar clientAddr
Right conn <- connect endpoint theirAddr ReliableOrdered defaultConnectHints
-- reply to pings with pongs
putStrLn "server: awaiting client connection"
ConnectionOpened _ _ _ <- receive endpoint
pong endpoint conn
-- Start the client
forkIO $ do
let pings = read pingsStr
-- establish transport and endpoint
Right transport <- createTransport defaultZMQParameters "127.0.0.1"
Right endpoint <- newEndPoint transport
putMVar clientAddr (address endpoint)
Connect to the server to send pings
theirAddr <- takeMVar serverAddr
Right conn <- connect endpoint theirAddr ReliableOrdered defaultConnectHints
-- Send pings, waiting for a reply after every ping
ConnectionOpened _ _ _ <- receive endpoint
ping endpoint conn pings
putMVar clientDone ()
-- Wait for the client to finish
takeMVar clientDone
pingMessage :: [ByteString]
pingMessage = [pack "ping123"]
ping :: EndPoint -> Connection -> Int -> IO ()
ping endpoint conn pings = go pings
where
go :: Int -> IO ()
go 0 = do
putStrLn $ "client did " ++ show pings ++ " pings"
go !i = do
before <- getCurrentTime
send conn pingMessage
Received _ _payload <- receive endpoint
after <- getCurrentTime
-- putStrLn $ "client received " ++ show _payload
let latency = (1e6 :: Double) * realToFrac (diffUTCTime after before)
hPutStrLn stderr $ show i ++ " " ++ show latency
go (i - 1)
pong :: EndPoint -> Connection -> IO ()
pong endpoint conn = go
where
go = do
msg <- receive endpoint
case msg of
Received _ payload -> send conn payload >> go
ConnectionClosed _ -> return ()
_ -> fail "Unexpected message"
| null | https://raw.githubusercontent.com/tweag/network-transport-zeromq/491b0cc1321b6e17b89b4a7287c325df2502f527/benchmarks/subparts/n-t-zmq.hs | haskell | Start the server
establish transport and endpoint
reply to pings with pongs
Start the client
establish transport and endpoint
Send pings, waiting for a reply after every ping
Wait for the client to finish
putStrLn $ "client received " ++ show _payload | # LANGUAGE CPP , BangPatterns , OverloadedStrings #
module Main where
import Control.Monad
import Data.Int
import System.Environment (getArgs, withArgs)
import Data.Time (getCurrentTime, diffUTCTime, NominalDiffTime)
import System.IO (withFile, IOMode(..), hPutStrLn, Handle, stderr)
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar (newEmptyMVar, takeMVar, putMVar)
import qualified Network.Socket as N
import Debug.Trace
import Data.ByteString (ByteString)
import Data.ByteString.Char8 (pack, unpack)
import qualified Data.ByteString as BS
import qualified Network.Socket.ByteString as NBS
import Data.Time (getCurrentTime, diffUTCTime, NominalDiffTime)
import Network.Transport
import Network.Transport.ZMQ
main :: IO ()
main = do
[pingsStr] <- getArgs
serverAddr <- newEmptyMVar
clientAddr <- newEmptyMVar
clientDone <- newEmptyMVar
forkIO $ do
putStrLn "server: creating TCP connection"
Right transport <- createTransport defaultZMQParameters "127.0.0.1"
Right endpoint <- newEndPoint transport
putMVar serverAddr (address endpoint)
Connect to the client so that we can reply
theirAddr <- takeMVar clientAddr
Right conn <- connect endpoint theirAddr ReliableOrdered defaultConnectHints
putStrLn "server: awaiting client connection"
ConnectionOpened _ _ _ <- receive endpoint
pong endpoint conn
forkIO $ do
let pings = read pingsStr
Right transport <- createTransport defaultZMQParameters "127.0.0.1"
Right endpoint <- newEndPoint transport
putMVar clientAddr (address endpoint)
Connect to the server to send pings
theirAddr <- takeMVar serverAddr
Right conn <- connect endpoint theirAddr ReliableOrdered defaultConnectHints
ConnectionOpened _ _ _ <- receive endpoint
ping endpoint conn pings
putMVar clientDone ()
takeMVar clientDone
pingMessage :: [ByteString]
pingMessage = [pack "ping123"]
ping :: EndPoint -> Connection -> Int -> IO ()
ping endpoint conn pings = go pings
where
go :: Int -> IO ()
go 0 = do
putStrLn $ "client did " ++ show pings ++ " pings"
go !i = do
before <- getCurrentTime
send conn pingMessage
Received _ _payload <- receive endpoint
after <- getCurrentTime
let latency = (1e6 :: Double) * realToFrac (diffUTCTime after before)
hPutStrLn stderr $ show i ++ " " ++ show latency
go (i - 1)
pong :: EndPoint -> Connection -> IO ()
pong endpoint conn = go
where
go = do
msg <- receive endpoint
case msg of
Received _ payload -> send conn payload >> go
ConnectionClosed _ -> return ()
_ -> fail "Unexpected message"
|
abfef9a7fa7f163bd129248766ed7a6788b6aedbb06280f56b697b82e8277531 | gvolpe/split-morphism | SplitMonoSpec.hs | module SplitMonoSpec (
checkProps
) where
import Control.Lens.SplitMono
import Data.Maybe (fromMaybe)
import Test.QuickCheck
import Text.Read (readMaybe)
mono1 :: SplitMono Int Integer
mono1 = SplitMono toInteger fromInteger
mono2 :: SplitMono Integer String
mono2 = SplitMono show (fromMaybe 0 . readMaybe)
mono3 :: SplitMono Int String
mono3 = mono1 `composeSplitMono` mono2
prop_normalize :: Eq a => SplitMono a b -> b -> Bool
prop_normalize mono x =
reverseGet mono (normalize mono x) == reverseGet mono x
prop_normalized_reverse_get_round_trip :: Eq b => SplitMono a b -> b -> Bool
prop_normalized_reverse_get_round_trip mono x =
(get mono . reverseGet mono) x' == x'
where
x' = normalize mono x
prop_get_round_trip :: Eq a => SplitMono a b -> a -> Bool
prop_get_round_trip mono x =
(reverseGet mono . get mono) x == x
checkProps :: IO ()
checkProps = do
quickCheck (prop_normalize mono1)
quickCheck (prop_normalize mono2)
quickCheck (prop_normalize mono3)
quickCheck (prop_normalized_reverse_get_round_trip mono1)
quickCheck (prop_normalized_reverse_get_round_trip mono2)
quickCheck (prop_normalized_reverse_get_round_trip mono3)
quickCheck (prop_get_round_trip mono1)
quickCheck (prop_get_round_trip mono2)
quickCheck (prop_get_round_trip mono3)
| null | https://raw.githubusercontent.com/gvolpe/split-morphism/15d37ffefacb26d9986cf351e8b8f942830685f0/test/SplitMonoSpec.hs | haskell | module SplitMonoSpec (
checkProps
) where
import Control.Lens.SplitMono
import Data.Maybe (fromMaybe)
import Test.QuickCheck
import Text.Read (readMaybe)
mono1 :: SplitMono Int Integer
mono1 = SplitMono toInteger fromInteger
mono2 :: SplitMono Integer String
mono2 = SplitMono show (fromMaybe 0 . readMaybe)
mono3 :: SplitMono Int String
mono3 = mono1 `composeSplitMono` mono2
prop_normalize :: Eq a => SplitMono a b -> b -> Bool
prop_normalize mono x =
reverseGet mono (normalize mono x) == reverseGet mono x
prop_normalized_reverse_get_round_trip :: Eq b => SplitMono a b -> b -> Bool
prop_normalized_reverse_get_round_trip mono x =
(get mono . reverseGet mono) x' == x'
where
x' = normalize mono x
prop_get_round_trip :: Eq a => SplitMono a b -> a -> Bool
prop_get_round_trip mono x =
(reverseGet mono . get mono) x == x
checkProps :: IO ()
checkProps = do
quickCheck (prop_normalize mono1)
quickCheck (prop_normalize mono2)
quickCheck (prop_normalize mono3)
quickCheck (prop_normalized_reverse_get_round_trip mono1)
quickCheck (prop_normalized_reverse_get_round_trip mono2)
quickCheck (prop_normalized_reverse_get_round_trip mono3)
quickCheck (prop_get_round_trip mono1)
quickCheck (prop_get_round_trip mono2)
quickCheck (prop_get_round_trip mono3)
| |
f3bd197a9ff4f6a9aeef7f6176156b5a845d2130d4a80242a4354f73776d7959 | hasktorch/hasktorch | Constraints.hs | # LANGUAGE DataKinds #
module Torch.Distributions.Constraints
( Constraint,
dependent,
boolean,
integerInterval,
integerLessThan,
integerGreaterThan,
integerLessThanEq,
integerGreaterThanEq,
real,
greaterThan,
greaterThanEq,
lessThan,
lessThanEq,
interval,
halfOpenInterval,
simplex,
nonNegativeInteger,
positiveInteger,
positive,
unitInterval,
)
where
import qualified Torch.Functional as F
import qualified Torch.Functional.Internal as I
import Torch.Scalar
import qualified Torch.Tensor as D
import qualified Torch.TensorFactories as D
type Constraint = D.Tensor -> D.Tensor
dependent :: Constraint
dependent _tensor = error "Cannot determine validity of dependent constraint"
boolean :: Constraint
boolean tensor = (tensor `F.eq` D.zerosLike tensor) `I.logical_or` (tensor `F.eq` D.onesLike tensor)
integerInterval :: Int -> Int -> Constraint
integerInterval lower_bound upper_bound tensor = (tensor `F.ge` fullLike' lower_bound tensor) `I.logical_and` (tensor `F.le` fullLike' upper_bound tensor)
integerLessThan :: Int -> Constraint
integerLessThan upper_bound tensor = tensor `F.lt` fullLike' upper_bound tensor
integerGreaterThan :: Int -> Constraint
integerGreaterThan lower_bound tensor = tensor `F.gt` fullLike' lower_bound tensor
integerLessThanEq :: Int -> Constraint
integerLessThanEq upper_bound tensor = tensor `F.le` fullLike' upper_bound tensor
integerGreaterThanEq :: Int -> Constraint
integerGreaterThanEq lower_bound tensor = tensor `F.ge` fullLike' lower_bound tensor
real :: Constraint
real = I.isfinite
greaterThan :: Float -> Constraint
greaterThan lower_bound tensor = tensor `F.gt` fullLike' lower_bound tensor
greaterThanEq :: Float -> Constraint
greaterThanEq lower_bound tensor = tensor `F.ge` fullLike' lower_bound tensor
lessThan :: Float -> Constraint
lessThan upper_bound tensor = tensor `F.lt` fullLike' upper_bound tensor
lessThanEq :: Float -> Constraint
lessThanEq upper_bound tensor = tensor `F.le` fullLike' upper_bound tensor
interval :: Float -> Float -> Constraint
interval lower_bound upper_bound tensor = (tensor `F.ge` fullLike' lower_bound tensor) `I.logical_and` (tensor `F.le` fullLike' upper_bound tensor)
halfOpenInterval :: Float -> Float -> Constraint
halfOpenInterval lower_bound upper_bound tensor = (tensor `F.ge` fullLike' lower_bound tensor) `I.logical_and` (tensor `F.lt` fullLike' upper_bound tensor)
simplex :: Constraint
simplex tensor = F.allDim (F.Dim $ -1) False (greaterThanEq 0.0 tensor) `I.logical_and` (lessThan 1e-6 $ F.abs $ summed `F.sub` D.onesLike summed)
where
summed = F.sumDim (F.Dim $ -1) F.RemoveDim (D.dtype tensor) tensor
-- TODO: lowerTriangular
TODO :
-- TODO: positiveDefinite
-- TODO: realVector
-- TODO: cat
-- TODO: stack
nonNegativeInteger :: Constraint
nonNegativeInteger = integerGreaterThanEq 0
positiveInteger :: Constraint
positiveInteger = integerGreaterThanEq 1
positive :: Constraint
positive = greaterThan 0.0
unitInterval :: Constraint
unitInterval = interval 0.0 1.0
fullLike' :: (Scalar a) => a -> D.Tensor -> D.Tensor
fullLike' i t = F.mulScalar i $ D.onesLike t
| null | https://raw.githubusercontent.com/hasktorch/hasktorch/c34996b0a401a5b1b98b5774e892fde88adaa079/hasktorch/src/Torch/Distributions/Constraints.hs | haskell | TODO: lowerTriangular
TODO: positiveDefinite
TODO: realVector
TODO: cat
TODO: stack | # LANGUAGE DataKinds #
module Torch.Distributions.Constraints
( Constraint,
dependent,
boolean,
integerInterval,
integerLessThan,
integerGreaterThan,
integerLessThanEq,
integerGreaterThanEq,
real,
greaterThan,
greaterThanEq,
lessThan,
lessThanEq,
interval,
halfOpenInterval,
simplex,
nonNegativeInteger,
positiveInteger,
positive,
unitInterval,
)
where
import qualified Torch.Functional as F
import qualified Torch.Functional.Internal as I
import Torch.Scalar
import qualified Torch.Tensor as D
import qualified Torch.TensorFactories as D
type Constraint = D.Tensor -> D.Tensor
dependent :: Constraint
dependent _tensor = error "Cannot determine validity of dependent constraint"
boolean :: Constraint
boolean tensor = (tensor `F.eq` D.zerosLike tensor) `I.logical_or` (tensor `F.eq` D.onesLike tensor)
integerInterval :: Int -> Int -> Constraint
integerInterval lower_bound upper_bound tensor = (tensor `F.ge` fullLike' lower_bound tensor) `I.logical_and` (tensor `F.le` fullLike' upper_bound tensor)
integerLessThan :: Int -> Constraint
integerLessThan upper_bound tensor = tensor `F.lt` fullLike' upper_bound tensor
integerGreaterThan :: Int -> Constraint
integerGreaterThan lower_bound tensor = tensor `F.gt` fullLike' lower_bound tensor
integerLessThanEq :: Int -> Constraint
integerLessThanEq upper_bound tensor = tensor `F.le` fullLike' upper_bound tensor
integerGreaterThanEq :: Int -> Constraint
integerGreaterThanEq lower_bound tensor = tensor `F.ge` fullLike' lower_bound tensor
real :: Constraint
real = I.isfinite
greaterThan :: Float -> Constraint
greaterThan lower_bound tensor = tensor `F.gt` fullLike' lower_bound tensor
greaterThanEq :: Float -> Constraint
greaterThanEq lower_bound tensor = tensor `F.ge` fullLike' lower_bound tensor
lessThan :: Float -> Constraint
lessThan upper_bound tensor = tensor `F.lt` fullLike' upper_bound tensor
lessThanEq :: Float -> Constraint
lessThanEq upper_bound tensor = tensor `F.le` fullLike' upper_bound tensor
interval :: Float -> Float -> Constraint
interval lower_bound upper_bound tensor = (tensor `F.ge` fullLike' lower_bound tensor) `I.logical_and` (tensor `F.le` fullLike' upper_bound tensor)
halfOpenInterval :: Float -> Float -> Constraint
halfOpenInterval lower_bound upper_bound tensor = (tensor `F.ge` fullLike' lower_bound tensor) `I.logical_and` (tensor `F.lt` fullLike' upper_bound tensor)
simplex :: Constraint
simplex tensor = F.allDim (F.Dim $ -1) False (greaterThanEq 0.0 tensor) `I.logical_and` (lessThan 1e-6 $ F.abs $ summed `F.sub` D.onesLike summed)
where
summed = F.sumDim (F.Dim $ -1) F.RemoveDim (D.dtype tensor) tensor
TODO :
nonNegativeInteger :: Constraint
nonNegativeInteger = integerGreaterThanEq 0
positiveInteger :: Constraint
positiveInteger = integerGreaterThanEq 1
positive :: Constraint
positive = greaterThan 0.0
unitInterval :: Constraint
unitInterval = interval 0.0 1.0
fullLike' :: (Scalar a) => a -> D.Tensor -> D.Tensor
fullLike' i t = F.mulScalar i $ D.onesLike t
|
f63e63f09d8f79dde1dec19f15551aef6ae58cc49cd519250c48f80ef930274c | acieroid/scala-am | indexer5.scm | Example taken from Dynamic Partial Order Reduction , Figure 1
;; Expected result: #t
(let* ((size 128)
(max 4)
(table (make-vector size 0))
(thread (lambda (tid)
(letrec ((hash (lambda (w) (modulo (* w 7) size)))
(process (lambda (m)
(if (< m max)
(letrec ((w (+ (* 11 (+ m 1)) tid))
(update (lambda (h)
(if (cas-vector table h 0 w)
#t
(update (modulo (+ h 1) size))))))
(update (hash w))
(process (+ m 1)))
#t))))
(process 0))))
(t1 (fork (thread 1)))
(t2 (fork (thread 2)))
(t3 (fork (thread 3)))
(t4 (fork (thread 4)))
(t5 (fork (thread 5))))
(join t1)
(join t2)
(join t3)
(join t4)
(join t5)
#t)
| null | https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/concurrentScheme/threads/variations/indexer5.scm | scheme | Expected result: #t | Example taken from Dynamic Partial Order Reduction , Figure 1
(let* ((size 128)
(max 4)
(table (make-vector size 0))
(thread (lambda (tid)
(letrec ((hash (lambda (w) (modulo (* w 7) size)))
(process (lambda (m)
(if (< m max)
(letrec ((w (+ (* 11 (+ m 1)) tid))
(update (lambda (h)
(if (cas-vector table h 0 w)
#t
(update (modulo (+ h 1) size))))))
(update (hash w))
(process (+ m 1)))
#t))))
(process 0))))
(t1 (fork (thread 1)))
(t2 (fork (thread 2)))
(t3 (fork (thread 3)))
(t4 (fork (thread 4)))
(t5 (fork (thread 5))))
(join t1)
(join t2)
(join t3)
(join t4)
(join t5)
#t)
|
9f3a19d5c34c0d78ddc540526d056b939590c4091cbfe414b0b56ef95bf1e467 | janestreet/shexp | signal.ml | type t = int
(* this function is a copy&paste from stdune *)
let name =
let table =
let open Sys in
[ sigabrt, "ABRT"
; sigalrm, "ALRM"
; sigfpe, "FPE"
; sighup, "HUP"
; sigill, "ILL"
; sigint, "INT"
; sigkill, "KILL"
; sigpipe, "PIPE"
; sigquit, "QUIT"
; sigsegv, "SEGV"
; sigterm, "TERM"
; sigusr1, "USR1"
; sigusr2, "USR2"
; sigchld, "CHLD"
; sigcont, "CONT"
; sigstop, "STOP"
; sigtstp, "TSTP"
; sigttin, "TTIN"
; sigttou, "TTOU"
; sigvtalrm, "VTALRM"
; sigprof, "PROF"
; sigbus, "BUS"
; sigpoll, "POLL"
; sigsys, "SYS"
; sigtrap, "TRAP"
; sigurg, "URG"
; sigxcpu, "XCPU"
; sigxfsz, "XFSZ"
]
in
fun (n : int) ->
match List.assoc_opt n table with
| None -> if n > 0 then Printf.sprintf "%d" n else Printf.sprintf "caml:%d" n
| Some s -> s
;;
| null | https://raw.githubusercontent.com/janestreet/shexp/635989a9065f94e309707f113d6647dc62d6932f/process-lib/src/signal.ml | ocaml | this function is a copy&paste from stdune | type t = int
let name =
let table =
let open Sys in
[ sigabrt, "ABRT"
; sigalrm, "ALRM"
; sigfpe, "FPE"
; sighup, "HUP"
; sigill, "ILL"
; sigint, "INT"
; sigkill, "KILL"
; sigpipe, "PIPE"
; sigquit, "QUIT"
; sigsegv, "SEGV"
; sigterm, "TERM"
; sigusr1, "USR1"
; sigusr2, "USR2"
; sigchld, "CHLD"
; sigcont, "CONT"
; sigstop, "STOP"
; sigtstp, "TSTP"
; sigttin, "TTIN"
; sigttou, "TTOU"
; sigvtalrm, "VTALRM"
; sigprof, "PROF"
; sigbus, "BUS"
; sigpoll, "POLL"
; sigsys, "SYS"
; sigtrap, "TRAP"
; sigurg, "URG"
; sigxcpu, "XCPU"
; sigxfsz, "XFSZ"
]
in
fun (n : int) ->
match List.assoc_opt n table with
| None -> if n > 0 then Printf.sprintf "%d" n else Printf.sprintf "caml:%d" n
| Some s -> s
;;
|
630c4730256066e5de8e3f3ad716ac75f59925bba501a41d959d28a2fe5832d7 | arichiardi/replumb | runner.cljs | (ns launcher.runner
(:require [doo.runner :as doo :refer-macros [doo-tests]]
replumb.core-test
replumb.repl-test
replumb.common-test
replumb.load-test
replumb.options-test
replumb.macro-test
replumb.require-test
replumb.source-test
replumb.ast-test
replumb.cache-test))
(enable-console-print!)
(doo-tests
'replumb.core-test
'replumb.repl-test
'replumb.common-test
'replumb.load-test
'replumb.options-test
'replumb.macro-test
'replumb.require-test
'replumb.source-test
'replumb.ast-test
'replumb.cache-test)
| null | https://raw.githubusercontent.com/arichiardi/replumb/dde2228f2e364c3bafdf6585bb1bc1c27a3e336c/test/node/launcher/runner.cljs | clojure | (ns launcher.runner
(:require [doo.runner :as doo :refer-macros [doo-tests]]
replumb.core-test
replumb.repl-test
replumb.common-test
replumb.load-test
replumb.options-test
replumb.macro-test
replumb.require-test
replumb.source-test
replumb.ast-test
replumb.cache-test))
(enable-console-print!)
(doo-tests
'replumb.core-test
'replumb.repl-test
'replumb.common-test
'replumb.load-test
'replumb.options-test
'replumb.macro-test
'replumb.require-test
'replumb.source-test
'replumb.ast-test
'replumb.cache-test)
| |
e14d990da4a418e855d8f42eccaafdf3a3203a4fcf44ed6c164e06044ae405a5 | MinaProtocol/mina | mina_state_registers.ml | module V1 = struct
type ('ledger, 'pending_coinbase_stack, 'local_state) t =
{ first_pass_ledger : 'ledger
; second_pass_ledger : 'ledger
; pending_coinbase_stack : 'pending_coinbase_stack
; local_state : 'local_state
}
end
| null | https://raw.githubusercontent.com/MinaProtocol/mina/57e2ea1b87fe1a24517e1c62f51cc59fe9bc87cd/src/lib/mina_wire_types/mina_state/mina_state_registers.ml | ocaml | module V1 = struct
type ('ledger, 'pending_coinbase_stack, 'local_state) t =
{ first_pass_ledger : 'ledger
; second_pass_ledger : 'ledger
; pending_coinbase_stack : 'pending_coinbase_stack
; local_state : 'local_state
}
end
| |
85bb6480a7ff6b44288091ee846528fad346e62415199909a01b3f13499e8435 | gbour/wave | mqtt_topic_registry.erl | %%
Wave - MQTT Broker
Copyright ( C ) 2014 - 2016 -
%%
%% This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation , version 3 of the License .
%%
%% This program is distributed in the hope that it will be useful,
%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details .
%%
You should have received a copy of the GNU Affero General Public License
%% along with this program. If not, see </>.
%%
%% NOTE: current implementation is really naive
%% we have to go through the whole list to match each subscriber topic regex
will be really slow w/ hundred thousand subscribers !
%%
-module(mqtt_topic_registry).
-author("Guillaume Bour <>").
-behaviour(gen_server).
-include("mqtt_msg.hrl").
-record(state, {
{ topicname , [ Pid * ] }
peers
}).
-type subscriber() :: {Module :: module(), Fun :: atom(), Pid :: pid(), DeviceID :: mqtt_clientid()|undefined}.
-type subscription() :: {Re :: binary(), Fields :: list(integer()), Qos :: integer(), Subscriber :: subscriber()}.
-type match() :: {Position :: integer(), Value :: binary()}.
-type match_result() :: {Re :: binary(), Qos :: integer(), Subscriber :: subscriber(), Matches :: list(match())}.
%
-export([count/0, dump/0, subscribe/3, unsubscribe/1, unsubscribe/2, match/1]).
-ifdef(DEBUG).
-export([debug_cleanup/0]).
-endif.
% gen_server API
-export([start_link/0, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
start_link() ->
NOTE : currently we set ONE global registry service for the current erlang server
gen_server:start_link({local,?MODULE}, ?MODULE, [], []).
init(_) ->
exometer:update([wave,subscriptions], 0),
{ok, #state{}}.
%%
%% PUBLIC API
%%
%
% todo: there may be wildcard in topic name => need to do a search
%
Name : TopicName | { TopicName , }
-spec subscribe(Topic :: binary(), Qos :: integer(), Subscriber :: subscriber()) -> ok | duplicate.
subscribe(Name, Qos, Subscriber) ->
gen_server:call(?MODULE, {subscribe, Name, Qos, Subscriber}).
-spec unsubscribe(Subscription :: subscription()) -> ok.
unsubscribe(Subscription) ->
gen_server:call(?MODULE, {unsubscribe, Subscription}).
-spec unsubscribe(Topic :: binary(), Subscriber :: subscriber()) -> ok.
unsubscribe(Name, Subscriber) ->
gen_server:call(?MODULE, {unsubscribe, Name, Subscriber}).
-spec match(Topic :: binary()) -> list(match_result()).
match(Name) ->
gen_server:call(?MODULE, {match, Name}).
-spec count() -> {ok, integer()}.
count() ->
{ok, gen_server:call(?MODULE, count)}.
-spec dump() -> ok.
dump() ->
gen_server:call(?MODULE, dump).
% flush all registry
-ifdef(DEBUG).
debug_cleanup() ->
gen_server:call(?MODULE, debug_cleanup).
-endif.
%%
%% PRIVATE API
%%
handle_call(count, _, State=#state{subscriptions=S}) ->
{reply, erlang:length(S), State};
handle_call(dump, _, State=#state{subscriptions=S}) ->
priv_dump(S),
{reply, ok, State};
handle_call(debug_cleanup, _, _State) ->
lager:warning("clearing registry"),
exometer:update([wave,subscriptions], 0),
{reply, ok, #state{}};
handle_call({subscribe, Topic, Qos, Subscriber}, _, State=#state{subscriptions=Subscriptions}) ->
lager:debug("~p: subscribe to '~p' topic w/ qos ~p", [Subscriber, Topic, Qos]),
{TopicName, Fields} = case Topic of
{T, M} ->
{T, M};
Topic ->
{Topic, mqtt_topic_match:fields(Topic)}
end,
{Reply, S2} = case lists:filter(fun({T,F,_,S}) -> {T,F,S} =:= {TopicName,Fields,Subscriber} end, Subscriptions) of
[] ->
exometer:update([wave,subscriptions], length(Subscriptions)+1),
{ok, Subscriptions ++ [{TopicName,Fields,Qos,Subscriber}]};
_ ->
lager:notice("~p already subscribed to ~p (~p)", [Subscriber, TopicName, Fields]),
{duplicate, Subscriptions}
end,
{reply, Reply, State#state{subscriptions=S2}};
handle_call({unsubscribe, Subscriber}, _, State=#state{subscriptions=S}) ->
S2 = priv_unsubscribe(Subscriber, S, []),
exometer:update([wave,subscriptions], length(S2)),
{reply, ok, State#state{subscriptions=S2}};
handle_call({unsubscribe, TopicName, Subscriber}, _, State=#state{subscriptions=S}) ->
lager:debug("unsubscribe ~p from ~p", [Subscriber, TopicName]),
S2 = lists:filter(fun({T,_,_,Sub}) ->
{T,Sub} =/= {TopicName, Subscriber}
end, S
),
exometer:update([wave,subscriptions], length(S2)),
lager : ~p / ~p " , [ S , S2 ] ) ,
{reply, ok, State#state{subscriptions=S2}};
handle_call({match, TopicName}, _, State=#state{subscriptions=S}) ->
Match = priv_match(TopicName, S, []),
{reply, Match, State};
handle_call(_,_,State) ->
{reply, ok, State}.
handle_cast(_, State) ->
{noreply, State}.
handle_info(_, State) ->
{noreply, State}.
terminate(_,_) ->
ok.
code_change(_, State, _) ->
{ok, State}.
%%
%% PRIVATE FUNS
%%
% dump subscribers list
%
-spec priv_dump(list(subscription())) -> ok.
priv_dump([{Topic, Fields, Qos, Subscriber} |T]) ->
lager:info("~p (~p) (qos ~p) -> ~p", [Topic, Fields, Qos, Subscriber]),
priv_dump(T);
priv_dump([]) ->
ok.
% remove a subscriber from subscription list
%
-spec priv_unsubscribe(Subscriber :: subscription(), Subscribers :: list(subscription()),
Acc :: list(subscription)) -> list(subscription()).
priv_unsubscribe(_, [], S2) ->
S2;
priv_unsubscribe(S, [S|T], S2) ->
priv_unsubscribe(S, T, S2);
priv_unsubscribe(S, [H|T], S2) ->
priv_unsubscribe(S, T, [H|S2]).
% find subscriptions matching topic
%
-spec priv_match(Topic :: binary(), Subscriptions :: list(subscription()), Acc :: list(match_result())) ->
list(match_result()).
% exact match
priv_match(Topic, [{Topic, _, Qos, S}|T], M) ->
priv_match(Topic, T, [{Topic,Qos,S,[]}|M]);
% regex
priv_match(Topic, [{Re, Fields, Qos, S}|T], M) ->
MatchList = case mqtt_topic_match:match(Re, {Topic, Fields}) of
{ok, MatchFields} ->
[{Re,Qos,S,MatchFields}|M];
fail ->
M
end,
priv_match(Topic, T, MatchList);
priv_match(_, [], M) ->
M.
| null | https://raw.githubusercontent.com/gbour/wave/fe5b78408a7c6e723b19cd454068958058e5e072/apps/wave/src/mqtt_topic_registry.erl | erlang |
This program is free software: you can redistribute it and/or modify
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this program. If not, see </>.
NOTE: current implementation is really naive
we have to go through the whole list to match each subscriber topic regex
gen_server API
PUBLIC API
todo: there may be wildcard in topic name => need to do a search
flush all registry
PRIVATE API
PRIVATE FUNS
dump subscribers list
remove a subscriber from subscription list
find subscriptions matching topic
exact match
regex | Wave - MQTT Broker
Copyright ( C ) 2014 - 2016 -
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation , version 3 of the License .
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
will be really slow w/ hundred thousand subscribers !
-module(mqtt_topic_registry).
-author("Guillaume Bour <>").
-behaviour(gen_server).
-include("mqtt_msg.hrl").
-record(state, {
{ topicname , [ Pid * ] }
peers
}).
-type subscriber() :: {Module :: module(), Fun :: atom(), Pid :: pid(), DeviceID :: mqtt_clientid()|undefined}.
-type subscription() :: {Re :: binary(), Fields :: list(integer()), Qos :: integer(), Subscriber :: subscriber()}.
-type match() :: {Position :: integer(), Value :: binary()}.
-type match_result() :: {Re :: binary(), Qos :: integer(), Subscriber :: subscriber(), Matches :: list(match())}.
-export([count/0, dump/0, subscribe/3, unsubscribe/1, unsubscribe/2, match/1]).
-ifdef(DEBUG).
-export([debug_cleanup/0]).
-endif.
-export([start_link/0, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
start_link() ->
NOTE : currently we set ONE global registry service for the current erlang server
gen_server:start_link({local,?MODULE}, ?MODULE, [], []).
init(_) ->
exometer:update([wave,subscriptions], 0),
{ok, #state{}}.
Name : TopicName | { TopicName , }
-spec subscribe(Topic :: binary(), Qos :: integer(), Subscriber :: subscriber()) -> ok | duplicate.
subscribe(Name, Qos, Subscriber) ->
gen_server:call(?MODULE, {subscribe, Name, Qos, Subscriber}).
-spec unsubscribe(Subscription :: subscription()) -> ok.
unsubscribe(Subscription) ->
gen_server:call(?MODULE, {unsubscribe, Subscription}).
-spec unsubscribe(Topic :: binary(), Subscriber :: subscriber()) -> ok.
unsubscribe(Name, Subscriber) ->
gen_server:call(?MODULE, {unsubscribe, Name, Subscriber}).
-spec match(Topic :: binary()) -> list(match_result()).
match(Name) ->
gen_server:call(?MODULE, {match, Name}).
-spec count() -> {ok, integer()}.
count() ->
{ok, gen_server:call(?MODULE, count)}.
-spec dump() -> ok.
dump() ->
gen_server:call(?MODULE, dump).
-ifdef(DEBUG).
debug_cleanup() ->
gen_server:call(?MODULE, debug_cleanup).
-endif.
handle_call(count, _, State=#state{subscriptions=S}) ->
{reply, erlang:length(S), State};
handle_call(dump, _, State=#state{subscriptions=S}) ->
priv_dump(S),
{reply, ok, State};
handle_call(debug_cleanup, _, _State) ->
lager:warning("clearing registry"),
exometer:update([wave,subscriptions], 0),
{reply, ok, #state{}};
handle_call({subscribe, Topic, Qos, Subscriber}, _, State=#state{subscriptions=Subscriptions}) ->
lager:debug("~p: subscribe to '~p' topic w/ qos ~p", [Subscriber, Topic, Qos]),
{TopicName, Fields} = case Topic of
{T, M} ->
{T, M};
Topic ->
{Topic, mqtt_topic_match:fields(Topic)}
end,
{Reply, S2} = case lists:filter(fun({T,F,_,S}) -> {T,F,S} =:= {TopicName,Fields,Subscriber} end, Subscriptions) of
[] ->
exometer:update([wave,subscriptions], length(Subscriptions)+1),
{ok, Subscriptions ++ [{TopicName,Fields,Qos,Subscriber}]};
_ ->
lager:notice("~p already subscribed to ~p (~p)", [Subscriber, TopicName, Fields]),
{duplicate, Subscriptions}
end,
{reply, Reply, State#state{subscriptions=S2}};
handle_call({unsubscribe, Subscriber}, _, State=#state{subscriptions=S}) ->
S2 = priv_unsubscribe(Subscriber, S, []),
exometer:update([wave,subscriptions], length(S2)),
{reply, ok, State#state{subscriptions=S2}};
handle_call({unsubscribe, TopicName, Subscriber}, _, State=#state{subscriptions=S}) ->
lager:debug("unsubscribe ~p from ~p", [Subscriber, TopicName]),
S2 = lists:filter(fun({T,_,_,Sub}) ->
{T,Sub} =/= {TopicName, Subscriber}
end, S
),
exometer:update([wave,subscriptions], length(S2)),
lager : ~p / ~p " , [ S , S2 ] ) ,
{reply, ok, State#state{subscriptions=S2}};
handle_call({match, TopicName}, _, State=#state{subscriptions=S}) ->
Match = priv_match(TopicName, S, []),
{reply, Match, State};
handle_call(_,_,State) ->
{reply, ok, State}.
handle_cast(_, State) ->
{noreply, State}.
handle_info(_, State) ->
{noreply, State}.
terminate(_,_) ->
ok.
code_change(_, State, _) ->
{ok, State}.
-spec priv_dump(list(subscription())) -> ok.
priv_dump([{Topic, Fields, Qos, Subscriber} |T]) ->
lager:info("~p (~p) (qos ~p) -> ~p", [Topic, Fields, Qos, Subscriber]),
priv_dump(T);
priv_dump([]) ->
ok.
-spec priv_unsubscribe(Subscriber :: subscription(), Subscribers :: list(subscription()),
Acc :: list(subscription)) -> list(subscription()).
priv_unsubscribe(_, [], S2) ->
S2;
priv_unsubscribe(S, [S|T], S2) ->
priv_unsubscribe(S, T, S2);
priv_unsubscribe(S, [H|T], S2) ->
priv_unsubscribe(S, T, [H|S2]).
-spec priv_match(Topic :: binary(), Subscriptions :: list(subscription()), Acc :: list(match_result())) ->
list(match_result()).
priv_match(Topic, [{Topic, _, Qos, S}|T], M) ->
priv_match(Topic, T, [{Topic,Qos,S,[]}|M]);
priv_match(Topic, [{Re, Fields, Qos, S}|T], M) ->
MatchList = case mqtt_topic_match:match(Re, {Topic, Fields}) of
{ok, MatchFields} ->
[{Re,Qos,S,MatchFields}|M];
fail ->
M
end,
priv_match(Topic, T, MatchList);
priv_match(_, [], M) ->
M.
|
906af07fefd3894a68abc324a4fc1ec9cdea12102c98c58a54eac34aee7e1327 | flavioc/cl-hurd | proxy-translator.lisp |
(defpackage :proxy-translator
(:use :cl :hurd-common :mach
:hurd :hurd-translator
:hurd-tree-translator))
(in-package :proxy-translator)
(defclass proxy-translator (tree-translator)
())
(defclass underlying-entry ()
((port :initarg :port
:initform nil
:accessor port)))
(defclass proxy-entry (underlying-entry entry) ())
(defclass proxy-dir-entry (underlying-entry dir-entry) ())
(define-callback read-file proxy-translator
(node user start amount stream)
(when (has-access-p node user :read)
(let ((data (io-read (port node)
:amount amount
:offset start)))
(when data
(loop for item across data
do (cond
((and (>= item 97)
(<= item 122))
(write-byte (- item 32) stream))
(t (write-byte item stream)))))
t)))
(defun %get-file-type (stat)
(cond
((is-dir-p stat) 'proxy-dir-entry)
(t 'proxy-entry)))
(defconstant +max-dir-entries+ 2048)
(defun %fetch-nodes (node)
(when (is-dir-p (stat node))
(let ((entries (dir-readdir (port node)
:nentries +max-dir-entries+)))
(loop for dirent in entries
do (let ((name (dirent-name dirent)))
(unless (or (string= name ".")
(string= name ".."))
(let* ((port (file-name-lookup name
:under (port node)
:flags '(:read :nolink :notrans)))
(stat (io-stat port))
(entry (make-instance (%get-file-type stat)
:stat stat
:port port
:parent node)))
(add-entry node entry name)
(when (is-dir-p stat)
(%fetch-nodes entry)))))))))
(define-callback make-root-node proxy-translator
(underlying-node underlying-stat)
(let ((node (make-instance (%get-file-type underlying-stat)
:stat underlying-stat
:port underlying-node)))
(%fetch-nodes node)
node))
(defun main ()
(let ((translator (make-instance 'proxy-translator
:name "proxy-translator")))
(run-translator translator :flags '(:notrans :read))))
(main)
| null | https://raw.githubusercontent.com/flavioc/cl-hurd/982232f47d1a0ff4df5fde2edad03b9df871470a/examples/proxy-translator.lisp | lisp |
(defpackage :proxy-translator
(:use :cl :hurd-common :mach
:hurd :hurd-translator
:hurd-tree-translator))
(in-package :proxy-translator)
(defclass proxy-translator (tree-translator)
())
(defclass underlying-entry ()
((port :initarg :port
:initform nil
:accessor port)))
(defclass proxy-entry (underlying-entry entry) ())
(defclass proxy-dir-entry (underlying-entry dir-entry) ())
(define-callback read-file proxy-translator
(node user start amount stream)
(when (has-access-p node user :read)
(let ((data (io-read (port node)
:amount amount
:offset start)))
(when data
(loop for item across data
do (cond
((and (>= item 97)
(<= item 122))
(write-byte (- item 32) stream))
(t (write-byte item stream)))))
t)))
(defun %get-file-type (stat)
(cond
((is-dir-p stat) 'proxy-dir-entry)
(t 'proxy-entry)))
(defconstant +max-dir-entries+ 2048)
(defun %fetch-nodes (node)
(when (is-dir-p (stat node))
(let ((entries (dir-readdir (port node)
:nentries +max-dir-entries+)))
(loop for dirent in entries
do (let ((name (dirent-name dirent)))
(unless (or (string= name ".")
(string= name ".."))
(let* ((port (file-name-lookup name
:under (port node)
:flags '(:read :nolink :notrans)))
(stat (io-stat port))
(entry (make-instance (%get-file-type stat)
:stat stat
:port port
:parent node)))
(add-entry node entry name)
(when (is-dir-p stat)
(%fetch-nodes entry)))))))))
(define-callback make-root-node proxy-translator
(underlying-node underlying-stat)
(let ((node (make-instance (%get-file-type underlying-stat)
:stat underlying-stat
:port underlying-node)))
(%fetch-nodes node)
node))
(defun main ()
(let ((translator (make-instance 'proxy-translator
:name "proxy-translator")))
(run-translator translator :flags '(:notrans :read))))
(main)
| |
26240d10e01d4cf8143d47e56c75956d1f38bd3d250788cdb30cf825641544a2 | cabol/erlbus | ebus.erl | %%%-------------------------------------------------------------------
%%% @doc
%%% Main `ebus` interface. This module also works as a wrapper on top
%%% of `ebus_ps' module.
@see
%%% @end
%%%-------------------------------------------------------------------
-module(ebus).
-behaviour(application).
PubSub API
-export([sub/2, sub/3, sub/4]).
-export([unsub/2, unsub/3]).
-export([pub/2, pub/3, pub_from/3, pub_from/4]).
-export([subscribers/1, subscribers/2]).
-export([local_subscribers/1, local_subscribers/2]).
-export([topics/0, topics/1, local_topics/0, local_topics/1]).
-export([dispatch/2, dispatch/3, dispatch/4]).
%% Application callbacks and functions
-export([start/0, stop/0]).
-export([start/2, stop/1]).
Utilities
-export([server/0, default_ps_server/0]).
%%%===================================================================
%%% Types
%%%===================================================================
-type topic() :: iodata().
-type handler() :: pid().
%% Receives as argument the subscribers list. Then it should choose
one subscriber from the given list and return it .
-type dispatch_fun() :: fun(([term()]) -> term()).
%% Available dispatch options.
-type dispatch_opt() :: {scope, local | global} |
{dispatch_fun, dispatch_fun()}.
Dispatch options .
-type dispatch_opts() :: [dispatch_opt()].
` sub/4 ' options .
-type options() :: ebus_ps_local:options().
% Exported types
-export_type([
topic/0,
handler/0,
dispatch_fun/0,
dispatch_opts/0,
options/0
]).
%%%===================================================================
PubSub API
%%%===================================================================
%% @equiv sub(server(), Handler, Topic)
sub(Handler, Topic) ->
sub(server(), Handler, Topic).
%% @equiv sub(Server, Handler, Topic, [])
sub(Server, Handler, Topic) ->
sub(Server, Handler, Topic, []).
%% @doc
%% Subscribes the `Handler' given `Topic'.
%%
%% <ul>
< li>`Server ' : The Pid registered name of the server.</li >
%% <li>`Handler': The subscriber pid to receive pubsub messages.</li>
%% <li>`Topic': The topic to subscribe to, ie: `"users:123"'.</li>
%% <li>`Opts': The optional list of options. See below.</li>
%% </ul>
%%
%% <b>Options:</b>
%% <br/>
%% <ul>
< li>`{link , _ } ' : links the subscriber to the pubsub adapter.</li >
%% <li>`{fastlane, ebus_ps_local:fastlane()}': Provides a fastlane path
%% for the broadcasts for `broadcast()' events. The fastlane process is
%% notified of a cached message instead of the normal subscriber.
Fastlane handlers must implement ` fastlane/1 ' callbacks which accepts a
%% `broadcast()' struct and returns a fastlaned format for the handler.</li>
%% </ul>
%%
%% Examples:
%%
%% ```
%% > ebus:sub(self(), <<"foo">>).
%% ok
> ebus : , self ( ) , < < " foo " > > ) .
%% ok
> ebus : , self ( ) , < < " foo " > > , [ ] ) .
%% ok
> ebus : , self ( ) , < < " foo " > > ,
[ { fastlane , { FastPid , , [ < < " event1 " > > ] } ] ) .
%% ok
%% '''
%% @end
-spec sub(atom(), handler(), topic(), options()) -> ok | {error, term()}.
sub(Server, Handler, Topic, Opts) ->
ebus_ps:subscribe(Server, Handler, ebus_common:to_bin(Topic), Opts).
%% @equiv unsub(server(), Handler, Topic)
unsub(Handler, Topic) ->
unsub(server(), Handler, Topic).
%% @doc
the given ` Handler ' from the ` Topic ' .
%%
%% <ul>
%% <li>`Server': The registered server name or pid.</li>
%% <li>`Handler': The subscriber pid.</li>
%% <li>`Topic': The string topic, for example `<<"users:123">>'.</li>
%% </ul>
%%
%% Example:
%%
%% ```
%% > ebus:unsub(self(), <<"foo">>).
%% ok
%% > ebus:unsub(ebus_ps, self(), <<"foo">>).
%% ok
%% '''
%% @end
-spec unsub(atom(), handler(), topic()) -> ok | {error, term()}.
unsub(Server, Handler, Topic) ->
ebus_ps:unsubscribe(Server, Handler, ebus_common:to_bin(Topic)).
%% @equiv pub(server(), Topic, Message)
pub(Topic, Message) ->
pub(server(), Topic, Message).
%% @doc
%% Sends a message to all subscribers of a topic.
%%
%% <ul>
%% <li>`Server': The registered server name or pid.</li>
%% <li>`Topic': The string topic, for example `<<"users:123">>'.</li>
%% <li>`Message': Any erlang term.</li>
%% </ul>
%%
%% Examples:
%%
%% ```
> ebus : pub("bar " , # { topic = > " foo " , payload = > " hi " } ) .
%% ok
> ebus : , " bar " , # { topic = > " foo " , payload = > " hi " } ) .
%% ok
%% '''
%% @end
-spec pub(atom(), topic(), term()) -> ok | {error, term()}.
pub(Server, Topic, Message) ->
ebus_ps:broadcast(Server, ebus_common:to_bin(Topic), Message).
%% @equiv pub_from(server(), From, Topic, Message)
pub_from(From, Topic, Message) ->
pub_from(server(), From, Topic, Message).
%% @doc
Same as ` pub/3 ' but message is not sent to ` FromHandler ' .
%%
%% Examples:
%%
%% ```
%% > ebus:pub_from(self(), "foo", <<"message">>).
%% ok
%% > ebus:pub_from(ebus_ps, self(), "foo", <<"message">>).
%% ok
%% '''
%% @end
-spec pub_from(atom(), handler(), topic(), term()) -> ok | {error, term()}.
pub_from(Server, FromHandler, Topic, Message) ->
BinTopic = ebus_common:to_bin(Topic),
ebus_ps:broadcast_from(Server, FromHandler, BinTopic, Message).
%% @equiv subscribers(server(), Topic)
subscribers(Topic) ->
subscribers(server(), Topic).
%% @doc
%% Returns a set of all subscribers handlers (local and global)
%% for the given `Topic'.
%%
%% <ul>
%% <li>`Server': The registered server name or pid.</li>
%% <li>`Topic': The string topic, for example `<<"users:123">>'.</li>
%% </ul>
%%
%% Example:
%%
%% ```
> ebus : subscribers(ebus_ps , < < " foo " > > ) .
[ < 0.48.0 > , < 0.49.0 > ]
%% '''
%% @end
-spec subscribers(atom(), topic()) -> [pid()].
subscribers(Server, Topic) ->
BinTopic = ebus_common:to_bin(Topic),
{ResL, _} = rpc:multicall(?MODULE, local_subscribers, [Server, BinTopic]),
lists:merge(ResL).
%% @equiv local_subscribers(server(), Topic)
local_subscribers(Topic) ->
local_subscribers(server(), Topic).
%% @doc
%% Same as `subscribers/2' but only local subscribers handlers for the
%% given `Topic' are returned.
%%
%% Example:
%%
%% ```
%% > ebus:local_subscribers(ebus_ps, <<"foo">>).
[ < 0.48.0 > , < 0.49.0 > ]
%% '''
%% @end
-spec local_subscribers(atom(), topic()) -> [pid()].
local_subscribers(Server, Topic) ->
ebus_ps:subscribers(Server, ebus_common:to_bin(Topic)).
%% @equiv topics(server())
topics() ->
topics(server()).
%% @doc
%% Returns the list of all topics (local and global) in use.
%% This is an expensive and private operation.
%%
%% <p>This is an expensive operation. <b> DO NOT USE IT IN PROD</b></p>
%%
%% Example:
%%
%% ```
%% > ebus:topics().
%% [<<"foo">>, <<"bar">>]
%% > ebus:topics(ebus_ps).
%% [<<"foo">>, <<"bar">>]
%% '''
%% @end
-spec topics(atom()) -> [binary()].
topics(Server) ->
{ResL, _} = rpc:multicall(?MODULE, local_topics, [Server]),
lists:usort(lists:merge(ResL)).
%% @equiv local_topics(server())
local_topics() ->
local_topics(server()).
%% @doc
%% Same as `topics/1' but only local topics are returned.
%%
%% Example:
%%
%% ```
> ebus : ( ) .
%% [<<"foo">>, <<"bar">>]
> ebus : local_topics(ebus_ps ) .
%% [<<"foo">>, <<"bar">>]
%% '''
%% @end
-spec local_topics(atom()) -> [binary()].
local_topics(Server) ->
ebus_ps:list(Server).
%% @equiv dispatch(Topic, Message, [])
dispatch(Topic, Message) ->
dispatch(Topic, Message, []).
@equiv dispatch(server ( ) , Topic , Message , )
dispatch(Topic, Message, Opts) ->
dispatch(server(), Topic, Message, Opts).
%% @doc
Sends a message only to one subscriber handler of the ` Topic ' .
%%
%% <ul>
%% <li>`Server': The registered server name or pid.</li>
%% <li>`Topic': The string topic, for example `<<"users:123">>'.</li>
%% <li>`Message': Any erlang term.</li>
%% <li>`Opts': The optional list of options. See below.</li>
%% </ul>
%%
%% <b>Options:</b>
%% <br/>
%% <ul>
%% <li>`{scope, local | global}': define if the message must be delivered
%% to a local or global (any) process. Default is `local'.</li>
%% <li>`{dispatch_fun, dispatch_fun()}': allows to pass a function to
%% choose a subscriber from the current subscribers handlers to a topic.</li>
%% </ul>
%%
%% Examples:
%%
%% ```
%% > ebus:dispatch("bar", #{topic => "foo", payload => "hi"}).
%% ok
%% > ebus:dispatch("bar", #{topic => "foo", payload => "hi"}, []).
%% ok
%% > ebus:dispatch(ebus_ps, "bar", "my message",
%% [{scope, global}, {dispatch_fun, fun([H | _]) -> H end}]).
%% ok
%% '''
%% @end
-spec dispatch(atom(), topic(), term(), dispatch_opts()) -> ok.
dispatch(Server, Topic, Message, Opts) ->
BinTopic = ebus_common:to_bin(Topic),
Subscribers = case ebus_common:keyfind(scope, Opts, local) of
local -> local_subscribers(Server, BinTopic);
_ -> subscribers(Server, BinTopic)
end,
DispatchFun = case ebus_common:keyfind(dispatch_fun, Opts) of
nil -> fun ebus_common:rand_elem/1;
Fun -> Fun
end,
case Subscribers of
[] -> throw(no_subscribers_available);
_ -> DispatchFun(Subscribers) ! Message, ok
end.
%%%===================================================================
%%% Application callbacks and functions
%%%===================================================================
%% @doc Starts `ebus' application.
-spec start() -> {ok, _} | {error, term()}.
start() -> application:ensure_all_started(ebus).
%% @doc Stops `ebus' application.
-spec stop() -> ok | {error, term()}.
stop() -> application:stop(ebus).
%% @hidden
start(_StartType, _StartArgs) -> ebus_sup:start_link().
%% @hidden
stop(_State) -> ok.
%%%===================================================================
Utilities
%%%===================================================================
%% @doc Returns the registered `ebus' server name.
-spec server() -> atom().
server() ->
PubSub = application:get_env(ebus, pubsub, []),
ebus_common:keyfind(name, PubSub, default_ps_server()).
%% @doc Returns default `ebus' server name: `ebus_ps'.
-spec default_ps_server() -> ebus_ps.
default_ps_server() -> ebus_ps.
| null | https://raw.githubusercontent.com/cabol/erlbus/050cb728ef09a0ad51c9297281602e362b5e233d/src/ebus.erl | erlang | -------------------------------------------------------------------
@doc
Main `ebus` interface. This module also works as a wrapper on top
of `ebus_ps' module.
@end
-------------------------------------------------------------------
Application callbacks and functions
===================================================================
Types
===================================================================
Receives as argument the subscribers list. Then it should choose
Available dispatch options.
Exported types
===================================================================
===================================================================
@equiv sub(server(), Handler, Topic)
@equiv sub(Server, Handler, Topic, [])
@doc
Subscribes the `Handler' given `Topic'.
<ul>
<li>`Handler': The subscriber pid to receive pubsub messages.</li>
<li>`Topic': The topic to subscribe to, ie: `"users:123"'.</li>
<li>`Opts': The optional list of options. See below.</li>
</ul>
<b>Options:</b>
<br/>
<ul>
<li>`{fastlane, ebus_ps_local:fastlane()}': Provides a fastlane path
for the broadcasts for `broadcast()' events. The fastlane process is
notified of a cached message instead of the normal subscriber.
`broadcast()' struct and returns a fastlaned format for the handler.</li>
</ul>
Examples:
```
> ebus:sub(self(), <<"foo">>).
ok
ok
ok
ok
'''
@end
@equiv unsub(server(), Handler, Topic)
@doc
<ul>
<li>`Server': The registered server name or pid.</li>
<li>`Handler': The subscriber pid.</li>
<li>`Topic': The string topic, for example `<<"users:123">>'.</li>
</ul>
Example:
```
> ebus:unsub(self(), <<"foo">>).
ok
> ebus:unsub(ebus_ps, self(), <<"foo">>).
ok
'''
@end
@equiv pub(server(), Topic, Message)
@doc
Sends a message to all subscribers of a topic.
<ul>
<li>`Server': The registered server name or pid.</li>
<li>`Topic': The string topic, for example `<<"users:123">>'.</li>
<li>`Message': Any erlang term.</li>
</ul>
Examples:
```
ok
ok
'''
@end
@equiv pub_from(server(), From, Topic, Message)
@doc
Examples:
```
> ebus:pub_from(self(), "foo", <<"message">>).
ok
> ebus:pub_from(ebus_ps, self(), "foo", <<"message">>).
ok
'''
@end
@equiv subscribers(server(), Topic)
@doc
Returns a set of all subscribers handlers (local and global)
for the given `Topic'.
<ul>
<li>`Server': The registered server name or pid.</li>
<li>`Topic': The string topic, for example `<<"users:123">>'.</li>
</ul>
Example:
```
'''
@end
@equiv local_subscribers(server(), Topic)
@doc
Same as `subscribers/2' but only local subscribers handlers for the
given `Topic' are returned.
Example:
```
> ebus:local_subscribers(ebus_ps, <<"foo">>).
'''
@end
@equiv topics(server())
@doc
Returns the list of all topics (local and global) in use.
This is an expensive and private operation.
<p>This is an expensive operation. <b> DO NOT USE IT IN PROD</b></p>
Example:
```
> ebus:topics().
[<<"foo">>, <<"bar">>]
> ebus:topics(ebus_ps).
[<<"foo">>, <<"bar">>]
'''
@end
@equiv local_topics(server())
@doc
Same as `topics/1' but only local topics are returned.
Example:
```
[<<"foo">>, <<"bar">>]
[<<"foo">>, <<"bar">>]
'''
@end
@equiv dispatch(Topic, Message, [])
@doc
<ul>
<li>`Server': The registered server name or pid.</li>
<li>`Topic': The string topic, for example `<<"users:123">>'.</li>
<li>`Message': Any erlang term.</li>
<li>`Opts': The optional list of options. See below.</li>
</ul>
<b>Options:</b>
<br/>
<ul>
<li>`{scope, local | global}': define if the message must be delivered
to a local or global (any) process. Default is `local'.</li>
<li>`{dispatch_fun, dispatch_fun()}': allows to pass a function to
choose a subscriber from the current subscribers handlers to a topic.</li>
</ul>
Examples:
```
> ebus:dispatch("bar", #{topic => "foo", payload => "hi"}).
ok
> ebus:dispatch("bar", #{topic => "foo", payload => "hi"}, []).
ok
> ebus:dispatch(ebus_ps, "bar", "my message",
[{scope, global}, {dispatch_fun, fun([H | _]) -> H end}]).
ok
'''
@end
===================================================================
Application callbacks and functions
===================================================================
@doc Starts `ebus' application.
@doc Stops `ebus' application.
@hidden
@hidden
===================================================================
===================================================================
@doc Returns the registered `ebus' server name.
@doc Returns default `ebus' server name: `ebus_ps'. | @see
-module(ebus).
-behaviour(application).
PubSub API
-export([sub/2, sub/3, sub/4]).
-export([unsub/2, unsub/3]).
-export([pub/2, pub/3, pub_from/3, pub_from/4]).
-export([subscribers/1, subscribers/2]).
-export([local_subscribers/1, local_subscribers/2]).
-export([topics/0, topics/1, local_topics/0, local_topics/1]).
-export([dispatch/2, dispatch/3, dispatch/4]).
-export([start/0, stop/0]).
-export([start/2, stop/1]).
Utilities
-export([server/0, default_ps_server/0]).
-type topic() :: iodata().
-type handler() :: pid().
one subscriber from the given list and return it .
-type dispatch_fun() :: fun(([term()]) -> term()).
-type dispatch_opt() :: {scope, local | global} |
{dispatch_fun, dispatch_fun()}.
Dispatch options .
-type dispatch_opts() :: [dispatch_opt()].
` sub/4 ' options .
-type options() :: ebus_ps_local:options().
-export_type([
topic/0,
handler/0,
dispatch_fun/0,
dispatch_opts/0,
options/0
]).
PubSub API
sub(Handler, Topic) ->
sub(server(), Handler, Topic).
sub(Server, Handler, Topic) ->
sub(Server, Handler, Topic, []).
< li>`Server ' : The Pid registered name of the server.</li >
< li>`{link , _ } ' : links the subscriber to the pubsub adapter.</li >
Fastlane handlers must implement ` fastlane/1 ' callbacks which accepts a
> ebus : , self ( ) , < < " foo " > > ) .
> ebus : , self ( ) , < < " foo " > > , [ ] ) .
> ebus : , self ( ) , < < " foo " > > ,
[ { fastlane , { FastPid , , [ < < " event1 " > > ] } ] ) .
-spec sub(atom(), handler(), topic(), options()) -> ok | {error, term()}.
sub(Server, Handler, Topic, Opts) ->
ebus_ps:subscribe(Server, Handler, ebus_common:to_bin(Topic), Opts).
unsub(Handler, Topic) ->
unsub(server(), Handler, Topic).
the given ` Handler ' from the ` Topic ' .
-spec unsub(atom(), handler(), topic()) -> ok | {error, term()}.
unsub(Server, Handler, Topic) ->
ebus_ps:unsubscribe(Server, Handler, ebus_common:to_bin(Topic)).
pub(Topic, Message) ->
pub(server(), Topic, Message).
> ebus : pub("bar " , # { topic = > " foo " , payload = > " hi " } ) .
> ebus : , " bar " , # { topic = > " foo " , payload = > " hi " } ) .
-spec pub(atom(), topic(), term()) -> ok | {error, term()}.
pub(Server, Topic, Message) ->
ebus_ps:broadcast(Server, ebus_common:to_bin(Topic), Message).
pub_from(From, Topic, Message) ->
pub_from(server(), From, Topic, Message).
Same as ` pub/3 ' but message is not sent to ` FromHandler ' .
-spec pub_from(atom(), handler(), topic(), term()) -> ok | {error, term()}.
pub_from(Server, FromHandler, Topic, Message) ->
BinTopic = ebus_common:to_bin(Topic),
ebus_ps:broadcast_from(Server, FromHandler, BinTopic, Message).
subscribers(Topic) ->
subscribers(server(), Topic).
> ebus : subscribers(ebus_ps , < < " foo " > > ) .
[ < 0.48.0 > , < 0.49.0 > ]
-spec subscribers(atom(), topic()) -> [pid()].
subscribers(Server, Topic) ->
BinTopic = ebus_common:to_bin(Topic),
{ResL, _} = rpc:multicall(?MODULE, local_subscribers, [Server, BinTopic]),
lists:merge(ResL).
local_subscribers(Topic) ->
local_subscribers(server(), Topic).
[ < 0.48.0 > , < 0.49.0 > ]
-spec local_subscribers(atom(), topic()) -> [pid()].
local_subscribers(Server, Topic) ->
ebus_ps:subscribers(Server, ebus_common:to_bin(Topic)).
topics() ->
topics(server()).
-spec topics(atom()) -> [binary()].
topics(Server) ->
{ResL, _} = rpc:multicall(?MODULE, local_topics, [Server]),
lists:usort(lists:merge(ResL)).
local_topics() ->
local_topics(server()).
> ebus : ( ) .
> ebus : local_topics(ebus_ps ) .
-spec local_topics(atom()) -> [binary()].
local_topics(Server) ->
ebus_ps:list(Server).
dispatch(Topic, Message) ->
dispatch(Topic, Message, []).
@equiv dispatch(server ( ) , Topic , Message , )
dispatch(Topic, Message, Opts) ->
dispatch(server(), Topic, Message, Opts).
Sends a message only to one subscriber handler of the ` Topic ' .
-spec dispatch(atom(), topic(), term(), dispatch_opts()) -> ok.
dispatch(Server, Topic, Message, Opts) ->
BinTopic = ebus_common:to_bin(Topic),
Subscribers = case ebus_common:keyfind(scope, Opts, local) of
local -> local_subscribers(Server, BinTopic);
_ -> subscribers(Server, BinTopic)
end,
DispatchFun = case ebus_common:keyfind(dispatch_fun, Opts) of
nil -> fun ebus_common:rand_elem/1;
Fun -> Fun
end,
case Subscribers of
[] -> throw(no_subscribers_available);
_ -> DispatchFun(Subscribers) ! Message, ok
end.
-spec start() -> {ok, _} | {error, term()}.
start() -> application:ensure_all_started(ebus).
-spec stop() -> ok | {error, term()}.
stop() -> application:stop(ebus).
start(_StartType, _StartArgs) -> ebus_sup:start_link().
stop(_State) -> ok.
Utilities
-spec server() -> atom().
server() ->
PubSub = application:get_env(ebus, pubsub, []),
ebus_common:keyfind(name, PubSub, default_ps_server()).
-spec default_ps_server() -> ebus_ps.
default_ps_server() -> ebus_ps.
|
cfff7a2e0cd9d88bc953f605459ca9f7a1f86862300755d9ffe250677a1dddc8 | dwayne/haskell-programming | more-structure.hs | 16.13 More structure , more functors
data Wrap f a
= Wrap (f a)
deriving (Eq, Show)
-- This looks so strange to me
--
-- But let's try to break it down
-- f and a are types
-- (f a) has to mean that f :: * -> * and a :: *
--
-- So, Wrap Maybe Int is a possible type
--
let a = 1 : : Int
-- let x = Wrap (Just a) :: Wrap Maybe Int
instance Functor f => Functor (Wrap f) where
-- f :: a -> b
-- fa :: f a
--
Hence , we need to fmap f over . But , that would mean
-- we need f to be a Functor.
--
-- Thus,
fmap f (Wrap fa) = Wrap (fmap f fa)
| null | https://raw.githubusercontent.com/dwayne/haskell-programming/d08679e76cfd39985fa2ee3cd89d55c9aedfb531/ch16/more-structure.hs | haskell | This looks so strange to me
But let's try to break it down
f and a are types
(f a) has to mean that f :: * -> * and a :: *
So, Wrap Maybe Int is a possible type
let x = Wrap (Just a) :: Wrap Maybe Int
f :: a -> b
fa :: f a
we need f to be a Functor.
Thus, | 16.13 More structure , more functors
data Wrap f a
= Wrap (f a)
deriving (Eq, Show)
let a = 1 : : Int
instance Functor f => Functor (Wrap f) where
Hence , we need to fmap f over . But , that would mean
fmap f (Wrap fa) = Wrap (fmap f fa)
|
849840f50bd6dd968560321fddc38cf2884c4a2e2a2ac0dc9d0ee6425d37d724 | dparis/gen-phzr | point_proxy.cljs | (ns phzr.physics.p2.point-proxy
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser]))
(defn ->PointProxy
"A PointProxy is an internal class that allows for direct getter/setter style property access to Arrays and TypedArrays.
Parameters:
* world (Phaser.Physics.P2) - A reference to the P2 World.
* destination (any) - The object to bind to."
([world destination]
(js/Phaser.Physics.P2.PointProxy. (clj->phaser world)
(clj->phaser destination))))
| null | https://raw.githubusercontent.com/dparis/gen-phzr/e4c7b272e225ac343718dc15fc84f5f0dce68023/out/physics/p2/point_proxy.cljs | clojure | (ns phzr.physics.p2.point-proxy
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser]))
(defn ->PointProxy
"A PointProxy is an internal class that allows for direct getter/setter style property access to Arrays and TypedArrays.
Parameters:
* world (Phaser.Physics.P2) - A reference to the P2 World.
* destination (any) - The object to bind to."
([world destination]
(js/Phaser.Physics.P2.PointProxy. (clj->phaser world)
(clj->phaser destination))))
| |
8407f71efdd3ee6ed746292f026960d76c5d52834f011dd8d95ebb10fab52e11 | jonatack/cl-kraken | asset-pairs.lisp | ;;;; cl-kraken/tests/asset-pairs.lisp
(defpackage #:cl-kraken/tests/asset-pairs
(:use #:cl #:cl-kraken #:rove)
(:import-from #:cl-kraken/tests/kraken-public-data
#:*all-pairs*
#:*xbteur-pair*
#:*xbtusd-and-xmreur-pairs*
#:*raw-pairs*))
(in-package #:cl-kraken/tests/asset-pairs)
(deftest asset-pairs
(testing "with no argument passed, evaluates to all asset pairs"
(ok (equal (cl-kraken:asset-pairs) *all-pairs*)))
;; Test PAIR parameter.
(testing "when passed \"XBTEUR\", evaluates to XBTEUR pair"
(ok (equal (cl-kraken:asset-pairs :pair "XBTEUR") *xbteur-pair*)))
(testing "when passed \"xbteur\", evaluates to XBTEUR pair"
(ok (equal (cl-kraken:asset-pairs :pair "xbteur") *xbteur-pair*)))
(testing "when passed \"xbtusd,xmreur\", evaluates to XBTUSD+XMREUR pairs"
(ok (equal (cl-kraken:asset-pairs :pair "xbtusd,xmreur")
*xbtusd-and-xmreur-pairs*)))
(testing "when passed \" XBTusd , xmrEUR \" evaluates to XBTUSD+XMREUR pairs"
(ok (equal (cl-kraken:asset-pairs :pair " XBTusd , xmrEUR ")
*xbtusd-and-xmreur-pairs*)))
(testing "when passed an invalid PAIR, evaluates to unknown asset pair error"
(ok (equal (cl-kraken:asset-pairs :pair "abc")
'(:OBJ ("error" "EQuery:Unknown asset pair")))))
(testing "when passed an empty PAIR, evaluates to unknown asset pair error"
(ok (equal (cl-kraken:asset-pairs :pair "")
'(:OBJ ("error" "EQuery:Unknown asset pair")))))
(testing "when passed a symbol PAIR, a type error is signaled"
(ok (signals (cl-kraken:asset-pairs :pair 'xbtusd) 'type-error)
"The value of PAIR is XBTUSD, which is not of type (OR STRING NULL)."))
(testing "when passed a keyword PAIR, a type error is signaled"
(ok (signals (cl-kraken:asset-pairs :pair :xbtusd) 'type-error)
"The value of PAIR is :XBTUSD, which is not of type (OR STRING NULL)."))
;; Test RAW parameter.
(testing "when passed RAW T, evaluates to the raw response string"
(let ((response (cl-kraken:asset-pairs :pair "xbtusd, xbteur" :raw t)))
(ok (stringp response))
(ok (string= response *raw-pairs*))))
(testing "when passed RAW NIL, evaluates as if no RAW argument was passed"
(ok (equal (cl-kraken:asset-pairs :pair "xbteur" :raw nil) *xbteur-pair*)))
;; Test invalid RAW values.
(testing "when passed a string RAW, a type error is signaled"
(ok (signals (cl-kraken:asset-pairs :raw "1") 'type-error)
"The value of RAW is \"1\", which is not of type (MEMBER T NIL)."))
(testing "when passed a symbol RAW, a type error is signaled"
(ok (signals (cl-kraken:asset-pairs :raw 'a) 'type-error)
"The value of RAW is 'a, which is not of type (MEMBER T NIL)."))
(testing "when passed a keyword RAW, a type error is signaled"
(ok (signals (cl-kraken:asset-pairs :raw :1) 'type-error)
"The value of RAW is :|1|, which is not of type (MEMBER T NIL).")))
| null | https://raw.githubusercontent.com/jonatack/cl-kraken/e5b438eb821bf7dc3514a44ff84f15f393b4e393/tests/asset-pairs.lisp | lisp | cl-kraken/tests/asset-pairs.lisp
Test PAIR parameter.
Test RAW parameter.
Test invalid RAW values. |
(defpackage #:cl-kraken/tests/asset-pairs
(:use #:cl #:cl-kraken #:rove)
(:import-from #:cl-kraken/tests/kraken-public-data
#:*all-pairs*
#:*xbteur-pair*
#:*xbtusd-and-xmreur-pairs*
#:*raw-pairs*))
(in-package #:cl-kraken/tests/asset-pairs)
(deftest asset-pairs
(testing "with no argument passed, evaluates to all asset pairs"
(ok (equal (cl-kraken:asset-pairs) *all-pairs*)))
(testing "when passed \"XBTEUR\", evaluates to XBTEUR pair"
(ok (equal (cl-kraken:asset-pairs :pair "XBTEUR") *xbteur-pair*)))
(testing "when passed \"xbteur\", evaluates to XBTEUR pair"
(ok (equal (cl-kraken:asset-pairs :pair "xbteur") *xbteur-pair*)))
(testing "when passed \"xbtusd,xmreur\", evaluates to XBTUSD+XMREUR pairs"
(ok (equal (cl-kraken:asset-pairs :pair "xbtusd,xmreur")
*xbtusd-and-xmreur-pairs*)))
(testing "when passed \" XBTusd , xmrEUR \" evaluates to XBTUSD+XMREUR pairs"
(ok (equal (cl-kraken:asset-pairs :pair " XBTusd , xmrEUR ")
*xbtusd-and-xmreur-pairs*)))
(testing "when passed an invalid PAIR, evaluates to unknown asset pair error"
(ok (equal (cl-kraken:asset-pairs :pair "abc")
'(:OBJ ("error" "EQuery:Unknown asset pair")))))
(testing "when passed an empty PAIR, evaluates to unknown asset pair error"
(ok (equal (cl-kraken:asset-pairs :pair "")
'(:OBJ ("error" "EQuery:Unknown asset pair")))))
(testing "when passed a symbol PAIR, a type error is signaled"
(ok (signals (cl-kraken:asset-pairs :pair 'xbtusd) 'type-error)
"The value of PAIR is XBTUSD, which is not of type (OR STRING NULL)."))
(testing "when passed a keyword PAIR, a type error is signaled"
(ok (signals (cl-kraken:asset-pairs :pair :xbtusd) 'type-error)
"The value of PAIR is :XBTUSD, which is not of type (OR STRING NULL)."))
(testing "when passed RAW T, evaluates to the raw response string"
(let ((response (cl-kraken:asset-pairs :pair "xbtusd, xbteur" :raw t)))
(ok (stringp response))
(ok (string= response *raw-pairs*))))
(testing "when passed RAW NIL, evaluates as if no RAW argument was passed"
(ok (equal (cl-kraken:asset-pairs :pair "xbteur" :raw nil) *xbteur-pair*)))
(testing "when passed a string RAW, a type error is signaled"
(ok (signals (cl-kraken:asset-pairs :raw "1") 'type-error)
"The value of RAW is \"1\", which is not of type (MEMBER T NIL)."))
(testing "when passed a symbol RAW, a type error is signaled"
(ok (signals (cl-kraken:asset-pairs :raw 'a) 'type-error)
"The value of RAW is 'a, which is not of type (MEMBER T NIL)."))
(testing "when passed a keyword RAW, a type error is signaled"
(ok (signals (cl-kraken:asset-pairs :raw :1) 'type-error)
"The value of RAW is :|1|, which is not of type (MEMBER T NIL).")))
|
58ef5ff4f9040783cdbf22e4f57bb783125cf41a5502eebc686e562b8a91b793 | tdammers/ginger | Spec.hs | module Main where
import Test.Tasty
import Text.Ginger.SimulationTests (simulationTests)
import Text.Ginger.PropertyTests (propertyTests)
main = defaultMain allTests
allTests :: TestTree
allTests =
testGroup "All Tests"
[ simulationTests
, propertyTests
]
| null | https://raw.githubusercontent.com/tdammers/ginger/bd8cb39c1853d4fb4f663c4c201884575906acea/test/Spec.hs | haskell | module Main where
import Test.Tasty
import Text.Ginger.SimulationTests (simulationTests)
import Text.Ginger.PropertyTests (propertyTests)
main = defaultMain allTests
allTests :: TestTree
allTests =
testGroup "All Tests"
[ simulationTests
, propertyTests
]
| |
2b74bed5295a349021fa3a0e3ee035adb83b06244e93fe9e284eccda7776f0b4 | cartazio/language-c | Node.hs | {-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.C.Syntax.Attributes
Copyright : ( c ) [ 1995 .. 1999 ]
( c ) 2008 ( stripped radically )
-- License : BSD-style
-- Maintainer :
-- Stability : experimental
Portability : ghc
--
source position and unqiue name
-----------------------------------------------------------------------------
module Language.C.Data.Node (
NodeInfo(..), undefNode, isUndefNode,
mkNodeInfoOnlyPos,mkNodeInfoPosLen, mkNodeInfo,mkNodeInfo',
internalNode, -- deprecated, use undefNode
CNode(nodeInfo), fileOfNode,
posOfNode, nameOfNode, getLastTokenPos, lengthOfNode,
eqByName,
) where
import Language.C.Data.Position
import Language.C.Data.Name (Name)
import Data.Generics
| entity attribute
data NodeInfo = OnlyPos Position {-# UNPACK #-} !PosLength -- only pos and last token (for internal stuff only)
| NodeInfo Position {-# UNPACK #-} !PosLength !Name -- pos, last token and unique name
deriving (Data,Typeable)
instance Show NodeInfo where
showsPrec d (OnlyPos p l) =
(showString "(OnlyPos ") . (showsPrec d p) . (showString " ") . (showsPrec d l) . (showString ")")
showsPrec d (NodeInfo p l n) =
(showString "(NodeInfo ") . (showsPrec d p) . (showString " ") . (showsPrec d l) . (showString " ") . (showsPrec d n) . (showString ")")
-- name equality of attributes, used to define (name) equality of objects
instance Eq NodeInfo where
(NodeInfo _ _ id1) == (NodeInfo _ _ id2) = id1 == id2
_ == _ =
error "Attributes: Attempt to compare `OnlyPos' attributes!"
-- attribute ordering
instance Ord NodeInfo where
(NodeInfo _ _ id1) <= (NodeInfo _ _ id2) = id1 <= id2
_ <= _ =
error "Attributes: Attempt to compare `OnlyPos' attributes!"
instance Pos NodeInfo where
posOf (OnlyPos pos _) = pos
posOf (NodeInfo pos _ _) = pos
-- | get the number of characters an AST node spans
lengthOfNode :: NodeInfo -> Maybe Int
lengthOfNode ni = len
where
len = case ni of NodeInfo firstPos lastTok _ -> computeLength firstPos lastTok
OnlyPos firstPos lastTok -> computeLength firstPos lastTok
computeLength pos (lastPos,len) | len < 0 = Nothing
| otherwise = Just (posOffset lastPos + len - posOffset pos)
-- | get the position and length of the last token
getLastTokenPos :: NodeInfo -> PosLength
getLastTokenPos (NodeInfo _ lastTok _) = lastTok
getLastTokenPos (OnlyPos _ lastTok) = lastTok
-- | a class for convenient access to the attributes of an attributed object
class CNode a where
nodeInfo :: a -> NodeInfo
instance CNode NodeInfo where
nodeInfo = id
instance (CNode a, CNode b) => CNode (Either a b) where
nodeInfo = either nodeInfo nodeInfo
nameOfNode :: NodeInfo -> Maybe Name
nameOfNode (OnlyPos _ _) = Nothing
nameOfNode (NodeInfo _ _ name) = Just name
posOfNode :: NodeInfo -> Position
posOfNode ni = case ni of (OnlyPos pos _) -> pos; (NodeInfo pos _ _) -> pos
fileOfNode :: (CNode a) => a -> Maybe FilePath
fileOfNode = fmap posFile . justIf isSourcePos . posOfNode . nodeInfo where
justIf predicate x | predicate x = Just x
| otherwise = Nothing
-- | equality by name
eqByName :: CNode a => a -> a -> Bool
eqByName obj1 obj2 = (nodeInfo obj1) == (nodeInfo obj2)
-- attribute identifier creation
-- -----------------------------
{-# DEPRECATED internalNode "use undefNode instead" #-}
internalNode :: NodeInfo
internalNode = undefNode
-- | create a node with neither name nor positional information
undefNode :: NodeInfo
undefNode = OnlyPos nopos (nopos,-1)
-- | return True if the node carries neither name nor positional information
isUndefNode :: NodeInfo -> Bool
isUndefNode (OnlyPos p _) | isNoPos p = True
| otherwise = False
isUndefNode _ = False
-- |
-- | Given only a source position, create a new node attribute
mkNodeInfoOnlyPos :: Position -> NodeInfo
mkNodeInfoOnlyPos pos = OnlyPos pos (nopos,-1)
-- | Given a source position and the position and length of the last token, create a new node attribute
mkNodeInfoPosLen :: Position -> PosLength -> NodeInfo
mkNodeInfoPosLen = OnlyPos
-- | Given a source position and a unique name, create a new attribute
-- identifier
mkNodeInfo :: Position -> Name -> NodeInfo
mkNodeInfo pos name = NodeInfo pos (nopos,-1) name
-- | Given a source position, the position and length of the last token and a unique name, create a new attribute
-- identifier. Strict in
mkNodeInfo' :: Position -> PosLength -> Name -> NodeInfo
mkNodeInfo' pos lasttok name = NodeInfo pos lasttok name
| null | https://raw.githubusercontent.com/cartazio/language-c/d003206a45baec4e2a2a85026d88bd225162d951/src/Language/C/Data/Node.hs | haskell | # LANGUAGE DeriveDataTypeable #
---------------------------------------------------------------------------
|
Module : Language.C.Syntax.Attributes
License : BSD-style
Maintainer :
Stability : experimental
---------------------------------------------------------------------------
deprecated, use undefNode
# UNPACK #
only pos and last token (for internal stuff only)
# UNPACK #
pos, last token and unique name
name equality of attributes, used to define (name) equality of objects
attribute ordering
| get the number of characters an AST node spans
| get the position and length of the last token
| a class for convenient access to the attributes of an attributed object
| equality by name
attribute identifier creation
-----------------------------
# DEPRECATED internalNode "use undefNode instead" #
| create a node with neither name nor positional information
| return True if the node carries neither name nor positional information
|
| Given only a source position, create a new node attribute
| Given a source position and the position and length of the last token, create a new node attribute
| Given a source position and a unique name, create a new attribute
identifier
| Given a source position, the position and length of the last token and a unique name, create a new attribute
identifier. Strict in | Copyright : ( c ) [ 1995 .. 1999 ]
( c ) 2008 ( stripped radically )
Portability : ghc
source position and unqiue name
module Language.C.Data.Node (
NodeInfo(..), undefNode, isUndefNode,
mkNodeInfoOnlyPos,mkNodeInfoPosLen, mkNodeInfo,mkNodeInfo',
CNode(nodeInfo), fileOfNode,
posOfNode, nameOfNode, getLastTokenPos, lengthOfNode,
eqByName,
) where
import Language.C.Data.Position
import Language.C.Data.Name (Name)
import Data.Generics
| entity attribute
deriving (Data,Typeable)
instance Show NodeInfo where
showsPrec d (OnlyPos p l) =
(showString "(OnlyPos ") . (showsPrec d p) . (showString " ") . (showsPrec d l) . (showString ")")
showsPrec d (NodeInfo p l n) =
(showString "(NodeInfo ") . (showsPrec d p) . (showString " ") . (showsPrec d l) . (showString " ") . (showsPrec d n) . (showString ")")
instance Eq NodeInfo where
(NodeInfo _ _ id1) == (NodeInfo _ _ id2) = id1 == id2
_ == _ =
error "Attributes: Attempt to compare `OnlyPos' attributes!"
instance Ord NodeInfo where
(NodeInfo _ _ id1) <= (NodeInfo _ _ id2) = id1 <= id2
_ <= _ =
error "Attributes: Attempt to compare `OnlyPos' attributes!"
instance Pos NodeInfo where
posOf (OnlyPos pos _) = pos
posOf (NodeInfo pos _ _) = pos
lengthOfNode :: NodeInfo -> Maybe Int
lengthOfNode ni = len
where
len = case ni of NodeInfo firstPos lastTok _ -> computeLength firstPos lastTok
OnlyPos firstPos lastTok -> computeLength firstPos lastTok
computeLength pos (lastPos,len) | len < 0 = Nothing
| otherwise = Just (posOffset lastPos + len - posOffset pos)
getLastTokenPos :: NodeInfo -> PosLength
getLastTokenPos (NodeInfo _ lastTok _) = lastTok
getLastTokenPos (OnlyPos _ lastTok) = lastTok
class CNode a where
nodeInfo :: a -> NodeInfo
instance CNode NodeInfo where
nodeInfo = id
instance (CNode a, CNode b) => CNode (Either a b) where
nodeInfo = either nodeInfo nodeInfo
nameOfNode :: NodeInfo -> Maybe Name
nameOfNode (OnlyPos _ _) = Nothing
nameOfNode (NodeInfo _ _ name) = Just name
posOfNode :: NodeInfo -> Position
posOfNode ni = case ni of (OnlyPos pos _) -> pos; (NodeInfo pos _ _) -> pos
fileOfNode :: (CNode a) => a -> Maybe FilePath
fileOfNode = fmap posFile . justIf isSourcePos . posOfNode . nodeInfo where
justIf predicate x | predicate x = Just x
| otherwise = Nothing
eqByName :: CNode a => a -> a -> Bool
eqByName obj1 obj2 = (nodeInfo obj1) == (nodeInfo obj2)
internalNode :: NodeInfo
internalNode = undefNode
undefNode :: NodeInfo
undefNode = OnlyPos nopos (nopos,-1)
isUndefNode :: NodeInfo -> Bool
isUndefNode (OnlyPos p _) | isNoPos p = True
| otherwise = False
isUndefNode _ = False
mkNodeInfoOnlyPos :: Position -> NodeInfo
mkNodeInfoOnlyPos pos = OnlyPos pos (nopos,-1)
mkNodeInfoPosLen :: Position -> PosLength -> NodeInfo
mkNodeInfoPosLen = OnlyPos
mkNodeInfo :: Position -> Name -> NodeInfo
mkNodeInfo pos name = NodeInfo pos (nopos,-1) name
mkNodeInfo' :: Position -> PosLength -> Name -> NodeInfo
mkNodeInfo' pos lasttok name = NodeInfo pos lasttok name
|
f26ebd0b59242c235ce4a6be1090f70789cbaac6fbb79b4036afe1303db4f90e | VERIMAG-Polyhedra/VPL | EqSet.ml | module Cs = Cstr.Rat
type 'c t = (Var.t * 'c Cons.t) list
let to_string: (Var.t -> string) -> 'c t -> string
= fun varPr e ->
List.fold_right
(fun (_, c) s -> s ^ (Cons.to_string varPr c) ^ "\n") e ""
let to_string_ext: 'c Factory.t -> (Var.t -> string) -> 'c t -> string
= fun factory varPr e->
List.fold_right
(fun (x, c) s -> Printf.sprintf "%s(%s, %s)\n"
s (varPr x) (Cons.to_string_ext factory varPr c)) e ""
type 'c rel_t =
| NoIncl
| Incl of 'c list
let nil : 'c t = []
let isTop (s : 'c t) = (s = [])
let list x = List.map Stdlib.snd x
let equal (s1 : 'c1 t) (s2 : 'c2 t) =
if List.length s1 = List.length s2 then
List.for_all2
(fun (x1 ,(c1,_)) (x2, (c2,_)) ->
x1 = x2 && Cs.inclSyn c1 c2)
s1 s2
else
false
L'ordre fold_right est important pour les réécritures
let filter : 'c Factory.t -> 'c t -> 'c Cons.t -> 'c Cons.t
= fun factory s c ->
let filter1 (x, (c2,cert2)) (c1,cert1) =
if Cs.Vec.Coeff.cmpz (Cs.Vec.get (Cs.get_v c1) x) = 0 then
(c1,cert1)
else
Cons.elimc factory x (c1,cert1) (c2,cert2)
in
List.fold_right filter1 s c
let filter2 : 'c Factory.t -> 'c t -> Cs.t -> Cs.t * 'c Cons.t
= fun factory s c ->
let filter1 (c_res,(cons_c, cons_cert)) (x, (c,cert)) =
let (c_res',(cstr_res',cert_res')) = Cons.elim factory x (c,cert) c_res in
(c_res', (Cs.add cstr_res' cons_c, factory.Factory.add cert_res' cons_cert))
in
List.fold_left filter1 (c, Cons.triv factory) s
let leq : 'c1 Factory.t -> 'c1 t -> 'c2 t -> 'c1 rel_t
= fun factory s1 s2 ->
if List.length s1 < List.length s2 then
NoIncl
else
let rec _incl certs =
function
| [] -> Incl certs
| (_, (c,_))::t ->
let (c2,(_,cert2)) = filter2 factory s1 c in
match Cs.tellProp c2 with
| Cs.Contrad | Cs.Nothing -> NoIncl
| Cs.Trivial ->
_incl (cert2::certs) t
in
_incl [] s2
let choose : Cs.t -> Var.t * Cs.Vec.Coeff.t
= fun c ->
match Rtree.findPred (fun n -> Cs.Vec.Coeff.cmpz n <> 0) (Cs.get_v c) with
| None -> failwith "EqSet.choose"
| Some (x, a) -> (x, a)
let rename factory s fromX toY =
let rename1 (x, c) =
((if x = fromX then toY else x), Cons.rename factory fromX toY c)
in
List.map rename1 s
let pick (msk : Var.t option Rtree.t) ((e,_) : 'c Cons.t) =
match Rtree.findPred2
(fun n1 n2 -> n1 <> None && not (Cs.Vec.Coeff.cmpz n2 = 0))
msk (Cs.get_v e)
with
| Some (_,Some n1,_) -> Some n1
| _ -> None
let rec subst : 'c Factory.t -> Var.t -> 'c Cons.t -> 'c t -> 'c t
= fun factory x e ->
function
| [] -> []
| (x1, (e1,cert1))::l1 ->
if Var.equal x1 x then
failwith "EqSet.subst"
else
let e2 =
if Cs.Vec.Coeff.cmpz (Cs.Vec.get (Cs.get_v e1) x) = 0 then
(e1,cert1)
else
Cons.elimc factory x e (e1,cert1)
in
(x1,e2) :: subst factory x e l1
let rec tryDefs : 'c Factory.t -> Var.t option Rtree.t -> 'c t -> ('c Cons.t * Var.t) option * 'c t
= fun factory msk ->
function
| [] -> (None, [])
| (x, e)::l ->
if Rtree.get None msk x = None
then
let (def, l1) = tryDefs factory msk l in
(def, (x, e)::l1)
else
let l1 = subst factory x e l in
(Some (e, x), l1)
let trySubstM : 'c Factory.t -> Var.t option Rtree.t -> 'c t -> ('c Cons.t * Var.t) option * 'c t
= fun factory msk l ->
let (def, l1) = tryDefs factory msk l in
if def = None then
let rec _trysubstm msk =
function
| [] -> (None, [])
| (x, e)::l ->
match pick msk e with
| None ->
let (def, l1) = _trysubstm msk l in
(def, (x, e)::l1)
| Some x ->
let l1 = subst factory x e l in
(Some (e, x), l1)
in
_trysubstm msk l
else
(def, l1)
let trySubst : 'c Factory.t -> Var.t -> 'c t -> 'c Cons.t option * 'c t
= fun factory x l ->
let msk = Rtree.set None Rtree.empty x (Some x) in
let (optx, s1) = trySubstM factory msk l in
match optx with
| None -> (None, s1)
| Some (e, _) -> (Some e, s1)
type 'c meetT =
| Added of 'c t
| Bot of 'c
(* XXX: doit on comparer les certificats? *)
let meetEq: 'c meetT -> 'c meetT -> bool
= fun ar ar' ->
match ar, ar' with
| Added e, Added e' -> equal e e'
| Bot _, Bot _ -> true
| Added _, Bot _
| Bot _, Added _ -> false
let meet_to_string : 'c Factory.t -> (Var.t -> string) -> 'c meetT -> string
= fun factory varPr -> function
| Added e ->
Printf.sprintf "Added %s" (to_string_ext factory varPr e)
| Bot f -> Printf.sprintf "Bot : %s" (factory.Factory.to_string f)
let addM: 'c Factory.t -> 'c t -> 'c Cons.t list -> 'c meetT
= fun factory s conss ->
let add : 'c Cons.t -> 'c meetT -> 'c meetT
= fun (c,cert) ->
function
| Bot _ as r -> r
| Added s ->
match Cs.get_typ c with
| Cstr_type.Le | Cstr_type.Lt -> failwith "EqSet.addM"
| Cstr_type.Eq ->
let (c1,cert1) = filter factory s (c,cert) in
match Cs.tellProp c1 with
| Cs.Trivial -> Added s
| Cs.Contrad -> Bot cert1
| Cs.Nothing ->
let (x, ax) = choose c1 in
let c2 = Cs.mulc (Cs.Vec.Coeff.inv ax) c1
and cert2 = factory.Factory.mul (Cs.Vec.Coeff.inv ax) cert1 in
(* rewritting of the rest of the equality set with the new one. *)
let s' = if !Flags.row_echelon_equalities
then subst factory x (c2,cert2) s
else s
in
Added ((x, (c2,cert2)) :: s')
in
List.fold_left (fun res c -> add c res) (Added s) conss
let add: 'c Factory.t -> 'c t -> 'c Cons.t -> 'c meetT
= fun factory s c ->
addM factory s [c]
let joinSetup_1: 'c2 Factory.t -> Var.t -> Var.t option Rtree.t -> Var.t -> 'c1 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
= fun factory2 nxt relocTbl alpha s ->
let apply (x, c) (nxt1, relocTbl1, s1) =
let (nxt2, relocTbl2, c1) = Cons.joinSetup_1 factory2 nxt1 relocTbl1 alpha c in
let x1 = match Rtree.get None relocTbl2 x with
| None -> failwith "EqSet.joinSetup_1"
| Some x1 -> x1
in
(nxt2, relocTbl2, (x1, c1)::s1)
in
is necessary because order needs to be preserved ( echelon form )
List.fold_right apply s (nxt, relocTbl, nil)
let joinSetup_2: 'c1 Factory.t -> Var.t -> Var.t option Rtree.t -> Var.t -> 'c2 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
= fun factory1 nxt relocTbl alpha s ->
let apply (x, c) (nxt1, relocTbl1, s1) =
let (nxt2, relocTbl2, c1) = Cons.joinSetup_2 factory1 nxt1 relocTbl1 alpha c in
let x1 = x in
(nxt2, relocTbl2, (x1, c1)::s1)
in
is necessary because order needs to be preserved ( echelon form )
List.fold_right apply s (nxt, relocTbl, nil)
let minkowskiSetup_1: 'c2 Factory.t -> Var.t -> Var.t option Rtree.t -> 'c1 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
= fun factory2 nxt relocTbl s ->
let apply (x, c) (nxt1, relocTbl1, s1) =
let (nxt2, relocTbl2, c1) = Cons.minkowskiSetup_1 factory2 nxt1 relocTbl1 c in
let x1 = match Rtree.get None relocTbl2 x with
| None -> failwith "EqSet.minkowskiSetup_1"
| Some x1 -> x1
in
(nxt2, relocTbl2, (x1, c1)::s1)
in
is necessary because order needs to be preserved ( echelon form )
List.fold_right apply s (nxt, relocTbl, nil)
let minkowskiSetup_2: 'c1 Factory.t -> Var.t -> Var.t option Rtree.t -> 'c2 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
= fun factory1 nxt relocTbl s ->
let apply (x, c) (nxt1, relocTbl1, s1) =
let (nxt2, relocTbl2, c1) = Cons.minkowskiSetup_2 factory1 nxt1 relocTbl1 c in
let x1 = x in
(nxt2, relocTbl2, (x1, c1)::s1)
in
is necessary because order needs to be preserved ( echelon form )
List.fold_right apply s (nxt, relocTbl, nil)
let satisfy : 'c t -> Cs.Vec.t -> bool
= fun eqs point ->
List.for_all
(fun (_,cons) ->
Cons.get_c cons
|> Cs.satisfy point)
eqs
| null | https://raw.githubusercontent.com/VERIMAG-Polyhedra/VPL/cd78d6e7d120508fd5a694bdb01300477e5646f8/ocaml/core/EqSet.ml | ocaml | XXX: doit on comparer les certificats?
rewritting of the rest of the equality set with the new one. | module Cs = Cstr.Rat
type 'c t = (Var.t * 'c Cons.t) list
let to_string: (Var.t -> string) -> 'c t -> string
= fun varPr e ->
List.fold_right
(fun (_, c) s -> s ^ (Cons.to_string varPr c) ^ "\n") e ""
let to_string_ext: 'c Factory.t -> (Var.t -> string) -> 'c t -> string
= fun factory varPr e->
List.fold_right
(fun (x, c) s -> Printf.sprintf "%s(%s, %s)\n"
s (varPr x) (Cons.to_string_ext factory varPr c)) e ""
type 'c rel_t =
| NoIncl
| Incl of 'c list
let nil : 'c t = []
let isTop (s : 'c t) = (s = [])
let list x = List.map Stdlib.snd x
let equal (s1 : 'c1 t) (s2 : 'c2 t) =
if List.length s1 = List.length s2 then
List.for_all2
(fun (x1 ,(c1,_)) (x2, (c2,_)) ->
x1 = x2 && Cs.inclSyn c1 c2)
s1 s2
else
false
L'ordre fold_right est important pour les réécritures
let filter : 'c Factory.t -> 'c t -> 'c Cons.t -> 'c Cons.t
= fun factory s c ->
let filter1 (x, (c2,cert2)) (c1,cert1) =
if Cs.Vec.Coeff.cmpz (Cs.Vec.get (Cs.get_v c1) x) = 0 then
(c1,cert1)
else
Cons.elimc factory x (c1,cert1) (c2,cert2)
in
List.fold_right filter1 s c
let filter2 : 'c Factory.t -> 'c t -> Cs.t -> Cs.t * 'c Cons.t
= fun factory s c ->
let filter1 (c_res,(cons_c, cons_cert)) (x, (c,cert)) =
let (c_res',(cstr_res',cert_res')) = Cons.elim factory x (c,cert) c_res in
(c_res', (Cs.add cstr_res' cons_c, factory.Factory.add cert_res' cons_cert))
in
List.fold_left filter1 (c, Cons.triv factory) s
let leq : 'c1 Factory.t -> 'c1 t -> 'c2 t -> 'c1 rel_t
= fun factory s1 s2 ->
if List.length s1 < List.length s2 then
NoIncl
else
let rec _incl certs =
function
| [] -> Incl certs
| (_, (c,_))::t ->
let (c2,(_,cert2)) = filter2 factory s1 c in
match Cs.tellProp c2 with
| Cs.Contrad | Cs.Nothing -> NoIncl
| Cs.Trivial ->
_incl (cert2::certs) t
in
_incl [] s2
let choose : Cs.t -> Var.t * Cs.Vec.Coeff.t
= fun c ->
match Rtree.findPred (fun n -> Cs.Vec.Coeff.cmpz n <> 0) (Cs.get_v c) with
| None -> failwith "EqSet.choose"
| Some (x, a) -> (x, a)
let rename factory s fromX toY =
let rename1 (x, c) =
((if x = fromX then toY else x), Cons.rename factory fromX toY c)
in
List.map rename1 s
let pick (msk : Var.t option Rtree.t) ((e,_) : 'c Cons.t) =
match Rtree.findPred2
(fun n1 n2 -> n1 <> None && not (Cs.Vec.Coeff.cmpz n2 = 0))
msk (Cs.get_v e)
with
| Some (_,Some n1,_) -> Some n1
| _ -> None
let rec subst : 'c Factory.t -> Var.t -> 'c Cons.t -> 'c t -> 'c t
= fun factory x e ->
function
| [] -> []
| (x1, (e1,cert1))::l1 ->
if Var.equal x1 x then
failwith "EqSet.subst"
else
let e2 =
if Cs.Vec.Coeff.cmpz (Cs.Vec.get (Cs.get_v e1) x) = 0 then
(e1,cert1)
else
Cons.elimc factory x e (e1,cert1)
in
(x1,e2) :: subst factory x e l1
let rec tryDefs : 'c Factory.t -> Var.t option Rtree.t -> 'c t -> ('c Cons.t * Var.t) option * 'c t
= fun factory msk ->
function
| [] -> (None, [])
| (x, e)::l ->
if Rtree.get None msk x = None
then
let (def, l1) = tryDefs factory msk l in
(def, (x, e)::l1)
else
let l1 = subst factory x e l in
(Some (e, x), l1)
let trySubstM : 'c Factory.t -> Var.t option Rtree.t -> 'c t -> ('c Cons.t * Var.t) option * 'c t
= fun factory msk l ->
let (def, l1) = tryDefs factory msk l in
if def = None then
let rec _trysubstm msk =
function
| [] -> (None, [])
| (x, e)::l ->
match pick msk e with
| None ->
let (def, l1) = _trysubstm msk l in
(def, (x, e)::l1)
| Some x ->
let l1 = subst factory x e l in
(Some (e, x), l1)
in
_trysubstm msk l
else
(def, l1)
let trySubst : 'c Factory.t -> Var.t -> 'c t -> 'c Cons.t option * 'c t
= fun factory x l ->
let msk = Rtree.set None Rtree.empty x (Some x) in
let (optx, s1) = trySubstM factory msk l in
match optx with
| None -> (None, s1)
| Some (e, _) -> (Some e, s1)
type 'c meetT =
| Added of 'c t
| Bot of 'c
let meetEq: 'c meetT -> 'c meetT -> bool
= fun ar ar' ->
match ar, ar' with
| Added e, Added e' -> equal e e'
| Bot _, Bot _ -> true
| Added _, Bot _
| Bot _, Added _ -> false
let meet_to_string : 'c Factory.t -> (Var.t -> string) -> 'c meetT -> string
= fun factory varPr -> function
| Added e ->
Printf.sprintf "Added %s" (to_string_ext factory varPr e)
| Bot f -> Printf.sprintf "Bot : %s" (factory.Factory.to_string f)
let addM: 'c Factory.t -> 'c t -> 'c Cons.t list -> 'c meetT
= fun factory s conss ->
let add : 'c Cons.t -> 'c meetT -> 'c meetT
= fun (c,cert) ->
function
| Bot _ as r -> r
| Added s ->
match Cs.get_typ c with
| Cstr_type.Le | Cstr_type.Lt -> failwith "EqSet.addM"
| Cstr_type.Eq ->
let (c1,cert1) = filter factory s (c,cert) in
match Cs.tellProp c1 with
| Cs.Trivial -> Added s
| Cs.Contrad -> Bot cert1
| Cs.Nothing ->
let (x, ax) = choose c1 in
let c2 = Cs.mulc (Cs.Vec.Coeff.inv ax) c1
and cert2 = factory.Factory.mul (Cs.Vec.Coeff.inv ax) cert1 in
let s' = if !Flags.row_echelon_equalities
then subst factory x (c2,cert2) s
else s
in
Added ((x, (c2,cert2)) :: s')
in
List.fold_left (fun res c -> add c res) (Added s) conss
let add: 'c Factory.t -> 'c t -> 'c Cons.t -> 'c meetT
= fun factory s c ->
addM factory s [c]
let joinSetup_1: 'c2 Factory.t -> Var.t -> Var.t option Rtree.t -> Var.t -> 'c1 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
= fun factory2 nxt relocTbl alpha s ->
let apply (x, c) (nxt1, relocTbl1, s1) =
let (nxt2, relocTbl2, c1) = Cons.joinSetup_1 factory2 nxt1 relocTbl1 alpha c in
let x1 = match Rtree.get None relocTbl2 x with
| None -> failwith "EqSet.joinSetup_1"
| Some x1 -> x1
in
(nxt2, relocTbl2, (x1, c1)::s1)
in
is necessary because order needs to be preserved ( echelon form )
List.fold_right apply s (nxt, relocTbl, nil)
let joinSetup_2: 'c1 Factory.t -> Var.t -> Var.t option Rtree.t -> Var.t -> 'c2 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
= fun factory1 nxt relocTbl alpha s ->
let apply (x, c) (nxt1, relocTbl1, s1) =
let (nxt2, relocTbl2, c1) = Cons.joinSetup_2 factory1 nxt1 relocTbl1 alpha c in
let x1 = x in
(nxt2, relocTbl2, (x1, c1)::s1)
in
is necessary because order needs to be preserved ( echelon form )
List.fold_right apply s (nxt, relocTbl, nil)
let minkowskiSetup_1: 'c2 Factory.t -> Var.t -> Var.t option Rtree.t -> 'c1 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
= fun factory2 nxt relocTbl s ->
let apply (x, c) (nxt1, relocTbl1, s1) =
let (nxt2, relocTbl2, c1) = Cons.minkowskiSetup_1 factory2 nxt1 relocTbl1 c in
let x1 = match Rtree.get None relocTbl2 x with
| None -> failwith "EqSet.minkowskiSetup_1"
| Some x1 -> x1
in
(nxt2, relocTbl2, (x1, c1)::s1)
in
is necessary because order needs to be preserved ( echelon form )
List.fold_right apply s (nxt, relocTbl, nil)
let minkowskiSetup_2: 'c1 Factory.t -> Var.t -> Var.t option Rtree.t -> 'c2 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
= fun factory1 nxt relocTbl s ->
let apply (x, c) (nxt1, relocTbl1, s1) =
let (nxt2, relocTbl2, c1) = Cons.minkowskiSetup_2 factory1 nxt1 relocTbl1 c in
let x1 = x in
(nxt2, relocTbl2, (x1, c1)::s1)
in
is necessary because order needs to be preserved ( echelon form )
List.fold_right apply s (nxt, relocTbl, nil)
let satisfy : 'c t -> Cs.Vec.t -> bool
= fun eqs point ->
List.for_all
(fun (_,cons) ->
Cons.get_c cons
|> Cs.satisfy point)
eqs
|
e82f6719be31f3b703478be7dd4ec11d1fb567a6b3053756ed3b502023d79214 | naoto-ogawa/h-xproto-mysql | Parameter.hs | # LANGUAGE BangPatterns , DeriveDataTypeable , DeriveGeneric , FlexibleInstances , MultiParamTypeClasses #
# OPTIONS_GHC -fno - warn - unused - imports #
module Com.Mysql.Cj.Mysqlx.Protobuf.SessionStateChanged.Parameter (Parameter(..)) where
import Prelude ((+), (/), (.))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified GHC.Generics as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data Parameter = CURRENT_SCHEMA
| ACCOUNT_EXPIRED
| GENERATED_INSERT_ID
| ROWS_AFFECTED
| ROWS_FOUND
| ROWS_MATCHED
| TRX_COMMITTED
| TRX_ROLLEDBACK
| PRODUCED_MESSAGE
| CLIENT_ID_ASSIGNED
deriving (Prelude'.Read, Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data,
Prelude'.Generic)
instance P'.Mergeable Parameter
instance Prelude'.Bounded Parameter where
minBound = CURRENT_SCHEMA
maxBound = CLIENT_ID_ASSIGNED
instance P'.Default Parameter where
defaultValue = CURRENT_SCHEMA
toMaybe'Enum :: Prelude'.Int -> P'.Maybe Parameter
toMaybe'Enum 1 = Prelude'.Just CURRENT_SCHEMA
toMaybe'Enum 2 = Prelude'.Just ACCOUNT_EXPIRED
toMaybe'Enum 3 = Prelude'.Just GENERATED_INSERT_ID
toMaybe'Enum 4 = Prelude'.Just ROWS_AFFECTED
toMaybe'Enum 5 = Prelude'.Just ROWS_FOUND
toMaybe'Enum 6 = Prelude'.Just ROWS_MATCHED
toMaybe'Enum 7 = Prelude'.Just TRX_COMMITTED
toMaybe'Enum 9 = Prelude'.Just TRX_ROLLEDBACK
toMaybe'Enum 10 = Prelude'.Just PRODUCED_MESSAGE
toMaybe'Enum 11 = Prelude'.Just CLIENT_ID_ASSIGNED
toMaybe'Enum _ = Prelude'.Nothing
instance Prelude'.Enum Parameter where
fromEnum CURRENT_SCHEMA = 1
fromEnum ACCOUNT_EXPIRED = 2
fromEnum GENERATED_INSERT_ID = 3
fromEnum ROWS_AFFECTED = 4
fromEnum ROWS_FOUND = 5
fromEnum ROWS_MATCHED = 6
fromEnum TRX_COMMITTED = 7
fromEnum TRX_ROLLEDBACK = 9
fromEnum PRODUCED_MESSAGE = 10
fromEnum CLIENT_ID_ASSIGNED = 11
toEnum
= P'.fromMaybe
(Prelude'.error "hprotoc generated code: toEnum failure for type Com.Mysql.Cj.Mysqlx.Protobuf.SessionStateChanged.Parameter")
. toMaybe'Enum
succ CURRENT_SCHEMA = ACCOUNT_EXPIRED
succ ACCOUNT_EXPIRED = GENERATED_INSERT_ID
succ GENERATED_INSERT_ID = ROWS_AFFECTED
succ ROWS_AFFECTED = ROWS_FOUND
succ ROWS_FOUND = ROWS_MATCHED
succ ROWS_MATCHED = TRX_COMMITTED
succ TRX_COMMITTED = TRX_ROLLEDBACK
succ TRX_ROLLEDBACK = PRODUCED_MESSAGE
succ PRODUCED_MESSAGE = CLIENT_ID_ASSIGNED
succ _ = Prelude'.error "hprotoc generated code: succ failure for type Com.Mysql.Cj.Mysqlx.Protobuf.SessionStateChanged.Parameter"
pred ACCOUNT_EXPIRED = CURRENT_SCHEMA
pred GENERATED_INSERT_ID = ACCOUNT_EXPIRED
pred ROWS_AFFECTED = GENERATED_INSERT_ID
pred ROWS_FOUND = ROWS_AFFECTED
pred ROWS_MATCHED = ROWS_FOUND
pred TRX_COMMITTED = ROWS_MATCHED
pred TRX_ROLLEDBACK = TRX_COMMITTED
pred PRODUCED_MESSAGE = TRX_ROLLEDBACK
pred CLIENT_ID_ASSIGNED = PRODUCED_MESSAGE
pred _ = Prelude'.error "hprotoc generated code: pred failure for type Com.Mysql.Cj.Mysqlx.Protobuf.SessionStateChanged.Parameter"
instance P'.Wire Parameter where
wireSize ft' enum = P'.wireSize ft' (Prelude'.fromEnum enum)
wirePut ft' enum = P'.wirePut ft' (Prelude'.fromEnum enum)
wireGet 14 = P'.wireGetEnum toMaybe'Enum
wireGet ft' = P'.wireGetErr ft'
wireGetPacked 14 = P'.wireGetPackedEnum toMaybe'Enum
wireGetPacked ft' = P'.wireGetErr ft'
instance P'.GPB Parameter
instance P'.MessageAPI msg' (msg' -> Parameter) Parameter where
getVal m' f' = f' m'
instance P'.ReflectEnum Parameter where
reflectEnum
= [(1, "CURRENT_SCHEMA", CURRENT_SCHEMA), (2, "ACCOUNT_EXPIRED", ACCOUNT_EXPIRED),
(3, "GENERATED_INSERT_ID", GENERATED_INSERT_ID), (4, "ROWS_AFFECTED", ROWS_AFFECTED), (5, "ROWS_FOUND", ROWS_FOUND),
(6, "ROWS_MATCHED", ROWS_MATCHED), (7, "TRX_COMMITTED", TRX_COMMITTED), (9, "TRX_ROLLEDBACK", TRX_ROLLEDBACK),
(10, "PRODUCED_MESSAGE", PRODUCED_MESSAGE), (11, "CLIENT_ID_ASSIGNED", CLIENT_ID_ASSIGNED)]
reflectEnumInfo _
= P'.EnumInfo
(P'.makePNF (P'.pack ".Mysqlx.Notice.SessionStateChanged.Parameter") []
["Com", "Mysql", "Cj", "Mysqlx", "Protobuf", "SessionStateChanged"]
"Parameter")
["Com", "Mysql", "Cj", "Mysqlx", "Protobuf", "SessionStateChanged", "Parameter.hs"]
[(1, "CURRENT_SCHEMA"), (2, "ACCOUNT_EXPIRED"), (3, "GENERATED_INSERT_ID"), (4, "ROWS_AFFECTED"), (5, "ROWS_FOUND"),
(6, "ROWS_MATCHED"), (7, "TRX_COMMITTED"), (9, "TRX_ROLLEDBACK"), (10, "PRODUCED_MESSAGE"), (11, "CLIENT_ID_ASSIGNED")]
instance P'.TextType Parameter where
tellT = P'.tellShow
getT = P'.getRead | null | https://raw.githubusercontent.com/naoto-ogawa/h-xproto-mysql/1eacd6486c99b849016bf088788cb8d8b166f964/src/Com/Mysql/Cj/Mysqlx/Protobuf/SessionStateChanged/Parameter.hs | haskell | # LANGUAGE BangPatterns , DeriveDataTypeable , DeriveGeneric , FlexibleInstances , MultiParamTypeClasses #
# OPTIONS_GHC -fno - warn - unused - imports #
module Com.Mysql.Cj.Mysqlx.Protobuf.SessionStateChanged.Parameter (Parameter(..)) where
import Prelude ((+), (/), (.))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified GHC.Generics as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data Parameter = CURRENT_SCHEMA
| ACCOUNT_EXPIRED
| GENERATED_INSERT_ID
| ROWS_AFFECTED
| ROWS_FOUND
| ROWS_MATCHED
| TRX_COMMITTED
| TRX_ROLLEDBACK
| PRODUCED_MESSAGE
| CLIENT_ID_ASSIGNED
deriving (Prelude'.Read, Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data,
Prelude'.Generic)
instance P'.Mergeable Parameter
instance Prelude'.Bounded Parameter where
minBound = CURRENT_SCHEMA
maxBound = CLIENT_ID_ASSIGNED
instance P'.Default Parameter where
defaultValue = CURRENT_SCHEMA
toMaybe'Enum :: Prelude'.Int -> P'.Maybe Parameter
toMaybe'Enum 1 = Prelude'.Just CURRENT_SCHEMA
toMaybe'Enum 2 = Prelude'.Just ACCOUNT_EXPIRED
toMaybe'Enum 3 = Prelude'.Just GENERATED_INSERT_ID
toMaybe'Enum 4 = Prelude'.Just ROWS_AFFECTED
toMaybe'Enum 5 = Prelude'.Just ROWS_FOUND
toMaybe'Enum 6 = Prelude'.Just ROWS_MATCHED
toMaybe'Enum 7 = Prelude'.Just TRX_COMMITTED
toMaybe'Enum 9 = Prelude'.Just TRX_ROLLEDBACK
toMaybe'Enum 10 = Prelude'.Just PRODUCED_MESSAGE
toMaybe'Enum 11 = Prelude'.Just CLIENT_ID_ASSIGNED
toMaybe'Enum _ = Prelude'.Nothing
instance Prelude'.Enum Parameter where
fromEnum CURRENT_SCHEMA = 1
fromEnum ACCOUNT_EXPIRED = 2
fromEnum GENERATED_INSERT_ID = 3
fromEnum ROWS_AFFECTED = 4
fromEnum ROWS_FOUND = 5
fromEnum ROWS_MATCHED = 6
fromEnum TRX_COMMITTED = 7
fromEnum TRX_ROLLEDBACK = 9
fromEnum PRODUCED_MESSAGE = 10
fromEnum CLIENT_ID_ASSIGNED = 11
toEnum
= P'.fromMaybe
(Prelude'.error "hprotoc generated code: toEnum failure for type Com.Mysql.Cj.Mysqlx.Protobuf.SessionStateChanged.Parameter")
. toMaybe'Enum
succ CURRENT_SCHEMA = ACCOUNT_EXPIRED
succ ACCOUNT_EXPIRED = GENERATED_INSERT_ID
succ GENERATED_INSERT_ID = ROWS_AFFECTED
succ ROWS_AFFECTED = ROWS_FOUND
succ ROWS_FOUND = ROWS_MATCHED
succ ROWS_MATCHED = TRX_COMMITTED
succ TRX_COMMITTED = TRX_ROLLEDBACK
succ TRX_ROLLEDBACK = PRODUCED_MESSAGE
succ PRODUCED_MESSAGE = CLIENT_ID_ASSIGNED
succ _ = Prelude'.error "hprotoc generated code: succ failure for type Com.Mysql.Cj.Mysqlx.Protobuf.SessionStateChanged.Parameter"
pred ACCOUNT_EXPIRED = CURRENT_SCHEMA
pred GENERATED_INSERT_ID = ACCOUNT_EXPIRED
pred ROWS_AFFECTED = GENERATED_INSERT_ID
pred ROWS_FOUND = ROWS_AFFECTED
pred ROWS_MATCHED = ROWS_FOUND
pred TRX_COMMITTED = ROWS_MATCHED
pred TRX_ROLLEDBACK = TRX_COMMITTED
pred PRODUCED_MESSAGE = TRX_ROLLEDBACK
pred CLIENT_ID_ASSIGNED = PRODUCED_MESSAGE
pred _ = Prelude'.error "hprotoc generated code: pred failure for type Com.Mysql.Cj.Mysqlx.Protobuf.SessionStateChanged.Parameter"
instance P'.Wire Parameter where
wireSize ft' enum = P'.wireSize ft' (Prelude'.fromEnum enum)
wirePut ft' enum = P'.wirePut ft' (Prelude'.fromEnum enum)
wireGet 14 = P'.wireGetEnum toMaybe'Enum
wireGet ft' = P'.wireGetErr ft'
wireGetPacked 14 = P'.wireGetPackedEnum toMaybe'Enum
wireGetPacked ft' = P'.wireGetErr ft'
instance P'.GPB Parameter
instance P'.MessageAPI msg' (msg' -> Parameter) Parameter where
getVal m' f' = f' m'
instance P'.ReflectEnum Parameter where
reflectEnum
= [(1, "CURRENT_SCHEMA", CURRENT_SCHEMA), (2, "ACCOUNT_EXPIRED", ACCOUNT_EXPIRED),
(3, "GENERATED_INSERT_ID", GENERATED_INSERT_ID), (4, "ROWS_AFFECTED", ROWS_AFFECTED), (5, "ROWS_FOUND", ROWS_FOUND),
(6, "ROWS_MATCHED", ROWS_MATCHED), (7, "TRX_COMMITTED", TRX_COMMITTED), (9, "TRX_ROLLEDBACK", TRX_ROLLEDBACK),
(10, "PRODUCED_MESSAGE", PRODUCED_MESSAGE), (11, "CLIENT_ID_ASSIGNED", CLIENT_ID_ASSIGNED)]
reflectEnumInfo _
= P'.EnumInfo
(P'.makePNF (P'.pack ".Mysqlx.Notice.SessionStateChanged.Parameter") []
["Com", "Mysql", "Cj", "Mysqlx", "Protobuf", "SessionStateChanged"]
"Parameter")
["Com", "Mysql", "Cj", "Mysqlx", "Protobuf", "SessionStateChanged", "Parameter.hs"]
[(1, "CURRENT_SCHEMA"), (2, "ACCOUNT_EXPIRED"), (3, "GENERATED_INSERT_ID"), (4, "ROWS_AFFECTED"), (5, "ROWS_FOUND"),
(6, "ROWS_MATCHED"), (7, "TRX_COMMITTED"), (9, "TRX_ROLLEDBACK"), (10, "PRODUCED_MESSAGE"), (11, "CLIENT_ID_ASSIGNED")]
instance P'.TextType Parameter where
tellT = P'.tellShow
getT = P'.getRead | |
91f1df85804d90b437083bf17f0684b2806b5fdecd6e251dc8ff512bead4bd0c | fpco/say | Say.hs | {-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
module Say
( -- * Stdout
say
, sayString
, sayShow
-- * Stderr
, sayErr
, sayErrString
, sayErrShow
-- * Handle
, hSay
, hSayString
, hSayShow
) where
import Control.Monad (join, void)
import Control.Monad.IO.Class (MonadIO, liftIO)
import qualified Data.ByteString as S
import qualified Data.ByteString.Builder as BB
import qualified Data.ByteString.Builder.Prim as BBP
import qualified Data.ByteString.Char8 as S8
import Data.IORef
import Data.Monoid (mappend)
import Data.Text (Text, pack)
import qualified Data.Text.Encoding as TE
import Data.Text.Internal.Fusion (stream)
import Data.Text.Internal.Fusion.Types (Step (..), Stream (..))
import GHC.IO.Buffer (Buffer (..), BufferState (..),
CharBufElem, CharBuffer,
RawCharBuffer, emptyBuffer,
newCharBuffer, writeCharBuf)
import GHC.IO.Encoding.Types (textEncodingName)
import GHC.IO.Handle.Internals (wantWritableHandle)
import GHC.IO.Handle.Text (commitBuffer')
import GHC.IO.Handle.Types (BufferList (..),
Handle__ (..))
import System.IO (Handle, Newline (..), stderr,
stdout)
-- | Send a 'Text' to standard output, appending a newline, and chunking the
data . By default , the chunk size is 2048 characters , so any messages below
that size will be sent as one contiguous unit . If larger messages are used ,
-- it is possible for interleaving with other threads to occur.
--
@since 0.1.0.0
say :: MonadIO m => Text -> m ()
say = hSay stdout
# INLINE say #
-- | Same as 'say', but operates on a 'String'. Note that this will
-- force the entire @String@ into memory at once, and will fail for
-- infinite @String@s.
--
@since 0.1.0.0
sayString :: MonadIO m => String -> m ()
sayString = hSayString stdout
# INLINE sayString #
-- | Same as 'say', but for instances of 'Show'.
--
-- If your @Show@ instance generates infinite output, this will fail. However,
-- an infinite result for @show@ would generally be considered an invalid
-- instance anyway.
--
@since 0.1.0.0
sayShow :: (MonadIO m, Show a) => a -> m ()
sayShow = hSayShow stdout
# INLINE sayShow #
-- | Same as 'say', but data is sent to standard error.
--
@since 0.1.0.0
sayErr :: MonadIO m => Text -> m ()
sayErr = hSay stderr
# INLINE sayErr #
| Same as ' ' , but data is sent to standard error .
--
@since 0.1.0.0
sayErrString :: MonadIO m => String -> m ()
sayErrString = hSayString stderr
# INLINE sayErrString #
-- | Same as 'sayShow', but data is sent to standard error.
--
@since 0.1.0.0
sayErrShow :: (MonadIO m, Show a) => a -> m ()
sayErrShow = hSayShow stderr
# INLINE sayErrShow #
-- | Same as 'say', but data is sent to the provided 'Handle'.
--
@since 0.1.0.0
hSay :: MonadIO m => Handle -> Text -> m ()
hSay h msg =
liftIO $ join $ wantWritableHandle "hSay" h $ \h_ -> do
let nl = haOutputNL h_
if fmap textEncodingName (haCodec h_) == Just "UTF-8"
then return $ case nl of
LF -> viaUtf8Raw
CRLF -> viaUtf8CRLF
else do
buf <- getSpareBuffer h_
return $
case nl of
CRLF -> writeBlocksCRLF buf str
LF -> writeBlocksRaw buf str
-- Note that the release called below will return the buffer to the
-- list of spares
where
str = stream msg
viaUtf8Raw :: IO ()
viaUtf8Raw = BB.hPutBuilder h (TE.encodeUtf8Builder msg `mappend` BB.word8 10)
viaUtf8CRLF :: IO ()
viaUtf8CRLF =
BB.hPutBuilder h (builder `mappend` BBP.primFixed crlf (error "viaUtf8CRLF"))
where
builder = TE.encodeUtf8BuilderEscaped escapeLF msg
escapeLF =
BBP.condB
(== 10)
(BBP.liftFixedToBounded crlf)
(BBP.liftFixedToBounded BBP.word8)
crlf =
fixed2 (13, 10)
where
fixed2 x = const x BBP.>$< BBP.word8 BBP.>*< BBP.word8
getSpareBuffer :: Handle__ -> IO CharBuffer
getSpareBuffer Handle__{haCharBuffer=ref, haBuffers=spare_ref} = do
-- Despite appearances, IORef operations here are not a race
condition , since we 're already inside the MVar lock
buf <- readIORef ref
bufs <- readIORef spare_ref
case bufs of
BufferListCons b rest -> do
writeIORef spare_ref rest
return (emptyBuffer b (bufSize buf) WriteBuffer)
BufferListNil -> do
new_buf <- newCharBuffer (bufSize buf) WriteBuffer
return new_buf
writeBlocksRaw :: Buffer CharBufElem -> Stream Char -> IO ()
writeBlocksRaw buf0 (Stream next0 s0 _len) =
outer s0 buf0
where
outer s1 Buffer{bufRaw=raw, bufSize=len} =
inner s1 0
where
commit = commitBuffer h raw len
inner !s !n =
case next0 s of
Done
| n + 1 >= len -> flush
| otherwise -> do
n1 <- writeCharBuf raw n '\n'
void $ commit n1 False{-no flush-} True{-release-}
Skip s' -> inner s' n
Yield x s'
| n + 1 >= len -> flush
| otherwise -> writeCharBuf raw n x >>= inner s'
where
flush = commit n True{-needs flush-} False{-don't release-} >>= outer s
writeBlocksCRLF :: Buffer CharBufElem -> Stream Char -> IO ()
writeBlocksCRLF buf0 (Stream next0 s0 _len) =
outer s0 buf0
where
outer s1 Buffer{bufRaw=raw, bufSize=len} =
inner s1 0
where
commit = commitBuffer h raw len
inner !s !n =
case next0 s of
Done
| n + 2 >= len -> flush
| otherwise -> do
n1 <- writeCharBuf raw n '\r'
n2 <- writeCharBuf raw n1 '\n'
void $ commit n2 False{-no flush-} True{-release-}
Skip s' -> inner s' n
Yield '\n' s'
| n + 2 >= len -> flush
| otherwise -> do
n1 <- writeCharBuf raw n '\r'
n2 <- writeCharBuf raw n1 '\n'
inner s' n2
Yield x s'
| n + 1 >= len -> flush
| otherwise -> writeCharBuf raw n x >>= inner s'
where
flush = commit n True{-needs flush-} False{-don't release-} >>= outer s
commitBuffer :: Handle -> RawCharBuffer -> Int -> Int -> Bool -> Bool
-> IO CharBuffer
commitBuffer hdl !raw !sz !count flush release =
wantWritableHandle "commitAndReleaseBuffer" hdl $
commitBuffer' raw sz count flush release
{-# SPECIALIZE hSay :: Handle -> Text -> IO () #-}
| Same as ' ' , but data is sent to the provided ' Handle ' .
--
@since 0.1.0.0
hSayString :: MonadIO m => Handle -> String -> m ()
hSayString h = hSay h . pack
# INLINE hSayString #
-- | Same as 'sayShow', but data is sent to the provided 'Handle'.
--
@since 0.1.0.0
hSayShow :: (MonadIO m, Show a) => Handle -> a -> m ()
hSayShow h = hSayString h . show
# INLINE hSayShow #
| null | https://raw.githubusercontent.com/fpco/say/e667d40e7e09bf94385892c3fe101fb290d25ed5/src/Say.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE OverloadedStrings #
* Stdout
* Stderr
* Handle
| Send a 'Text' to standard output, appending a newline, and chunking the
it is possible for interleaving with other threads to occur.
| Same as 'say', but operates on a 'String'. Note that this will
force the entire @String@ into memory at once, and will fail for
infinite @String@s.
| Same as 'say', but for instances of 'Show'.
If your @Show@ instance generates infinite output, this will fail. However,
an infinite result for @show@ would generally be considered an invalid
instance anyway.
| Same as 'say', but data is sent to standard error.
| Same as 'sayShow', but data is sent to standard error.
| Same as 'say', but data is sent to the provided 'Handle'.
Note that the release called below will return the buffer to the
list of spares
Despite appearances, IORef operations here are not a race
no flush
release
needs flush
don't release
no flush
release
needs flush
don't release
# SPECIALIZE hSay :: Handle -> Text -> IO () #
| Same as 'sayShow', but data is sent to the provided 'Handle'.
| module Say
say
, sayString
, sayShow
, sayErr
, sayErrString
, sayErrShow
, hSay
, hSayString
, hSayShow
) where
import Control.Monad (join, void)
import Control.Monad.IO.Class (MonadIO, liftIO)
import qualified Data.ByteString as S
import qualified Data.ByteString.Builder as BB
import qualified Data.ByteString.Builder.Prim as BBP
import qualified Data.ByteString.Char8 as S8
import Data.IORef
import Data.Monoid (mappend)
import Data.Text (Text, pack)
import qualified Data.Text.Encoding as TE
import Data.Text.Internal.Fusion (stream)
import Data.Text.Internal.Fusion.Types (Step (..), Stream (..))
import GHC.IO.Buffer (Buffer (..), BufferState (..),
CharBufElem, CharBuffer,
RawCharBuffer, emptyBuffer,
newCharBuffer, writeCharBuf)
import GHC.IO.Encoding.Types (textEncodingName)
import GHC.IO.Handle.Internals (wantWritableHandle)
import GHC.IO.Handle.Text (commitBuffer')
import GHC.IO.Handle.Types (BufferList (..),
Handle__ (..))
import System.IO (Handle, Newline (..), stderr,
stdout)
data . By default , the chunk size is 2048 characters , so any messages below
that size will be sent as one contiguous unit . If larger messages are used ,
@since 0.1.0.0
say :: MonadIO m => Text -> m ()
say = hSay stdout
# INLINE say #
@since 0.1.0.0
sayString :: MonadIO m => String -> m ()
sayString = hSayString stdout
# INLINE sayString #
@since 0.1.0.0
sayShow :: (MonadIO m, Show a) => a -> m ()
sayShow = hSayShow stdout
# INLINE sayShow #
@since 0.1.0.0
sayErr :: MonadIO m => Text -> m ()
sayErr = hSay stderr
# INLINE sayErr #
| Same as ' ' , but data is sent to standard error .
@since 0.1.0.0
sayErrString :: MonadIO m => String -> m ()
sayErrString = hSayString stderr
# INLINE sayErrString #
@since 0.1.0.0
sayErrShow :: (MonadIO m, Show a) => a -> m ()
sayErrShow = hSayShow stderr
# INLINE sayErrShow #
@since 0.1.0.0
hSay :: MonadIO m => Handle -> Text -> m ()
hSay h msg =
liftIO $ join $ wantWritableHandle "hSay" h $ \h_ -> do
let nl = haOutputNL h_
if fmap textEncodingName (haCodec h_) == Just "UTF-8"
then return $ case nl of
LF -> viaUtf8Raw
CRLF -> viaUtf8CRLF
else do
buf <- getSpareBuffer h_
return $
case nl of
CRLF -> writeBlocksCRLF buf str
LF -> writeBlocksRaw buf str
where
str = stream msg
viaUtf8Raw :: IO ()
viaUtf8Raw = BB.hPutBuilder h (TE.encodeUtf8Builder msg `mappend` BB.word8 10)
viaUtf8CRLF :: IO ()
viaUtf8CRLF =
BB.hPutBuilder h (builder `mappend` BBP.primFixed crlf (error "viaUtf8CRLF"))
where
builder = TE.encodeUtf8BuilderEscaped escapeLF msg
escapeLF =
BBP.condB
(== 10)
(BBP.liftFixedToBounded crlf)
(BBP.liftFixedToBounded BBP.word8)
crlf =
fixed2 (13, 10)
where
fixed2 x = const x BBP.>$< BBP.word8 BBP.>*< BBP.word8
getSpareBuffer :: Handle__ -> IO CharBuffer
getSpareBuffer Handle__{haCharBuffer=ref, haBuffers=spare_ref} = do
condition , since we 're already inside the MVar lock
buf <- readIORef ref
bufs <- readIORef spare_ref
case bufs of
BufferListCons b rest -> do
writeIORef spare_ref rest
return (emptyBuffer b (bufSize buf) WriteBuffer)
BufferListNil -> do
new_buf <- newCharBuffer (bufSize buf) WriteBuffer
return new_buf
writeBlocksRaw :: Buffer CharBufElem -> Stream Char -> IO ()
writeBlocksRaw buf0 (Stream next0 s0 _len) =
outer s0 buf0
where
outer s1 Buffer{bufRaw=raw, bufSize=len} =
inner s1 0
where
commit = commitBuffer h raw len
inner !s !n =
case next0 s of
Done
| n + 1 >= len -> flush
| otherwise -> do
n1 <- writeCharBuf raw n '\n'
Skip s' -> inner s' n
Yield x s'
| n + 1 >= len -> flush
| otherwise -> writeCharBuf raw n x >>= inner s'
where
writeBlocksCRLF :: Buffer CharBufElem -> Stream Char -> IO ()
writeBlocksCRLF buf0 (Stream next0 s0 _len) =
outer s0 buf0
where
outer s1 Buffer{bufRaw=raw, bufSize=len} =
inner s1 0
where
commit = commitBuffer h raw len
inner !s !n =
case next0 s of
Done
| n + 2 >= len -> flush
| otherwise -> do
n1 <- writeCharBuf raw n '\r'
n2 <- writeCharBuf raw n1 '\n'
Skip s' -> inner s' n
Yield '\n' s'
| n + 2 >= len -> flush
| otherwise -> do
n1 <- writeCharBuf raw n '\r'
n2 <- writeCharBuf raw n1 '\n'
inner s' n2
Yield x s'
| n + 1 >= len -> flush
| otherwise -> writeCharBuf raw n x >>= inner s'
where
commitBuffer :: Handle -> RawCharBuffer -> Int -> Int -> Bool -> Bool
-> IO CharBuffer
commitBuffer hdl !raw !sz !count flush release =
wantWritableHandle "commitAndReleaseBuffer" hdl $
commitBuffer' raw sz count flush release
| Same as ' ' , but data is sent to the provided ' Handle ' .
@since 0.1.0.0
hSayString :: MonadIO m => Handle -> String -> m ()
hSayString h = hSay h . pack
# INLINE hSayString #
@since 0.1.0.0
hSayShow :: (MonadIO m, Show a) => Handle -> a -> m ()
hSayShow h = hSayString h . show
# INLINE hSayShow #
|
456f28e645c2e3d030469ef68cfdc67e992106caebebae228d15bfa8a58e3645 | bkirwi/ethereum-haskell | Item.hs | module Ethereum.RLP.Item(Item(String, List), encode, decode) where
import Control.Error
import qualified Data.ByteString as BS
import Data.Attoparsec.ByteString as A
import Data.Bits
import Ethereum.Prelude
data Item = String ByteString
| List [Item]
deriving (Eq)
instance Show Item where
show (String str) = show str
show (List list) = show list
encode :: Item -> ByteString
encode x = case x of
String bytes ->
if BS.length bytes == 1 && BS.head bytes <= 0x7f then bytes
else encodeLength 0x80 bytes
List children -> encodeLength 0xc0 (mconcat $ map encode children)
where
encodeLength :: Word8 -> ByteString -> ByteString
encodeLength offset bytes
| len <= 55 = prefix len <> bytes
| otherwise =
let lenBytes = encodeInt len
lenLen = BS.length lenBytes + 55
in prefix lenLen <> lenBytes <> bytes
where
len = BS.length bytes
prefix n = BS.singleton $ offset + fromIntegral n
parseItem :: Parser (Int, Item)
parseItem = do
first <- anyWord8
let typeBits = 0xc0 .&. first
parseString, parseList :: Int -> Parser Item
parseString n = String <$> A.take n
parseList 0 = return $ List []
parseList n = do
(took, ret) <- parseItem
List rest <- parseList (n - took)
return . List $ ret : rest
withSize offset parser
| lenBits <= 55 = do
res <- parser lenBits
return (1 + lenBits, res)
| otherwise = do
let lenLen = lenBits - 55
bytes <- A.take lenLen
len <- justZ $ decodeInt bytes
ret <- parser len
return (1 + lenLen + len, ret)
where lenBits = fromIntegral $ first - offset
case typeBits of
0x80 -> withSize 0x80 parseString
0xc0 -> withSize 0xc0 parseList
_ -> return (1, String $ BS.singleton first)
decode :: ByteString -> Maybe Item
decode bs = hush $ parseOnly (snd <$> parser) bs
where
parser = do
a <- parseItem
A.endOfInput
return a
| null | https://raw.githubusercontent.com/bkirwi/ethereum-haskell/ee995281bad4eed488c174bd8982ed174cfe26af/src/Ethereum/RLP/Item.hs | haskell | module Ethereum.RLP.Item(Item(String, List), encode, decode) where
import Control.Error
import qualified Data.ByteString as BS
import Data.Attoparsec.ByteString as A
import Data.Bits
import Ethereum.Prelude
data Item = String ByteString
| List [Item]
deriving (Eq)
instance Show Item where
show (String str) = show str
show (List list) = show list
encode :: Item -> ByteString
encode x = case x of
String bytes ->
if BS.length bytes == 1 && BS.head bytes <= 0x7f then bytes
else encodeLength 0x80 bytes
List children -> encodeLength 0xc0 (mconcat $ map encode children)
where
encodeLength :: Word8 -> ByteString -> ByteString
encodeLength offset bytes
| len <= 55 = prefix len <> bytes
| otherwise =
let lenBytes = encodeInt len
lenLen = BS.length lenBytes + 55
in prefix lenLen <> lenBytes <> bytes
where
len = BS.length bytes
prefix n = BS.singleton $ offset + fromIntegral n
parseItem :: Parser (Int, Item)
parseItem = do
first <- anyWord8
let typeBits = 0xc0 .&. first
parseString, parseList :: Int -> Parser Item
parseString n = String <$> A.take n
parseList 0 = return $ List []
parseList n = do
(took, ret) <- parseItem
List rest <- parseList (n - took)
return . List $ ret : rest
withSize offset parser
| lenBits <= 55 = do
res <- parser lenBits
return (1 + lenBits, res)
| otherwise = do
let lenLen = lenBits - 55
bytes <- A.take lenLen
len <- justZ $ decodeInt bytes
ret <- parser len
return (1 + lenLen + len, ret)
where lenBits = fromIntegral $ first - offset
case typeBits of
0x80 -> withSize 0x80 parseString
0xc0 -> withSize 0xc0 parseList
_ -> return (1, String $ BS.singleton first)
decode :: ByteString -> Maybe Item
decode bs = hush $ parseOnly (snd <$> parser) bs
where
parser = do
a <- parseItem
A.endOfInput
return a
| |
84e612c7ac268553e6dbf0996fde1208d5875007412dfd2a97a0724eff46c262 | hyraxbio/hyraxAbif | Fasta.hs | # LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
|
Module : . Abif . Fasta
Description : Read a FASTA file
Copyright : ( c ) HyraxBio , 2018
License : : ,
Stability : beta
Functionality for reading FASTA files
Module : Hyax.Abif.Fasta
Description : Read a FASTA file
Copyright : (c) HyraxBio, 2018
License : BSD3
Maintainer : ,
Stability : beta
Functionality for reading FASTA files
-}
module Hyrax.Abif.Fasta
( Fasta (..)
, parseFasta
) where
import Protolude hiding (lines)
import qualified Data.Text as Txt
-- | FASTA data
data Fasta = Fasta { fastaName :: !Text -- ^ Name
, fastaRead :: !Text -- ^ Data
} deriving (Show, Eq)
| Parse the data for a single FASTA into a list of ' Fasta ' values .
Single and multi - line FASTAs are supported .
-- Used by "Hyrax.Abif.Generate" to read weighted-FASTAs
parseFasta :: Text -> Either Text [Fasta]
parseFasta s =
reverse <$> go (Txt.lines s) Nothing "" []
where
go :: [Text] -> Maybe Text -> Text -> [Fasta] -> Either Text [Fasta]
go (line:lines) (Just name) read acc =
if Txt.take 1 line /= ">"
then go lines (Just name) (read <> line) acc
else go lines (Just $ Txt.drop 1 line) "" (Fasta (Txt.strip name) read : acc)
go (line:lines) Nothing _read acc =
if Txt.take 1 line == ">"
then go lines (Just $ Txt.strip . Txt.drop 1 $ line) "" acc
else Left "Expecting name"
go [] Nothing _ acc =
Right acc
go [] (Just _name) "" _acc =
Left "Expecting read"
go [] (Just name) read acc =
Right $ Fasta (Txt.strip name) read : acc
| null | https://raw.githubusercontent.com/hyraxbio/hyraxAbif/01b188956330057aeba1b62b61ebcc32ce3744d9/src/Hyrax/Abif/Fasta.hs | haskell | # LANGUAGE OverloadedStrings #
| FASTA data
^ Name
^ Data
Used by "Hyrax.Abif.Generate" to read weighted-FASTAs | # LANGUAGE NoImplicitPrelude #
|
Module : . Abif . Fasta
Description : Read a FASTA file
Copyright : ( c ) HyraxBio , 2018
License : : ,
Stability : beta
Functionality for reading FASTA files
Module : Hyax.Abif.Fasta
Description : Read a FASTA file
Copyright : (c) HyraxBio, 2018
License : BSD3
Maintainer : ,
Stability : beta
Functionality for reading FASTA files
-}
module Hyrax.Abif.Fasta
( Fasta (..)
, parseFasta
) where
import Protolude hiding (lines)
import qualified Data.Text as Txt
} deriving (Show, Eq)
| Parse the data for a single FASTA into a list of ' Fasta ' values .
Single and multi - line FASTAs are supported .
parseFasta :: Text -> Either Text [Fasta]
parseFasta s =
reverse <$> go (Txt.lines s) Nothing "" []
where
go :: [Text] -> Maybe Text -> Text -> [Fasta] -> Either Text [Fasta]
go (line:lines) (Just name) read acc =
if Txt.take 1 line /= ">"
then go lines (Just name) (read <> line) acc
else go lines (Just $ Txt.drop 1 line) "" (Fasta (Txt.strip name) read : acc)
go (line:lines) Nothing _read acc =
if Txt.take 1 line == ">"
then go lines (Just $ Txt.strip . Txt.drop 1 $ line) "" acc
else Left "Expecting name"
go [] Nothing _ acc =
Right acc
go [] (Just _name) "" _acc =
Left "Expecting read"
go [] (Just name) read acc =
Right $ Fasta (Txt.strip name) read : acc
|
9def17e696f37ff8addf621b68067d225d7f4200c89ae3a73a82d863f9e2f8bd | karlhof26/gimp-scheme | elsamuko-antique-border.scm | ; The GIMP -- an image manipulation program
Copyright ( C ) 1995 and
;
; This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
You should have received a copy of the GNU General Public License
; along with this program; if not, write to the Free Software
Foundation , Inc. , 675 Mass Ave , Cambridge , , USA .
; -3.0.html
;
Copyright ( C ) 2008 elsamuko < >
;
Version 0.1 - Adding Film Grain after :
;
(define (elsamuko-antique-border aimg adraw thicknesspercent radiuspercent color granularity smooth motion resize)
(let* ((img (car (gimp-item-get-image adraw)))
(owidth (car (gimp-image-width img)))
(oheight (car (gimp-image-height img)))
;the border has to reach a little bit into the image, if it's resized:
(thickness (- (* owidth (/ thicknesspercent 100)) granularity))
)
; init
(define (script-fu-antique-border-helper aimg adraw thicknesspercent radiuspercent color granularity smooth motion)
(let* ((img (car (gimp-item-get-image adraw)))
(owidth (car (gimp-image-width img)))
(thickness (* owidth (/ thicknesspercent 100)))
(radius (* owidth (/ radiuspercent 100)))
(oheight (car (gimp-image-height img)))
(borderlayer (car (gimp-layer-new img
owidth
oheight
1
"Border"
100
LAYER-MODE-NORMAL)))
)
;add new layer
(gimp-image-insert-layer img borderlayer 0 -1)
(gimp-drawable-fill borderlayer FILL-TRANSPARENT)
;select rounded rectangle, distress and invert it
(gimp-image-select-round-rectangle img CHANNEL-OP-REPLACE
thickness thickness
(- owidth (* 2 thickness)) (- oheight (* 2 thickness))
radius radius)
(if (> granularity 0)
(begin
(script-fu-distress-selection img borderlayer 127 12 granularity smooth TRUE TRUE)
)
)
(gimp-selection-invert img)
;fill up with border color
(gimp-context-set-foreground color)
(gimp-edit-bucket-fill borderlayer BUCKET-FILL-FG LAYER-MODE-NORMAL 100 0 FALSE 0 0)
(gimp-selection-none img)
;blur border
(if (> motion 0)
(plug-in-mblur 1 img borderlayer 2 motion 0 (/ owidth 2) (/ oheight 2))
)
)
)
(gimp-context-push)
(gimp-image-undo-group-start img)
(if (= (car (gimp-drawable-is-gray adraw )) TRUE)
(gimp-image-convert-rgb img)
)
;resize image
(if (= resize TRUE)
(gimp-image-resize img (+ owidth (* 2 thickness)) (+ oheight (* 2 thickness)) thickness thickness)
)
;call border function
(script-fu-antique-border-helper img adraw thicknesspercent radiuspercent color granularity smooth motion)
;tidy up
(gimp-image-undo-group-end img)
(gimp-displays-flush)
(gimp-context-pop)
)
)
(script-fu-register "elsamuko-antique-border"
"Antique Photo Border..."
"Adding an Antique Photo Border
Newest version can be downloaded from \nfile:elsamuko-antique-border.scm"
"elsamuko <>"
"elsamuko"
"15/09/08"
"*"
SF-IMAGE "Input image" 0
SF-DRAWABLE "Input drawable" 0
SF-ADJUSTMENT _"Border Thickness (% of Width)" '(1 0 35 0.1 5 1 1)
SF-ADJUSTMENT _"Edge Radius (% of Width)" '(10 0 50 0.1 5 1 1)
SF-COLOR _"Border Color" '(246 249 240)
SF-ADJUSTMENT _"Distress Granularity" '(15 0 50 1 5 0 1)
SF-ADJUSTMENT _"Smooth Value" '(3 1 25 1 5 0 1)
SF-ADJUSTMENT _"Motion Blur" '(1 0 5 1 5 0 1)
SF-TOGGLE _"Resize" FALSE
)
(script-fu-menu-register "elsamuko-antique-border" "<Image>/Script-Fu/Decor")
;end of script | null | https://raw.githubusercontent.com/karlhof26/gimp-scheme/791bcbb719d89ba3751d0555c61ee889b337384c/elsamuko-antique-border.scm | scheme | The GIMP -- an image manipulation program
This program is free software; you can redistribute it and/or modify
either version 3 of the License , or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program; if not, write to the Free Software
-3.0.html
the border has to reach a little bit into the image, if it's resized:
init
add new layer
select rounded rectangle, distress and invert it
fill up with border color
blur border
resize image
call border function
tidy up
end of script | Copyright ( C ) 1995 and
it under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
Foundation , Inc. , 675 Mass Ave , Cambridge , , USA .
Copyright ( C ) 2008 elsamuko < >
Version 0.1 - Adding Film Grain after :
(define (elsamuko-antique-border aimg adraw thicknesspercent radiuspercent color granularity smooth motion resize)
(let* ((img (car (gimp-item-get-image adraw)))
(owidth (car (gimp-image-width img)))
(oheight (car (gimp-image-height img)))
(thickness (- (* owidth (/ thicknesspercent 100)) granularity))
)
(define (script-fu-antique-border-helper aimg adraw thicknesspercent radiuspercent color granularity smooth motion)
(let* ((img (car (gimp-item-get-image adraw)))
(owidth (car (gimp-image-width img)))
(thickness (* owidth (/ thicknesspercent 100)))
(radius (* owidth (/ radiuspercent 100)))
(oheight (car (gimp-image-height img)))
(borderlayer (car (gimp-layer-new img
owidth
oheight
1
"Border"
100
LAYER-MODE-NORMAL)))
)
(gimp-image-insert-layer img borderlayer 0 -1)
(gimp-drawable-fill borderlayer FILL-TRANSPARENT)
(gimp-image-select-round-rectangle img CHANNEL-OP-REPLACE
thickness thickness
(- owidth (* 2 thickness)) (- oheight (* 2 thickness))
radius radius)
(if (> granularity 0)
(begin
(script-fu-distress-selection img borderlayer 127 12 granularity smooth TRUE TRUE)
)
)
(gimp-selection-invert img)
(gimp-context-set-foreground color)
(gimp-edit-bucket-fill borderlayer BUCKET-FILL-FG LAYER-MODE-NORMAL 100 0 FALSE 0 0)
(gimp-selection-none img)
(if (> motion 0)
(plug-in-mblur 1 img borderlayer 2 motion 0 (/ owidth 2) (/ oheight 2))
)
)
)
(gimp-context-push)
(gimp-image-undo-group-start img)
(if (= (car (gimp-drawable-is-gray adraw )) TRUE)
(gimp-image-convert-rgb img)
)
(if (= resize TRUE)
(gimp-image-resize img (+ owidth (* 2 thickness)) (+ oheight (* 2 thickness)) thickness thickness)
)
(script-fu-antique-border-helper img adraw thicknesspercent radiuspercent color granularity smooth motion)
(gimp-image-undo-group-end img)
(gimp-displays-flush)
(gimp-context-pop)
)
)
(script-fu-register "elsamuko-antique-border"
"Antique Photo Border..."
"Adding an Antique Photo Border
Newest version can be downloaded from \nfile:elsamuko-antique-border.scm"
"elsamuko <>"
"elsamuko"
"15/09/08"
"*"
SF-IMAGE "Input image" 0
SF-DRAWABLE "Input drawable" 0
SF-ADJUSTMENT _"Border Thickness (% of Width)" '(1 0 35 0.1 5 1 1)
SF-ADJUSTMENT _"Edge Radius (% of Width)" '(10 0 50 0.1 5 1 1)
SF-COLOR _"Border Color" '(246 249 240)
SF-ADJUSTMENT _"Distress Granularity" '(15 0 50 1 5 0 1)
SF-ADJUSTMENT _"Smooth Value" '(3 1 25 1 5 0 1)
SF-ADJUSTMENT _"Motion Blur" '(1 0 5 1 5 0 1)
SF-TOGGLE _"Resize" FALSE
)
(script-fu-menu-register "elsamuko-antique-border" "<Image>/Script-Fu/Decor")
|
4fabdcdf75488679dfb7dc938d641b72ec7d1854019ca12d6f202e91b6a04fb3 | jellelicht/guix | docbook.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2014 < >
Copyright © 2014 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages docbook)
#:use-module (gnu packages)
#:use-module (gnu packages compression)
#:use-module (gnu packages imagemagick)
#:use-module (gnu packages inkscape)
#:use-module (gnu packages texlive)
#:use-module (gnu packages python)
#:use-module (gnu packages base)
#:use-module (gnu packages xml)
#:use-module (guix licenses)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system trivial)
#:use-module (guix build-system python)
#:autoload (gnu packages zip) (unzip))
(define-public docbook-xml
(package
(name "docbook-xml")
(version "4.5")
(source (origin
(method url-fetch)
(uri (string-append "/" version
"/docbook-xml-" version ".zip"))
(sha256
(base32
"1d671lcjckjri28xfbf6dq7y3xnkppa910w1jin8rjc35dx06kjf"))))
(build-system trivial-build-system)
(arguments
'(#:builder (begin
(use-modules (guix build utils))
(let* ((unzip
(string-append (assoc-ref %build-inputs "unzip")
"/bin/unzip"))
(source (assoc-ref %build-inputs "source"))
(out (assoc-ref %outputs "out"))
(dtd (string-append out "/xml/dtd/docbook")))
(mkdir-p dtd)
(with-directory-excursion dtd
(system* unzip source))
(substitute* (string-append out "/xml/dtd/docbook/catalog.xml")
(("uri=\"")
(string-append
"uri=\"file://" dtd "/")))))
#:modules ((guix build utils))))
(native-inputs `(("unzip" ,unzip)))
(home-page "")
(synopsis "DocBook XML DTDs for document authoring")
(description
"DocBook is general purpose XML and SGML document type particularly well
suited to books and papers about computer hardware and software (though it is
by no means limited to these applications.) This package provides XML DTDs.")
(license (x11-style "" "See file headers."))))
(define-public docbook-xml-4.4
(package (inherit docbook-xml)
(version "4.4")
(source (origin
(method url-fetch)
(uri (string-append "/" version
"/docbook-xml-" version ".zip"))
(sha256
(base32
"141h4zsyc71sfi2zzd89v4bb4qqq9ca1ri9ix2als9f4i3mmkw82"))))))
(define-public docbook-xml-4.3
(package (inherit docbook-xml)
(version "4.3")
(source (origin
(method url-fetch)
(uri (string-append "/" version
"/docbook-xml-" version ".zip"))
(sha256
(base32
"0r1l2if1z4wm2v664sqdizm4gak6db1kx9y50jq89m3gxaa8l1i3"))))))
(define-public docbook-xml-4.2
(package (inherit docbook-xml)
(version "4.2")
(source (origin
(method url-fetch)
(uri (string-append "/" version
"/docbook-xml-" version ".zip"))
(sha256
(base32
"18hgwvmywh6a5jh38szjmg3hg2r4v5lb6r3ydc3rd8cp9wg61i5c"))))))
(define-public docbook-xsl
(package
(name "docbook-xsl")
(version "1.78.1")
(source (origin
(method url-fetch)
(uri (string-append "mirror-xsl-"
version ".tar.bz2"))
(sha256
(base32
"0rxl013ncmz1n6ymk2idvx3hix9pdabk8xn01cpcv32wmfb753y9"))))
(build-system trivial-build-system)
(arguments
`(#:builder (let ((name-version (string-append ,name "-" ,version)))
(use-modules (guix build utils))
(let* ((bzip2 (assoc-ref %build-inputs "bzip2"))
(tar (assoc-ref %build-inputs "tar"))
(source (assoc-ref %build-inputs "source"))
(out (assoc-ref %outputs "out"))
(xsl (string-append out "/xml/xsl")))
(setenv "PATH" (string-append bzip2 "/bin"))
(system* (string-append tar "/bin/tar") "xvf" source)
(mkdir-p xsl)
(copy-recursively name-version
(string-append xsl "/" name-version))
(substitute* (string-append xsl "/" name-version "/catalog.xml")
(("rewritePrefix=\"./")
(string-append "rewritePrefix=\"file://" xsl "/"
name-version "/")))))
#:modules ((guix build utils))))
(native-inputs `(("bzip2" ,bzip2)
("tar" ,tar)))
(home-page "")
(synopsis "DocBook XSL style sheets for document authoring")
(description
"This package provides XSL style sheets for DocBook.")
(license (x11-style "" "See 'COPYING' file."))))
(define-public dblatex
(package
(name "dblatex")
(version "0.3.5")
(source (origin
(method url-fetch)
(uri (string-append "mirror-"
version ".tar.bz2"))
(sha256
(base32
"0h3472n33pabrn8qwggsahkrjx8lybpwlc3zprby3w3w3x5i830f"))))
(build-system python-build-system)
;; TODO: Add xfig/transfig for fig2dev utility
(inputs
`(("python-setuptools" ,python-setuptools)
("texlive" ,texlive)
("imagemagick" ,imagemagick) ;for convert
("inkscape" ,inkscape) ;for svg conversion
("docbook" ,docbook-xml)
for xsltproc
(arguments
`(#:python ,python-2 ;'print' syntax
#:tests? #f ;no 'test' command
#:phases
(alist-cons-after
'wrap 'set-path
(lambda* (#:key inputs outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
;; dblatex executes helper programs at runtime.
(wrap-program (string-append out "/bin/dblatex")
`("PATH" ":" prefix
,(map (lambda (input)
(string-append (assoc-ref inputs input)
"/bin"))
'("libxslt" "texlive"
"imagemagick" "inkscape"))))))
%standard-phases)))
(home-page "")
(synopsis "DocBook to LaTeX Publishing")
(description
"DocBook to LaTeX Publishing transforms your SGML/XML DocBook documents
to DVI, PostScript or PDF by translating them in pure LaTeX as a first
process. MathML 2.0 markups are supported too. It started as a clone of
DB2LaTeX.")
;; lib/contrib/which is under an X11 license
(license gpl2+)))
| null | https://raw.githubusercontent.com/jellelicht/guix/83cfc9414fca3ab57c949e18c1ceb375a179b59c/gnu/packages/docbook.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
TODO: Add xfig/transfig for fig2dev utility
for convert
for svg conversion
'print' syntax
no 'test' command
dblatex executes helper programs at runtime.
lib/contrib/which is under an X11 license | Copyright © 2014 < >
Copyright © 2014 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages docbook)
#:use-module (gnu packages)
#:use-module (gnu packages compression)
#:use-module (gnu packages imagemagick)
#:use-module (gnu packages inkscape)
#:use-module (gnu packages texlive)
#:use-module (gnu packages python)
#:use-module (gnu packages base)
#:use-module (gnu packages xml)
#:use-module (guix licenses)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system trivial)
#:use-module (guix build-system python)
#:autoload (gnu packages zip) (unzip))
(define-public docbook-xml
(package
(name "docbook-xml")
(version "4.5")
(source (origin
(method url-fetch)
(uri (string-append "/" version
"/docbook-xml-" version ".zip"))
(sha256
(base32
"1d671lcjckjri28xfbf6dq7y3xnkppa910w1jin8rjc35dx06kjf"))))
(build-system trivial-build-system)
(arguments
'(#:builder (begin
(use-modules (guix build utils))
(let* ((unzip
(string-append (assoc-ref %build-inputs "unzip")
"/bin/unzip"))
(source (assoc-ref %build-inputs "source"))
(out (assoc-ref %outputs "out"))
(dtd (string-append out "/xml/dtd/docbook")))
(mkdir-p dtd)
(with-directory-excursion dtd
(system* unzip source))
(substitute* (string-append out "/xml/dtd/docbook/catalog.xml")
(("uri=\"")
(string-append
"uri=\"file://" dtd "/")))))
#:modules ((guix build utils))))
(native-inputs `(("unzip" ,unzip)))
(home-page "")
(synopsis "DocBook XML DTDs for document authoring")
(description
"DocBook is general purpose XML and SGML document type particularly well
suited to books and papers about computer hardware and software (though it is
by no means limited to these applications.) This package provides XML DTDs.")
(license (x11-style "" "See file headers."))))
(define-public docbook-xml-4.4
(package (inherit docbook-xml)
(version "4.4")
(source (origin
(method url-fetch)
(uri (string-append "/" version
"/docbook-xml-" version ".zip"))
(sha256
(base32
"141h4zsyc71sfi2zzd89v4bb4qqq9ca1ri9ix2als9f4i3mmkw82"))))))
(define-public docbook-xml-4.3
(package (inherit docbook-xml)
(version "4.3")
(source (origin
(method url-fetch)
(uri (string-append "/" version
"/docbook-xml-" version ".zip"))
(sha256
(base32
"0r1l2if1z4wm2v664sqdizm4gak6db1kx9y50jq89m3gxaa8l1i3"))))))
(define-public docbook-xml-4.2
(package (inherit docbook-xml)
(version "4.2")
(source (origin
(method url-fetch)
(uri (string-append "/" version
"/docbook-xml-" version ".zip"))
(sha256
(base32
"18hgwvmywh6a5jh38szjmg3hg2r4v5lb6r3ydc3rd8cp9wg61i5c"))))))
(define-public docbook-xsl
(package
(name "docbook-xsl")
(version "1.78.1")
(source (origin
(method url-fetch)
(uri (string-append "mirror-xsl-"
version ".tar.bz2"))
(sha256
(base32
"0rxl013ncmz1n6ymk2idvx3hix9pdabk8xn01cpcv32wmfb753y9"))))
(build-system trivial-build-system)
(arguments
`(#:builder (let ((name-version (string-append ,name "-" ,version)))
(use-modules (guix build utils))
(let* ((bzip2 (assoc-ref %build-inputs "bzip2"))
(tar (assoc-ref %build-inputs "tar"))
(source (assoc-ref %build-inputs "source"))
(out (assoc-ref %outputs "out"))
(xsl (string-append out "/xml/xsl")))
(setenv "PATH" (string-append bzip2 "/bin"))
(system* (string-append tar "/bin/tar") "xvf" source)
(mkdir-p xsl)
(copy-recursively name-version
(string-append xsl "/" name-version))
(substitute* (string-append xsl "/" name-version "/catalog.xml")
(("rewritePrefix=\"./")
(string-append "rewritePrefix=\"file://" xsl "/"
name-version "/")))))
#:modules ((guix build utils))))
(native-inputs `(("bzip2" ,bzip2)
("tar" ,tar)))
(home-page "")
(synopsis "DocBook XSL style sheets for document authoring")
(description
"This package provides XSL style sheets for DocBook.")
(license (x11-style "" "See 'COPYING' file."))))
(define-public dblatex
(package
(name "dblatex")
(version "0.3.5")
(source (origin
(method url-fetch)
(uri (string-append "mirror-"
version ".tar.bz2"))
(sha256
(base32
"0h3472n33pabrn8qwggsahkrjx8lybpwlc3zprby3w3w3x5i830f"))))
(build-system python-build-system)
(inputs
`(("python-setuptools" ,python-setuptools)
("texlive" ,texlive)
("docbook" ,docbook-xml)
for xsltproc
(arguments
#:phases
(alist-cons-after
'wrap 'set-path
(lambda* (#:key inputs outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(wrap-program (string-append out "/bin/dblatex")
`("PATH" ":" prefix
,(map (lambda (input)
(string-append (assoc-ref inputs input)
"/bin"))
'("libxslt" "texlive"
"imagemagick" "inkscape"))))))
%standard-phases)))
(home-page "")
(synopsis "DocBook to LaTeX Publishing")
(description
"DocBook to LaTeX Publishing transforms your SGML/XML DocBook documents
to DVI, PostScript or PDF by translating them in pure LaTeX as a first
process. MathML 2.0 markups are supported too. It started as a clone of
DB2LaTeX.")
(license gpl2+)))
|
399bf7a04d21e95d8346bfc468f7363a02ca098027032a8ebbb2ce5153c81925 | mcgizzle/haxchange | Types.hs | {-# LANGUAGE OverloadedStrings #-}
module <newmodule>.Types where
import Debug.Trace
import Types ( Api
, Ticker(..)
, Currency(..)
, Currency'(..)
, MarketName(..)
, Balance(..) )
import qualified Types as T
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Aeson
class <newmodule>Text a where
toText :: a -> Text
instance <newmodule>Text MarketName where
toText = T.toText
instance <newmodule>Text Currency where
toText = T.toText
instance FromJSON Markets
instance FromJSON Market
instance FromJSON Tickers
instance FromJSON Ticker
instance FromJSON Balance
instance FromJSON OrderId
instance FromJSON ServerTime
| null | https://raw.githubusercontent.com/mcgizzle/haxchange/620a4c93ae28abdd637b7c1fd8b018628b3bd0e9/templates/Types.hs | haskell | # LANGUAGE OverloadedStrings # | module <newmodule>.Types where
import Debug.Trace
import Types ( Api
, Ticker(..)
, Currency(..)
, Currency'(..)
, MarketName(..)
, Balance(..) )
import qualified Types as T
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Aeson
class <newmodule>Text a where
toText :: a -> Text
instance <newmodule>Text MarketName where
toText = T.toText
instance <newmodule>Text Currency where
toText = T.toText
instance FromJSON Markets
instance FromJSON Market
instance FromJSON Tickers
instance FromJSON Ticker
instance FromJSON Balance
instance FromJSON OrderId
instance FromJSON ServerTime
|
3e644c97e2685cc8d7bf77bf9bc30695e66397b5905885e7c0cf5d859bdca348 | tweag/asterius | T16208.hs | # LANGUAGE GADTs , ExplicitForAll #
module Main (main) where
import GHC.Exts
newtype Age a b where
Age :: forall b a. Int -> Age a b
data T a = MkT a
# NOINLINE foo #
foo :: (Int -> Age Bool Char) -> String
foo _ = "bad (RULE should have fired)"
# RULES " foo / coerce " [ 1 ] foo coerce = " good " #
main = putStrLn (foo Age)
| null | https://raw.githubusercontent.com/tweag/asterius/e7b823c87499656860f87b9b468eb0567add1de8/asterius/test/ghc-testsuite/simplCore/T16208.hs | haskell | # LANGUAGE GADTs , ExplicitForAll #
module Main (main) where
import GHC.Exts
newtype Age a b where
Age :: forall b a. Int -> Age a b
data T a = MkT a
# NOINLINE foo #
foo :: (Int -> Age Bool Char) -> String
foo _ = "bad (RULE should have fired)"
# RULES " foo / coerce " [ 1 ] foo coerce = " good " #
main = putStrLn (foo Age)
| |
5fcdb8eeb95e89ff90484260b697fce75d02bd2ebdca530dc09f66dd0cca3444 | heroku/ranch_proxy_protocol | ranch_proxy_encoder.erl | %% Based off -protocol.txt
note : Erlang 's bit syntax is big - endian by default . So is this protocol
-module(ranch_proxy_encoder).
-export([v1_encode/4, v2_encode/2, v2_encode/5]).
-include("ranch_proxy.hrl").
-type opts() :: [{negotiated_protocol, binary()}
|{protocol, sslv3 | tlsv1 | 'tlsv1.1' | 'tlsv1.2'}
|{sni_hostname, iodata()}
|{verify, verify_peer | verify_none}
].
-export_type([opts/0]).
%%%%%%%%%%%%%%%%%%
%%% PUBLIC API %%%
%%%%%%%%%%%%%%%%%%
%%% Proxy v1
-spec v1_encode(proxy, inet, {inet:ip4_address(), inet:port_number()}, {inet:ip4_address(), inet:port_number()}) -> binary()
; (proxy, inet6, {inet:ip6_address(), inet:port_number()}, {inet:ip6_address(), inet:port_number()}) -> binary().
v1_encode(proxy, Proto, {SrcIp, SrcPort}, {DstIp, DstPort}) ->
TCP = case Proto of
inet -> <<"TCP4">>;
inet6 -> <<"TCP6">>
end,
BinSrcIp = list_to_binary(inet:ntoa(SrcIp)),
BinDstIp = list_to_binary(inet:ntoa(DstIp)),
BinSrcPort = list_to_binary(integer_to_list(SrcPort)),
BinDstPort = list_to_binary(integer_to_list(DstPort)),
<<"PROXY ", TCP/binary, " ", BinSrcIp/binary, " ", BinDstIp/binary, " ",
BinSrcPort/binary, " ", BinDstPort/binary, "\r\n">>.
%%% Proxy v2
%% supports connection-oriented IP stuff only, no DGRAM nor unix sockets yet.
-spec v2_encode(local, undefined) -> binary().
v2_encode(local, undefined) ->
Cmd = command(local),
Proto = protocol(undefined),
AddrLen = addr_len(undefined),
%% Header
<<?HEADER, ?VSN:4, Cmd:4, Proto/binary, AddrLen:16>>.
-spec v2_encode(proxy, inet, {inet:ip4_address(), inet:port_number()}, {inet:ip4_address(), inet:port_number()}, opts()) -> binary()
; (proxy, inet6, {inet:ip6_address(), inet:port_number()}, {inet:ip6_address(), inet:port_number()}, opts()) -> binary().
v2_encode(Command, Protocol, Src, Dst, Opt) ->
Cmd = command(Command),
Proto = protocol(Protocol),
AddrLen = addr_len(Protocol),
Addresses = addr(Protocol, Src, Dst),
AdditionalBytes = more(Opt),
HeaderLen = AddrLen + byte_size(AdditionalBytes),
%% Header
<<?HEADER, ?VSN:4, Cmd:4, Proto/binary, HeaderLen:16,
%% Body
Addresses:AddrLen/binary, AdditionalBytes/binary>>.
%%%%%%%%%%%%%%%
%%% PRIVATE %%%
%%%%%%%%%%%%%%%
command(local) -> 16#00;
command(proxy) -> 16#11.
protocol(undefined) -> <<?AF_UNSPEC:4, ?UNSPEC:4>>;
protocol(inet) -> <<?AF_INET:4, ?STREAM:4>>;
protocol(inet6) -> <<?AF_INET6:4, ?STREAM:4>>.
) - > < < ? AF_UNIX:4 , ? STREAM:4 > > .
addr_len(undefined) -> 0;
addr_len(inet) -> 12;
addr_len(inet6) -> 36.
addr_len(unix ) - > 216 .
addr(inet, {{SA,SB,SC,SD}, SP}, {{DA,DB,DC,DD}, DP}) ->
<<SA:8, SB:8, SC:8, SD:8,
DA:8, DB:8, DC:8, DD:8,
SP:16, DP:16>>;
addr(inet6, {{SA,SB,SC,SD,SE,SF,SG,SH}, SP}, {{DA,DB,DC,DD,DE,DF,DG,DH}, DP}) ->
<<SA:16, SB:16, SC:16, SD:16, SE:16, SF:16, SG:16, SH:16,
DA:16, DB:16, DC:16, DD:16, DE:16, DF:16, DG:16, DH:16,
SP:16, DP:16>>.
more(List) ->
iolist_to_binary([check(alpn, List), check(ssl, List)]).
check(alpn, List) ->
case lists:keyfind(negotiated_protocol, 1, List) of
{negotiated_protocol, Proto} ->
<<?PP2_TYPE_ALPN:8, (byte_size(Proto)):16, Proto/binary>>;
false ->
<<>>
end;
check(ssl, List) ->
case lists:keyfind(protocol, 1, List) of
{_, Val} -> ssl_record(Val, List);
_ -> <<>>
end.
ssl_record(Proto, List) ->
ClientCert = case lists:keyfind(verify, 1, List) of
{verify, verify_peer} -> 1; % otherwise the conn would have failed
{verify, verify_none} -> 0;
false -> 0
end,
ClientSSL = 1, % otherwise the conn would have failed
UNASSIGNED
ClientCert:1, % PP2_CLIENT_CERT_SESS
ClientCert:1, % PP2_CLIENT_CERT_CONN
ClientSSL:1>>, % PP2_CLIENT_SSL
Verify = <<(bnot ClientCert):32>>,
VsnStr = case Proto of
ssl3 -> <<"SSL 3.0">>;
tlsv1 -> <<"TLS 1.0">>;
'tlsv1.1' -> <<"TLS 1.1">>;
'tlsv1.2' -> <<"TLS 1.2">>
end,
Vsn = <<?PP2_SUBTYPE_SSL_VERSION:8, (byte_size(VsnStr)):16, VsnStr/binary>>,
CN = case lists:keyfind(sni_hostname, 1, List) of
{_, Name} ->
CNStr = iolist_to_binary(Name),
<<?PP2_SUBTYPE_SSL_CN:8, (byte_size(CNStr)):16, CNStr/binary>>;
_ ->
<<>>
end,
<<?PP2_TYPE_SSL:8, (1+4+byte_size(Vsn)+byte_size(CN)):16,
BitField/binary, Verify/binary, Vsn/binary, CN/binary>>.
| null | https://raw.githubusercontent.com/heroku/ranch_proxy_protocol/59046e5e40f8c573c5b15ae0358e78bd8b211ab6/src/ranch_proxy_encoder.erl | erlang | Based off -protocol.txt
PUBLIC API %%%
Proxy v1
Proxy v2
supports connection-oriented IP stuff only, no DGRAM nor unix sockets yet.
Header
Header
Body
PRIVATE %%%
otherwise the conn would have failed
otherwise the conn would have failed
PP2_CLIENT_CERT_SESS
PP2_CLIENT_CERT_CONN
PP2_CLIENT_SSL | note : Erlang 's bit syntax is big - endian by default . So is this protocol
-module(ranch_proxy_encoder).
-export([v1_encode/4, v2_encode/2, v2_encode/5]).
-include("ranch_proxy.hrl").
-type opts() :: [{negotiated_protocol, binary()}
|{protocol, sslv3 | tlsv1 | 'tlsv1.1' | 'tlsv1.2'}
|{sni_hostname, iodata()}
|{verify, verify_peer | verify_none}
].
-export_type([opts/0]).
-spec v1_encode(proxy, inet, {inet:ip4_address(), inet:port_number()}, {inet:ip4_address(), inet:port_number()}) -> binary()
; (proxy, inet6, {inet:ip6_address(), inet:port_number()}, {inet:ip6_address(), inet:port_number()}) -> binary().
v1_encode(proxy, Proto, {SrcIp, SrcPort}, {DstIp, DstPort}) ->
TCP = case Proto of
inet -> <<"TCP4">>;
inet6 -> <<"TCP6">>
end,
BinSrcIp = list_to_binary(inet:ntoa(SrcIp)),
BinDstIp = list_to_binary(inet:ntoa(DstIp)),
BinSrcPort = list_to_binary(integer_to_list(SrcPort)),
BinDstPort = list_to_binary(integer_to_list(DstPort)),
<<"PROXY ", TCP/binary, " ", BinSrcIp/binary, " ", BinDstIp/binary, " ",
BinSrcPort/binary, " ", BinDstPort/binary, "\r\n">>.
-spec v2_encode(local, undefined) -> binary().
v2_encode(local, undefined) ->
Cmd = command(local),
Proto = protocol(undefined),
AddrLen = addr_len(undefined),
<<?HEADER, ?VSN:4, Cmd:4, Proto/binary, AddrLen:16>>.
-spec v2_encode(proxy, inet, {inet:ip4_address(), inet:port_number()}, {inet:ip4_address(), inet:port_number()}, opts()) -> binary()
; (proxy, inet6, {inet:ip6_address(), inet:port_number()}, {inet:ip6_address(), inet:port_number()}, opts()) -> binary().
v2_encode(Command, Protocol, Src, Dst, Opt) ->
Cmd = command(Command),
Proto = protocol(Protocol),
AddrLen = addr_len(Protocol),
Addresses = addr(Protocol, Src, Dst),
AdditionalBytes = more(Opt),
HeaderLen = AddrLen + byte_size(AdditionalBytes),
<<?HEADER, ?VSN:4, Cmd:4, Proto/binary, HeaderLen:16,
Addresses:AddrLen/binary, AdditionalBytes/binary>>.
command(local) -> 16#00;
command(proxy) -> 16#11.
protocol(undefined) -> <<?AF_UNSPEC:4, ?UNSPEC:4>>;
protocol(inet) -> <<?AF_INET:4, ?STREAM:4>>;
protocol(inet6) -> <<?AF_INET6:4, ?STREAM:4>>.
) - > < < ? AF_UNIX:4 , ? STREAM:4 > > .
addr_len(undefined) -> 0;
addr_len(inet) -> 12;
addr_len(inet6) -> 36.
addr_len(unix ) - > 216 .
addr(inet, {{SA,SB,SC,SD}, SP}, {{DA,DB,DC,DD}, DP}) ->
<<SA:8, SB:8, SC:8, SD:8,
DA:8, DB:8, DC:8, DD:8,
SP:16, DP:16>>;
addr(inet6, {{SA,SB,SC,SD,SE,SF,SG,SH}, SP}, {{DA,DB,DC,DD,DE,DF,DG,DH}, DP}) ->
<<SA:16, SB:16, SC:16, SD:16, SE:16, SF:16, SG:16, SH:16,
DA:16, DB:16, DC:16, DD:16, DE:16, DF:16, DG:16, DH:16,
SP:16, DP:16>>.
more(List) ->
iolist_to_binary([check(alpn, List), check(ssl, List)]).
check(alpn, List) ->
case lists:keyfind(negotiated_protocol, 1, List) of
{negotiated_protocol, Proto} ->
<<?PP2_TYPE_ALPN:8, (byte_size(Proto)):16, Proto/binary>>;
false ->
<<>>
end;
check(ssl, List) ->
case lists:keyfind(protocol, 1, List) of
{_, Val} -> ssl_record(Val, List);
_ -> <<>>
end.
ssl_record(Proto, List) ->
ClientCert = case lists:keyfind(verify, 1, List) of
{verify, verify_none} -> 0;
false -> 0
end,
UNASSIGNED
Verify = <<(bnot ClientCert):32>>,
VsnStr = case Proto of
ssl3 -> <<"SSL 3.0">>;
tlsv1 -> <<"TLS 1.0">>;
'tlsv1.1' -> <<"TLS 1.1">>;
'tlsv1.2' -> <<"TLS 1.2">>
end,
Vsn = <<?PP2_SUBTYPE_SSL_VERSION:8, (byte_size(VsnStr)):16, VsnStr/binary>>,
CN = case lists:keyfind(sni_hostname, 1, List) of
{_, Name} ->
CNStr = iolist_to_binary(Name),
<<?PP2_SUBTYPE_SSL_CN:8, (byte_size(CNStr)):16, CNStr/binary>>;
_ ->
<<>>
end,
<<?PP2_TYPE_SSL:8, (1+4+byte_size(Vsn)+byte_size(CN)):16,
BitField/binary, Verify/binary, Vsn/binary, CN/binary>>.
|
9f996736666a6334cc29c73f5e43517af24aa3d18d377c7d99e196c8049222f9 | gfredericks/test.chuck | generators.cljc | (ns com.gfredericks.test.chuck.generators
"Yes this namespace's name has five components."
(:refer-clojure :exclude [double for partition])
(:require [clojure.test.check.generators :as gen]
[#?(:clj clojure.core :cljs cljs.core) :as core]
[#?(:clj clj-time.core :cljs cljs-time.core) :as ct]
#?(:clj [com.gfredericks.test.chuck.regexes :as regexes]))
#?(:cljs
(:require-macros [com.gfredericks.test.chuck.generators :refer [for]])))
Hoping this will be in test.check proper :
;; -15
(defmacro for
"Like clojure.core/for, but builds up a generator using bind, fmap,
and such-that. The right half of each binding pair is a generator,
and the left half is the value it's generating. The body of the for
should be a generated value.
Both :let and :when are available as in clojure.core/for. Using
:when will apply a filter to the previous generator via such-that.
An additional available clause is the :parallel clause, which is an
alternative to tuple, for use when several generators are
independent."
[bindings expr]
The strategy here is to rewrite the expression one clause at
a time using two varieties of recursion :
;;
;; A basic single-clause form expands to fmap:
;;
;; (for [x g] (f x))
;;
;; becomes
;;
;; (fmap (fn [x] (f x)) g)
;;
;; Multiple clauses expand one at a time to a call to bind with
;; a nested for expression:
;;
( for [ x1 g1 , x2 ] ( f x1 x2 ) )
;;
;; becomes
;;
;; (bind g1 (fn [x1] (for [x2 g2] (f x1 x2))))
;;
;; A :let clause gets absorbed into the preceding clause via
;; a transformation with fmap and tuple destructuring:
;;
;; (for [x g, :let [y (f x)]] (h x y))
;;
;; becomes
;;
;; (for [[x y] (fmap (fn [arg]
;; (let [x arg, y (f x)]
;; [arg y]))
;; g)]
;; (h x y))
;;
;; A :when clause gets absorbed into the preceding clause
;; via a transformation with such-that:
;;
;; (for [x g, :when (f x)] (h x))
;;
;; becomes
;;
;; (for [x (such-that (fn [x] (f x)) g)] (h x))
;;
;; A :parallel clause is easily transformed to a call to
gen / tuple :
;;
( for [: parallel [ v1 g1 , v2 ] ] ( f v1 v2 ) )
;;
;; becomes
;;
;; (for [[v1 v2] (gen/tuple g1 g2)] (f v1 v2))
(if-let [[k1 v1 & [k2 v2 & even-more :as more]] (seq bindings)]
(do
(assert (or (= :parallel k1) (not (keyword? k1))))
(cond (= :parallel k1)
(do (assert (even? (count v1))
":parallel clause must have an even number of bindings!")
(let [pairs (core/partition 2 v1)
names (map first pairs)
gens (map second pairs)]
`(for [[~@names] (gen/tuple ~@gens)
~@more]
~expr)))
(empty? more)
;; special case to avoid extra call to fmap
(if (and (symbol? k1) (= k1 expr))
v1
`(gen/fmap (fn [~k1] ~expr) ~v1))
(= k2 :let)
;; This part is complex because we need to watch out for
;; destructuring inside the :let, since the destructuring
;; form can't be used as a value expression.
;;
This loop is constructing three collections :
;;
lettings - The kv pairs for the let inside the fmap fn
;; bindings - The single tuple-destructuring form used
;; in the outer for expression
;; values - The value expressions that go in the vector
that is the return value from the fmap fn
(let [[lettings bindings values]
(loop [lettings []
bindings []
values []
xs (core/partition 2 v2)]
(if-let [[[k v] & xs] (seq xs)]
(if (symbol? k)
(recur (conj lettings k v)
(conj bindings k)
(conj values k)
xs)
(let [k' (gensym)]
(recur (conj lettings k' v k k')
(conj bindings k)
(conj values k')
xs)))
[lettings bindings values]))
k1' (apply vector k1 bindings)
v1' `(gen/fmap (fn [arg#]
(let [~k1 arg#
~@lettings]
[arg# ~@values]))
~v1)]
`(for [~k1' ~v1' ~@even-more] ~expr))
(= k2 :when)
(let [max-tries-meta (-> v2 meta :max-tries)
max-tries-arg (when max-tries-meta
[max-tries-meta])
v1' `(gen/such-that (fn [~k1] ~v2) ~v1 ~@max-tries-arg)]
`(for [~k1 ~v1' ~@even-more] ~expr))
((some-fn symbol? vector? map? #{:parallel}) k2)
`(gen/bind ~v1 (fn [~k1] (for ~more ~expr)))
:else
(throw (ex-info "Unsupported binding form in gen/for!" {:form k2}))))
`(gen/return ~expr)))
(defn subsequence
"Given a collection, generates \"subsequences\" which are sequences
of (not necessarily contiguous) elements from the original
collection, in the same order. For collections of distinct elements
this is effectively a subset generator, with an ordering guarantee."
[elements]
(for [bools (apply gen/tuple (repeat (count elements) gen/boolean))]
(->> (map list bools elements)
(filter first)
(map second))))
(defn subset
"Deprecated variant of subsequence that coerces the result to a set."
[elements]
(gen/fmap set (subsequence elements)))
(defn cap-size
"Wraps the given generator so that it is never called with a size
larger than the max given."
[max-size gen]
(gen/sized (fn [size]
(gen/resize (min size max-size) gen))))
(defn partition
"Generates a collection of collection of the elements in coll, such
that concatting them together gives the original collection. None of
the subcollections will be empty."
([coll] (partition coll 4))
([coll avg-size]
{:pre [(> avg-size 1)]}
(if (empty? coll)
(gen/return [])
(for [bools (apply gen/tuple (repeat (dec (count coll))
(gen/frequency
[[(dec avg-size) (gen/return false)]
[1 (gen/return true)]])))]
(reduce (fn [ret [bool x]]
(if bool
(conj ret [x])
(update-in ret [(dec (count ret))] conj x)))
[[(first coll)]]
(map vector bools (rest coll)))))))
(defn map->hash-map
"Like test.check.generators/hash-map, but takes a single map argument
instead of varargs."
[m]
(apply gen/hash-map (apply concat m)))
;;
;; Numbers!
;;
(defn ^:deprecated bounded-int
"DEPRECATED: see clojure.test.check.generators/large-integer*
Like clojure.test.check.generators/choose, but generates
smallish numbers for small sizes.
Both bounds are inclusive."
[low high]
(gen/sized (fn [size]
(let [exp (apply * (repeat size 2N))
-high-low (- high low)
range-size (min (* 2 exp) -high-low)
low' (- exp)
high' exp]
(cond (<= -high-low range-size)
(gen/choose low high)
(<= low low' high' high)
(gen/choose low' high')
(< low' low)
(gen/choose low (+ low range-size))
(< high high')
(gen/choose (- high range-size) high))))))
(defn ^:private scalb
[x exp]
#?(:clj (Math/scalb ^double x ^int exp)
:cljs (* x (.pow js/Math 2 exp))))
(def ^:deprecated double
"DEPRECATED: see clojure.test.check.generators/double
Generates a Double, which can include Infinity and -Infinity
but not NaN."
(gen/fmap
(fn [[signed-significand exp]]
(scalb (core/double signed-significand) (core/int exp)))
(gen/tuple
(let [bignumber (apply * (repeat 52 2))]
(gen/large-integer* {:min (- bignumber) :max bignumber}))
(gen/large-integer* {:min -1022 :max 1023}))))
#?(:clj
(defn string-from-regex
"Given a regular expression, returns a generator that generates
strings matching that regular expression.
As jvm regular expressions are quite complex, and certain features
are quite challenging to implement as generators, this function does
not support all of their features. However, it tries to at least
accurately recognize features that it doesn't support and throw
helpful exceptions if it is called with a regular expression using
any of those features."
[regex]
(regexes/gen-string-from-regex regex)))
(defn sub-map
"Given a concrete map, randomly selects keys from it to create a
subset of the given map. Note: the generated maps may be empty.
Example:
(gen/sample (sub-map {:a 1 :b 2 :c 3}))
=> ({} {:b 2} {:b 2, :c 3} {:a 1} ...)"
[m]
(gen/fmap (fn [ks]
(select-keys m ks))
(subsequence (keys m))))
(def valid-offset-fns [ct/millis ct/seconds ct/minutes ct/hours ct/days ct/months ct/years])
(def ^:private valid-offset-fn? (set valid-offset-fns))
(def ^:private yr-2000 (ct/date-time 2000))
(defn datetime
"Generates datetime within given range and format.
base-datetime => By default it'll calculate the dates from year 2000.
Generally this is a good idea instead of using (ct/now)
since giving the same seed will generate the same output.
If you would like to generate from a differnt base-datetime,
Pass for example (ct/now) to use current time,
Or pass a specific date-time (ct/date-time 2011 11 24)
offset-min & offset-max => The offset number range
By default it is -1000 to 1000
offset-fns => List of functions which will be used with the given offset.
It randomly picks one of the functions and
applies the random offset with the given range.
Check valid-offset-fns for possible values.
By default its all the values of valid-offset-fns.
For example If you would like to generate datetime
from last 10 months to next 10 months:
(gen/sample (datetime {:offset-fns [clj-time.core/months]
:offset-min -10
:offset-max 10}))
=>
(#<DateTime 1999-11-01T00:00:00.000Z>
#<DateTime 1999-12-01T00:00:00.000Z>
#<DateTime 2000-05-01T00:00:00.000Z>
....)"
([]
(datetime {}))
([{:keys [base-datetime offset-fns offset-min offset-max]
:or {offset-fns valid-offset-fns
offset-min -1000
offset-max 1000
base-datetime yr-2000}}]
{:pre [(<= offset-min offset-max)
(not-empty offset-fns)
(every? valid-offset-fn?
offset-fns)]}
(gen/fmap (fn [[offset-fn offset]]
(->> offset
offset-fn
(ct/plus base-datetime)))
(gen/tuple (gen/elements offset-fns)
(gen/large-integer* {:min offset-min
:max offset-max})))))
(defn- bounded-recursive-helper
[container-gen-fn scalar-gen scalar-size max-breadth curr-height]
(if (neg? curr-height)
(gen/resize scalar-size scalar-gen)
(gen/resize max-breadth
(container-gen-fn
(bounded-recursive-helper container-gen-fn
scalar-gen
scalar-size
max-breadth
(dec curr-height))))))
(defn bounded-recursive-gen
"Same as gen/recursive-gen but allows a bound on both breadth and height.
Height = Number of levels of nesting. Eg:
level of nesting = 0: [15 -4]
level of nesting = 1: [[5 1 -3 -10 -18] [17]]
Breadth = Number of elements in a level (number of elements in each vector,
in the above eg).
Example 1: Breadth=2, Height=10
This means that no vector will contain more than 2 elements.
and there will be at most 10 levels of nested vectors.
(last (gen/sample (bounded-recursive-gen gen/vector
gen/int
2 10) 20))
=> [[[[[] []]
[[[[[[[[-11 1] []]]] [[[[3 10] []]] []]] []] []]
[[[[[[[16 10] []]] []] []] [[] []]] [[[[]]] []]]]]
[[[[]] []]]]]
Example 2: Breadth=10, Height=2 (Opposite of ex 1)
This means that no vector will contain more than 10 elements.
and there will be atmost 2 levels of nested vectors.
(last (gen/sample (bounded-recursive-gen gen/vector
gen/int
10 2) 20))
=> [[[11 5 3 8 15 -19 -12 -2] [7 3 -12 -11 0 -10 19 -19 -1] [16 -15 19 1]]
[[6 -18 -14 -10 -7 -5 5]
[7 10 -5]
[-19 -5 3 -15 15 17 -18]
[16 -15 10 -7]
[14 3 5 9 -2 8 -7 11]
[-5 17 -19 5 -9 7]
[11 -1 -4 5]
[-2 13 -16 -4]
[-3 -12 -1]
[4 15]]]
There are atmost 2 nested levels, and no vector contains more than 10
elements."
[container-gen-fn scalar-gen max-breadth max-height]
(assert (gen/generator? scalar-gen)
"Second arg to recursive-gen must be a generator")
(gen/sized
(fn [size]
(gen/bind
(gen/choose 1 5)
(fn [decay-factor]
(bounded-recursive-helper container-gen-fn
scalar-gen
size
(min max-breadth (Math/pow size (/ 1 decay-factor)))
(min max-height (Math/pow size (/ 1 (inc decay-factor))))))))))
| null | https://raw.githubusercontent.com/gfredericks/test.chuck/9f6f33db6cc1ac8b172f20a45e8f13e34ac3c6f2/src/com/gfredericks/test/chuck/generators.cljc | clojure | -15
A basic single-clause form expands to fmap:
(for [x g] (f x))
becomes
(fmap (fn [x] (f x)) g)
Multiple clauses expand one at a time to a call to bind with
a nested for expression:
becomes
(bind g1 (fn [x1] (for [x2 g2] (f x1 x2))))
A :let clause gets absorbed into the preceding clause via
a transformation with fmap and tuple destructuring:
(for [x g, :let [y (f x)]] (h x y))
becomes
(for [[x y] (fmap (fn [arg]
(let [x arg, y (f x)]
[arg y]))
g)]
(h x y))
A :when clause gets absorbed into the preceding clause
via a transformation with such-that:
(for [x g, :when (f x)] (h x))
becomes
(for [x (such-that (fn [x] (f x)) g)] (h x))
A :parallel clause is easily transformed to a call to
becomes
(for [[v1 v2] (gen/tuple g1 g2)] (f v1 v2))
special case to avoid extra call to fmap
This part is complex because we need to watch out for
destructuring inside the :let, since the destructuring
form can't be used as a value expression.
bindings - The single tuple-destructuring form used
in the outer for expression
values - The value expressions that go in the vector
Numbers!
| (ns com.gfredericks.test.chuck.generators
"Yes this namespace's name has five components."
(:refer-clojure :exclude [double for partition])
(:require [clojure.test.check.generators :as gen]
[#?(:clj clojure.core :cljs cljs.core) :as core]
[#?(:clj clj-time.core :cljs cljs-time.core) :as ct]
#?(:clj [com.gfredericks.test.chuck.regexes :as regexes]))
#?(:cljs
(:require-macros [com.gfredericks.test.chuck.generators :refer [for]])))
Hoping this will be in test.check proper :
(defmacro for
"Like clojure.core/for, but builds up a generator using bind, fmap,
and such-that. The right half of each binding pair is a generator,
and the left half is the value it's generating. The body of the for
should be a generated value.
Both :let and :when are available as in clojure.core/for. Using
:when will apply a filter to the previous generator via such-that.
An additional available clause is the :parallel clause, which is an
alternative to tuple, for use when several generators are
independent."
[bindings expr]
The strategy here is to rewrite the expression one clause at
a time using two varieties of recursion :
( for [ x1 g1 , x2 ] ( f x1 x2 ) )
gen / tuple :
( for [: parallel [ v1 g1 , v2 ] ] ( f v1 v2 ) )
(if-let [[k1 v1 & [k2 v2 & even-more :as more]] (seq bindings)]
(do
(assert (or (= :parallel k1) (not (keyword? k1))))
(cond (= :parallel k1)
(do (assert (even? (count v1))
":parallel clause must have an even number of bindings!")
(let [pairs (core/partition 2 v1)
names (map first pairs)
gens (map second pairs)]
`(for [[~@names] (gen/tuple ~@gens)
~@more]
~expr)))
(empty? more)
(if (and (symbol? k1) (= k1 expr))
v1
`(gen/fmap (fn [~k1] ~expr) ~v1))
(= k2 :let)
This loop is constructing three collections :
lettings - The kv pairs for the let inside the fmap fn
that is the return value from the fmap fn
(let [[lettings bindings values]
(loop [lettings []
bindings []
values []
xs (core/partition 2 v2)]
(if-let [[[k v] & xs] (seq xs)]
(if (symbol? k)
(recur (conj lettings k v)
(conj bindings k)
(conj values k)
xs)
(let [k' (gensym)]
(recur (conj lettings k' v k k')
(conj bindings k)
(conj values k')
xs)))
[lettings bindings values]))
k1' (apply vector k1 bindings)
v1' `(gen/fmap (fn [arg#]
(let [~k1 arg#
~@lettings]
[arg# ~@values]))
~v1)]
`(for [~k1' ~v1' ~@even-more] ~expr))
(= k2 :when)
(let [max-tries-meta (-> v2 meta :max-tries)
max-tries-arg (when max-tries-meta
[max-tries-meta])
v1' `(gen/such-that (fn [~k1] ~v2) ~v1 ~@max-tries-arg)]
`(for [~k1 ~v1' ~@even-more] ~expr))
((some-fn symbol? vector? map? #{:parallel}) k2)
`(gen/bind ~v1 (fn [~k1] (for ~more ~expr)))
:else
(throw (ex-info "Unsupported binding form in gen/for!" {:form k2}))))
`(gen/return ~expr)))
(defn subsequence
"Given a collection, generates \"subsequences\" which are sequences
of (not necessarily contiguous) elements from the original
collection, in the same order. For collections of distinct elements
this is effectively a subset generator, with an ordering guarantee."
[elements]
(for [bools (apply gen/tuple (repeat (count elements) gen/boolean))]
(->> (map list bools elements)
(filter first)
(map second))))
(defn subset
"Deprecated variant of subsequence that coerces the result to a set."
[elements]
(gen/fmap set (subsequence elements)))
(defn cap-size
"Wraps the given generator so that it is never called with a size
larger than the max given."
[max-size gen]
(gen/sized (fn [size]
(gen/resize (min size max-size) gen))))
(defn partition
"Generates a collection of collection of the elements in coll, such
that concatting them together gives the original collection. None of
the subcollections will be empty."
([coll] (partition coll 4))
([coll avg-size]
{:pre [(> avg-size 1)]}
(if (empty? coll)
(gen/return [])
(for [bools (apply gen/tuple (repeat (dec (count coll))
(gen/frequency
[[(dec avg-size) (gen/return false)]
[1 (gen/return true)]])))]
(reduce (fn [ret [bool x]]
(if bool
(conj ret [x])
(update-in ret [(dec (count ret))] conj x)))
[[(first coll)]]
(map vector bools (rest coll)))))))
(defn map->hash-map
"Like test.check.generators/hash-map, but takes a single map argument
instead of varargs."
[m]
(apply gen/hash-map (apply concat m)))
(defn ^:deprecated bounded-int
"DEPRECATED: see clojure.test.check.generators/large-integer*
Like clojure.test.check.generators/choose, but generates
smallish numbers for small sizes.
Both bounds are inclusive."
[low high]
(gen/sized (fn [size]
(let [exp (apply * (repeat size 2N))
-high-low (- high low)
range-size (min (* 2 exp) -high-low)
low' (- exp)
high' exp]
(cond (<= -high-low range-size)
(gen/choose low high)
(<= low low' high' high)
(gen/choose low' high')
(< low' low)
(gen/choose low (+ low range-size))
(< high high')
(gen/choose (- high range-size) high))))))
(defn ^:private scalb
[x exp]
#?(:clj (Math/scalb ^double x ^int exp)
:cljs (* x (.pow js/Math 2 exp))))
(def ^:deprecated double
"DEPRECATED: see clojure.test.check.generators/double
Generates a Double, which can include Infinity and -Infinity
but not NaN."
(gen/fmap
(fn [[signed-significand exp]]
(scalb (core/double signed-significand) (core/int exp)))
(gen/tuple
(let [bignumber (apply * (repeat 52 2))]
(gen/large-integer* {:min (- bignumber) :max bignumber}))
(gen/large-integer* {:min -1022 :max 1023}))))
#?(:clj
(defn string-from-regex
"Given a regular expression, returns a generator that generates
strings matching that regular expression.
As jvm regular expressions are quite complex, and certain features
are quite challenging to implement as generators, this function does
not support all of their features. However, it tries to at least
accurately recognize features that it doesn't support and throw
helpful exceptions if it is called with a regular expression using
any of those features."
[regex]
(regexes/gen-string-from-regex regex)))
(defn sub-map
"Given a concrete map, randomly selects keys from it to create a
subset of the given map. Note: the generated maps may be empty.
Example:
(gen/sample (sub-map {:a 1 :b 2 :c 3}))
=> ({} {:b 2} {:b 2, :c 3} {:a 1} ...)"
[m]
(gen/fmap (fn [ks]
(select-keys m ks))
(subsequence (keys m))))
(def valid-offset-fns [ct/millis ct/seconds ct/minutes ct/hours ct/days ct/months ct/years])
(def ^:private valid-offset-fn? (set valid-offset-fns))
(def ^:private yr-2000 (ct/date-time 2000))
(defn datetime
"Generates datetime within given range and format.
base-datetime => By default it'll calculate the dates from year 2000.
Generally this is a good idea instead of using (ct/now)
since giving the same seed will generate the same output.
If you would like to generate from a differnt base-datetime,
Pass for example (ct/now) to use current time,
Or pass a specific date-time (ct/date-time 2011 11 24)
offset-min & offset-max => The offset number range
By default it is -1000 to 1000
offset-fns => List of functions which will be used with the given offset.
It randomly picks one of the functions and
applies the random offset with the given range.
Check valid-offset-fns for possible values.
By default its all the values of valid-offset-fns.
For example If you would like to generate datetime
from last 10 months to next 10 months:
(gen/sample (datetime {:offset-fns [clj-time.core/months]
:offset-min -10
:offset-max 10}))
=>
(#<DateTime 1999-11-01T00:00:00.000Z>
#<DateTime 1999-12-01T00:00:00.000Z>
#<DateTime 2000-05-01T00:00:00.000Z>
....)"
([]
(datetime {}))
([{:keys [base-datetime offset-fns offset-min offset-max]
:or {offset-fns valid-offset-fns
offset-min -1000
offset-max 1000
base-datetime yr-2000}}]
{:pre [(<= offset-min offset-max)
(not-empty offset-fns)
(every? valid-offset-fn?
offset-fns)]}
(gen/fmap (fn [[offset-fn offset]]
(->> offset
offset-fn
(ct/plus base-datetime)))
(gen/tuple (gen/elements offset-fns)
(gen/large-integer* {:min offset-min
:max offset-max})))))
(defn- bounded-recursive-helper
[container-gen-fn scalar-gen scalar-size max-breadth curr-height]
(if (neg? curr-height)
(gen/resize scalar-size scalar-gen)
(gen/resize max-breadth
(container-gen-fn
(bounded-recursive-helper container-gen-fn
scalar-gen
scalar-size
max-breadth
(dec curr-height))))))
(defn bounded-recursive-gen
"Same as gen/recursive-gen but allows a bound on both breadth and height.
Height = Number of levels of nesting. Eg:
level of nesting = 0: [15 -4]
level of nesting = 1: [[5 1 -3 -10 -18] [17]]
Breadth = Number of elements in a level (number of elements in each vector,
in the above eg).
Example 1: Breadth=2, Height=10
This means that no vector will contain more than 2 elements.
and there will be at most 10 levels of nested vectors.
(last (gen/sample (bounded-recursive-gen gen/vector
gen/int
2 10) 20))
=> [[[[[] []]
[[[[[[[[-11 1] []]]] [[[[3 10] []]] []]] []] []]
[[[[[[[16 10] []]] []] []] [[] []]] [[[[]]] []]]]]
[[[[]] []]]]]
Example 2: Breadth=10, Height=2 (Opposite of ex 1)
This means that no vector will contain more than 10 elements.
and there will be atmost 2 levels of nested vectors.
(last (gen/sample (bounded-recursive-gen gen/vector
gen/int
10 2) 20))
=> [[[11 5 3 8 15 -19 -12 -2] [7 3 -12 -11 0 -10 19 -19 -1] [16 -15 19 1]]
[[6 -18 -14 -10 -7 -5 5]
[7 10 -5]
[-19 -5 3 -15 15 17 -18]
[16 -15 10 -7]
[14 3 5 9 -2 8 -7 11]
[-5 17 -19 5 -9 7]
[11 -1 -4 5]
[-2 13 -16 -4]
[-3 -12 -1]
[4 15]]]
There are atmost 2 nested levels, and no vector contains more than 10
elements."
[container-gen-fn scalar-gen max-breadth max-height]
(assert (gen/generator? scalar-gen)
"Second arg to recursive-gen must be a generator")
(gen/sized
(fn [size]
(gen/bind
(gen/choose 1 5)
(fn [decay-factor]
(bounded-recursive-helper container-gen-fn
scalar-gen
size
(min max-breadth (Math/pow size (/ 1 decay-factor)))
(min max-height (Math/pow size (/ 1 (inc decay-factor))))))))))
|
aa1e077c6bf9271756757d70175e9251e89ece56fabae88701123fe2aab7d008 | ahf/peculium_core | peculium_core_inv.erl | %%%
Copyright ( c ) 2013 .
%%% All rights reserved.
%%%
%%% Redistribution and use in source and binary forms, with or without
%%% modification, are permitted provided that the following conditions are met:
%%%
%%% * Redistributions of source code must retain the above copyright notice, this
%%% list of conditions and the following disclaimer.
%%%
%%% * Redistributions in binary form must reproduce the above copyright notice,
%%% this list of conditions and the following disclaimer in the documentation
%%% and/or other materials provided with the distribution.
%%%
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND
%%% ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
%%% WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
%%% OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
%%% OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
%%%
%%% ----------------------------------------------------------------------------
@author < >
2013
%%% @end
%%% ----------------------------------------------------------------------------
%%% @doc Bitcoin Inv Utilities.
%%% This module contains utilities for manipulating and using Inv objects.
%%% @end
%%% ----------------------------------------------------------------------------
-module(peculium_core_inv).
%% API.
-export([type/1, hash/1, is_transaction/1, is_block/1, unknown_invs/1, known/1, unknown/1]).
%% Types.
-type inv() :: peculium_core_types:inv().
-type inv_type() :: peculium_core_types:inv_type().
-type hash() :: peculium_core_types:hash().
-include("peculium_core.hrl").
%% @doc Returns the type of a given inv.
-spec type(Inv :: inv()) -> inv_type().
type(#inv { type = Type }) ->
Type.
%% @doc Returns the hash of a given inv.
-spec hash(Inv :: inv()) -> hash().
hash(#inv { hash = Hash }) ->
Hash.
@doc Checks if a given inv is a transaction .
-spec is_transaction(Inv :: inv()) -> boolean().
is_transaction(Inv) ->
type(Inv) =:= transaction.
%% @doc Checks if a given inv is a block.
-spec is_block(Inv :: inv()) -> boolean().
is_block(Inv) ->
type(Inv) =:= block.
%% @doc Returns a list of inv objects that we do not currently have.
-spec unknown_invs(Invs :: [inv()]) -> [inv()].
unknown_invs(Invs) ->
lists:filter(fun unknown/1, Invs).
%% @doc Check if we have the given object.
-spec known(Inv :: inv()) -> boolean().
known(#inv { type = Type, hash = Hash }) ->
case Type of
block ->
FIXME : Create an API in the block_index where we can ask for an entire set of inv 's .
peculium_core_block_index:exists(Hash);
transaction ->
%% FIXME: Once we have a transaction database, this should be changed.
false
end.
%% @doc Check if we do not have the given object.
-spec unknown(Inv :: inv()) -> boolean().
unknown(Inv) ->
not known(Inv).
| null | https://raw.githubusercontent.com/ahf/peculium_core/50259b7ee11ad31316e97c7e88b33cae3f89e5a1/src/peculium_core_inv.erl | erlang |
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------------
@end
----------------------------------------------------------------------------
@doc Bitcoin Inv Utilities.
This module contains utilities for manipulating and using Inv objects.
@end
----------------------------------------------------------------------------
API.
Types.
@doc Returns the type of a given inv.
@doc Returns the hash of a given inv.
@doc Checks if a given inv is a block.
@doc Returns a list of inv objects that we do not currently have.
@doc Check if we have the given object.
FIXME: Once we have a transaction database, this should be changed.
@doc Check if we do not have the given object. | Copyright ( c ) 2013 .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND
DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
@author < >
2013
-module(peculium_core_inv).
-export([type/1, hash/1, is_transaction/1, is_block/1, unknown_invs/1, known/1, unknown/1]).
-type inv() :: peculium_core_types:inv().
-type inv_type() :: peculium_core_types:inv_type().
-type hash() :: peculium_core_types:hash().
-include("peculium_core.hrl").
-spec type(Inv :: inv()) -> inv_type().
type(#inv { type = Type }) ->
Type.
-spec hash(Inv :: inv()) -> hash().
hash(#inv { hash = Hash }) ->
Hash.
@doc Checks if a given inv is a transaction .
-spec is_transaction(Inv :: inv()) -> boolean().
is_transaction(Inv) ->
type(Inv) =:= transaction.
-spec is_block(Inv :: inv()) -> boolean().
is_block(Inv) ->
type(Inv) =:= block.
-spec unknown_invs(Invs :: [inv()]) -> [inv()].
unknown_invs(Invs) ->
lists:filter(fun unknown/1, Invs).
-spec known(Inv :: inv()) -> boolean().
known(#inv { type = Type, hash = Hash }) ->
case Type of
block ->
FIXME : Create an API in the block_index where we can ask for an entire set of inv 's .
peculium_core_block_index:exists(Hash);
transaction ->
false
end.
-spec unknown(Inv :: inv()) -> boolean().
unknown(Inv) ->
not known(Inv).
|
e99dd3219bad1be08ab81a01efc311034920f11a6717de25c5c5bed021604877 | PrincetonUniversity/lucid | normalizeInts.ml | (* Balance the subexpressions of commutative
operation expression trees, then atomize integer expressions. *)
open CoreSyntax
open InterpHelpers
module DBG = BackendLogging
let silent = ref false;;
let outc = ref None
let dprint_endline = ref DBG.no_printf
exception Error of string
let error s = raise (Error s)
let err msg ex = error (msg ^ " " ^ Printing.exp_to_string ex)
let info str =
if (not !silent)
then (Console.show_message str ANSITerminal.Green "normalizeIntOps")
let dprint_eop exp =
let op, args, _, _ = unpack_eop exp in
let flat_args = CL.map (extract_atomic_opargs op) args |> CL.flatten in
!dprint_endline "[balance_assign_exp] flat args: ";
CL.iter
(fun exp ->
!dprint_endline ("[balance_assign_exp]" ^ Printing.exp_to_string exp))
flat_args;
!dprint_endline "---[balance_assign_exp] flat args--- "
;;
(**** transform operation expressions into balanced expression trees ****)
let balance_assign_exp s =
1 . flatten expression based on op -- >
op [ arg1 ; ; ... ; argn ]
- each arg is an expression _ not _ of type op
2 . build a balanced tree from the list of atoms
op [arg1; arg2; ...; argn]
- each arg is an expression _not_ of type op
2. build a balanced tree from the list of atoms *)
(* extract the expression *)
let exp =
match s with
| SLocal (_, _, exp) -> exp
| SAssign (_, exp) -> exp
| _ -> error "not an assign or local"
in
(* update the exp *)
let new_exp =
match op_of_exp exp with
(* plus is commutative *)
| Some Plus ->
dprint_eop exp;
let new_exp = balance_eop_tree exp in
!dprint_endline
("[balance_assign_exp] original expression:\n"
^ Printing.exp_to_string exp);
!dprint_endline
("[balance_assign_exp] balanced expression:\n"
^ Printing.exp_to_string new_exp);
new_exp
| Some _ -> exp
| None -> exp
in
(* rebuild the statement *)
match s with
| SLocal (id, ty, _) -> SLocal (id, ty, new_exp)
| SAssign (id, _) -> SAssign (id, new_exp)
| _ -> error "not an assign or local"
;;
let balance_assign_exps ds =
let v =
object
inherit [_] s_map as super
method! visit_s ctx s =
(* Question:
is it safe to balance every single expression, not just rhs of assign and local? *)
match s with
| SAssign _ | SLocal _ -> balance_assign_exp s
| _ -> super#visit_s ctx s
end
in
v#visit_decls () ds
;;
(**** transform expressions into atomic expressions ****)
(* convert an expression into a variable whose value is
the evaluation result of an atomic expression *)
let rec to_immediate exp =
match is_immediate exp with
| true -> exp, []
| false ->
(match is_atomic exp with
(* the expression is an atomic op, so we can replace it with a precomputation. *)
| true ->
let ty = exp.ety in
let var_id = Id.fresh "to_immediate_tmp" in
let stmt = slocal var_id ty exp in
let exp = { exp with e = EVar (Cid.id var_id) } in
exp, [stmt]
(* convert the expression into an atomic expression, then an immediate *)
| false ->
let exp, stmts_a = to_atomic exp in
let exp, stmts_b = to_immediate exp in
exp, stmts_a @ stmts_b)
convert an expression into an atomic operation . If
any of the expressions arguments are not immediates ,
convert them to an immediate first .
any of the expressions arguments are not immediates,
convert them to an immediate first. *)
and to_atomic exp =
match is_atomic exp with
| true -> exp, []
| false ->
let args = args_of_exp exp in
(* make all the arguments into immediates *)
let immediate_args, precompute_stmts =
CL.map to_immediate args |> CL.split
in
let precompute_stmts = CL.flatten precompute_stmts in
(* return expression that uses immediates and the precompute statement *)
let exp = replace_args exp immediate_args in
exp, precompute_stmts
;;
let atomize_int_assigns ds =
let v =
object
inherit [_] s_map as super
val mutable precompute_stmts : statement list = []
method precompute_stmts = precompute_stmts
skip !
method! visit_DMemop _ m = DMemop (m)
method! visit_statement ctx stmt =
match stmt.s with
| SAssign _ | SLocal _ ->
precompute_stmts <- [];
(* recurse on statement tree, passing context as true *)
let transformed_stmt = super#visit_statement true stmt in
if there are any precompute statements , place them first
fold_stmts (precompute_stmts @ [transformed_stmt])
(* for other statement kinds, just recurse *)
| _ -> super#visit_statement ctx stmt
method! visit_exp in_assign_or_local exp =
(* We only want to normalize expressions on the rhs of an assignment.
By this point in time, all other complex expressions are removed. *)
match in_assign_or_local with
| true ->
(* transform the expression into an atomic (if not already) *)
!dprint_endline
("[transform_precompute_exps.visit_exp] exp: "
^ Printing.exp_to_string exp);
let atomic_exp, new_precompute_stmts = to_atomic exp in
precompute_stmts <- precompute_stmts @ new_precompute_stmts;
!dprint_endline
("[transform_precompute_exps.visit_exp] atomic exp: "
^ Printing.exp_to_string atomic_exp);
atomic_exp
| false -> exp
end
in
v#visit_decls false ds
;;
let do_passes ds =
if (not !silent)
then (DBG.start_mlog __FILE__ outc dprint_endline);
let orig_ds = ds in
let ds = balance_assign_exps ds in
info "assignments transformed to balanced exps";
let balanced_ds = ds in
let ds = atomize_int_assigns ds in
info "expressions atomized";
exit 1 ;
!dprint_endline "original program: ";
!dprint_endline (Printing.decls_to_string orig_ds);
!dprint_endline "program after exp tree balancing: ";
!dprint_endline (Printing.decls_to_string balanced_ds);
!dprint_endline "program after precomputation pass: ";
!dprint_endline (Printing.decls_to_string ds);
!dprint_endline "compute expressions simplified.";
exit 1 ;
ds
;;
| null | https://raw.githubusercontent.com/PrincetonUniversity/lucid/3a94505efa50e988b4c9ef28048be47cc65b4355/src/lib/midend/transformations/normalizeInts.ml | ocaml | Balance the subexpressions of commutative
operation expression trees, then atomize integer expressions.
*** transform operation expressions into balanced expression trees ***
extract the expression
update the exp
plus is commutative
rebuild the statement
Question:
is it safe to balance every single expression, not just rhs of assign and local?
*** transform expressions into atomic expressions ***
convert an expression into a variable whose value is
the evaluation result of an atomic expression
the expression is an atomic op, so we can replace it with a precomputation.
convert the expression into an atomic expression, then an immediate
make all the arguments into immediates
return expression that uses immediates and the precompute statement
recurse on statement tree, passing context as true
for other statement kinds, just recurse
We only want to normalize expressions on the rhs of an assignment.
By this point in time, all other complex expressions are removed.
transform the expression into an atomic (if not already) | open CoreSyntax
open InterpHelpers
module DBG = BackendLogging
let silent = ref false;;
let outc = ref None
let dprint_endline = ref DBG.no_printf
exception Error of string
let error s = raise (Error s)
let err msg ex = error (msg ^ " " ^ Printing.exp_to_string ex)
let info str =
if (not !silent)
then (Console.show_message str ANSITerminal.Green "normalizeIntOps")
let dprint_eop exp =
let op, args, _, _ = unpack_eop exp in
let flat_args = CL.map (extract_atomic_opargs op) args |> CL.flatten in
!dprint_endline "[balance_assign_exp] flat args: ";
CL.iter
(fun exp ->
!dprint_endline ("[balance_assign_exp]" ^ Printing.exp_to_string exp))
flat_args;
!dprint_endline "---[balance_assign_exp] flat args--- "
;;
let balance_assign_exp s =
1 . flatten expression based on op -- >
op [ arg1 ; ; ... ; argn ]
- each arg is an expression _ not _ of type op
2 . build a balanced tree from the list of atoms
op [arg1; arg2; ...; argn]
- each arg is an expression _not_ of type op
2. build a balanced tree from the list of atoms *)
let exp =
match s with
| SLocal (_, _, exp) -> exp
| SAssign (_, exp) -> exp
| _ -> error "not an assign or local"
in
let new_exp =
match op_of_exp exp with
| Some Plus ->
dprint_eop exp;
let new_exp = balance_eop_tree exp in
!dprint_endline
("[balance_assign_exp] original expression:\n"
^ Printing.exp_to_string exp);
!dprint_endline
("[balance_assign_exp] balanced expression:\n"
^ Printing.exp_to_string new_exp);
new_exp
| Some _ -> exp
| None -> exp
in
match s with
| SLocal (id, ty, _) -> SLocal (id, ty, new_exp)
| SAssign (id, _) -> SAssign (id, new_exp)
| _ -> error "not an assign or local"
;;
let balance_assign_exps ds =
let v =
object
inherit [_] s_map as super
method! visit_s ctx s =
match s with
| SAssign _ | SLocal _ -> balance_assign_exp s
| _ -> super#visit_s ctx s
end
in
v#visit_decls () ds
;;
let rec to_immediate exp =
match is_immediate exp with
| true -> exp, []
| false ->
(match is_atomic exp with
| true ->
let ty = exp.ety in
let var_id = Id.fresh "to_immediate_tmp" in
let stmt = slocal var_id ty exp in
let exp = { exp with e = EVar (Cid.id var_id) } in
exp, [stmt]
| false ->
let exp, stmts_a = to_atomic exp in
let exp, stmts_b = to_immediate exp in
exp, stmts_a @ stmts_b)
convert an expression into an atomic operation . If
any of the expressions arguments are not immediates ,
convert them to an immediate first .
any of the expressions arguments are not immediates,
convert them to an immediate first. *)
and to_atomic exp =
match is_atomic exp with
| true -> exp, []
| false ->
let args = args_of_exp exp in
let immediate_args, precompute_stmts =
CL.map to_immediate args |> CL.split
in
let precompute_stmts = CL.flatten precompute_stmts in
let exp = replace_args exp immediate_args in
exp, precompute_stmts
;;
let atomize_int_assigns ds =
let v =
object
inherit [_] s_map as super
val mutable precompute_stmts : statement list = []
method precompute_stmts = precompute_stmts
skip !
method! visit_DMemop _ m = DMemop (m)
method! visit_statement ctx stmt =
match stmt.s with
| SAssign _ | SLocal _ ->
precompute_stmts <- [];
let transformed_stmt = super#visit_statement true stmt in
if there are any precompute statements , place them first
fold_stmts (precompute_stmts @ [transformed_stmt])
| _ -> super#visit_statement ctx stmt
method! visit_exp in_assign_or_local exp =
match in_assign_or_local with
| true ->
!dprint_endline
("[transform_precompute_exps.visit_exp] exp: "
^ Printing.exp_to_string exp);
let atomic_exp, new_precompute_stmts = to_atomic exp in
precompute_stmts <- precompute_stmts @ new_precompute_stmts;
!dprint_endline
("[transform_precompute_exps.visit_exp] atomic exp: "
^ Printing.exp_to_string atomic_exp);
atomic_exp
| false -> exp
end
in
v#visit_decls false ds
;;
let do_passes ds =
if (not !silent)
then (DBG.start_mlog __FILE__ outc dprint_endline);
let orig_ds = ds in
let ds = balance_assign_exps ds in
info "assignments transformed to balanced exps";
let balanced_ds = ds in
let ds = atomize_int_assigns ds in
info "expressions atomized";
exit 1 ;
!dprint_endline "original program: ";
!dprint_endline (Printing.decls_to_string orig_ds);
!dprint_endline "program after exp tree balancing: ";
!dprint_endline (Printing.decls_to_string balanced_ds);
!dprint_endline "program after precomputation pass: ";
!dprint_endline (Printing.decls_to_string ds);
!dprint_endline "compute expressions simplified.";
exit 1 ;
ds
;;
|
4230ba5f43a689f314e2be850324f2a942a2b3f097930c44e652af15bab5b502 | racket/plot | gui.rkt | #lang racket/base
;; GUI helpers
(require racket/gui/base racket/class mrlib/snip-canvas)
(provide (all-defined-out))
(define snip-frame%
(class frame%
(define/override (on-traverse-char event)
(define key-code (send event get-key-code))
(case key-code
[(escape) (send this show #f)]
[else (super on-traverse-char event)]))
(super-new)))
(define (make-snip-frame make-snip width height label)
(define frame
(new snip-frame% [label label] [width (+ 20 width)] [height (+ 20 height)]))
(new snip-canvas%
[parent frame]
[make-snip make-snip]
[horiz-margin 5] [vert-margin 5]
[horizontal-inset 5] [vertical-inset 5])
frame)
| null | https://raw.githubusercontent.com/racket/plot/c4126001f2c609e36c3aa12f300e9c673ab1a806/plot-gui-lib/plot/private/gui/gui.rkt | racket | GUI helpers | #lang racket/base
(require racket/gui/base racket/class mrlib/snip-canvas)
(provide (all-defined-out))
(define snip-frame%
(class frame%
(define/override (on-traverse-char event)
(define key-code (send event get-key-code))
(case key-code
[(escape) (send this show #f)]
[else (super on-traverse-char event)]))
(super-new)))
(define (make-snip-frame make-snip width height label)
(define frame
(new snip-frame% [label label] [width (+ 20 width)] [height (+ 20 height)]))
(new snip-canvas%
[parent frame]
[make-snip make-snip]
[horiz-margin 5] [vert-margin 5]
[horizontal-inset 5] [vertical-inset 5])
frame)
|
027ae30a9d8bdc125ca8232b083bcd728fb4e2ac0351f388dceb41b5b3603b93 | shayan-najd/NativeMetaprogramming | T2683.hs | # OPTIONS_GHC -fno - warn - redundant - constraints #
# LANGUAGE ExistentialQuantification , MultiParamTypeClasses ,
FunctionalDependencies , RankNTypes #
FunctionalDependencies, RankNTypes #-}
module Q where
class Transformer t a | t -> a where
transform :: t -> l a -> (forall l'. l' a -> b) -> b
data EL a = forall l. EL (l a)
unEL :: EL a -> (forall l. l a -> b) -> b
unEL _ _ = error "unEL"
transform' :: (Transformer t a) => t -> EL a -> EL a
transform' = error "transform'"
data MultiToggleS ts a = MultiToggleS ts
data MultiToggle = MultiToggle
expand :: HList ts a => MultiToggleS ts a -> MultiToggle
expand (MultiToggleS ts) =
resolve ts
(\x mt ->
let g = transform' x in
mt
)
MultiToggle
class HList c a | c -> a where
resolve :: c -> (forall t. (Transformer t a) => t -> b) -> b
| null | https://raw.githubusercontent.com/shayan-najd/NativeMetaprogramming/24e5f85990642d3f0b0044be4327b8f52fce2ba3/testsuite/tests/typecheck/should_compile/T2683.hs | haskell | # OPTIONS_GHC -fno - warn - redundant - constraints #
# LANGUAGE ExistentialQuantification , MultiParamTypeClasses ,
FunctionalDependencies , RankNTypes #
FunctionalDependencies, RankNTypes #-}
module Q where
class Transformer t a | t -> a where
transform :: t -> l a -> (forall l'. l' a -> b) -> b
data EL a = forall l. EL (l a)
unEL :: EL a -> (forall l. l a -> b) -> b
unEL _ _ = error "unEL"
transform' :: (Transformer t a) => t -> EL a -> EL a
transform' = error "transform'"
data MultiToggleS ts a = MultiToggleS ts
data MultiToggle = MultiToggle
expand :: HList ts a => MultiToggleS ts a -> MultiToggle
expand (MultiToggleS ts) =
resolve ts
(\x mt ->
let g = transform' x in
mt
)
MultiToggle
class HList c a | c -> a where
resolve :: c -> (forall t. (Transformer t a) => t -> b) -> b
| |
6469bc24a278251c073231f23da6bb8695480f01e6953c66bf00f0efc2ad93f3 | hakaru-dev/hakaru | Lazy.hs | # LANGUAGE CPP
, GADTs
, MultiParamTypeClasses
, FunctionalDependencies
, ScopedTypeVariables
, FlexibleContexts
, Rank2Types
, TypeSynonymInstances
, FlexibleInstances
#
, GADTs
, DataKinds
, KindSignatures
, MultiParamTypeClasses
, FunctionalDependencies
, ScopedTypeVariables
, FlexibleContexts
, Rank2Types
, TypeSynonymInstances
, FlexibleInstances
#-}
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
----------------------------------------------------------------
-- 2016.04.28
-- |
-- Module : Language.Hakaru.Evaluation.Lazy
Copyright : Copyright ( c ) 2016 the Hakaru team
-- License : BSD3
-- Maintainer :
-- Stability : experimental
Portability : GHC - only
--
-- Lazy partial evaluation.
--
-- BUG: completely gave up on structure sharing. Need to add that
back in . cf . , - lopstr07lncs.pdf@ for an approach much
-- like my old one.
----------------------------------------------------------------
module Language.Hakaru.Evaluation.Lazy
( evaluate
-- ** Helper functions
, evaluateNaryOp
, evaluatePrimOp
, evaluateArrayOp
-- ** Helpers that should really go away
, Interp(..), reifyPair
) where
import Prelude hiding (id, (.))
import Control.Category (Category(..))
#if __GLASGOW_HASKELL__ < 710
import Data.Functor ((<$>))
#endif
import Control.Monad ((<=<))
import Control.Monad.Identity (Identity, runIdentity)
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import qualified Data.Text as Text
import Language.Hakaru.Syntax.IClasses
import Data.Number.Nat
import Data.Number.Natural
import Language.Hakaru.Types.DataKind
import Language.Hakaru.Types.Sing
import Language.Hakaru.Types.Coercion
import Language.Hakaru.Types.HClasses
import Language.Hakaru.Syntax.TypeOf
import Language.Hakaru.Syntax.AST
import Language.Hakaru.Syntax.Datum
import Language.Hakaru.Syntax.DatumCase (DatumEvaluator, MatchState(..), matchTopPattern)
import Language.Hakaru.Syntax.ABT
import Language.Hakaru.Evaluation.Types
import qualified Language.Hakaru.Syntax.Prelude as P
-- BUG : ca n't import this because of cyclic dependency
import qualified Language . Hakaru . Expect as E
-- BUG: can't import this because of cyclic dependency
import qualified Language.Hakaru.Expect as E
-}
#ifdef __TRACE_DISINTEGRATE__
import Language.Hakaru.Pretty.Haskell (pretty)
import Debug.Trace (trace)
#endif
----------------------------------------------------------------
----------------------------------------------------------------
-- TODO: (eventually) accept an argument dictating the evaluation
strategy ( HNF , WHNF , full - beta NF , ... ) . The strategy value should
-- probably be a family of singletons, where the type-level strategy
-- @s@ is also an index on the 'Context' and (the renamed) 'Whnf'.
-- That way we don't need to define a bunch of variant 'Context',
-- 'Statement', and 'Whnf' data types; but rather can use indexing
-- to select out subtypes of the generic versions.
-- | Lazy partial evaluation with some given \"perform\" and
\"evaluateCase\ " functions . , if @p ~ ' Pure@ then the
-- \"perform\" function will never be called.
evaluate
:: forall abt m p
. (ABT Term abt, EvaluationMonad abt m p)
=> MeasureEvaluator abt m
-> TermEvaluator abt m
# INLINE evaluate #
evaluate perform = evaluate_
where
evaluateCase_ :: CaseEvaluator abt m
evaluateCase_ = evaluateCase evaluate_
evaluate_ :: TermEvaluator abt m
evaluate_ e0 =
#ifdef __TRACE_DISINTEGRATE__
trace ("-- evaluate_: " ++ show (pretty e0)) $
#endif
caseVarSyn e0 (evaluateVar perform evaluate_) $ \t ->
case t of
-- Things which are already WHNFs
Literal_ v -> return . Head_ $ WLiteral v
Datum_ d -> return . Head_ $ WDatum d
Empty_ typ -> return . Head_ $ WEmpty typ
Array_ e1 e2 -> return . Head_ $ WArray e1 e2
ArrayLiteral_ es -> return . Head_ $ WArrayLiteral es
Lam_ :$ e1 :* End -> return . Head_ $ WLam e1
Dirac :$ e1 :* End -> return . Head_ $ WDirac e1
MBind :$ e1 :* e2 :* End -> return . Head_ $ WMBind e1 e2
Plate :$ e1 :* e2 :* End -> return . Head_ $ WPlate e1 e2
MeasureOp_ o :$ es -> return . Head_ $ WMeasureOp o es
Superpose_ pes -> return . Head_ $ WSuperpose pes
Reject_ typ -> return . Head_ $ WReject typ
-- We don't bother evaluating these, even though we could...
Integrate :$ e1 :* e2 :* e3 :* End ->
return . Head_ $ WIntegrate e1 e2 e3
Summate _ _ :$ _ :* _ :* _ :* End ->
return . Neutral $ syn t
return . Head _ $ WSummate e1 e2 e3
-- Everything else needs some evaluation
App_ :$ e1 :* e2 :* End -> do
w1 <- evaluate_ e1
case w1 of
Neutral e1' -> return . Neutral $ P.app e1' e2
Head_ v1 -> evaluateApp v1
where
evaluateApp (WLam f) =
-- call-by-name:
caseBind f $ \x f' -> do
i <- getIndices
push (SLet x (Thunk e2) i) f' >>= evaluate_
evaluateApp _ = error "evaluate{App_}: the impossible happened"
Let_ :$ e1 :* e2 :* End -> do
i <- getIndices
caseBind e2 $ \x e2' ->
push (SLet x (Thunk e1) i) e2' >>= evaluate_
CoerceTo_ c :$ e1 :* End -> coerceTo c <$> evaluate_ e1
UnsafeFrom_ c :$ e1 :* End -> coerceFrom c <$> evaluate_ e1
TODO : will maybe clean up the code to map ' evaluate ' over before calling the evaluateFooOp helpers ?
NaryOp_ o es -> evaluateNaryOp evaluate_ o es
ArrayOp_ o :$ es -> evaluateArrayOp evaluate_ o es
PrimOp_ o :$ es -> evaluatePrimOp evaluate_ o es
Transform_ tt :$ _ -> error $
concat ["TODO: evaluate{", show tt, "}"
,": cannot evaluate transforms; expand them first"]
Case_ e bs -> evaluateCase_ e bs
-- Bucket_ _ _ _ _ -> error "What oh what to do with a Bucket here?"
_ :$ _ -> error "evaluate: the impossible happened"
----------------------------------------------------------------
-- BUG: need to improve the types so they can capture polymorphic data types
-- BUG: this is a **really gross** hack. If we can avoid it, we should!!!
class Interp a a' | a -> a' where
reify :: (ABT Term abt) => Head abt a -> a'
reflect :: (ABT Term abt) => a' -> Head abt a
instance Interp 'HNat Natural where
reflect = WLiteral . LNat
reify (WLiteral (LNat n)) = n
reify (WCoerceTo _ _) = error "TODO: reify{WCoerceTo}"
reify (WUnsafeFrom _ _) = error "TODO: reify{WUnsafeFrom}"
instance Interp 'HInt Integer where
reflect = WLiteral . LInt
reify (WLiteral (LInt i)) = i
reify (WCoerceTo _ _) = error "TODO: reify{WCoerceTo}"
reify (WUnsafeFrom _ _) = error "TODO: reify{WUnsafeFrom}"
instance Interp 'HProb NonNegativeRational where
reflect = WLiteral . LProb
reify (WLiteral (LProb p)) = p
reify (WCoerceTo _ _) = error "TODO: reify{WCoerceTo}"
reify (WUnsafeFrom _ _) = error "TODO: reify{WUnsafeFrom}"
reify (WIntegrate _ _ _) = error "TODO: reify{WIntegrate}"
reify ( WSummate _ _ _ ) = error " TODO : reify{WSummate } "
instance Interp 'HReal Rational where
reflect = WLiteral . LReal
reify (WLiteral (LReal r)) = r
reify (WCoerceTo _ _) = error "TODO: reify{WCoerceTo}"
reify (WUnsafeFrom _ _) = error "TODO: reify{WUnsafeFrom}"
identifyDatum :: (ABT Term abt) => DatumEvaluator (abt '[]) Identity
identifyDatum = return . (viewWhnfDatum <=< toWhnf)
-- HACK: this requires -XTypeSynonymInstances and -XFlexibleInstances
-- This instance does seem to work; albeit it's trivial...
instance Interp HUnit () where
reflect () = WDatum dUnit
reify v = runIdentity $ do
match <- matchTopPattern identifyDatum (fromHead v) pUnit Nil1
case match of
Just (Matched_ _ss Nil1) -> return ()
_ -> error "reify{HUnit}: the impossible happened"
-- HACK: this requires -XTypeSynonymInstances and -XFlexibleInstances
-- This instance also seems to work...
instance Interp HBool Bool where
reflect = WDatum . (\b -> if b then dTrue else dFalse)
reify v = runIdentity $ do
matchT <- matchTopPattern identifyDatum (fromHead v) pTrue Nil1
case matchT of
Just (Matched_ _ss Nil1) -> return True
Just GotStuck_ -> error "reify{HBool}: the impossible happened"
Nothing -> do
matchF <- matchTopPattern identifyDatum (fromHead v) pFalse Nil1
case matchF of
Just (Matched_ _ss Nil1) -> return False
_ -> error "reify{HBool}: the impossible happened"
-- TODO: can't we just use 'viewHeadDatum' and match on that?
reifyPair
:: (ABT Term abt) => Head abt (HPair a b) -> (abt '[] a, abt '[] b)
reifyPair v =
let impossible = error "reifyPair: the impossible happened"
e0 = fromHead v
n = nextFree e0
(a,b) = sUnPair $ typeOf e0
x = Variable Text.empty n a
y = Variable Text.empty (1 + n) b
in runIdentity $ do
match <- matchTopPattern identifyDatum e0 (pPair PVar PVar) (Cons1 x (Cons1 y Nil1))
case match of
Just (Matched_ ss Nil1) ->
case ss [] of
[Assoc x' e1, Assoc y' e2] ->
maybe impossible id $ do
Refl <- varEq x x'
Refl <- varEq y y'
Just $ return (e1, e2)
_ -> impossible
_ -> impossible
instance Interp ( HPair a b ) ( abt ' [ ] a , abt ' [ ] b ) where
reflect ( a , b ) = P.pair a b
reify = reifyPair
instance Interp ( HEither a b ) ( Either ( abt ' [ ] a ) ( abt ' [ ] b ) ) where
reflect ( Left a ) = P.left a
reflect ( Right b ) = P.right b
reify =
instance Interp ( HMaybe a ) ( Maybe ( abt ' [ ] a ) ) where
reflect Nothing = P.nothing
reflect ( Just a ) = P.just a
reify =
data ListHead ( a : : )
= NilHead
| ( abt ' [ ] a ) ( abt ' [ ] ( HList a ) ) -- modulo scoping of @abt@
instance Interp ( HList a ) ( ListHead a ) where
reflect [ ] = P.nil
reflect ( x : xs ) = P.cons x xs
reify =
instance Interp (HPair a b) (abt '[] a, abt '[] b) where
reflect (a,b) = P.pair a b
reify = reifyPair
instance Interp (HEither a b) (Either (abt '[] a) (abt '[] b)) where
reflect (Left a) = P.left a
reflect (Right b) = P.right b
reify =
instance Interp (HMaybe a) (Maybe (abt '[] a)) where
reflect Nothing = P.nothing
reflect (Just a) = P.just a
reify =
data ListHead (a :: Hakaru)
= NilHead
| ConsHead (abt '[] a) (abt '[] (HList a)) -- modulo scoping of @abt@
instance Interp (HList a) (ListHead a) where
reflect [] = P.nil
reflect (x:xs) = P.cons x xs
reify =
-}
impl, diff, nand, nor :: Bool -> Bool -> Bool
impl x y = not x || y
diff x y = x && not y
nand x y = not (x && y)
nor x y = not (x || y)
BUG : no Floating instance for LogFloat ( nor NonNegativeRational ) , so ca n't actually use this ...
-- natRoot :: (Floating a) => a -> Nat -> a
natRoot x y = x * * recip ( ( fromNat y ) )
----------------------------------------------------------------
evaluateNaryOp
:: (ABT Term abt, EvaluationMonad abt m p)
=> TermEvaluator abt m
-> NaryOp a
-> Seq (abt '[] a)
-> m (Whnf abt a)
evaluateNaryOp evaluate_ = \o es -> mainLoop o (evalOp o) Seq.empty es
where
-- TODO: there's got to be a more efficient way to do this...
mainLoop o op ws es =
case Seq.viewl es of
Seq.EmptyL -> return $
case Seq.viewl ws of
Seq.EmptyL -> identityElement o -- Avoid empty naryOps
w Seq.:< ws'
| Seq.null ws' -> w -- Avoid singleton naryOps
| otherwise ->
Neutral . syn . NaryOp_ o $ fmap fromWhnf ws
e Seq.:< es' -> do
w <- evaluate_ e
case matchNaryOp o w of
Nothing -> mainLoop o op (snocLoop op ws w) es'
Just es2 -> mainLoop o op ws (es2 Seq.>< es')
snocLoop
:: (ABT syn abt)
=> (Head abt a -> Head abt a -> Head abt a)
-> Seq (Whnf abt a)
-> Whnf abt a
-> Seq (Whnf abt a)
snocLoop op ws w1 =
-- TODO: immediately return @ws@ if @w1 == identityElement o@ (whenever identityElement is defined)
case Seq.viewr ws of
Seq.EmptyR -> Seq.singleton w1
ws' Seq.:> w2 ->
case (w1,w2) of
(Head_ v1, Head_ v2) -> snocLoop op ws' (Head_ (op v1 v2))
_ -> ws Seq.|> w1
matchNaryOp
:: (ABT Term abt)
=> NaryOp a
-> Whnf abt a
-> Maybe (Seq (abt '[] a))
matchNaryOp o w =
case w of
Head_ _ -> Nothing
Neutral e ->
caseVarSyn e (const Nothing) $ \t ->
case t of
NaryOp_ o' es | o' == o -> Just es
_ -> Nothing
TODO : move this off to Prelude.hs or somewhere ...
identityElement :: (ABT Term abt) => NaryOp a -> Whnf abt a
identityElement o =
case o of
And -> Head_ (WDatum dTrue)
Or -> Head_ (WDatum dFalse)
Xor -> Head_ (WDatum dFalse)
Iff -> Head_ (WDatum dTrue)
Min _ -> Neutral (syn (NaryOp_ o Seq.empty)) -- no identity in general (but we could do it by cases...)
Max _ -> Neutral (syn (NaryOp_ o Seq.empty)) -- no identity in general (but we could do it by cases...)
TODO : figure out how to reuse ' P.zero _ ' and ' P.one _ ' here ; requires converting @(syn . Literal_)@ into @(Head _ . WLiteral)@. Maybe we should change ' P.zero _ ' and ' P.one _ ' so they just return the ' Literal ' itself rather than the @abt@ ?
Sum HSemiring_Nat -> Head_ (WLiteral (LNat 0))
Sum HSemiring_Int -> Head_ (WLiteral (LInt 0))
Sum HSemiring_Prob -> Head_ (WLiteral (LProb 0))
Sum HSemiring_Real -> Head_ (WLiteral (LReal 0))
Prod HSemiring_Nat -> Head_ (WLiteral (LNat 1))
Prod HSemiring_Int -> Head_ (WLiteral (LInt 1))
Prod HSemiring_Prob -> Head_ (WLiteral (LProb 1))
Prod HSemiring_Real -> Head_ (WLiteral (LReal 1))
| The evaluation interpretation of each NaryOp
evalOp
:: (ABT Term abt)
=> NaryOp a
-> Head abt a
-> Head abt a
-> Head abt a
-- TODO: something more efficient\/direct if we can...
evalOp And = \v1 v2 -> reflect (reify v1 && reify v2)
evalOp Or = \v1 v2 -> reflect (reify v1 || reify v2)
evalOp Xor = \v1 v2 -> reflect (reify v1 /= reify v2)
evalOp Iff = \v1 v2 -> reflect (reify v1 == reify v2)
evalOp (Min _) = error "TODO: evalOp{Min}"
evalOp (Max _) = error "TODO: evalOp{Max}"
evalOp ( ) = \v1 v2 - > reflect ( reify v1 ` min ` reify v2 )
evalOp ( ) = \v1 v2 - > reflect ( reify v1 ` max ` reify v2 )
evalOp ( Sum _ ) = \v1 v2 - > reflect ( reify v1 + reify v2 )
evalOp ( Prod _ ) = \v1 v2 - > reflect ( reify v1 * reify v2 )
evalOp (Min _) = \v1 v2 -> reflect (reify v1 `min` reify v2)
evalOp (Max _) = \v1 v2 -> reflect (reify v1 `max` reify v2)
evalOp (Sum _) = \v1 v2 -> reflect (reify v1 + reify v2)
evalOp (Prod _) = \v1 v2 -> reflect (reify v1 * reify v2)
-}
-- HACK: this is just to have something to test. We really should reduce\/remove all this boilerplate...
evalOp (Sum theSemi) =
\(WLiteral v1) (WLiteral v2) -> WLiteral $ evalSum theSemi v1 v2
evalOp (Prod theSemi) =
\(WLiteral v1) (WLiteral v2) -> WLiteral $ evalProd theSemi v1 v2
TODO : even if only one of the arguments is a literal , if that literal is zero\/one , then we can still partially evaluate it . ( As is done in the old finally - tagless code )
evalSum, evalProd :: HSemiring a -> Literal a -> Literal a -> Literal a
evalSum HSemiring_Nat = \(LNat n1) (LNat n2) -> LNat (n1 + n2)
evalSum HSemiring_Int = \(LInt i1) (LInt i2) -> LInt (i1 + i2)
evalSum HSemiring_Prob = \(LProb p1) (LProb p2) -> LProb (p1 + p2)
evalSum HSemiring_Real = \(LReal r1) (LReal r2) -> LReal (r1 + r2)
evalProd HSemiring_Nat = \(LNat n1) (LNat n2) -> LNat (n1 * n2)
evalProd HSemiring_Int = \(LInt i1) (LInt i2) -> LInt (i1 * i2)
evalProd HSemiring_Prob = \(LProb p1) (LProb p2) -> LProb (p1 * p2)
evalProd HSemiring_Real = \(LReal r1) (LReal r2) -> LReal (r1 * r2)
----------------------------------------------------------------
evaluateArrayOp
:: ( ABT Term abt, EvaluationMonad abt m p
, typs ~ UnLCs args, args ~ LCs typs)
=> TermEvaluator abt m
-> ArrayOp typs a
-> SArgs abt args
-> m (Whnf abt a)
evaluateArrayOp evaluate_ = go
where
go o@(Index _) = \(e1 :* e2 :* End) -> do
let -- idxCode :: abt '[] ('HArray a) -> abt '[] 'HNat -> abt '[] a
idxCode a i = Neutral $ syn ( ArrayOp _ o : $ a :* i :* End )
w1 <- evaluate_ e1
case w1 of
Neutral e1' ->
return . Neutral $ syn (ArrayOp_ o :$ e1' :* e2 :* End)
Head_ (WArray _ b) ->
caseBind b $ \x body -> extSubst x e2 body >>= evaluate_
Head_ (WEmpty _) ->
error "TODO: evaluateArrayOp{Index}{Head_ (WEmpty _)}"
Head_ (WArrayLiteral arr) ->
do w2 <- evaluate_ e2
case w2 of
Head_ (WLiteral (LNat n)) -> return . Neutral $
arr !! fromInteger (fromNatural n)
_ -> return . Neutral $
syn (ArrayOp_ o :$ fromWhnf w1 :* fromWhnf w2 :* End)
_ -> error "evaluateArrayOp{Index}: uknown whnf of array type"
go o@(Size _) = \(e1 :* End) -> do
w1 <- evaluate_ e1
case w1 of
Neutral e1' -> return . Neutral $ syn (ArrayOp_ o :$ e1' :* End)
Head_ (WEmpty _) -> return . Head_ $ WLiteral (LNat 0)
Head_ (WArray e2 _) -> evaluate_ e2
Head_ (WArrayLiteral es) -> return . Head_ . WLiteral .
primCoerceFrom (Signed HRing_Int) .
LInt . toInteger $ length es
Head_ _ -> error "Got something odd when evaluating an array"
go (Reduce _) = \(_ :* _ :* _ :* End) ->
error "TODO: evaluateArrayOp{Reduce}"
----------------------------------------------------------------
TODO : maybe we should adjust ' ' to have a third option for
-- closed terms of the atomic\/literal types, so that we can avoid
-- reducing them just yet. Of course, we'll have to reduce them
-- eventually, but we can leave that for the runtime evaluation or
Maple or whatever . These are called \"annotated\ " terms in
-- et al 2008 (though they allow anything to be annotated, not just
-- closed terms of atomic type).
evaluatePrimOp
:: forall abt m p typs args a
. ( ABT Term abt, EvaluationMonad abt m p
, typs ~ UnLCs args, args ~ LCs typs)
=> TermEvaluator abt m
-> PrimOp typs a
-> SArgs abt args
-> m (Whnf abt a)
evaluatePrimOp evaluate_ = go
where
-- HACK: we don't have any way of saying these functions haven't reduced even though it's not actually a neutral term.
neu1 :: forall b c
. (abt '[] b -> abt '[] c)
-> abt '[] b
-> m (Whnf abt c)
neu1 f e = (Neutral . f . fromWhnf) <$> evaluate_ e
neu2 :: forall b c d
. (abt '[] b -> abt '[] c -> abt '[] d)
-> abt '[] b
-> abt '[] c
-> m (Whnf abt d)
neu2 f e1 e2 = do e1' <- fromWhnf <$> evaluate_ e1
e2' <- fromWhnf <$> evaluate_ e2
return . Neutral $ f e1' e2'
rr1 :: forall b b' c c'
. (Interp b b', Interp c c')
=> (b' -> c')
-> (abt '[] b -> abt '[] c)
-> abt '[] b
-> m (Whnf abt c)
rr1 f' f e = do
w <- evaluate_ e
return $
case w of
Neutral e' -> Neutral $ f e'
Head_ v -> Head_ . reflect $ f' (reify v)
rr2 :: forall b b' c c' d d'
. (Interp b b', Interp c c', Interp d d')
=> (b' -> c' -> d')
-> (abt '[] b -> abt '[] c -> abt '[] d)
-> abt '[] b
-> abt '[] c
-> m (Whnf abt d)
rr2 f' f e1 e2 = do
w1 <- evaluate_ e1
w2 <- evaluate_ e2
return $
case w1 of
Neutral e1' -> Neutral $ f e1' (fromWhnf w2)
Head_ v1 ->
case w2 of
Neutral e2' -> Neutral $ f (fromWhnf w1) e2'
Head_ v2 -> Head_ . reflect $ f' (reify v1) (reify v2)
primOp2_
:: forall b c d
. PrimOp '[ b, c ] d -> abt '[] b -> abt '[] c -> abt '[] d
primOp2_ o e1 e2 = syn (PrimOp_ o :$ e1 :* e2 :* End)
-- TODO: something more efficient\/direct if we can...
go Not (e1 :* End) = rr1 not P.not e1
go Impl (e1 :* e2 :* End) = rr2 impl (primOp2_ Impl) e1 e2
go Diff (e1 :* e2 :* End) = rr2 diff (primOp2_ Diff) e1 e2
go Nand (e1 :* e2 :* End) = rr2 nand P.nand e1 e2
go Nor (e1 :* e2 :* End) = rr2 nor P.nor e1 e2
HACK : we do n't have a way of saying that ' Pi ' ( or ' Infinity ' , ... ) is in fact a head ; so we 're forced to call it neutral which is a lie . We should add constructor(s ) to ' Head ' to cover these magic constants ; probably grouped together under a single constructor called something like @Constant@. Maybe should group them like that in the AST as well ?
go Pi End = return $ Neutral P.pi
-- We treat trig functions as strict, thus forcing their
-- arguments; however, to avoid fuzz issues we don't actually
-- evaluate the trig functions.
--
-- HACK: we might should have some other way to make these
' ' rather than calling them neutral terms ; since they
-- aren't, in fact, neutral!
go Sin (e1 :* End) = neu1 P.sin e1
go Cos (e1 :* End) = neu1 P.cos e1
go Tan (e1 :* End) = neu1 P.tan e1
go Asin (e1 :* End) = neu1 P.asin e1
go Acos (e1 :* End) = neu1 P.acos e1
go Atan (e1 :* End) = neu1 P.atan e1
go Sinh (e1 :* End) = neu1 P.sinh e1
go Cosh (e1 :* End) = neu1 P.cosh e1
go Tanh (e1 :* End) = neu1 P.tanh e1
go Asinh (e1 :* End) = neu1 P.asinh e1
go Acosh (e1 :* End) = neu1 P.acosh e1
go Atanh (e1 :* End) = neu1 P.atanh e1
go Floor (e1 :* End) = neu1 P.floor e1
TODO : deal with how we have better types for these three ops than does ...
go RealPow ( e1 :* e2 :* End ) = rr2 ( * * ) ( P. * * ) e1 e2
go RealPow (e1 :* e2 :* End) = neu2 (P.**) e1 e2
go Choose (e1 :* e2 :* End) = neu2 (P.choose) e1 e2
-- HACK: these aren't actually neutral!
BUG : we should try to cancel out @(exp . log)@ and @(log . exp)@
go Exp (e1 :* End) = neu1 P.exp e1
go Log (e1 :* End) = neu1 P.log e1
-- HACK: these aren't actually neutral!
go (Infinity h) End =
case h of
HIntegrable_Nat -> return . Neutral $ P.primOp0_ (Infinity h)
HIntegrable_Prob -> return $ Neutral P.infinity
go GammaFunc (e1 :* End) = neu1 P.gammaFunc e1
go BetaFunc (e1 :* e2 :* End) = neu2 P.betaFunc e1 e2
go (Equal theEq) (e1 :* e2 :* End) = rrEqual theEq e1 e2
go (Less theOrd) (e1 :* e2 :* End) = rrLess theOrd e1 e2
go (NatPow theSemi) (e1 :* e2 :* End) =
case theSemi of
HSemiring_Nat -> rr2 (\v1 v2 -> v1 ^ fromNatural v2) (P.^) e1 e2
HSemiring_Int -> rr2 (\v1 v2 -> v1 ^ fromNatural v2) (P.^) e1 e2
HSemiring_Prob -> rr2 (\v1 v2 -> v1 ^ fromNatural v2) (P.^) e1 e2
HSemiring_Real -> rr2 (\v1 v2 -> v1 ^ fromNatural v2) (P.^) e1 e2
go (Negate theRing) (e1 :* End) =
case theRing of
HRing_Int -> rr1 negate P.negate e1
HRing_Real -> rr1 negate P.negate e1
go (Abs theRing) (e1 :* End) =
case theRing of
HRing_Int -> rr1 (unsafeNatural . abs) P.abs_ e1
HRing_Real -> rr1 (unsafeNonNegativeRational . abs) P.abs_ e1
go (Signum theRing) (e1 :* End) =
case theRing of
HRing_Int -> rr1 signum P.signum e1
HRing_Real -> rr1 signum P.signum e1
go (Recip theFractional) (e1 :* End) =
case theFractional of
HFractional_Prob -> rr1 recip P.recip e1
HFractional_Real -> rr1 recip P.recip e1
go (NatRoot theRadical) (e1 :* e2 :* End) =
case theRadical of
HRadical_Prob -> neu2 (flip P.thRootOf) e1 e2
go ( ) ( e1 :* e2 :* End ) =
case of
HRadical_Prob - > rr2 natRoot ( flip P.thRootOf ) e1 e2
go ( ) ( e1 :* End ) =
case theContinuous of
HContinuous_Prob - > rr1 erf P.erf e1
HContinuous_Real - > rr1 erf P.erf e1
go (NatRoot theRadical) (e1 :* e2 :* End) =
case theRadical of
HRadical_Prob -> rr2 natRoot (flip P.thRootOf) e1 e2
go (Erf theContinuous) (e1 :* End) =
case theContinuous of
HContinuous_Prob -> rr1 erf P.erf e1
HContinuous_Real -> rr1 erf P.erf e1
-}
go op _ = error $ "TODO: evaluatePrimOp{" ++ show op ++ "}"
rrEqual
:: forall b. HEq b -> abt '[] b -> abt '[] b -> m (Whnf abt HBool)
rrEqual theEq =
case theEq of
HEq_Nat -> rr2 (==) (P.==)
HEq_Int -> rr2 (==) (P.==)
HEq_Prob -> rr2 (==) (P.==)
HEq_Real -> rr2 (==) (P.==)
HEq_Array _ -> error "TODO: rrEqual{HEq_Array}"
HEq_Bool -> rr2 (==) (P.==)
HEq_Unit -> rr2 (==) (P.==)
HEq_Pair aEq bEq ->
\e1 e2 -> do
w1 <- evaluate_ e1
w2 <- evaluate_ e2
case w1 of
Neutral e1' ->
return . Neutral
$ P.primOp2_ (Equal theEq) e1' (fromWhnf w2)
Head_ v1 ->
case w2 of
Neutral e2' ->
return . Neutral
$ P.primOp2_ (Equal theEq) (fromHead v1) e2'
Head_ v2 -> do
let (v1a, v1b) = reifyPair v1
let (v2a, v2b) = reifyPair v2
wa <- rrEqual aEq v1a v2a
wb <- rrEqual bEq v1b v2b
return $
case wa of
Neutral ea ->
case wb of
Neutral eb -> Neutral (ea P.&& eb)
Head_ vb
| reify vb -> wa
| otherwise -> Head_ $ WDatum dFalse
Head_ va
| reify va -> wb
| otherwise -> Head_ $ WDatum dFalse
HEq_Either _ _ -> error "TODO: rrEqual{HEq_Either}"
rrLess
:: forall b. HOrd b -> abt '[] b -> abt '[] b -> m (Whnf abt HBool)
rrLess theOrd =
case theOrd of
HOrd_Nat -> rr2 (<) (P.<)
HOrd_Int -> rr2 (<) (P.<)
HOrd_Prob -> rr2 (<) (P.<)
HOrd_Real -> rr2 (<) (P.<)
HOrd_Array _ -> error "TODO: rrLess{HOrd_Array}"
HOrd_Bool -> rr2 (<) (P.<)
HOrd_Unit -> rr2 (<) (P.<)
HOrd_Pair _ _ ->
\e1 e2 -> do
w1 <- evaluate_ e1
w2 <- evaluate_ e2
case w1 of
Neutral e1' ->
return . Neutral
$ P.primOp2_ (Less theOrd) e1' (fromWhnf w2)
Head_ v1 ->
case w2 of
Neutral e2' ->
return . Neutral
$ P.primOp2_ (Less theOrd) (fromHead v1) e2'
Head_ v2 -> do
let (_, _) = reifyPair v1
let (_, _) = reifyPair v2
error "TODO: rrLess{HOrd_Pair}"
BUG : The obvious recursion wo n't work because we need to know when the first components are equal before recursing ( to implement lexicographic ordering ) . We really need a ternary comparison operator like ' compare ' .
HOrd_Either _ _ -> error "TODO: rrLess{HOrd_Either}"
----------------------------------------------------------------
----------------------------------------------------------- fin.
| null | https://raw.githubusercontent.com/hakaru-dev/hakaru/94157c89ea136c3b654a85cce51f19351245a490/haskell/Language/Hakaru/Evaluation/Lazy.hs | haskell | # OPTIONS_GHC -Wall -fwarn-tabs #
--------------------------------------------------------------
2016.04.28
|
Module : Language.Hakaru.Evaluation.Lazy
License : BSD3
Maintainer :
Stability : experimental
Lazy partial evaluation.
BUG: completely gave up on structure sharing. Need to add that
like my old one.
--------------------------------------------------------------
** Helper functions
** Helpers that should really go away
BUG : ca n't import this because of cyclic dependency
BUG: can't import this because of cyclic dependency
--------------------------------------------------------------
--------------------------------------------------------------
TODO: (eventually) accept an argument dictating the evaluation
probably be a family of singletons, where the type-level strategy
@s@ is also an index on the 'Context' and (the renamed) 'Whnf'.
That way we don't need to define a bunch of variant 'Context',
'Statement', and 'Whnf' data types; but rather can use indexing
to select out subtypes of the generic versions.
| Lazy partial evaluation with some given \"perform\" and
\"perform\" function will never be called.
Things which are already WHNFs
We don't bother evaluating these, even though we could...
Everything else needs some evaluation
call-by-name:
Bucket_ _ _ _ _ -> error "What oh what to do with a Bucket here?"
--------------------------------------------------------------
BUG: need to improve the types so they can capture polymorphic data types
BUG: this is a **really gross** hack. If we can avoid it, we should!!!
HACK: this requires -XTypeSynonymInstances and -XFlexibleInstances
This instance does seem to work; albeit it's trivial...
HACK: this requires -XTypeSynonymInstances and -XFlexibleInstances
This instance also seems to work...
TODO: can't we just use 'viewHeadDatum' and match on that?
modulo scoping of @abt@
modulo scoping of @abt@
natRoot :: (Floating a) => a -> Nat -> a
--------------------------------------------------------------
TODO: there's got to be a more efficient way to do this...
Avoid empty naryOps
Avoid singleton naryOps
TODO: immediately return @ws@ if @w1 == identityElement o@ (whenever identityElement is defined)
no identity in general (but we could do it by cases...)
no identity in general (but we could do it by cases...)
TODO: something more efficient\/direct if we can...
HACK: this is just to have something to test. We really should reduce\/remove all this boilerplate...
--------------------------------------------------------------
idxCode :: abt '[] ('HArray a) -> abt '[] 'HNat -> abt '[] a
--------------------------------------------------------------
closed terms of the atomic\/literal types, so that we can avoid
reducing them just yet. Of course, we'll have to reduce them
eventually, but we can leave that for the runtime evaluation or
et al 2008 (though they allow anything to be annotated, not just
closed terms of atomic type).
HACK: we don't have any way of saying these functions haven't reduced even though it's not actually a neutral term.
TODO: something more efficient\/direct if we can...
We treat trig functions as strict, thus forcing their
arguments; however, to avoid fuzz issues we don't actually
evaluate the trig functions.
HACK: we might should have some other way to make these
aren't, in fact, neutral!
HACK: these aren't actually neutral!
HACK: these aren't actually neutral!
--------------------------------------------------------------
--------------------------------------------------------- fin. | # LANGUAGE CPP
, GADTs
, MultiParamTypeClasses
, FunctionalDependencies
, ScopedTypeVariables
, FlexibleContexts
, Rank2Types
, TypeSynonymInstances
, FlexibleInstances
#
, GADTs
, DataKinds
, KindSignatures
, MultiParamTypeClasses
, FunctionalDependencies
, ScopedTypeVariables
, FlexibleContexts
, Rank2Types
, TypeSynonymInstances
, FlexibleInstances
#-}
Copyright : Copyright ( c ) 2016 the Hakaru team
Portability : GHC - only
back in . cf . , - lopstr07lncs.pdf@ for an approach much
module Language.Hakaru.Evaluation.Lazy
( evaluate
, evaluateNaryOp
, evaluatePrimOp
, evaluateArrayOp
, Interp(..), reifyPair
) where
import Prelude hiding (id, (.))
import Control.Category (Category(..))
#if __GLASGOW_HASKELL__ < 710
import Data.Functor ((<$>))
#endif
import Control.Monad ((<=<))
import Control.Monad.Identity (Identity, runIdentity)
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import qualified Data.Text as Text
import Language.Hakaru.Syntax.IClasses
import Data.Number.Nat
import Data.Number.Natural
import Language.Hakaru.Types.DataKind
import Language.Hakaru.Types.Sing
import Language.Hakaru.Types.Coercion
import Language.Hakaru.Types.HClasses
import Language.Hakaru.Syntax.TypeOf
import Language.Hakaru.Syntax.AST
import Language.Hakaru.Syntax.Datum
import Language.Hakaru.Syntax.DatumCase (DatumEvaluator, MatchState(..), matchTopPattern)
import Language.Hakaru.Syntax.ABT
import Language.Hakaru.Evaluation.Types
import qualified Language.Hakaru.Syntax.Prelude as P
import qualified Language . Hakaru . Expect as E
import qualified Language.Hakaru.Expect as E
-}
#ifdef __TRACE_DISINTEGRATE__
import Language.Hakaru.Pretty.Haskell (pretty)
import Debug.Trace (trace)
#endif
strategy ( HNF , WHNF , full - beta NF , ... ) . The strategy value should
\"evaluateCase\ " functions . , if @p ~ ' Pure@ then the
evaluate
:: forall abt m p
. (ABT Term abt, EvaluationMonad abt m p)
=> MeasureEvaluator abt m
-> TermEvaluator abt m
# INLINE evaluate #
evaluate perform = evaluate_
where
evaluateCase_ :: CaseEvaluator abt m
evaluateCase_ = evaluateCase evaluate_
evaluate_ :: TermEvaluator abt m
evaluate_ e0 =
#ifdef __TRACE_DISINTEGRATE__
trace ("-- evaluate_: " ++ show (pretty e0)) $
#endif
caseVarSyn e0 (evaluateVar perform evaluate_) $ \t ->
case t of
Literal_ v -> return . Head_ $ WLiteral v
Datum_ d -> return . Head_ $ WDatum d
Empty_ typ -> return . Head_ $ WEmpty typ
Array_ e1 e2 -> return . Head_ $ WArray e1 e2
ArrayLiteral_ es -> return . Head_ $ WArrayLiteral es
Lam_ :$ e1 :* End -> return . Head_ $ WLam e1
Dirac :$ e1 :* End -> return . Head_ $ WDirac e1
MBind :$ e1 :* e2 :* End -> return . Head_ $ WMBind e1 e2
Plate :$ e1 :* e2 :* End -> return . Head_ $ WPlate e1 e2
MeasureOp_ o :$ es -> return . Head_ $ WMeasureOp o es
Superpose_ pes -> return . Head_ $ WSuperpose pes
Reject_ typ -> return . Head_ $ WReject typ
Integrate :$ e1 :* e2 :* e3 :* End ->
return . Head_ $ WIntegrate e1 e2 e3
Summate _ _ :$ _ :* _ :* _ :* End ->
return . Neutral $ syn t
return . Head _ $ WSummate e1 e2 e3
App_ :$ e1 :* e2 :* End -> do
w1 <- evaluate_ e1
case w1 of
Neutral e1' -> return . Neutral $ P.app e1' e2
Head_ v1 -> evaluateApp v1
where
evaluateApp (WLam f) =
caseBind f $ \x f' -> do
i <- getIndices
push (SLet x (Thunk e2) i) f' >>= evaluate_
evaluateApp _ = error "evaluate{App_}: the impossible happened"
Let_ :$ e1 :* e2 :* End -> do
i <- getIndices
caseBind e2 $ \x e2' ->
push (SLet x (Thunk e1) i) e2' >>= evaluate_
CoerceTo_ c :$ e1 :* End -> coerceTo c <$> evaluate_ e1
UnsafeFrom_ c :$ e1 :* End -> coerceFrom c <$> evaluate_ e1
TODO : will maybe clean up the code to map ' evaluate ' over before calling the evaluateFooOp helpers ?
NaryOp_ o es -> evaluateNaryOp evaluate_ o es
ArrayOp_ o :$ es -> evaluateArrayOp evaluate_ o es
PrimOp_ o :$ es -> evaluatePrimOp evaluate_ o es
Transform_ tt :$ _ -> error $
concat ["TODO: evaluate{", show tt, "}"
,": cannot evaluate transforms; expand them first"]
Case_ e bs -> evaluateCase_ e bs
_ :$ _ -> error "evaluate: the impossible happened"
class Interp a a' | a -> a' where
reify :: (ABT Term abt) => Head abt a -> a'
reflect :: (ABT Term abt) => a' -> Head abt a
instance Interp 'HNat Natural where
reflect = WLiteral . LNat
reify (WLiteral (LNat n)) = n
reify (WCoerceTo _ _) = error "TODO: reify{WCoerceTo}"
reify (WUnsafeFrom _ _) = error "TODO: reify{WUnsafeFrom}"
instance Interp 'HInt Integer where
reflect = WLiteral . LInt
reify (WLiteral (LInt i)) = i
reify (WCoerceTo _ _) = error "TODO: reify{WCoerceTo}"
reify (WUnsafeFrom _ _) = error "TODO: reify{WUnsafeFrom}"
instance Interp 'HProb NonNegativeRational where
reflect = WLiteral . LProb
reify (WLiteral (LProb p)) = p
reify (WCoerceTo _ _) = error "TODO: reify{WCoerceTo}"
reify (WUnsafeFrom _ _) = error "TODO: reify{WUnsafeFrom}"
reify (WIntegrate _ _ _) = error "TODO: reify{WIntegrate}"
reify ( WSummate _ _ _ ) = error " TODO : reify{WSummate } "
instance Interp 'HReal Rational where
reflect = WLiteral . LReal
reify (WLiteral (LReal r)) = r
reify (WCoerceTo _ _) = error "TODO: reify{WCoerceTo}"
reify (WUnsafeFrom _ _) = error "TODO: reify{WUnsafeFrom}"
identifyDatum :: (ABT Term abt) => DatumEvaluator (abt '[]) Identity
identifyDatum = return . (viewWhnfDatum <=< toWhnf)
instance Interp HUnit () where
reflect () = WDatum dUnit
reify v = runIdentity $ do
match <- matchTopPattern identifyDatum (fromHead v) pUnit Nil1
case match of
Just (Matched_ _ss Nil1) -> return ()
_ -> error "reify{HUnit}: the impossible happened"
instance Interp HBool Bool where
reflect = WDatum . (\b -> if b then dTrue else dFalse)
reify v = runIdentity $ do
matchT <- matchTopPattern identifyDatum (fromHead v) pTrue Nil1
case matchT of
Just (Matched_ _ss Nil1) -> return True
Just GotStuck_ -> error "reify{HBool}: the impossible happened"
Nothing -> do
matchF <- matchTopPattern identifyDatum (fromHead v) pFalse Nil1
case matchF of
Just (Matched_ _ss Nil1) -> return False
_ -> error "reify{HBool}: the impossible happened"
reifyPair
:: (ABT Term abt) => Head abt (HPair a b) -> (abt '[] a, abt '[] b)
reifyPair v =
let impossible = error "reifyPair: the impossible happened"
e0 = fromHead v
n = nextFree e0
(a,b) = sUnPair $ typeOf e0
x = Variable Text.empty n a
y = Variable Text.empty (1 + n) b
in runIdentity $ do
match <- matchTopPattern identifyDatum e0 (pPair PVar PVar) (Cons1 x (Cons1 y Nil1))
case match of
Just (Matched_ ss Nil1) ->
case ss [] of
[Assoc x' e1, Assoc y' e2] ->
maybe impossible id $ do
Refl <- varEq x x'
Refl <- varEq y y'
Just $ return (e1, e2)
_ -> impossible
_ -> impossible
instance Interp ( HPair a b ) ( abt ' [ ] a , abt ' [ ] b ) where
reflect ( a , b ) = P.pair a b
reify = reifyPair
instance Interp ( HEither a b ) ( Either ( abt ' [ ] a ) ( abt ' [ ] b ) ) where
reflect ( Left a ) = P.left a
reflect ( Right b ) = P.right b
reify =
instance Interp ( HMaybe a ) ( Maybe ( abt ' [ ] a ) ) where
reflect Nothing = P.nothing
reflect ( Just a ) = P.just a
reify =
data ListHead ( a : : )
= NilHead
instance Interp ( HList a ) ( ListHead a ) where
reflect [ ] = P.nil
reflect ( x : xs ) = P.cons x xs
reify =
instance Interp (HPair a b) (abt '[] a, abt '[] b) where
reflect (a,b) = P.pair a b
reify = reifyPair
instance Interp (HEither a b) (Either (abt '[] a) (abt '[] b)) where
reflect (Left a) = P.left a
reflect (Right b) = P.right b
reify =
instance Interp (HMaybe a) (Maybe (abt '[] a)) where
reflect Nothing = P.nothing
reflect (Just a) = P.just a
reify =
data ListHead (a :: Hakaru)
= NilHead
instance Interp (HList a) (ListHead a) where
reflect [] = P.nil
reflect (x:xs) = P.cons x xs
reify =
-}
impl, diff, nand, nor :: Bool -> Bool -> Bool
impl x y = not x || y
diff x y = x && not y
nand x y = not (x && y)
nor x y = not (x || y)
BUG : no Floating instance for LogFloat ( nor NonNegativeRational ) , so ca n't actually use this ...
natRoot x y = x * * recip ( ( fromNat y ) )
evaluateNaryOp
:: (ABT Term abt, EvaluationMonad abt m p)
=> TermEvaluator abt m
-> NaryOp a
-> Seq (abt '[] a)
-> m (Whnf abt a)
evaluateNaryOp evaluate_ = \o es -> mainLoop o (evalOp o) Seq.empty es
where
mainLoop o op ws es =
case Seq.viewl es of
Seq.EmptyL -> return $
case Seq.viewl ws of
w Seq.:< ws'
| otherwise ->
Neutral . syn . NaryOp_ o $ fmap fromWhnf ws
e Seq.:< es' -> do
w <- evaluate_ e
case matchNaryOp o w of
Nothing -> mainLoop o op (snocLoop op ws w) es'
Just es2 -> mainLoop o op ws (es2 Seq.>< es')
snocLoop
:: (ABT syn abt)
=> (Head abt a -> Head abt a -> Head abt a)
-> Seq (Whnf abt a)
-> Whnf abt a
-> Seq (Whnf abt a)
snocLoop op ws w1 =
case Seq.viewr ws of
Seq.EmptyR -> Seq.singleton w1
ws' Seq.:> w2 ->
case (w1,w2) of
(Head_ v1, Head_ v2) -> snocLoop op ws' (Head_ (op v1 v2))
_ -> ws Seq.|> w1
matchNaryOp
:: (ABT Term abt)
=> NaryOp a
-> Whnf abt a
-> Maybe (Seq (abt '[] a))
matchNaryOp o w =
case w of
Head_ _ -> Nothing
Neutral e ->
caseVarSyn e (const Nothing) $ \t ->
case t of
NaryOp_ o' es | o' == o -> Just es
_ -> Nothing
TODO : move this off to Prelude.hs or somewhere ...
identityElement :: (ABT Term abt) => NaryOp a -> Whnf abt a
identityElement o =
case o of
And -> Head_ (WDatum dTrue)
Or -> Head_ (WDatum dFalse)
Xor -> Head_ (WDatum dFalse)
Iff -> Head_ (WDatum dTrue)
TODO : figure out how to reuse ' P.zero _ ' and ' P.one _ ' here ; requires converting @(syn . Literal_)@ into @(Head _ . WLiteral)@. Maybe we should change ' P.zero _ ' and ' P.one _ ' so they just return the ' Literal ' itself rather than the @abt@ ?
Sum HSemiring_Nat -> Head_ (WLiteral (LNat 0))
Sum HSemiring_Int -> Head_ (WLiteral (LInt 0))
Sum HSemiring_Prob -> Head_ (WLiteral (LProb 0))
Sum HSemiring_Real -> Head_ (WLiteral (LReal 0))
Prod HSemiring_Nat -> Head_ (WLiteral (LNat 1))
Prod HSemiring_Int -> Head_ (WLiteral (LInt 1))
Prod HSemiring_Prob -> Head_ (WLiteral (LProb 1))
Prod HSemiring_Real -> Head_ (WLiteral (LReal 1))
| The evaluation interpretation of each NaryOp
evalOp
:: (ABT Term abt)
=> NaryOp a
-> Head abt a
-> Head abt a
-> Head abt a
evalOp And = \v1 v2 -> reflect (reify v1 && reify v2)
evalOp Or = \v1 v2 -> reflect (reify v1 || reify v2)
evalOp Xor = \v1 v2 -> reflect (reify v1 /= reify v2)
evalOp Iff = \v1 v2 -> reflect (reify v1 == reify v2)
evalOp (Min _) = error "TODO: evalOp{Min}"
evalOp (Max _) = error "TODO: evalOp{Max}"
evalOp ( ) = \v1 v2 - > reflect ( reify v1 ` min ` reify v2 )
evalOp ( ) = \v1 v2 - > reflect ( reify v1 ` max ` reify v2 )
evalOp ( Sum _ ) = \v1 v2 - > reflect ( reify v1 + reify v2 )
evalOp ( Prod _ ) = \v1 v2 - > reflect ( reify v1 * reify v2 )
evalOp (Min _) = \v1 v2 -> reflect (reify v1 `min` reify v2)
evalOp (Max _) = \v1 v2 -> reflect (reify v1 `max` reify v2)
evalOp (Sum _) = \v1 v2 -> reflect (reify v1 + reify v2)
evalOp (Prod _) = \v1 v2 -> reflect (reify v1 * reify v2)
-}
evalOp (Sum theSemi) =
\(WLiteral v1) (WLiteral v2) -> WLiteral $ evalSum theSemi v1 v2
evalOp (Prod theSemi) =
\(WLiteral v1) (WLiteral v2) -> WLiteral $ evalProd theSemi v1 v2
TODO : even if only one of the arguments is a literal , if that literal is zero\/one , then we can still partially evaluate it . ( As is done in the old finally - tagless code )
evalSum, evalProd :: HSemiring a -> Literal a -> Literal a -> Literal a
evalSum HSemiring_Nat = \(LNat n1) (LNat n2) -> LNat (n1 + n2)
evalSum HSemiring_Int = \(LInt i1) (LInt i2) -> LInt (i1 + i2)
evalSum HSemiring_Prob = \(LProb p1) (LProb p2) -> LProb (p1 + p2)
evalSum HSemiring_Real = \(LReal r1) (LReal r2) -> LReal (r1 + r2)
evalProd HSemiring_Nat = \(LNat n1) (LNat n2) -> LNat (n1 * n2)
evalProd HSemiring_Int = \(LInt i1) (LInt i2) -> LInt (i1 * i2)
evalProd HSemiring_Prob = \(LProb p1) (LProb p2) -> LProb (p1 * p2)
evalProd HSemiring_Real = \(LReal r1) (LReal r2) -> LReal (r1 * r2)
evaluateArrayOp
:: ( ABT Term abt, EvaluationMonad abt m p
, typs ~ UnLCs args, args ~ LCs typs)
=> TermEvaluator abt m
-> ArrayOp typs a
-> SArgs abt args
-> m (Whnf abt a)
evaluateArrayOp evaluate_ = go
where
go o@(Index _) = \(e1 :* e2 :* End) -> do
idxCode a i = Neutral $ syn ( ArrayOp _ o : $ a :* i :* End )
w1 <- evaluate_ e1
case w1 of
Neutral e1' ->
return . Neutral $ syn (ArrayOp_ o :$ e1' :* e2 :* End)
Head_ (WArray _ b) ->
caseBind b $ \x body -> extSubst x e2 body >>= evaluate_
Head_ (WEmpty _) ->
error "TODO: evaluateArrayOp{Index}{Head_ (WEmpty _)}"
Head_ (WArrayLiteral arr) ->
do w2 <- evaluate_ e2
case w2 of
Head_ (WLiteral (LNat n)) -> return . Neutral $
arr !! fromInteger (fromNatural n)
_ -> return . Neutral $
syn (ArrayOp_ o :$ fromWhnf w1 :* fromWhnf w2 :* End)
_ -> error "evaluateArrayOp{Index}: uknown whnf of array type"
go o@(Size _) = \(e1 :* End) -> do
w1 <- evaluate_ e1
case w1 of
Neutral e1' -> return . Neutral $ syn (ArrayOp_ o :$ e1' :* End)
Head_ (WEmpty _) -> return . Head_ $ WLiteral (LNat 0)
Head_ (WArray e2 _) -> evaluate_ e2
Head_ (WArrayLiteral es) -> return . Head_ . WLiteral .
primCoerceFrom (Signed HRing_Int) .
LInt . toInteger $ length es
Head_ _ -> error "Got something odd when evaluating an array"
go (Reduce _) = \(_ :* _ :* _ :* End) ->
error "TODO: evaluateArrayOp{Reduce}"
TODO : maybe we should adjust ' ' to have a third option for
Maple or whatever . These are called \"annotated\ " terms in
evaluatePrimOp
:: forall abt m p typs args a
. ( ABT Term abt, EvaluationMonad abt m p
, typs ~ UnLCs args, args ~ LCs typs)
=> TermEvaluator abt m
-> PrimOp typs a
-> SArgs abt args
-> m (Whnf abt a)
evaluatePrimOp evaluate_ = go
where
neu1 :: forall b c
. (abt '[] b -> abt '[] c)
-> abt '[] b
-> m (Whnf abt c)
neu1 f e = (Neutral . f . fromWhnf) <$> evaluate_ e
neu2 :: forall b c d
. (abt '[] b -> abt '[] c -> abt '[] d)
-> abt '[] b
-> abt '[] c
-> m (Whnf abt d)
neu2 f e1 e2 = do e1' <- fromWhnf <$> evaluate_ e1
e2' <- fromWhnf <$> evaluate_ e2
return . Neutral $ f e1' e2'
rr1 :: forall b b' c c'
. (Interp b b', Interp c c')
=> (b' -> c')
-> (abt '[] b -> abt '[] c)
-> abt '[] b
-> m (Whnf abt c)
rr1 f' f e = do
w <- evaluate_ e
return $
case w of
Neutral e' -> Neutral $ f e'
Head_ v -> Head_ . reflect $ f' (reify v)
rr2 :: forall b b' c c' d d'
. (Interp b b', Interp c c', Interp d d')
=> (b' -> c' -> d')
-> (abt '[] b -> abt '[] c -> abt '[] d)
-> abt '[] b
-> abt '[] c
-> m (Whnf abt d)
rr2 f' f e1 e2 = do
w1 <- evaluate_ e1
w2 <- evaluate_ e2
return $
case w1 of
Neutral e1' -> Neutral $ f e1' (fromWhnf w2)
Head_ v1 ->
case w2 of
Neutral e2' -> Neutral $ f (fromWhnf w1) e2'
Head_ v2 -> Head_ . reflect $ f' (reify v1) (reify v2)
primOp2_
:: forall b c d
. PrimOp '[ b, c ] d -> abt '[] b -> abt '[] c -> abt '[] d
primOp2_ o e1 e2 = syn (PrimOp_ o :$ e1 :* e2 :* End)
go Not (e1 :* End) = rr1 not P.not e1
go Impl (e1 :* e2 :* End) = rr2 impl (primOp2_ Impl) e1 e2
go Diff (e1 :* e2 :* End) = rr2 diff (primOp2_ Diff) e1 e2
go Nand (e1 :* e2 :* End) = rr2 nand P.nand e1 e2
go Nor (e1 :* e2 :* End) = rr2 nor P.nor e1 e2
HACK : we do n't have a way of saying that ' Pi ' ( or ' Infinity ' , ... ) is in fact a head ; so we 're forced to call it neutral which is a lie . We should add constructor(s ) to ' Head ' to cover these magic constants ; probably grouped together under a single constructor called something like @Constant@. Maybe should group them like that in the AST as well ?
go Pi End = return $ Neutral P.pi
' ' rather than calling them neutral terms ; since they
go Sin (e1 :* End) = neu1 P.sin e1
go Cos (e1 :* End) = neu1 P.cos e1
go Tan (e1 :* End) = neu1 P.tan e1
go Asin (e1 :* End) = neu1 P.asin e1
go Acos (e1 :* End) = neu1 P.acos e1
go Atan (e1 :* End) = neu1 P.atan e1
go Sinh (e1 :* End) = neu1 P.sinh e1
go Cosh (e1 :* End) = neu1 P.cosh e1
go Tanh (e1 :* End) = neu1 P.tanh e1
go Asinh (e1 :* End) = neu1 P.asinh e1
go Acosh (e1 :* End) = neu1 P.acosh e1
go Atanh (e1 :* End) = neu1 P.atanh e1
go Floor (e1 :* End) = neu1 P.floor e1
TODO : deal with how we have better types for these three ops than does ...
go RealPow ( e1 :* e2 :* End ) = rr2 ( * * ) ( P. * * ) e1 e2
go RealPow (e1 :* e2 :* End) = neu2 (P.**) e1 e2
go Choose (e1 :* e2 :* End) = neu2 (P.choose) e1 e2
BUG : we should try to cancel out @(exp . log)@ and @(log . exp)@
go Exp (e1 :* End) = neu1 P.exp e1
go Log (e1 :* End) = neu1 P.log e1
go (Infinity h) End =
case h of
HIntegrable_Nat -> return . Neutral $ P.primOp0_ (Infinity h)
HIntegrable_Prob -> return $ Neutral P.infinity
go GammaFunc (e1 :* End) = neu1 P.gammaFunc e1
go BetaFunc (e1 :* e2 :* End) = neu2 P.betaFunc e1 e2
go (Equal theEq) (e1 :* e2 :* End) = rrEqual theEq e1 e2
go (Less theOrd) (e1 :* e2 :* End) = rrLess theOrd e1 e2
go (NatPow theSemi) (e1 :* e2 :* End) =
case theSemi of
HSemiring_Nat -> rr2 (\v1 v2 -> v1 ^ fromNatural v2) (P.^) e1 e2
HSemiring_Int -> rr2 (\v1 v2 -> v1 ^ fromNatural v2) (P.^) e1 e2
HSemiring_Prob -> rr2 (\v1 v2 -> v1 ^ fromNatural v2) (P.^) e1 e2
HSemiring_Real -> rr2 (\v1 v2 -> v1 ^ fromNatural v2) (P.^) e1 e2
go (Negate theRing) (e1 :* End) =
case theRing of
HRing_Int -> rr1 negate P.negate e1
HRing_Real -> rr1 negate P.negate e1
go (Abs theRing) (e1 :* End) =
case theRing of
HRing_Int -> rr1 (unsafeNatural . abs) P.abs_ e1
HRing_Real -> rr1 (unsafeNonNegativeRational . abs) P.abs_ e1
go (Signum theRing) (e1 :* End) =
case theRing of
HRing_Int -> rr1 signum P.signum e1
HRing_Real -> rr1 signum P.signum e1
go (Recip theFractional) (e1 :* End) =
case theFractional of
HFractional_Prob -> rr1 recip P.recip e1
HFractional_Real -> rr1 recip P.recip e1
go (NatRoot theRadical) (e1 :* e2 :* End) =
case theRadical of
HRadical_Prob -> neu2 (flip P.thRootOf) e1 e2
go ( ) ( e1 :* e2 :* End ) =
case of
HRadical_Prob - > rr2 natRoot ( flip P.thRootOf ) e1 e2
go ( ) ( e1 :* End ) =
case theContinuous of
HContinuous_Prob - > rr1 erf P.erf e1
HContinuous_Real - > rr1 erf P.erf e1
go (NatRoot theRadical) (e1 :* e2 :* End) =
case theRadical of
HRadical_Prob -> rr2 natRoot (flip P.thRootOf) e1 e2
go (Erf theContinuous) (e1 :* End) =
case theContinuous of
HContinuous_Prob -> rr1 erf P.erf e1
HContinuous_Real -> rr1 erf P.erf e1
-}
go op _ = error $ "TODO: evaluatePrimOp{" ++ show op ++ "}"
rrEqual
:: forall b. HEq b -> abt '[] b -> abt '[] b -> m (Whnf abt HBool)
rrEqual theEq =
case theEq of
HEq_Nat -> rr2 (==) (P.==)
HEq_Int -> rr2 (==) (P.==)
HEq_Prob -> rr2 (==) (P.==)
HEq_Real -> rr2 (==) (P.==)
HEq_Array _ -> error "TODO: rrEqual{HEq_Array}"
HEq_Bool -> rr2 (==) (P.==)
HEq_Unit -> rr2 (==) (P.==)
HEq_Pair aEq bEq ->
\e1 e2 -> do
w1 <- evaluate_ e1
w2 <- evaluate_ e2
case w1 of
Neutral e1' ->
return . Neutral
$ P.primOp2_ (Equal theEq) e1' (fromWhnf w2)
Head_ v1 ->
case w2 of
Neutral e2' ->
return . Neutral
$ P.primOp2_ (Equal theEq) (fromHead v1) e2'
Head_ v2 -> do
let (v1a, v1b) = reifyPair v1
let (v2a, v2b) = reifyPair v2
wa <- rrEqual aEq v1a v2a
wb <- rrEqual bEq v1b v2b
return $
case wa of
Neutral ea ->
case wb of
Neutral eb -> Neutral (ea P.&& eb)
Head_ vb
| reify vb -> wa
| otherwise -> Head_ $ WDatum dFalse
Head_ va
| reify va -> wb
| otherwise -> Head_ $ WDatum dFalse
HEq_Either _ _ -> error "TODO: rrEqual{HEq_Either}"
rrLess
:: forall b. HOrd b -> abt '[] b -> abt '[] b -> m (Whnf abt HBool)
rrLess theOrd =
case theOrd of
HOrd_Nat -> rr2 (<) (P.<)
HOrd_Int -> rr2 (<) (P.<)
HOrd_Prob -> rr2 (<) (P.<)
HOrd_Real -> rr2 (<) (P.<)
HOrd_Array _ -> error "TODO: rrLess{HOrd_Array}"
HOrd_Bool -> rr2 (<) (P.<)
HOrd_Unit -> rr2 (<) (P.<)
HOrd_Pair _ _ ->
\e1 e2 -> do
w1 <- evaluate_ e1
w2 <- evaluate_ e2
case w1 of
Neutral e1' ->
return . Neutral
$ P.primOp2_ (Less theOrd) e1' (fromWhnf w2)
Head_ v1 ->
case w2 of
Neutral e2' ->
return . Neutral
$ P.primOp2_ (Less theOrd) (fromHead v1) e2'
Head_ v2 -> do
let (_, _) = reifyPair v1
let (_, _) = reifyPair v2
error "TODO: rrLess{HOrd_Pair}"
BUG : The obvious recursion wo n't work because we need to know when the first components are equal before recursing ( to implement lexicographic ordering ) . We really need a ternary comparison operator like ' compare ' .
HOrd_Either _ _ -> error "TODO: rrLess{HOrd_Either}"
|
f0b59dbb331f01bcfa66a47353717adebd7708839fb02112c8eef515231a93bb | EarnestResearch/honeycomb-haskell | Core.hs | # LANGUAGE ScopedTypeVariables #
module Honeycomb.Core
( honeyOptionsFromEnv,
withHoney,
withHoney',
newHoney,
newHoney',
withHoneyOptions,
-- * Forcing event sending
flush,
)
where
import Control.Monad.Reader (MonadReader, local, void)
import Data.Maybe (isJust)
import qualified Data.Text as T
import Honeycomb.Core.Internal.Types
import Honeycomb.Core.Types
import Honeycomb.Transport
import Lens.Micro (over, (&), (.~))
import Lens.Micro.Mtl (view)
import System.Environment (lookupEnv)
import UnliftIO
-- | Waits until all currently sent events have been dequeued and processed.
--
-- This may be useful in a system which suspends processing when idle; the user
-- may want to guarantee that all queued events have been sent.
--
-- This only guarantees that events queued before this call will be sent. A
-- user may add more events afterwards, and this does not guarantee that those
-- events have been sent.
flush ::
( MonadUnliftIO m,
MonadReader env m,
HasHoney env
) =>
-- | Length of time to wait before giving up (in microseconds)
Int ->
m ()
flush timeout_us = do
flushQueue <- view $ honeyL . honeyTransportStateL . transportFlushQueueL
mvar <- newEmptyTMVarIO
atomically $ writeTBQueue flushQueue mvar
void $ timeout timeout_us $ atomically $ takeTMVar mvar
| Creates a new Honey library instance .
--
-- A background thread is started up, which will dequeue events that
-- need to be sent. On shutdown, the event queue is shut down, and
-- the background thread stops once all messages are processed.
--
-- Discovers Honey options from the environment, using 'honeyOptionsFromEnv';
-- if you wish to set the options manually, use 'newHoney''
newHoney ::
( MonadUnliftIO n,
MonadIO m
) =>
n (Honey, m ())
newHoney = do
honeyServerOptions <- defaultHoneyServerOptions
honeyOptions <- honeyOptionsFromEnv
(transportState, shutdown) <- newTransport honeyServerOptions
pure (mkHoney honeyOptions transportState, shutdown)
| Creates a new Honey library instance .
--
-- A background thread is started up, which will dequeue events that
-- need to be sent. On shutdown, the event queue is shut down, and
-- the background thread stops once all messages are processed.
newHoney' ::
( MonadUnliftIO n,
MonadIO m
) =>
-- | Options for how event handling is performed
HoneyServerOptions n ->
-- | Options for client library behaviour
HoneyOptions ->
n (Honey, m ())
newHoney' honeyServerOptions honeyOptions = do
(transportState, shutdown) <- newTransport honeyServerOptions
pure (mkHoney honeyOptions transportState, shutdown)
-- |
-- Creates a Honey environment, and if given a program that uses this,
-- will run the program with an environment, correctly shutting everything
-- down afterwards.
--
-- Discovers Honey options from the environment; if you wish to set the
-- options manually, use 'withHoney'' or 'withHoneyOptions'
withHoney ::
MonadUnliftIO m =>
-- | The program to run
(Honey -> m a) ->
m a
withHoney inner = withRunInIO $ \run ->
bracket
newHoney
snd
(run . inner . fst)
-- |
-- Creates a Honey environment, and if given a program that uses this,
-- will run the program with an environment, correctly shutting everything
-- down afterwards.
withHoney' ::
MonadUnliftIO m =>
-- | Options for how event handling is performed
HoneyServerOptions m ->
-- | Options for client library behaviour
HoneyOptions ->
-- | The program to run
(Honey -> m a) ->
m a
withHoney' honeyServerOptions honeyOptions inner =
bracket
(newHoney' honeyServerOptions honeyOptions)
snd
(inner . fst)
-- | Modifies the HoneyOptions value for the provided program.
--
This allows a program to be run , with a @HoneyOptions@ value which is different
to the one configured when setting up the library .
withHoneyOptions ::
( MonadReader env m,
HasHoney env
) =>
-- | The function to modify the current options value
(HoneyOptions -> HoneyOptions) ->
-- | The program to run
m a ->
m a
withHoneyOptions f = local (over (honeyL . honeyOptionsL) f)
-- | Gets options for the library from the process environment.
--
-- This reads the default API Key from @HONEYCOMB_API_KEY@.
--
-- It reads the default dataset from @HONEYCOMB_DATASET@.
--
-- In addition, if @HONEYCOMB_DISABLED@ is set to any value, no
Honeycomb events are queued or sent ( but no errors are raised ) .
honeyOptionsFromEnv ::
MonadIO m =>
m HoneyOptions
honeyOptionsFromEnv = do
apiKeyEnv <- liftIO $ fmap (ApiKey . T.pack) <$> lookupEnv "HONEYCOMB_API_KEY"
datasetEnv <- liftIO $ fmap (Dataset . T.pack) <$> lookupEnv "HONEYCOMB_DATASET"
disabledEnv <- liftIO $ lookupEnv "HONEYCOMB_DISABLED"
pure $
defaultHoneyOptions
& apiKeyL .~ apiKeyEnv
& datasetL .~ datasetEnv
& disabledL .~ isJust disabledEnv
| null | https://raw.githubusercontent.com/EarnestResearch/honeycomb-haskell/0cb643a637e1b23eecded845970e6a973f8385cf/honeycomb/src/Honeycomb/Core.hs | haskell | * Forcing event sending
| Waits until all currently sent events have been dequeued and processed.
This may be useful in a system which suspends processing when idle; the user
may want to guarantee that all queued events have been sent.
This only guarantees that events queued before this call will be sent. A
user may add more events afterwards, and this does not guarantee that those
events have been sent.
| Length of time to wait before giving up (in microseconds)
A background thread is started up, which will dequeue events that
need to be sent. On shutdown, the event queue is shut down, and
the background thread stops once all messages are processed.
Discovers Honey options from the environment, using 'honeyOptionsFromEnv';
if you wish to set the options manually, use 'newHoney''
A background thread is started up, which will dequeue events that
need to be sent. On shutdown, the event queue is shut down, and
the background thread stops once all messages are processed.
| Options for how event handling is performed
| Options for client library behaviour
|
Creates a Honey environment, and if given a program that uses this,
will run the program with an environment, correctly shutting everything
down afterwards.
Discovers Honey options from the environment; if you wish to set the
options manually, use 'withHoney'' or 'withHoneyOptions'
| The program to run
|
Creates a Honey environment, and if given a program that uses this,
will run the program with an environment, correctly shutting everything
down afterwards.
| Options for how event handling is performed
| Options for client library behaviour
| The program to run
| Modifies the HoneyOptions value for the provided program.
| The function to modify the current options value
| The program to run
| Gets options for the library from the process environment.
This reads the default API Key from @HONEYCOMB_API_KEY@.
It reads the default dataset from @HONEYCOMB_DATASET@.
In addition, if @HONEYCOMB_DISABLED@ is set to any value, no | # LANGUAGE ScopedTypeVariables #
module Honeycomb.Core
( honeyOptionsFromEnv,
withHoney,
withHoney',
newHoney,
newHoney',
withHoneyOptions,
flush,
)
where
import Control.Monad.Reader (MonadReader, local, void)
import Data.Maybe (isJust)
import qualified Data.Text as T
import Honeycomb.Core.Internal.Types
import Honeycomb.Core.Types
import Honeycomb.Transport
import Lens.Micro (over, (&), (.~))
import Lens.Micro.Mtl (view)
import System.Environment (lookupEnv)
import UnliftIO
flush ::
( MonadUnliftIO m,
MonadReader env m,
HasHoney env
) =>
Int ->
m ()
flush timeout_us = do
flushQueue <- view $ honeyL . honeyTransportStateL . transportFlushQueueL
mvar <- newEmptyTMVarIO
atomically $ writeTBQueue flushQueue mvar
void $ timeout timeout_us $ atomically $ takeTMVar mvar
| Creates a new Honey library instance .
newHoney ::
( MonadUnliftIO n,
MonadIO m
) =>
n (Honey, m ())
newHoney = do
honeyServerOptions <- defaultHoneyServerOptions
honeyOptions <- honeyOptionsFromEnv
(transportState, shutdown) <- newTransport honeyServerOptions
pure (mkHoney honeyOptions transportState, shutdown)
| Creates a new Honey library instance .
newHoney' ::
( MonadUnliftIO n,
MonadIO m
) =>
HoneyServerOptions n ->
HoneyOptions ->
n (Honey, m ())
newHoney' honeyServerOptions honeyOptions = do
(transportState, shutdown) <- newTransport honeyServerOptions
pure (mkHoney honeyOptions transportState, shutdown)
withHoney ::
MonadUnliftIO m =>
(Honey -> m a) ->
m a
withHoney inner = withRunInIO $ \run ->
bracket
newHoney
snd
(run . inner . fst)
withHoney' ::
MonadUnliftIO m =>
HoneyServerOptions m ->
HoneyOptions ->
(Honey -> m a) ->
m a
withHoney' honeyServerOptions honeyOptions inner =
bracket
(newHoney' honeyServerOptions honeyOptions)
snd
(inner . fst)
This allows a program to be run , with a @HoneyOptions@ value which is different
to the one configured when setting up the library .
withHoneyOptions ::
( MonadReader env m,
HasHoney env
) =>
(HoneyOptions -> HoneyOptions) ->
m a ->
m a
withHoneyOptions f = local (over (honeyL . honeyOptionsL) f)
Honeycomb events are queued or sent ( but no errors are raised ) .
honeyOptionsFromEnv ::
MonadIO m =>
m HoneyOptions
honeyOptionsFromEnv = do
apiKeyEnv <- liftIO $ fmap (ApiKey . T.pack) <$> lookupEnv "HONEYCOMB_API_KEY"
datasetEnv <- liftIO $ fmap (Dataset . T.pack) <$> lookupEnv "HONEYCOMB_DATASET"
disabledEnv <- liftIO $ lookupEnv "HONEYCOMB_DISABLED"
pure $
defaultHoneyOptions
& apiKeyL .~ apiKeyEnv
& datasetL .~ datasetEnv
& disabledL .~ isJust disabledEnv
|
a30956a6a9ef6aba2bdac418e9f2cc7de73ec05d994a3345075e1b7883b79f35 | onedata/op-worker | provider_rest_translator.erl | %%%-------------------------------------------------------------------
@author
( C ) 2019 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% This module handles translation of middleware results concerning
%%% provider entities into REST responses.
%%% @end
%%%-------------------------------------------------------------------
-module(provider_rest_translator).
-author("Bartosz Walkowicz").
-include("http/rest.hrl").
-include("middleware/middleware.hrl").
-export([get_response/2]).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% {@link rest_translator_behaviour} callback get_response/2.
%% @end
%%--------------------------------------------------------------------
-spec get_response(gri:gri(), Resource :: term()) -> #rest_resp{}.
get_response(#gri{aspect = As}, Result) when
As =:= configuration;
As =:= test_image;
As =:= health
->
?OK_REPLY(Result).
| null | https://raw.githubusercontent.com/onedata/op-worker/57579e74cfb93981c55e620af9bf9772ee0a991f/src/http/rest/translators/provider_rest_translator.erl | erlang | -------------------------------------------------------------------
@end
-------------------------------------------------------------------
@doc
This module handles translation of middleware results concerning
provider entities into REST responses.
@end
-------------------------------------------------------------------
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
{@link rest_translator_behaviour} callback get_response/2.
@end
-------------------------------------------------------------------- | @author
( C ) 2019 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
-module(provider_rest_translator).
-author("Bartosz Walkowicz").
-include("http/rest.hrl").
-include("middleware/middleware.hrl").
-export([get_response/2]).
-spec get_response(gri:gri(), Resource :: term()) -> #rest_resp{}.
get_response(#gri{aspect = As}, Result) when
As =:= configuration;
As =:= test_image;
As =:= health
->
?OK_REPLY(Result).
|
18206a3bcfc97b1bec09fd3943ebffc1cbca1e16b378b1e9c11173969ffb28cf | eval/deps-try | core.clj | (ns rebel-readline.core
(:refer-clojure :exclude [read-line])
(:require
[clojure.string :as string]
[rebel-readline.commands :as commands]
[rebel-readline.io.callback-reader]
[rebel-readline.jline-api :as api]
[rebel-readline.tools :as tools]
[rebel-readline.utils :as utils])
(:import
[org.jline.reader
UserInterruptException
EndOfFileException]))
(defmacro ensure-terminal
"Bind the rebel-readline.jline-api/*terminal* var to a new Jline
terminal if needed, otherwise use the currently bound one.
Will throw a clojure.lang.ExceptionInfo with a data payload of
`{:type :rebel-readline.jline-api/bad-terminal}` if JVM wasn't
launched from a terminal process.
There should really only be one instance of a Jline terminal as it
represents a \"connection\" to the terminal that launched JVM
process.
--------------------------------------------------------------------
IMPORTANT NOTE:
--------------------------------------------------------------------
This function will attempt to manipulate the terminal that initiated
the JVM process. For this reason it is important to start your JVM
in a terminal.
That means launching your process using the
- the java command
- the Clojure `clojure` tool
- lein trampoline
- boot - would need to run in boot's worker pod
Launching from a process initiated by lein will not work and
launching from a boot pod will not cut it either.
The underlying Terminal manipulation code is Jline3 and it makes
every effort to be compatible with a wide array of terminals. It is
entirely possible that your terminal is not well supported."
[& body]
`(binding [rebel-readline.jline-api/*terminal*
(or rebel-readline.jline-api/*terminal* (rebel-readline.jline-api/create-terminal))]
~@body))
(defmacro with-line-reader
"This macro take a line-reader and binds it. It is one of the
primary ways to utilize this library. You can think of the
rebel-readline.jline-api/*line-reader* binding as an alternative in
source that the rebel-readline.core/read-line function reads from.
Example:
(require '[rebel-readline.core :as rebel])
(rebel/with-line-reader
(rebel-readline.clojure.line-reader/create
(rebel-readline.clojure.service.local/create))
;; optionally bind the output directly to the jline terminal
;; in such a way so that output won't corrupt the terminal
;; this is optional
(binding [*out* (rebel-readline.jline-api/safe-terminal-writer)]
(clojure.main/repl
;; this will create a fn that reads from the *line-reader*
:read (rebel-readline.clojure.main/create-repl-read)
:prompt (fn []))))"
[line-reader & body]
`(ensure-terminal
(binding [rebel-readline.jline-api/*line-reader* ~line-reader]
~@body)))
(defn help-message
"Returns a help message to print before enguaging the
readline. Helpful for repl development."
[]
"[Rebel readline] Type :repl/help for online help info")
(defn read-line-opts
"Like read-line, but allows overriding of the LineReader prompt, buffer, and mask parameters.
:prompt
Allows overriding with a cusom prompt
:buffer
The default value presented to the user to edit, may be null.
:mask
Should be set to a single character used by jline to bit-mask.
Characters will not be echoed if they mask to 0
Might do crazy stuff with rebel-readline, use with caution.
defaults to nil (no mask)
:command-executed
sentinal value to be returned when a repl command is executed, otherwise a
blank string will be returned when a repl command is executed.
"
[ & {prompt :prompt
mask :mask
buffer :buffer
command-executed :command-executed
:or {prompt nil buffer nil mask nil command-executed ""}}]
(let [redirect-output? (:redirect-output @api/*line-reader*)
save-out (volatile! *out*)
redirect-print-writer (api/safe-terminal-writer api/*line-reader*)]
(when redirect-output?
(alter-var-root
#'*out*
(fn [root-out]
(vreset! save-out root-out)
redirect-print-writer)))
(try
(binding [*out* redirect-print-writer]
;; this is intensely disatisfying
;; but we are blocking concurrent redisplays while the
;; readline prompt is initially drawn
(api/block-redisplay-millis 100)
(let [res' (.readLine api/*line-reader* (or prompt (tools/prompt)) mask buffer)]
(if-not (commands/handle-command res')
res'
command-executed)))
(finally
(when redirect-output?
(flush)
(alter-var-root #'*out* (fn [_] @save-out)))))))
(defn read-line
"Reads a line from the currently bound
rebel-readline.jline-api/*line-reader*. If you supply the optional
`command-executed` sentinal value, it will be returned when a repl
command is executed, otherwise a blank string will be returned when
a repl command is executed.
This function activates the rebel line reader which, in turn, will put
the terminal that launched the jvm process into \"raw mode\" during the
readline operation.
You can think of the readline operation as a launching of an editor
for the brief period that the line is read.
If readline service value of :redirect-output is truthy (the default
value) in the supplied rebel line reader service config this
function will alter the root binding of the *out* var to prevent
extraneous output from corrupting the read line editors output.
Once the reading is done it returns the terminal to its original
settings."
;; much of this code is intended to protect the prompt. If the
;; prompt gets corrupted by extraneous output it can lead to the
;; horrible condition of the readline program thinking the cursor is
;; in a different position than it is. We try to prevent this by
;; creating a safe writer that will print the output and redraw the
;; readline, while ensuring that the printed output has a newline at
;; the end.
;; We then expand the scope of this print-writer by temorarily
;; redefining the root binding of *out* to it.
;; The idea being that we want to catch as much concurrant output as
;; possible while the readline is enguaged.
[& [command-executed]]
(read-line-opts :command-executed (or command-executed "")))
(defn repl-read-line
"A readline function that converts the Exceptions normally thrown by
org.jline.reader.impl.LineReaderImpl that signal user interrupt or
the end of the parent stream into concrete sentinal objects that one
can act on.
This follows the pattern established by `clojure.main/repl-read`
This function either returns the string read by this readline or the
request-exit or request-prompt sentinal objects."
[request-prompt request-exit]
(try
(read-line request-prompt)
(catch UserInterruptException e
request-prompt)
(catch EndOfFileException e
request-exit)))
(defn has-remaining?
"Takes a PushbackReader and returns true if the next character is not negative.
i.e not the end of the readers stream."
[pbr]
(let [x (.read pbr)]
(and (not (neg? x))
(do (.unread pbr x) true))))
(defn create-buffered-repl-reader-fn [create-buffered-reader-fn has-remaining-pred repl-read-fn]
(fn []
(let [reader-buffer (atom (create-buffered-reader-fn ""))]
(fn [request-prompt request-exit]
(if (has-remaining-pred @reader-buffer)
(binding [*in* @reader-buffer]
(repl-read-fn request-prompt request-exit))
(let [possible-forms (repl-read-line request-prompt request-exit)]
(if (#{request-prompt request-exit} possible-forms)
possible-forms
(if-not (string/blank? possible-forms)
(do
(reset! reader-buffer (create-buffered-reader-fn (str possible-forms "\n")))
(binding [*in* @reader-buffer]
(repl-read-fn request-prompt request-exit)))
request-prompt))))))))
(defn stream-read-line
"This function reads lines and returns them ready to be read by a
java.io.Reader. This basically adds newlines at the end of readline
results.
This function returns `nil` if it is end of the supplied readlines
parent input stream or if a process exit is requested.
This function was designed to be supplied to a `rebel-readline.io.calback-reader`
Example:
this will create an input stream to be read from by a Clojure / Script REPL
(rebel-readline.io.calback-reader/callback-reader #(stream-read-line))"
[]
(let [request-prompt (Object.)
request-exit (Object.)
possible-result (repl-read-line request-prompt request-exit)]
(cond
(= request-prompt possible-result) "\n"
(= request-exit possible-result) nil
:else (str possible-result "\n"))))
(defmacro with-readline-in
"This macro takes a rebel readline service and binds *in* to an a
`clojure.lang.LineNumberingPushbackReader` that is backed by the
readline.
This is perhaps the easiest way to utilize this readline library.
The downside to using this method is if you are working in a REPL on
something that reads from the *in* that wouldn't benefit from the
features of this readline lib. In that case I would look at
`clj-repl-read` where the readline is only engaged during the read
portion of the REPL.
Examples:
(with-readline-in
(rebel-readline.clojure.line-reader/create
(rebel-readline.clojure.service.local/create {:prompt clojure.main/repl-prompt} ))
(clojure.main/repl :prompt (fn[])))"
[line-reader & body]
`(with-line-reader ~line-reader
(binding [*in* (clojure.lang.LineNumberingPushbackReader.
(rebel-readline.io.callback-reader/callback-reader
stream-read-line))]
~@body)))
(defn basic-line-reader [& opts]
(api/create-line-reader api/*terminal* nil (apply hash-map opts)))
| null | https://raw.githubusercontent.com/eval/deps-try/da691c68b527ad5f9e770dbad82cce6cbbe16fb4/vendor/rebel-readline/rebel-readline/src/rebel_readline/core.clj | clojure | optionally bind the output directly to the jline terminal
in such a way so that output won't corrupt the terminal
this is optional
this will create a fn that reads from the *line-reader*
this is intensely disatisfying
but we are blocking concurrent redisplays while the
readline prompt is initially drawn
much of this code is intended to protect the prompt. If the
prompt gets corrupted by extraneous output it can lead to the
horrible condition of the readline program thinking the cursor is
in a different position than it is. We try to prevent this by
creating a safe writer that will print the output and redraw the
readline, while ensuring that the printed output has a newline at
the end.
We then expand the scope of this print-writer by temorarily
redefining the root binding of *out* to it.
The idea being that we want to catch as much concurrant output as
possible while the readline is enguaged. | (ns rebel-readline.core
(:refer-clojure :exclude [read-line])
(:require
[clojure.string :as string]
[rebel-readline.commands :as commands]
[rebel-readline.io.callback-reader]
[rebel-readline.jline-api :as api]
[rebel-readline.tools :as tools]
[rebel-readline.utils :as utils])
(:import
[org.jline.reader
UserInterruptException
EndOfFileException]))
(defmacro ensure-terminal
"Bind the rebel-readline.jline-api/*terminal* var to a new Jline
terminal if needed, otherwise use the currently bound one.
Will throw a clojure.lang.ExceptionInfo with a data payload of
`{:type :rebel-readline.jline-api/bad-terminal}` if JVM wasn't
launched from a terminal process.
There should really only be one instance of a Jline terminal as it
represents a \"connection\" to the terminal that launched JVM
process.
--------------------------------------------------------------------
IMPORTANT NOTE:
--------------------------------------------------------------------
This function will attempt to manipulate the terminal that initiated
the JVM process. For this reason it is important to start your JVM
in a terminal.
That means launching your process using the
- the java command
- the Clojure `clojure` tool
- lein trampoline
- boot - would need to run in boot's worker pod
Launching from a process initiated by lein will not work and
launching from a boot pod will not cut it either.
The underlying Terminal manipulation code is Jline3 and it makes
every effort to be compatible with a wide array of terminals. It is
entirely possible that your terminal is not well supported."
[& body]
`(binding [rebel-readline.jline-api/*terminal*
(or rebel-readline.jline-api/*terminal* (rebel-readline.jline-api/create-terminal))]
~@body))
(defmacro with-line-reader
"This macro take a line-reader and binds it. It is one of the
primary ways to utilize this library. You can think of the
rebel-readline.jline-api/*line-reader* binding as an alternative in
source that the rebel-readline.core/read-line function reads from.
Example:
(require '[rebel-readline.core :as rebel])
(rebel/with-line-reader
(rebel-readline.clojure.line-reader/create
(rebel-readline.clojure.service.local/create))
(binding [*out* (rebel-readline.jline-api/safe-terminal-writer)]
(clojure.main/repl
:read (rebel-readline.clojure.main/create-repl-read)
:prompt (fn []))))"
[line-reader & body]
`(ensure-terminal
(binding [rebel-readline.jline-api/*line-reader* ~line-reader]
~@body)))
(defn help-message
"Returns a help message to print before enguaging the
readline. Helpful for repl development."
[]
"[Rebel readline] Type :repl/help for online help info")
(defn read-line-opts
"Like read-line, but allows overriding of the LineReader prompt, buffer, and mask parameters.
:prompt
Allows overriding with a cusom prompt
:buffer
The default value presented to the user to edit, may be null.
:mask
Should be set to a single character used by jline to bit-mask.
Characters will not be echoed if they mask to 0
Might do crazy stuff with rebel-readline, use with caution.
defaults to nil (no mask)
:command-executed
sentinal value to be returned when a repl command is executed, otherwise a
blank string will be returned when a repl command is executed.
"
[ & {prompt :prompt
mask :mask
buffer :buffer
command-executed :command-executed
:or {prompt nil buffer nil mask nil command-executed ""}}]
(let [redirect-output? (:redirect-output @api/*line-reader*)
save-out (volatile! *out*)
redirect-print-writer (api/safe-terminal-writer api/*line-reader*)]
(when redirect-output?
(alter-var-root
#'*out*
(fn [root-out]
(vreset! save-out root-out)
redirect-print-writer)))
(try
(binding [*out* redirect-print-writer]
(api/block-redisplay-millis 100)
(let [res' (.readLine api/*line-reader* (or prompt (tools/prompt)) mask buffer)]
(if-not (commands/handle-command res')
res'
command-executed)))
(finally
(when redirect-output?
(flush)
(alter-var-root #'*out* (fn [_] @save-out)))))))
(defn read-line
"Reads a line from the currently bound
rebel-readline.jline-api/*line-reader*. If you supply the optional
`command-executed` sentinal value, it will be returned when a repl
command is executed, otherwise a blank string will be returned when
a repl command is executed.
This function activates the rebel line reader which, in turn, will put
the terminal that launched the jvm process into \"raw mode\" during the
readline operation.
You can think of the readline operation as a launching of an editor
for the brief period that the line is read.
If readline service value of :redirect-output is truthy (the default
value) in the supplied rebel line reader service config this
function will alter the root binding of the *out* var to prevent
extraneous output from corrupting the read line editors output.
Once the reading is done it returns the terminal to its original
settings."
[& [command-executed]]
(read-line-opts :command-executed (or command-executed "")))
(defn repl-read-line
"A readline function that converts the Exceptions normally thrown by
org.jline.reader.impl.LineReaderImpl that signal user interrupt or
the end of the parent stream into concrete sentinal objects that one
can act on.
This follows the pattern established by `clojure.main/repl-read`
This function either returns the string read by this readline or the
request-exit or request-prompt sentinal objects."
[request-prompt request-exit]
(try
(read-line request-prompt)
(catch UserInterruptException e
request-prompt)
(catch EndOfFileException e
request-exit)))
(defn has-remaining?
"Takes a PushbackReader and returns true if the next character is not negative.
i.e not the end of the readers stream."
[pbr]
(let [x (.read pbr)]
(and (not (neg? x))
(do (.unread pbr x) true))))
(defn create-buffered-repl-reader-fn [create-buffered-reader-fn has-remaining-pred repl-read-fn]
(fn []
(let [reader-buffer (atom (create-buffered-reader-fn ""))]
(fn [request-prompt request-exit]
(if (has-remaining-pred @reader-buffer)
(binding [*in* @reader-buffer]
(repl-read-fn request-prompt request-exit))
(let [possible-forms (repl-read-line request-prompt request-exit)]
(if (#{request-prompt request-exit} possible-forms)
possible-forms
(if-not (string/blank? possible-forms)
(do
(reset! reader-buffer (create-buffered-reader-fn (str possible-forms "\n")))
(binding [*in* @reader-buffer]
(repl-read-fn request-prompt request-exit)))
request-prompt))))))))
(defn stream-read-line
"This function reads lines and returns them ready to be read by a
java.io.Reader. This basically adds newlines at the end of readline
results.
This function returns `nil` if it is end of the supplied readlines
parent input stream or if a process exit is requested.
This function was designed to be supplied to a `rebel-readline.io.calback-reader`
Example:
this will create an input stream to be read from by a Clojure / Script REPL
(rebel-readline.io.calback-reader/callback-reader #(stream-read-line))"
[]
(let [request-prompt (Object.)
request-exit (Object.)
possible-result (repl-read-line request-prompt request-exit)]
(cond
(= request-prompt possible-result) "\n"
(= request-exit possible-result) nil
:else (str possible-result "\n"))))
(defmacro with-readline-in
"This macro takes a rebel readline service and binds *in* to an a
`clojure.lang.LineNumberingPushbackReader` that is backed by the
readline.
This is perhaps the easiest way to utilize this readline library.
The downside to using this method is if you are working in a REPL on
something that reads from the *in* that wouldn't benefit from the
features of this readline lib. In that case I would look at
`clj-repl-read` where the readline is only engaged during the read
portion of the REPL.
Examples:
(with-readline-in
(rebel-readline.clojure.line-reader/create
(rebel-readline.clojure.service.local/create {:prompt clojure.main/repl-prompt} ))
(clojure.main/repl :prompt (fn[])))"
[line-reader & body]
`(with-line-reader ~line-reader
(binding [*in* (clojure.lang.LineNumberingPushbackReader.
(rebel-readline.io.callback-reader/callback-reader
stream-read-line))]
~@body)))
(defn basic-line-reader [& opts]
(api/create-line-reader api/*terminal* nil (apply hash-map opts)))
|
ff76f591f1c87f49c6274a547b4704eeadcc94a16f9be64d8d3f98dfc0aec96e | spawnfest/eep49ers | ex_canvas_paint.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2009 - 2016 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
-module(ex_canvas_paint).
-behaviour(wx_object).
%% Client API
-export([start/1]).
%% wx_object callbacks
-export([init/1, terminate/2, code_change/3,
handle_info/2, handle_call/3, handle_cast/2, handle_event/2, handle_sync_event/3]).
-include_lib("wx/include/wx.hrl").
-record(state,
{
parent,
config,
canvas,
pen,
brush,
old_pos,
bitmap
}).
start(Config) ->
wx_object:start_link(?MODULE, Config, []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
init(Config) ->
wx:batch(fun() -> do_init(Config) end).
do_init(Config) ->
Parent = proplists:get_value(parent, Config),
Panel = wxPanel:new(Parent, []),
%% Setup sizers
MainSizer = wxBoxSizer:new(?wxVERTICAL),
Sizer = wxStaticBoxSizer:new(?wxVERTICAL, Panel,
[{label, "wxDC"}]),
%% Create the window to paint on and make it repaint the whole window on resize
Canvas = wxPanel:new(Panel, [{style, ?wxFULL_REPAINT_ON_RESIZE}]),
wxPanel:setToolTip(Canvas,
"Left-click and hold to draw something - release to stop drawing.\n"
"Middle-click to fill with pink\n"
"Middle-dclick to fill with white.\n"
"Right-click to clear."),
%% Create a wxPen and a WxBrush and set its colors to draw with
Brush = wxBrush:new(?wxWHITE),
Pen = wxPen:new(?wxBLACK, [{width, 2}]),
PrintButton = wxButton:new(Panel, ?wxID_ANY, [{label, "Print"}]),
Bitmap = wxBitmap:new(30,30),
%% Add to sizers
wxSizer:add(Sizer, Canvas, [{flag, ?wxEXPAND},
{proportion, 1}]),
wxSizer:add(MainSizer, PrintButton, []),
wxSizer:add(MainSizer, Sizer, [{flag, ?wxEXPAND},
{proportion, 1}]),
wxPanel:connect(PrintButton, command_button_clicked),
wxPanel:connect(Canvas, paint, [callback]),
wxPanel:connect(Canvas, size),
wxPanel:connect(Canvas, left_down),
wxPanel:connect(Canvas, left_dclick),
wxPanel:connect(Canvas, left_up),
wxPanel:connect(Canvas, right_down),
wxPanel:connect(Canvas, middle_down),
wxPanel:connect(Canvas, middle_dclick),
wxPanel:setSizer(Panel, MainSizer),
wxSizer:layout(MainSizer),
{Panel, #state{parent=Panel, config=Config,
canvas = Canvas, pen = Pen,
brush = Brush, bitmap = Bitmap}}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Sync event from callback events , paint event must be handled in callbacks
%% otherwise nothing will be drawn on windows.
handle_sync_event(#wx{event = #wxPaint{}}, _wxObj, #state{canvas=Canvas, bitmap=Bitmap}) ->
DC = wxPaintDC:new(Canvas),
redraw(DC, Bitmap),
wxPaintDC:destroy(DC),
ok.
%% Print what's drawn
handle_event(#wx{event = #wxCommand{type = command_button_clicked}},
State = #state{bitmap=Bitmap}) ->
PD = wxPrintData:new(),
PDD = wxPrintDialogData:new(PD),
PSDD = wxPageSetupDialogData:new(PD),
Fun =
fun(This,_Page) ->
MX = MY = 500,
wxPrintout:fitThisSizeToPageMargins(This, {MX,MY}, PSDD),
{_X,_Y,W,H} = wxPrintout:getLogicalPageMarginsRect(This, PSDD),
wxPrintout:offsetLogicalOrigin(This,(W-MX) div 2, (H-MY) div 2),
DC = wxPrintout:getDC(This),
redraw(DC, Bitmap),
true
end,
Printout1 = wxPrintout:new("Print", Fun,
[{getPageInfo, fun getPageInfo/1}]),
Printout2 = wxPrintout:new("Print", Fun,
[{getPageInfo, fun getPageInfo/1}]),
Preview = wxPrintPreview:new(Printout1, [{printoutForPrinting,Printout2},{data,PDD}]),
case wxPrintPreview:isOk(Preview) of
true ->
Env = wx:get_env(),
spawn_link(fun() ->
wx:set_env(Env),
PF = wxPreviewFrame:new(Preview, State#state.parent, []),
wxPreviewFrame:centre(PF, [{dir, ?wxBOTH}]),
wxPreviewFrame:initialize(PF),
wxPreviewFrame:centre(PF),
wxPreviewFrame:show(PF)
end);
false ->
io:format("Could not create preview window.\n"
"Perhaps your current printer is not set correctly?~n", []),
wxPrintPreview:destroy(Preview)
end,
{noreply, State#state{}};
%% Draw a line
handle_event(#wx{event = #wxMouse{type = motion, x = X, y = Y}},
State = #state{canvas = Canvas, pen = Pen, brush = Brush}) ->
Fun =
fun(DC) ->
wxDC:setPen(DC, Pen),
wxBrush:setColour(Brush, ?wxBLACK),
wxDC:setBrush(DC, Brush),
wxDC:drawLine(DC, {X,Y}, State#state.old_pos)
end,
draw(Canvas,State#state.bitmap, Fun),
{noreply, State#state{old_pos = {X,Y}}};
%% Resize event
handle_event(#wx{event = #wxSize{size = {W,H}}}, State = #state{bitmap=Prev}) ->
case W > 0 andalso H > 0 of
true ->
wxBitmap:destroy(Prev),
Bitmap = wxBitmap:new(W,H),
draw(State#state.canvas, Bitmap, fun(DC) -> wxDC:clear(DC) end),
{noreply, State#state{bitmap=Bitmap}};
false ->
{noreply, State}
end;
handle_event(#wx{event = #wxMouse{type = left_dclick,x = X,y = Y}}, State = #state{}) ->
wxPanel:connect(State#state.canvas, motion),
{noreply, State#state{old_pos = {X,Y}}};
handle_event(#wx{event = #wxMouse{type = left_down,x = X,y = Y}}, State = #state{}) ->
wxPanel:connect(State#state.canvas, motion),
{noreply, State#state{old_pos = {X,Y}}};
%% Fill with pink color
handle_event(#wx{event = #wxMouse{type = middle_down,x = X, y =Y}}, State = #state{}) ->
case os:type() of
{_, darwin} ->
io:format("Fill doesn't work on Darwin ~n",[]);
_ ->
ok
end,
Fun =
fun(DC) ->
wxBrush:setColour(State#state.brush, {255,125,255,255}),
wxDC:setBrush(DC, State#state.brush),
wxDC:floodFill(DC, {X,Y}, ?wxBLACK, [{style, ?wxFLOOD_BORDER}])
end,
draw(State#state.canvas, State#state.bitmap, Fun),
{noreply, State};
%% Fill with white color
handle_event(#wx{event = #wxMouse{type = middle_dclick,x = X, y =Y}}, State = #state{}) ->
Fun =
fun(DC) ->
wxBrush:setColour(State#state.brush, ?wxWHITE),
wxDC:setBrush(DC, State#state.brush),
wxDC:floodFill(DC, {X,Y}, ?wxBLACK, [{style, ?wxFLOOD_BORDER}])
end,
draw(State#state.canvas, State#state.bitmap,Fun),
{noreply, State};
handle_event(#wx{event = #wxMouse{type = left_up}}, State = #state{}) ->
wxPanel:disconnect(State#state.canvas, motion),
{noreply, State};
%% Clear the DC
handle_event(#wx{event = #wxMouse{type = right_down}}, State = #state{}) ->
draw(State#state.canvas, State#state.bitmap, fun(DC) -> wxDC:clear(DC) end),
{noreply, State}.
%% Callbacks handled as normal gen_server callbacks
handle_info(Msg, State) ->
demo:format(State#state.config, "Got Info ~p\n", [Msg]),
{noreply, State}.
handle_call(shutdown, _From, State=#state{parent=Panel}) ->
wxPanel:destroy(Panel),
{stop, normal, ok, State};
handle_call(Msg, _From, State) ->
demo:format(State#state.config, "Got Call ~p\n", [Msg]),
{reply,{error, nyi}, State}.
handle_cast(Msg, State) ->
io:format("Got cast ~p~n",[Msg]),
{noreply,State}.
code_change(_, _, State) ->
{stop, ignore, State}.
terminate(_Reason, _) ->
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Local functions
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
draw(Canvas, Bitmap, Fun) ->
MemoryDC = wxMemoryDC:new(Bitmap),
CDC = wxClientDC:new(Canvas),
Fun(MemoryDC),
wxDC:blit(CDC, {0,0},
{wxBitmap:getWidth(Bitmap), wxBitmap:getHeight(Bitmap)},
MemoryDC, {0,0}),
wxClientDC:destroy(CDC),
wxMemoryDC:destroy(MemoryDC).
redraw(DC, Bitmap) ->
try
MemoryDC = wxMemoryDC:new(Bitmap),
wxDC:blit(DC, {0,0},
{wxBitmap:getWidth(Bitmap), wxBitmap:getHeight(Bitmap)},
MemoryDC, {0,0}),
wxMemoryDC:destroy(MemoryDC)
catch error:{{badarg,_},_} -> %% Bitmap have been deleted
ok
end.
getPageInfo(_This) ->
{1,1,1,1}.
| null | https://raw.githubusercontent.com/spawnfest/eep49ers/d1020fd625a0bbda8ab01caf0e1738eb1cf74886/lib/wx/examples/demo/ex_canvas_paint.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
Client API
wx_object callbacks
Setup sizers
Create the window to paint on and make it repaint the whole window on resize
Create a wxPen and a WxBrush and set its colors to draw with
Add to sizers
otherwise nothing will be drawn on windows.
Print what's drawn
Draw a line
Resize event
Fill with pink color
Fill with white color
Clear the DC
Callbacks handled as normal gen_server callbacks
Local functions
Bitmap have been deleted | Copyright Ericsson AB 2009 - 2016 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(ex_canvas_paint).
-behaviour(wx_object).
-export([start/1]).
-export([init/1, terminate/2, code_change/3,
handle_info/2, handle_call/3, handle_cast/2, handle_event/2, handle_sync_event/3]).
-include_lib("wx/include/wx.hrl").
-record(state,
{
parent,
config,
canvas,
pen,
brush,
old_pos,
bitmap
}).
start(Config) ->
wx_object:start_link(?MODULE, Config, []).
init(Config) ->
wx:batch(fun() -> do_init(Config) end).
do_init(Config) ->
Parent = proplists:get_value(parent, Config),
Panel = wxPanel:new(Parent, []),
MainSizer = wxBoxSizer:new(?wxVERTICAL),
Sizer = wxStaticBoxSizer:new(?wxVERTICAL, Panel,
[{label, "wxDC"}]),
Canvas = wxPanel:new(Panel, [{style, ?wxFULL_REPAINT_ON_RESIZE}]),
wxPanel:setToolTip(Canvas,
"Left-click and hold to draw something - release to stop drawing.\n"
"Middle-click to fill with pink\n"
"Middle-dclick to fill with white.\n"
"Right-click to clear."),
Brush = wxBrush:new(?wxWHITE),
Pen = wxPen:new(?wxBLACK, [{width, 2}]),
PrintButton = wxButton:new(Panel, ?wxID_ANY, [{label, "Print"}]),
Bitmap = wxBitmap:new(30,30),
wxSizer:add(Sizer, Canvas, [{flag, ?wxEXPAND},
{proportion, 1}]),
wxSizer:add(MainSizer, PrintButton, []),
wxSizer:add(MainSizer, Sizer, [{flag, ?wxEXPAND},
{proportion, 1}]),
wxPanel:connect(PrintButton, command_button_clicked),
wxPanel:connect(Canvas, paint, [callback]),
wxPanel:connect(Canvas, size),
wxPanel:connect(Canvas, left_down),
wxPanel:connect(Canvas, left_dclick),
wxPanel:connect(Canvas, left_up),
wxPanel:connect(Canvas, right_down),
wxPanel:connect(Canvas, middle_down),
wxPanel:connect(Canvas, middle_dclick),
wxPanel:setSizer(Panel, MainSizer),
wxSizer:layout(MainSizer),
{Panel, #state{parent=Panel, config=Config,
canvas = Canvas, pen = Pen,
brush = Brush, bitmap = Bitmap}}.
Sync event from callback events , paint event must be handled in callbacks
handle_sync_event(#wx{event = #wxPaint{}}, _wxObj, #state{canvas=Canvas, bitmap=Bitmap}) ->
DC = wxPaintDC:new(Canvas),
redraw(DC, Bitmap),
wxPaintDC:destroy(DC),
ok.
handle_event(#wx{event = #wxCommand{type = command_button_clicked}},
State = #state{bitmap=Bitmap}) ->
PD = wxPrintData:new(),
PDD = wxPrintDialogData:new(PD),
PSDD = wxPageSetupDialogData:new(PD),
Fun =
fun(This,_Page) ->
MX = MY = 500,
wxPrintout:fitThisSizeToPageMargins(This, {MX,MY}, PSDD),
{_X,_Y,W,H} = wxPrintout:getLogicalPageMarginsRect(This, PSDD),
wxPrintout:offsetLogicalOrigin(This,(W-MX) div 2, (H-MY) div 2),
DC = wxPrintout:getDC(This),
redraw(DC, Bitmap),
true
end,
Printout1 = wxPrintout:new("Print", Fun,
[{getPageInfo, fun getPageInfo/1}]),
Printout2 = wxPrintout:new("Print", Fun,
[{getPageInfo, fun getPageInfo/1}]),
Preview = wxPrintPreview:new(Printout1, [{printoutForPrinting,Printout2},{data,PDD}]),
case wxPrintPreview:isOk(Preview) of
true ->
Env = wx:get_env(),
spawn_link(fun() ->
wx:set_env(Env),
PF = wxPreviewFrame:new(Preview, State#state.parent, []),
wxPreviewFrame:centre(PF, [{dir, ?wxBOTH}]),
wxPreviewFrame:initialize(PF),
wxPreviewFrame:centre(PF),
wxPreviewFrame:show(PF)
end);
false ->
io:format("Could not create preview window.\n"
"Perhaps your current printer is not set correctly?~n", []),
wxPrintPreview:destroy(Preview)
end,
{noreply, State#state{}};
handle_event(#wx{event = #wxMouse{type = motion, x = X, y = Y}},
State = #state{canvas = Canvas, pen = Pen, brush = Brush}) ->
Fun =
fun(DC) ->
wxDC:setPen(DC, Pen),
wxBrush:setColour(Brush, ?wxBLACK),
wxDC:setBrush(DC, Brush),
wxDC:drawLine(DC, {X,Y}, State#state.old_pos)
end,
draw(Canvas,State#state.bitmap, Fun),
{noreply, State#state{old_pos = {X,Y}}};
handle_event(#wx{event = #wxSize{size = {W,H}}}, State = #state{bitmap=Prev}) ->
case W > 0 andalso H > 0 of
true ->
wxBitmap:destroy(Prev),
Bitmap = wxBitmap:new(W,H),
draw(State#state.canvas, Bitmap, fun(DC) -> wxDC:clear(DC) end),
{noreply, State#state{bitmap=Bitmap}};
false ->
{noreply, State}
end;
handle_event(#wx{event = #wxMouse{type = left_dclick,x = X,y = Y}}, State = #state{}) ->
wxPanel:connect(State#state.canvas, motion),
{noreply, State#state{old_pos = {X,Y}}};
handle_event(#wx{event = #wxMouse{type = left_down,x = X,y = Y}}, State = #state{}) ->
wxPanel:connect(State#state.canvas, motion),
{noreply, State#state{old_pos = {X,Y}}};
handle_event(#wx{event = #wxMouse{type = middle_down,x = X, y =Y}}, State = #state{}) ->
case os:type() of
{_, darwin} ->
io:format("Fill doesn't work on Darwin ~n",[]);
_ ->
ok
end,
Fun =
fun(DC) ->
wxBrush:setColour(State#state.brush, {255,125,255,255}),
wxDC:setBrush(DC, State#state.brush),
wxDC:floodFill(DC, {X,Y}, ?wxBLACK, [{style, ?wxFLOOD_BORDER}])
end,
draw(State#state.canvas, State#state.bitmap, Fun),
{noreply, State};
handle_event(#wx{event = #wxMouse{type = middle_dclick,x = X, y =Y}}, State = #state{}) ->
Fun =
fun(DC) ->
wxBrush:setColour(State#state.brush, ?wxWHITE),
wxDC:setBrush(DC, State#state.brush),
wxDC:floodFill(DC, {X,Y}, ?wxBLACK, [{style, ?wxFLOOD_BORDER}])
end,
draw(State#state.canvas, State#state.bitmap,Fun),
{noreply, State};
handle_event(#wx{event = #wxMouse{type = left_up}}, State = #state{}) ->
wxPanel:disconnect(State#state.canvas, motion),
{noreply, State};
handle_event(#wx{event = #wxMouse{type = right_down}}, State = #state{}) ->
draw(State#state.canvas, State#state.bitmap, fun(DC) -> wxDC:clear(DC) end),
{noreply, State}.
handle_info(Msg, State) ->
demo:format(State#state.config, "Got Info ~p\n", [Msg]),
{noreply, State}.
handle_call(shutdown, _From, State=#state{parent=Panel}) ->
wxPanel:destroy(Panel),
{stop, normal, ok, State};
handle_call(Msg, _From, State) ->
demo:format(State#state.config, "Got Call ~p\n", [Msg]),
{reply,{error, nyi}, State}.
handle_cast(Msg, State) ->
io:format("Got cast ~p~n",[Msg]),
{noreply,State}.
code_change(_, _, State) ->
{stop, ignore, State}.
terminate(_Reason, _) ->
ok.
draw(Canvas, Bitmap, Fun) ->
MemoryDC = wxMemoryDC:new(Bitmap),
CDC = wxClientDC:new(Canvas),
Fun(MemoryDC),
wxDC:blit(CDC, {0,0},
{wxBitmap:getWidth(Bitmap), wxBitmap:getHeight(Bitmap)},
MemoryDC, {0,0}),
wxClientDC:destroy(CDC),
wxMemoryDC:destroy(MemoryDC).
redraw(DC, Bitmap) ->
try
MemoryDC = wxMemoryDC:new(Bitmap),
wxDC:blit(DC, {0,0},
{wxBitmap:getWidth(Bitmap), wxBitmap:getHeight(Bitmap)},
MemoryDC, {0,0}),
wxMemoryDC:destroy(MemoryDC)
ok
end.
getPageInfo(_This) ->
{1,1,1,1}.
|
c8fd7c99cec83833bbb5af99bbebcad8ac0a416e712e524a09f723e29f8b8068 | abdulapopoola/SICPBook | Ex2.23.scm | #lang planet neil/sicp
(define (for-each proc items)
(cond ((null? items) true)
(else (proc (car items))
(for-each proc (cdr items)))))
(for-each (lambda (x) (newline) (display x))
(list 57 321 88))
| null | https://raw.githubusercontent.com/abdulapopoola/SICPBook/c8a0228ebf66d9c1ddc5ef1fcc1d05d8684f090a/Chapter%202/2.2/Ex2.23.scm | scheme | #lang planet neil/sicp
(define (for-each proc items)
(cond ((null? items) true)
(else (proc (car items))
(for-each proc (cdr items)))))
(for-each (lambda (x) (newline) (display x))
(list 57 321 88))
| |
b25ef2aa7a66a58b139fe3bc60c3faf064c3271d04be385b8f835f1781155686 | janestreet/bonsai | data.mli | open! Core
type t =
| Option_A
| Option_B
| Option_C
[@@deriving variants, enumerate, sexp, equal, compare]
val to_string : t -> string
include Comparable.S with type t := t
| null | https://raw.githubusercontent.com/janestreet/bonsai/c202b961f41e20e0b735c4e33c2bbac57a590941/web_ui/typeahead/test/data.mli | ocaml | open! Core
type t =
| Option_A
| Option_B
| Option_C
[@@deriving variants, enumerate, sexp, equal, compare]
val to_string : t -> string
include Comparable.S with type t := t
| |
221a665b1334ec6a670ef76b6ec6ba577fd1ceaadc649dcb51557602676b59ce | DeepSec-prover/deepsec | types_ui.mli | (**************************************************************************)
(* *)
DeepSec
(* *)
, project PESTO ,
, project PESTO ,
, project PESTO ,
(* *)
Copyright ( C ) INRIA 2017 - 2020
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU General Public License version 3.0 as described in the
(* file LICENSE *)
(* *)
(**************************************************************************)
open Types
(* Configuration *)
type json_position =
{
js_index : int;
js_args : int list
}
type json_pattern =
| JPVar of variable * int
| JPTuple of symbol * json_pattern list
| JPEquality of term
type json_process =
| JNil
| JOutput of term * term * json_process * json_position
| JInput of term * json_pattern * json_process * json_position
| JIfThenElse of term * term * json_process * json_process * json_position
| JLet of json_pattern * term * json_process * json_process * json_position
| JNew of name * int * json_process * json_position
| JPar of json_process list
| JBang of int * json_process * json_position
| JChoice of json_process * json_process * json_position
type configuration =
{
size_frame : int;
frame : term list;
process : json_process
}
(* Traces *)
type json_transition =
| JAOutput of recipe * json_position
| JAInput of recipe * recipe * json_position
| JAEaves of recipe * json_position (* out *) * json_position (* in *)
| JAComm of json_position (* out *) * json_position (* in *)
| JABang of int * json_position
| JATau of json_position
| JAChoice of json_position * bool (* True when the left process is chosen *)
type json_attack_trace =
{
id_proc : int;
transitions : json_transition list
}
type json_selected_transition =
| JSAOutput of string option * json_position
| JSAInput of string option * string option * json_position
| JSAEaves of string option * json_position (* out *) * json_position (* in *)
| JSAComm of json_position (* out *) * json_position (* in *)
| JSABang of int * json_position
| JSATau of json_position
| JSAChoice of json_position * bool (* True when the left process is chosen *)
(* Association table *)
type association =
{
size : int;
symbols : (symbol * int) list;
names : (name * int) list;
variables : (variable * int) list
}
type replicated_association =
{
repl_names : (name * (int * int list)) list;
repl_variables : (variable * (int * int list)) list
}
type full_association =
{
std : association;
repl : replicated_association
}
(* JSON data *)
type json =
| JString of string
| JBool of bool
| JInt of int
| JNull
| JObject of (string * json) list
| JList of json list
type json_atomic =
| JAtomVar of variable
| JAtomName of name
| JAtomSymbol of symbol
(* Query result *)
type progression =
| PVerif of int (* Percent *) * int (* Job remaining *)
| PGeneration of int (* Job generated *) * int (* Minimum nb of jobs *)
type query_progression =
| PDistributed of int (* Round *) * progression
| PSingleCore of progression
| PNot_defined
type query_status =
| QCompleted of json_attack_trace option
| QWaiting
| QIn_progress
| QCanceled
| QInternal_error of string
type query_settings =
{
var_set : int; (* Indicate the largest index created for a variable *)
name_set : int; (* Indicate the largest index created for a name *)
symbol_set : Term.Symbol.setting
}
(* We assume that the association contains all variables, names and symbols
occurring in the signature, processes and traces. *)
type query_result =
{
name_query : string;
q_index : int;
q_status : query_status;
q_batch_file : string;
q_run_file : string;
q_start_time : int option;
q_end_time : int option;
association : association;
semantics : semantics;
query_type : equivalence;
processes : json_process list;
settings : query_settings;
progression : query_progression;
memory : int
}
(* Run result *)
type run_batch_status =
| RBInternal_error of string
| RBWaiting
| RBCompleted
| RBIn_progress
| RBCanceled
type run_result =
{
name_run : string;
r_batch_file : string;
r_status : run_batch_status;
input_file : string option;
input_str : string option;
r_start_time : int option;
r_end_time : int option;
query_result_files : string list option;
query_results : query_result list option;
warnings : string list
}
(* Batch result *)
type batch_options =
| Nb_jobs of int option
| Round_timer of int
| Default_semantics of semantics
| Distant_workers of (string * string * int option) list
| Distributed of bool option
| Local_workers of int option
| Quiet
| ShowTrace
| POR of bool
| Title of string
type batch_result =
{
pid : int;
name_batch : string;
b_status : run_batch_status;
b_start_time : int option;
b_end_time : int option;
deepsec_version : string;
git_branch : string;
git_hash : string;
run_result_files : string list option;
run_results : run_result list option;
import_date : int option;
command_options : batch_options list;
command_options_cmp : batch_options list;
ocaml_version : string;
debug : bool
}
(* Simulator types *)
type detail_trace_display =
| DTFull
| DTStandard
| DTIO_only
type available_transition =
| AVDirect of recipe * recipe option * bool (* Indicate whether the recipes are lock or not. *)
| AVComm
| AVEavesdrop of recipe
type available_action =
| AV_output of json_position (* output *) * term * json_position list (* tau actions *) * available_transition list
| AV_input of json_position (* input *) * term * json_position list (* tau actions *) * available_transition list
| AV_bang of json_position (* bang *) * int (* max nb of unfolding *) * json_position list (* tau actions *)
| AV_choice of json_position (* choice *) * json_position list (* tau actions *)
| AV_tau of json_position
type status_static_equivalence =
| Static_equivalent
| Witness_message of recipe * term * int
| Witness_equality of recipe * recipe * term * term * term * int
(* Input Command *)
type input_command =
| Start_run of string list * batch_options list
| Cancel_run of string
| Cancel_query of string
| Cancel_batch
| Get_config
(* Simulator generic command *)
| Die
| Goto_step of int (* id process *) * int (* id step *)
| Next_step_user of json_selected_transition
| Next_steps of json_transition list
(* Simulator: Display of traces *)
| Display_trace of string * int (* Json of query result *)
(* Simulator: Attack_simulator *)
| Attack_simulator of string (* Json of query result *)
(* Simulator: Equivalence_simulator *)
| Equivalence_simulator of string * int (* process Id *)
| ESSelect_trace of int
| ESFind_equivalent_trace
type output_command =
(* Errors *)
| Init_internal_error of string * bool
| Batch_internal_error of string
| User_error of
(string (* Error msg*) * string (* file *) * string list (* warnings *)) list *
(string (* host *) * string (* Error msg *) list) list
| Query_internal_error of string (* Error msg*) * string (* file *)
(* Config *)
| Send_Configuration
(* Started *)
| Batch_started of string * (string * string * string list) list
| Run_started of string * string (* Dps file *)
| Query_started of string * int (* Index of query *)
(* Ended *)
| Batch_ended of string * run_batch_status
| Run_ended of string * run_batch_status
| Query_ended of
string *
query_status *
int (* Index of query *) *
int (* Running time *) *
int (* Memory used. = 0 when not defined *) *
equivalence
(* Exit *)
| Run_canceled of string
| Query_canceled of string
| Batch_canceled of string
(* Progression *)
| Progression of int (* Index of query *) * int (* execution time *) * query_progression * string (* json_file *)
(* Simulator: Generic command *)
Process i d
Process i d
| SFound_equivalent_trace of full_association * json_transition list
| SUser_error of string
| null | https://raw.githubusercontent.com/DeepSec-prover/deepsec/8ddc45ec79de5ec49810302ea7da32d3dc9f46e4/Source/interface/types_ui.mli | ocaml | ************************************************************************
All rights reserved. This file is distributed under the terms of
file LICENSE
************************************************************************
Configuration
Traces
out
in
out
in
True when the left process is chosen
out
in
out
in
True when the left process is chosen
Association table
JSON data
Query result
Percent
Job remaining
Job generated
Minimum nb of jobs
Round
Indicate the largest index created for a variable
Indicate the largest index created for a name
We assume that the association contains all variables, names and symbols
occurring in the signature, processes and traces.
Run result
Batch result
Simulator types
Indicate whether the recipes are lock or not.
output
tau actions
input
tau actions
bang
max nb of unfolding
tau actions
choice
tau actions
Input Command
Simulator generic command
id process
id step
Simulator: Display of traces
Json of query result
Simulator: Attack_simulator
Json of query result
Simulator: Equivalence_simulator
process Id
Errors
Error msg
file
warnings
host
Error msg
Error msg
file
Config
Started
Dps file
Index of query
Ended
Index of query
Running time
Memory used. = 0 when not defined
Exit
Progression
Index of query
execution time
json_file
Simulator: Generic command | DeepSec
, project PESTO ,
, project PESTO ,
, project PESTO ,
Copyright ( C ) INRIA 2017 - 2020
the GNU General Public License version 3.0 as described in the
open Types
type json_position =
{
js_index : int;
js_args : int list
}
type json_pattern =
| JPVar of variable * int
| JPTuple of symbol * json_pattern list
| JPEquality of term
type json_process =
| JNil
| JOutput of term * term * json_process * json_position
| JInput of term * json_pattern * json_process * json_position
| JIfThenElse of term * term * json_process * json_process * json_position
| JLet of json_pattern * term * json_process * json_process * json_position
| JNew of name * int * json_process * json_position
| JPar of json_process list
| JBang of int * json_process * json_position
| JChoice of json_process * json_process * json_position
type configuration =
{
size_frame : int;
frame : term list;
process : json_process
}
type json_transition =
| JAOutput of recipe * json_position
| JAInput of recipe * recipe * json_position
| JABang of int * json_position
| JATau of json_position
type json_attack_trace =
{
id_proc : int;
transitions : json_transition list
}
type json_selected_transition =
| JSAOutput of string option * json_position
| JSAInput of string option * string option * json_position
| JSABang of int * json_position
| JSATau of json_position
type association =
{
size : int;
symbols : (symbol * int) list;
names : (name * int) list;
variables : (variable * int) list
}
type replicated_association =
{
repl_names : (name * (int * int list)) list;
repl_variables : (variable * (int * int list)) list
}
type full_association =
{
std : association;
repl : replicated_association
}
type json =
| JString of string
| JBool of bool
| JInt of int
| JNull
| JObject of (string * json) list
| JList of json list
type json_atomic =
| JAtomVar of variable
| JAtomName of name
| JAtomSymbol of symbol
type progression =
type query_progression =
| PSingleCore of progression
| PNot_defined
type query_status =
| QCompleted of json_attack_trace option
| QWaiting
| QIn_progress
| QCanceled
| QInternal_error of string
type query_settings =
{
symbol_set : Term.Symbol.setting
}
type query_result =
{
name_query : string;
q_index : int;
q_status : query_status;
q_batch_file : string;
q_run_file : string;
q_start_time : int option;
q_end_time : int option;
association : association;
semantics : semantics;
query_type : equivalence;
processes : json_process list;
settings : query_settings;
progression : query_progression;
memory : int
}
type run_batch_status =
| RBInternal_error of string
| RBWaiting
| RBCompleted
| RBIn_progress
| RBCanceled
type run_result =
{
name_run : string;
r_batch_file : string;
r_status : run_batch_status;
input_file : string option;
input_str : string option;
r_start_time : int option;
r_end_time : int option;
query_result_files : string list option;
query_results : query_result list option;
warnings : string list
}
type batch_options =
| Nb_jobs of int option
| Round_timer of int
| Default_semantics of semantics
| Distant_workers of (string * string * int option) list
| Distributed of bool option
| Local_workers of int option
| Quiet
| ShowTrace
| POR of bool
| Title of string
type batch_result =
{
pid : int;
name_batch : string;
b_status : run_batch_status;
b_start_time : int option;
b_end_time : int option;
deepsec_version : string;
git_branch : string;
git_hash : string;
run_result_files : string list option;
run_results : run_result list option;
import_date : int option;
command_options : batch_options list;
command_options_cmp : batch_options list;
ocaml_version : string;
debug : bool
}
type detail_trace_display =
| DTFull
| DTStandard
| DTIO_only
type available_transition =
| AVComm
| AVEavesdrop of recipe
type available_action =
| AV_tau of json_position
type status_static_equivalence =
| Static_equivalent
| Witness_message of recipe * term * int
| Witness_equality of recipe * recipe * term * term * term * int
type input_command =
| Start_run of string list * batch_options list
| Cancel_run of string
| Cancel_query of string
| Cancel_batch
| Get_config
| Die
| Next_step_user of json_selected_transition
| Next_steps of json_transition list
| ESSelect_trace of int
| ESFind_equivalent_trace
type output_command =
| Init_internal_error of string * bool
| Batch_internal_error of string
| User_error of
| Send_Configuration
| Batch_started of string * (string * string * string list) list
| Batch_ended of string * run_batch_status
| Run_ended of string * run_batch_status
| Query_ended of
string *
query_status *
equivalence
| Run_canceled of string
| Query_canceled of string
| Batch_canceled of string
Process i d
Process i d
| SFound_equivalent_trace of full_association * json_transition list
| SUser_error of string
|
7fe4d85d8c73c12b15af592797696ab42dde0ca0571355969d044129283d8bea | graninas/Functional-Design-and-Architecture | ScenarioDSL5-Listing.hs | module ScenarioDSL4 where
import Data.Time
type Name = String
type Duration = DiffTime
-- service function:
seconds s = secondsToDiffTime s
data Value = FloatValue Float
| IntValue Int
| StringValue String
deriving (Show, Read)
data Controller = Controller Name
deriving (Show, Read)
type Temperature = Float
type Power = Float
type Script = [Procedure]
data Status = Online | Offline
deriving (Eq, Show, Read)
data Procedure
= ReadTemperature Controller (Temperature -> Script)
| Report Value
| Store Value
| AskStatus Controller (Status -> Script)
| InitBoosters (Controller -> Script)
| HeatUpBoosters Power Duration
temperatureToValue :: Temperature -> Value
temperatureToValue t = FloatValue t
heatUp :: Controller -> Script
heatUp controller = [
ReadTemperature controller (\t1 ->
[ Report (temperatureToValue t1)
, Store (temperatureToValue t1)
, HeatUpBoosters 1.0 (seconds 10)
, ReadTemperature controller (\t2 ->
[ Report (temperatureToValue t2)
, Store (temperatureToValue t2) ])
])
]
tryHeatUp :: Controller -> Script
tryHeatUp controller = [
AskStatus controller (\status ->
if (status == Online)
then heatUp controller
else []
)]
boostersHeatingUp :: Script
boostersHeatingUp =
[ InitBoosters tryHeatUp
, Report (StringValue "Script finished.") ]
someController = Controller "a00"
scriptInterpreter :: Script -> IO ()
scriptInterpreter [] = return ()
scriptInterpreter (p:ps) = do
interpret p
scriptInterpreter ps
interpret :: Procedure -> IO ()
interpret (ReadTemperature _ f) = do
print "Read temperature"
scriptInterpreter (f 0.0)
interpret (Report v) = print $ "Report: " ++ show v
interpret (Store v) = print $ "Store: " ++ show v
interpret (AskStatus _ f) = do
print "Ask status"
scriptInterpreter (f Online)
interpret (InitBoosters f) = do
print "Init boosters"
scriptInterpreter (f (Controller "a00"))
interpret (HeatUpBoosters _ _) = print "Heat up boosters"
mock :: Procedure -> IO ()
mock (ReadTemperature _ f) = do
print "Read temperature"
eval (f 0.0)
mock (Report _) = print "Report value"
mock (Store _) = print "Store value"
mock (AskStatus _ f) = do
print "Ask status"
eval (f Online)
mock (InitBoosters f) = do
print "Init boosters"
eval (f (Controller "a00"))
mock (HeatUpBoosters _ _) = print "Heat up boosters"
main = interpretScript boostersHeatingUp
| null | https://raw.githubusercontent.com/graninas/Functional-Design-and-Architecture/1736abc16d3e4917fc466010dcc182746af2fd0e/First-Edition/BookSamples/CH02/ScenarioDSL5-Listing.hs | haskell | service function: | module ScenarioDSL4 where
import Data.Time
type Name = String
type Duration = DiffTime
seconds s = secondsToDiffTime s
data Value = FloatValue Float
| IntValue Int
| StringValue String
deriving (Show, Read)
data Controller = Controller Name
deriving (Show, Read)
type Temperature = Float
type Power = Float
type Script = [Procedure]
data Status = Online | Offline
deriving (Eq, Show, Read)
data Procedure
= ReadTemperature Controller (Temperature -> Script)
| Report Value
| Store Value
| AskStatus Controller (Status -> Script)
| InitBoosters (Controller -> Script)
| HeatUpBoosters Power Duration
temperatureToValue :: Temperature -> Value
temperatureToValue t = FloatValue t
heatUp :: Controller -> Script
heatUp controller = [
ReadTemperature controller (\t1 ->
[ Report (temperatureToValue t1)
, Store (temperatureToValue t1)
, HeatUpBoosters 1.0 (seconds 10)
, ReadTemperature controller (\t2 ->
[ Report (temperatureToValue t2)
, Store (temperatureToValue t2) ])
])
]
tryHeatUp :: Controller -> Script
tryHeatUp controller = [
AskStatus controller (\status ->
if (status == Online)
then heatUp controller
else []
)]
boostersHeatingUp :: Script
boostersHeatingUp =
[ InitBoosters tryHeatUp
, Report (StringValue "Script finished.") ]
someController = Controller "a00"
scriptInterpreter :: Script -> IO ()
scriptInterpreter [] = return ()
scriptInterpreter (p:ps) = do
interpret p
scriptInterpreter ps
interpret :: Procedure -> IO ()
interpret (ReadTemperature _ f) = do
print "Read temperature"
scriptInterpreter (f 0.0)
interpret (Report v) = print $ "Report: " ++ show v
interpret (Store v) = print $ "Store: " ++ show v
interpret (AskStatus _ f) = do
print "Ask status"
scriptInterpreter (f Online)
interpret (InitBoosters f) = do
print "Init boosters"
scriptInterpreter (f (Controller "a00"))
interpret (HeatUpBoosters _ _) = print "Heat up boosters"
mock :: Procedure -> IO ()
mock (ReadTemperature _ f) = do
print "Read temperature"
eval (f 0.0)
mock (Report _) = print "Report value"
mock (Store _) = print "Store value"
mock (AskStatus _ f) = do
print "Ask status"
eval (f Online)
mock (InitBoosters f) = do
print "Init boosters"
eval (f (Controller "a00"))
mock (HeatUpBoosters _ _) = print "Heat up boosters"
main = interpretScript boostersHeatingUp
|
19877dd62ea2f0f8927789875f595f80bb0c576271421788af36fd95b069b8cc | scrintal/heroicons-reagent | ellipsis_vertical.cljs | (ns com.scrintal.heroicons.outline.ellipsis-vertical)
(defn render []
[:svg {:xmlns ""
:fill "none"
:viewBox "0 0 24 24"
:strokeWidth "1.5"
:stroke "currentColor"
:aria-hidden "true"}
[:path {:strokeLinecap "round"
:strokeLinejoin "round"
:d "M12 6.75a.75.75 0 110-1.5.75.75 0 010 1.5zM12 12.75a.75.75 0 110-1.5.75.75 0 010 1.5zM12 18.75a.75.75 0 110-1.5.75.75 0 010 1.5z"}]]) | null | https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/outline/ellipsis_vertical.cljs | clojure | (ns com.scrintal.heroicons.outline.ellipsis-vertical)
(defn render []
[:svg {:xmlns ""
:fill "none"
:viewBox "0 0 24 24"
:strokeWidth "1.5"
:stroke "currentColor"
:aria-hidden "true"}
[:path {:strokeLinecap "round"
:strokeLinejoin "round"
:d "M12 6.75a.75.75 0 110-1.5.75.75 0 010 1.5zM12 12.75a.75.75 0 110-1.5.75.75 0 010 1.5zM12 18.75a.75.75 0 110-1.5.75.75 0 010 1.5z"}]]) | |
690cd0b0aa53b4d3fa9af4ffd36a9f4153a44761fddc3c7bc08e33dc4c25bd7d | erlang/otp | mnesia_test_lib.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1996 - 2023 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
Author :
%%% Purpose: Test case support library
%%%
This test suite may be run as a part of the Grand Test Suite
of Erlang . The test suite is structured in a hierarchy .
Each test case is implemented as an exported function with arity 1 .
%%% Test case identifiers must have the following syntax: {Module, Function}.
%%%
The driver of the test suite runs in two passes as follows :
first the test case function is invoked with the atom ' suite ' as
%%% single argument. The returned value is treated as a list of sub
%%% test cases. If the list of sub test cases is [] the test case
%%% function is invoked again, this time with a list of nodes as
%%% argument. If the list of sub test cases is not empty, the test
%%% case driver applies the algorithm recursively on each element
%%% in the list.
%%%
%%% All test cases are written in such a manner
that they start to invoke ? , Config )
%%% in order to prepare the test case execution. When that is
%%% done, the test machinery ensures that at least X number
%%% of nodes are connected to each other. If too few nodes was
%%% specified in the Config, the test case is skipped. If there
%%% was enough node names in the Config, X of them are selected
%%% and if some of them happens to be down they are restarted
%%% via the peer module. When all nodes are up and running a
disk resident schema is created on all nodes and is
%%% started a on all nodes. This means that all test cases may
assume that is up and running on all acquired nodes .
%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%
%%% doc(TestCases)
%%%
%%% Generates a test spec from parts of the test case structure
%%%
%%% struct(TestCases)
%%%
%%% Prints out the test case structure
%%%
%%% test(TestCases)
%%%
%%% Run parts of the test suite. Uses test/2.
%%% Reads Config from mnesia_test.config and starts them if necessary.
and wipes out the directories as a starter .
%%%
%%% test(TestCases, Config)
%%%
%%% Run parts of the test suite on the given Nodes,
%%% assuming that the nodes are up and running.
and wipes out the directories as a starter .
%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-module(mnesia_test_lib).
-author('').
-export([
log/2,
log/4,
verbose/4,
default_config/0,
diskless/1,
eval_test_case/3,
test_driver/2,
test_case_evaluator/3,
activity_evaluator/1,
flush/0,
pick_msg/0,
start_activities/1,
start_transactions/1,
start_transactions/2,
start_sync_transactions/1,
start_sync_transactions/2,
sync_trans_tid_serial/1,
prepare_test_case/5,
select_nodes/4,
init_nodes/3,
error/4,
node_sup/0,
start_mnesia/1,
start_mnesia/2,
start_appls/2,
start_appls/3,
start_wait/2,
storage_type/2,
stop_mnesia/1,
stop_appls/2,
sort/1,
kill_mnesia/1,
kill_appls/2,
verify_mnesia/4,
shutdown/0,
verify_replica_location/5,
lookup_config/2,
sync_tables/2,
remote_start/3,
remote_stop/1,
remote_kill/1,
reload_appls/2,
remote_activate_debug_fun/6,
do_remote_activate_debug_fun/6,
test/1,
test/2,
doc/1,
struct/1,
init_per_testcase/2,
end_per_testcase/2,
kill_tc/2
]).
-include("mnesia_test_lib.hrl").
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% included for test server compatibility
%% assume that all test cases only takes Config as sole argument
init_per_testcase(_Func, Config) ->
Env = application:get_all_env(mnesia),
[application:unset_env(mnesia, Key, [{timeout, infinity}]) ||
{Key, _} <- Env, Key /= included_applications],
global:register_name(mnesia_global_logger, group_leader()),
Config.
end_per_testcase(_Func, Config) ->
global:unregister_name(mnesia_global_logger),
%% Nodes = select_nodes(all, Config, ?FILE, ?LINE),
rpc : multicall(Nodes , , , [ ] ) ,
Config.
Use ? log(Format , ) as wrapper
log(Format, Args, LongFile, Line) ->
File = filename:basename(LongFile),
Format2 = lists:concat([File, "(", Line, ")", ": ", Format]),
log(Format2, Args).
log(Format, Args) ->
case global:whereis_name(mnesia_global_logger) of
undefined ->
io:format(user, Format, Args);
Pid ->
io:format(Pid, Format, Args)
end.
verbose(Format, Args, File, Line) ->
Arg = mnesia_test_verbose,
case get(Arg) of
false ->
ok;
true ->
log(Format, Args, File, Line);
undefined ->
case init:get_argument(Arg) of
{ok, List} when is_list(List) ->
case lists:last(List) of
["true"] ->
put(Arg, true),
log(Format, Args, File, Line);
_ ->
put(Arg, false),
ok
end;
_ ->
put(Arg, false),
ok
end
end.
-record('REASON', {file, line, desc}).
error(Format, Args, File, Line) ->
global:send(mnesia_global_logger, {failed, File, Line}),
Fail = #'REASON'{file = filename:basename(File),
line = Line,
desc = Args},
case global:whereis_name(mnesia_test_case_sup) of
undefined ->
ignore;
Pid ->
Pid ! Fail
%% global:send(mnesia_test_case_sup, Fail),
end,
log("<>ERROR<>~n" ++ Format, Args, File, Line).
storage_type(Default, Config) ->
case diskless(Config) of
true ->
ram_copies;
false ->
Default
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
default_config() ->
[{nodes, default_nodes()}].
default_nodes() ->
mk_nodes(3, []).
mk_nodes(0, Nodes) ->
Nodes;
mk_nodes(N, []) ->
mk_nodes(N - 1, [node()]);
mk_nodes(N, Nodes) when N > 0 ->
Head = hd(Nodes),
[Name, Host] = node_to_name_and_host(Head),
Nodes ++ [mk_node(I, Name, Host) || I <- lists:seq(1, N)].
mk_node(N, Name, Host) ->
list_to_atom(lists:concat([Name ++ integer_to_list(N) ++ "@" ++ Host])).
node_start_link(Host, Name) ->
node_start_link(Host, Name, 10).
node_start_link(Host, Name, Retries) ->
Debug = atom_to_list(mnesia:system_info(debug)),
Args = ["-mnesia", "debug", Debug,
"-pa", filename:dirname(code:which(?MODULE)),
"-pa", filename:dirname(code:which(mnesia))],
case starter(Host, Name, Args) of
{ok, NewNode} ->
?match(pong, net_adm:ping(NewNode)),
{ok, Cwd} = file:get_cwd(),
Path = code:get_path(),
ok = rpc:call(NewNode, file, set_cwd, [Cwd]),
true = rpc:call(NewNode, code, set_path, [Path]),
ok = rpc:call(NewNode, error_logger, tty, [false]),
spawn_link(NewNode, ?MODULE, node_sup, []),
rpc:multicall([node() | nodes()], global, sync, []),
{ok, NewNode};
{error, Reason} when Retries == 0->
{error, Reason};
{error, Reason} ->
io:format("Could not start node ~p ~p retrying~n",
[{Host, Name, Args}, Reason]),
timer:sleep(500),
node_start_link(Host, Name, Retries - 1)
end.
starter(Host, Name, Args) ->
{ok, _, Node} = peer:start(#{host => Host, name => Name, args => Args}),
{ok, Node}.
node_sup() ->
process_flag(trap_exit, true),
receive
{'EXIT', _, _} ->
ignore
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Index the test case structure
doc(TestCases) when is_list(TestCases) ->
test(TestCases, suite),
SuiteFname = "index.html",
io:format("Generating HTML test specification to file: ~s~n",
[SuiteFname]),
{ok, Fd} = file:open(SuiteFname, [write]),
io:format(Fd, "<TITLE>Test specification for ~p</TITLE>.~n", [TestCases]),
io:format(Fd, "<H1>Test specification for ~p</H1>~n", [TestCases]),
io:format(Fd, "Test cases which not are implemented yet are written in <B>bold face</B>.~n~n", []),
io:format(Fd, "<BR><BR>~n", []),
io:format(Fd, "~n<DL>~n", []),
do_doc(Fd, TestCases, []),
io:format(Fd, "</DL>~n", []),
file:close(Fd);
doc(TestCases) ->
doc([TestCases]).
do_doc(Fd, [H | T], List) ->
case H of
{Module, TestCase} when is_atom(Module), is_atom(TestCase) ->
do_doc(Fd, Module, TestCase, List);
TestCase when is_atom(TestCase), List == [] ->
do_doc(Fd, mnesia_SUITE, TestCase, List);
TestCase when is_atom(TestCase) ->
do_doc(Fd, hd(List), TestCase, List)
end,
do_doc(Fd, T, List);
do_doc(_, [], _) ->
ok.
do_doc(Fd, Module, TestCase, List) ->
case get_suite(Module, TestCase) of
[] ->
%% Implemented leaf test case
Head = ?flat_format("<A HREF=~p.html#~p_1>{~p, ~p}</A>}",
[Module, TestCase, Module, TestCase]),
print_doc(Fd, Module, TestCase, Head);
Suite when is_list(Suite) ->
%% Test suite
Head = ?flat_format("{~p, ~p}", [Module, TestCase]),
print_doc(Fd, Module, TestCase, Head),
io:format(Fd, "~n<DL>~n", []),
do_doc(Fd, Suite, [Module | List]),
io:format(Fd, "</DL>~n", []);
'NYI' ->
%% Not yet implemented
Head = ?flat_format("<B>{~p, ~p}</B>", [Module, TestCase]),
print_doc(Fd, Module, TestCase, Head)
end.
print_doc(Fd, Mod, Fun, Head) ->
case catch (apply(Mod, Fun, [doc])) of
{'EXIT', _} ->
io:format(Fd, "<DT>~s</DT>~n", [Head]);
Doc when is_list(Doc) ->
io:format(Fd, "<DT><U>~s</U><BR><DD>~n", [Head]),
print_rows(Fd, Doc),
io:format(Fd, "</DD><BR><BR>~n", [])
end.
print_rows(_Fd, []) ->
ok;
print_rows(Fd, [H | T]) when is_list(H) ->
io:format(Fd, "~s~n", [H]),
print_rows(Fd, T);
print_rows(Fd, [H | T]) when is_integer(H) ->
io:format(Fd, "~s~n", [[H | T]]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Show the test case structure
struct(TestCases) ->
T = test(TestCases, suite),
struct(T, "").
struct({Module, TestCase}, Indentation)
when is_atom(Module), is_atom(TestCase) ->
log("~s{~p, ~p} ...~n", [Indentation, Module, TestCase]);
struct({Module, TestCase, Other}, Indentation)
when is_atom(Module), is_atom(TestCase) ->
log("~s{~p, ~p} ~p~n", [Indentation, Module, TestCase, Other]);
struct([], _) ->
ok;
struct([TestCase | TestCases], Indentation) ->
struct(TestCase, Indentation),
struct(TestCases, Indentation);
struct({TestCase, []}, Indentation) ->
struct(TestCase, Indentation);
struct({TestCase, SubTestCases}, Indentation) when is_list(SubTestCases) ->
struct(TestCase, Indentation),
struct(SubTestCases, Indentation ++ " ").
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Execute the test cases
test(TestCases) ->
test(TestCases, []).
test(TestCases, suite) when is_list(TestCases) ->
test_driver(TestCases, suite);
test(TestCases, Config) when is_list(TestCases) ->
D1 = lists:duplicate(10, $=),
D2 = lists:duplicate(10, $ ),
log("~n~s TEST CASES: ~p~n ~sCONFIG: ~p~n~n", [D1, TestCases, D2, Config]),
test_driver(TestCases, Config);
test(TestCase, Config) ->
test([TestCase], Config).
test_driver([], _Config) ->
[];
test_driver([T|TestCases], Config) ->
L1 = test_driver(T, Config),
L2 = test_driver(TestCases, Config),
[L1|L2];
test_driver({Module, TestCases}, Config) when is_list(TestCases)->
test_driver(default_module(Module, TestCases), Config);
test_driver({Module, all}, Config) ->
get_suite(Module, all, Config);
test_driver({Module, G={group, _}}, Config) ->
get_suite(Module, G, Config);
test_driver({_, {group, Module, Group}}, Config) ->
get_suite(Module, {group, Group}, Config);
test_driver({Module, TestCase}, Config) ->
Sec = timer:seconds(1) * 1000,
case Config of
suite ->
{Module, TestCase, 'IMPL'};
_ ->
log("Eval test case: ~w~n", [{Module, TestCase}]),
try timer:tc(?MODULE, eval_test_case, [Module, TestCase, Config]) of
{T, Res} ->
log("Tested ~w in ~w sec~n", [TestCase, T div Sec]),
{T div Sec, Res}
catch error:function_clause ->
log("<WARNING> Test case ~w NYI~n", [{Module, TestCase}]),
{0, {skip, {Module, TestCase}, "NYI"}}
end
end;
test_driver(TestCase, Config) ->
DefaultModule = mnesia_SUITE,
log("<>WARNING<> Missing module in test case identifier. "
"{~w, ~w} assumed~n", [DefaultModule, TestCase]),
test_driver({DefaultModule, TestCase}, Config).
default_module(DefaultModule, TestCases) when is_list(TestCases) ->
Fun = fun(T) ->
case T of
{group, _} -> {true, {DefaultModule, T}};
{_, _} -> true;
T -> {true, {DefaultModule, T}}
end
end,
lists:zf(Fun, TestCases).
get_suite(Module, TestCase, Config) ->
case get_suite(Module, TestCase) of
Suite when is_list(Suite), Config == suite ->
Res = test_driver(default_module(Module, Suite), Config),
{{Module, TestCase}, Res};
Suite when is_list(Suite) ->
log("Expand test case ~w~n", [{Module, TestCase}]),
Def = default_module(Module, Suite),
{T, Res} = timer:tc(?MODULE, test_driver, [Def, Config]),
Sec = timer:seconds(1) * 1000,
{T div Sec, {{Module, TestCase}, Res}};
'NYI' when Config == suite ->
{Module, TestCase, 'NYI'};
'NYI' ->
log("<WARNING> Test case ~w NYI~n", [{Module, TestCase}]),
{0, {skip, {Module, TestCase}, "NYI"}}
end.
Returns a list ( possibly empty ) or the atom ' NYI '
get_suite(Mod, {group, Suite}) ->
try
Groups = Mod:groups(),
{_, _, TCList} = lists:keyfind(Suite, 1, Groups),
TCList
catch
_:Reason:Stacktrace ->
io:format("Not implemented ~p ~p (~p ~p)~n",
[Mod,Suite,Reason,Stacktrace]),
'NYI'
end;
get_suite(Mod, all) ->
case catch (apply(Mod, all, [])) of
{'EXIT', _} -> 'NYI';
List when is_list(List) -> List
end;
get_suite(_Mod, _Fun) ->
[].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
eval_test_case(Mod, Fun, Config) ->
flush(),
global:register_name(mnesia_test_case_sup, self()),
Flag = process_flag(trap_exit, true),
Pid = spawn_link(?MODULE, test_case_evaluator, [Mod, Fun, [Config]]),
R = wait_for_evaluator(Pid, Mod, Fun, Config),
global:unregister_name(mnesia_test_case_sup),
process_flag(trap_exit, Flag),
R.
flush() ->
receive Msg -> [Msg | flush()]
after 0 -> []
end.
wait_for_evaluator(Pid, Mod, Fun, Config) ->
receive
{'EXIT', Pid, {test_case_ok, _PidRes}} ->
Errors = flush(),
Res =
case Errors of
[] -> ok;
Errors -> failed
end,
{Res, {Mod, Fun}, Errors};
{'EXIT', Pid, {skipped, Reason}} ->
log("<WARNING> Test case ~w skipped, because ~p~n",
[{Mod, Fun}, Reason]),
Mod:end_per_testcase(Fun, Config),
{skip, {Mod, Fun}, Reason};
{'EXIT', Pid, Reason} ->
log("<>ERROR<> Eval process ~w exited, because ~p~n",
[{Mod, Fun}, Reason]),
Mod:end_per_testcase(Fun, Config),
{crash, {Mod, Fun}, Reason}
end.
test_case_evaluator(Mod, Fun, [Config]) ->
NewConfig = Mod:init_per_testcase(Fun, Config),
try
R = apply(Mod, Fun, [NewConfig]),
Mod:end_per_testcase(Fun, NewConfig),
exit({test_case_ok, R})
catch error:function_clause ->
exit({skipped, 'NYI'})
end.
activity_evaluator(Coordinator) ->
activity_evaluator_loop(Coordinator),
exit(normal).
activity_evaluator_loop(Coordinator) ->
receive
begin_trans ->
transaction(Coordinator, 0);
{begin_trans, MaxRetries} ->
transaction(Coordinator, MaxRetries);
end_trans ->
end_trans;
Fun when is_function(Fun) ->
Coordinator ! {self(), Fun()},
activity_evaluator_loop(Coordinator);
{ ' EXIT ' , Coordinator , Reason } - >
% Reason;
ExitExpr ->
? error("activity_evaluator_loop ~p ~p : } " , [ Coordinator , self ( ) , ExitExpr ] ) ,
exit(ExitExpr)
end.
transaction(Coordinator, MaxRetries) ->
Fun = fun() ->
Coordinator ! {self(), begin_trans},
activity_evaluator_loop(Coordinator)
end,
Coordinator ! {self(), mnesia:transaction(Fun, MaxRetries)},
activity_evaluator_loop(Coordinator).
pick_msg() ->
receive
Message -> Message
after 4000 -> timeout
end.
start_activities(Nodes) ->
Fun = fun(N) -> spawn_link(N, ?MODULE, activity_evaluator, [self()]) end,
Pids = mapl(Fun, Nodes),
{success, Pids}.
mapl(Fun, [H|T]) ->
Res = Fun(H),
[Res|mapl(Fun, T)];
mapl(_Fun, []) ->
[].
diskless(Config) ->
case lists:keysearch(diskless, 1, Config) of
{value, {diskless, true}} ->
true;
_Else ->
false
end.
start_transactions(Pids) ->
Fun = fun(Pid) ->
Pid ! begin_trans,
?match_receive({Pid, begin_trans})
end,
mapl(Fun, Pids).
start_sync_transactions(Pids) ->
Nodes = [node(Pid) || Pid <- Pids],
Fun = fun(Pid) ->
sync_trans_tid_serial(Nodes),
Pid ! begin_trans,
?match_receive({Pid, begin_trans})
end,
mapl(Fun, Pids).
start_transactions(Pids, MaxRetries) ->
Fun = fun(Pid) ->
Pid ! {begin_trans, MaxRetries},
?match_receive({Pid, begin_trans})
end,
mapl(Fun, Pids).
start_sync_transactions(Pids, MaxRetries) ->
Nodes = [node(Pid) || Pid <- Pids],
Fun = fun(Pid) ->
sync_trans_tid_serial(Nodes),
Pid ! {begin_trans, MaxRetries},
?match_receive({Pid, begin_trans})
end,
mapl(Fun, Pids).
sync_trans_tid_serial(Nodes) ->
Fun = fun() -> mnesia:write_lock_table(schema) end,
rpc:multicall(Nodes, mnesia, transaction, [Fun]).
select_nodes(N, Config, File, Line) ->
prepare_test_case([], N, Config, File, Line).
prepare_test_case(Actions, N, Config, File, Line) ->
NodeList1 = lookup_config(nodes, Config),
NodeList2 = lookup_config(nodenames, Config), %% For testserver
NodeList3 = append_unique(NodeList1, NodeList2),
This = node(),
All = [This | lists:delete(This, NodeList3)],
Selected = pick_nodes(N, All, File, Line),
case diskless(Config) of
true ->
ok;
false ->
rpc:multicall(Selected, application, set_env,[mnesia, schema_location, opt_disc])
end,
do_prepare(Actions, Selected, All, Config, File, Line).
do_prepare([], Selected, _All, _Config, _File, _Line) ->
Selected;
do_prepare([{init_test_case, Appls} | Actions], Selected, All, Config, File, Line) ->
set_kill_timer(Config),
Started = init_nodes(Selected, File, Line),
All2 = append_unique(Started, All),
Alive = mnesia_lib:intersect(nodes() ++ [node()], All2),
kill_appls(Appls, Alive),
process_flag(trap_exit, true),
do_prepare(Actions, Started, All2, Config, File, Line);
do_prepare([delete_schema | Actions], Selected, All, Config, File, Line) ->
Alive = mnesia_lib:intersect(nodes() ++ [node()], All),
case diskless(Config) of
true ->
skip;
false ->
Del = fun(Node) ->
case mnesia:delete_schema([Node]) of
ok -> ok;
{error, {"All nodes not running",_}} ->
ok;
Else ->
?log("Delete schema error ~p ~n", [Else])
end
end,
lists:foreach(Del, Alive)
end,
do_prepare(Actions, Selected, All, Config, File, Line);
do_prepare([create_schema | Actions], Selected, All, Config, File, Line) ->
Ext = proplists:get_value(default_properties, Config, ?BACKEND),
case diskless(Config) of
true ->
rpc:multicall(Selected, application, set_env, [mnesia, schema, Ext]),
skip;
_Else ->
case mnesia:create_schema(Selected, Ext) of
ok ->
ignore;
BadNodes ->
?fatal("Cannot create Mnesia schema on ~p~n", [BadNodes])
end
end,
do_prepare(Actions, Selected, All, Config, File, Line);
do_prepare([{start_appls, Appls} | Actions], Selected, All, Config, File, Line) ->
case start_appls(Appls, Selected, Config) of
[] -> ok;
Bad -> ?fatal("Cannot start appls ~p: ~p~n", [Appls, Bad])
end,
do_prepare(Actions, Selected, All, Config, File, Line);
do_prepare([{reload_appls, Appls} | Actions], Selected, All, Config, File, Line) ->
reload_appls(Appls, Selected),
do_prepare(Actions, Selected, All, Config, File, Line).
set_kill_timer(Config) ->
case init:get_argument(mnesia_test_timeout) of
{ok, _ } -> ok;
_ ->
Time0 =
case lookup_config(tc_timeout, Config) of
[] -> timer:minutes(5);
ConfigTime when is_integer(ConfigTime) -> ConfigTime
end,
Mul = try
test_server:timetrap_scale_factor()
catch _:_ -> 1 end,
(catch test_server:timetrap(Mul*Time0 + 1000)),
spawn_link(?MODULE, kill_tc, [self(),Time0*Mul])
end.
kill_tc(Pid, Time) ->
receive
after Time ->
case process_info(Pid) of
undefined -> ok;
_ ->
?error("Watchdog in test case timed out "
"in ~p min~n", [Time div (1000*60)]),
Files = mnesia_lib:dist_coredump(),
?log("Cores dumped to:~n ~p~n", [Files]),
%% Generate erlang crashdumps.
%% GenDump = fun(Node) ->
%% File = "CRASH_" ++ atom_to_list(Node) ++ ".dump",
rpc : call(Node , os , putenv , [ " ERL_CRASH_DUMP " , File ] ) ,
rpc : cast(Node , erlang , halt , [ " RemoteTimeTrap " ] )
%% end,
[ ) || Node < - nodes ( ) ] ,
%% erlang:halt("DebugTimeTrap"),
exit(Pid, kill)
end
end.
append_unique([], List) -> List;
append_unique([H|R], List) ->
case lists:member(H, List) of
true -> append_unique(R, List);
false -> [H | append_unique(R, List)]
end.
pick_nodes(all, Nodes, File, Line) ->
pick_nodes(length(Nodes), Nodes, File, Line);
pick_nodes(N, [H | T], File, Line) when N > 0 ->
[H | pick_nodes(N - 1, T, File, Line)];
pick_nodes(0, _Nodes, _File, _Line) ->
[];
pick_nodes(N, [], File, Line) ->
?skip("Test case (~p(~p)) ignored: ~p nodes missing~n",
[File, Line, N]).
init_nodes([Node | Nodes], File, Line) ->
case net_adm:ping(Node) of
pong ->
[Node | init_nodes(Nodes, File, Line)];
pang ->
[Name, Host] = node_to_name_and_host(Node),
case node_start_link(Host, Name) of
{ok, Node1} ->
Path = code:get_path(),
true = rpc:call(Node1, code, set_path, [Path]),
[Node1 | init_nodes(Nodes, File, Line)];
Other ->
?skip("Test case (~p(~p)) ignored: cannot start node ~p: ~p~n",
[File, Line, Node, Other])
end
end;
init_nodes([], _File, _Line) ->
[].
%% Returns [Name, Host]
node_to_name_and_host(Node) ->
string:lexemes(atom_to_list(Node), [$@]).
lookup_config(Key,Config) ->
case lists:keysearch(Key,1,Config) of
{value,{Key,Val}} ->
Val;
_ ->
[]
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
start_appls(Appls, Nodes) ->
start_appls(Appls, Nodes, [], [schema]).
start_appls(Appls, Nodes, Config) ->
start_appls(Appls, Nodes, Config, [schema]).
start_appls([Appl | Appls], Nodes, Config, Tabs) ->
{Started, BadStarters} =
rpc:multicall(Nodes, ?MODULE, remote_start, [Appl, Config, Nodes]),
BadS = [{Node, Appl, Res} || {Node, Res} <- Started, Res /= ok],
BadN = [{BadNode, Appl, bad_start} || BadNode <- BadStarters],
Bad = BadS ++ BadN,
case Appl of
mnesia when Bad == [] ->
sync_tables(Nodes, Tabs);
_ ->
ignore
end,
Bad ++ start_appls(Appls, Nodes, Config, Tabs);
start_appls([], _Nodes, _Config, _Tabs) ->
[].
remote_start(mnesia, Config, Nodes) ->
case diskless(Config) of
true ->
application_controller:set_env(mnesia,
extra_db_nodes,
Nodes -- [node()]),
application_controller:set_env(mnesia,
schema_location,
ram);
false ->
application_controller:set_env(mnesia,
schema_location,
opt_disc),
ignore
end,
{node(), mnesia:start()};
remote_start(Appl, _Config, _Nodes) ->
Res =
case application:start(Appl) of
{error, {already_started, Appl}} ->
ok;
Other ->
Other
end,
{node(), Res}.
%% Start Mnesia on all given nodes and wait for specified
%% tables to be accessible on each node. The atom all means
%% that we should wait for all tables to be loaded
%%
Returns a list of error tuples { BadNode , , Reason }
start_mnesia(Nodes) ->
start_appls([mnesia], Nodes).
start_mnesia(Nodes, Tabs) when is_list(Nodes) ->
start_appls([mnesia], Nodes, [], Tabs).
%% Wait for the tables to be accessible from all nodes in the list
%% and that all nodes are aware of that the other nodes also ...
sync_tables(Nodes, Tabs) ->
Res = send_wait(Nodes, Tabs, []),
if
Res == [] ->
mnesia:transaction(fun() -> mnesia:write_lock_table(schema) end),
Res;
true ->
Res
end.
send_wait([Node | Nodes], Tabs, Pids) ->
Pid = spawn_link(Node, ?MODULE, start_wait, [self(), Tabs]),
send_wait(Nodes, Tabs, [Pid | Pids]);
send_wait([], _Tabs, Pids) ->
rec_wait(Pids, []).
rec_wait([Pid | Pids], BadRes) ->
receive
{'EXIT', Pid, R} ->
rec_wait(Pids, [{node(Pid), bad_wait, R} | BadRes]);
{Pid, ok} ->
rec_wait(Pids, BadRes);
{Pid, {error, R}} ->
rec_wait(Pids, [{node(Pid), bad_wait, R} | BadRes])
end;
rec_wait([], BadRes) ->
BadRes.
start_wait(Coord, Tabs) ->
process_flag(trap_exit, true),
Mon = whereis(mnesia_monitor),
case catch link(Mon) of
{'EXIT', _} ->
unlink(Coord),
Coord ! {self(), {error, {node_not_running, node()}}};
_ ->
Res = start_wait_loop(Tabs),
unlink(Mon),
unlink(Coord),
Coord ! {self(), Res}
end.
start_wait_loop(Tabs) ->
receive
{'EXIT', Pid, Reason} ->
{error, {start_wait, Pid, Reason}}
after 0 ->
case mnesia:wait_for_tables(Tabs, timer:seconds(30)) of
ok ->
verify_nodes(Tabs);
{timeout, BadTabs} ->
log("<>WARNING<> Wait for tables ~p: ~p~n", [node(), Tabs]),
start_wait_loop(BadTabs);
{error, Reason} ->
{error, {start_wait, Reason}}
end
end.
verify_nodes(Tabs) ->
verify_nodes(Tabs, 0).
verify_nodes([], _) ->
ok;
verify_nodes([Tab| Tabs], N) ->
?match(X when is_atom(X), mnesia_lib:val({Tab, where_to_read})),
Nodes = mnesia:table_info(Tab, where_to_write),
Copies =
mnesia:table_info(Tab, disc_copies) ++
mnesia:table_info(Tab, disc_only_copies) ++
mnesia:table_info(Tab, ram_copies),
Local = mnesia:table_info(Tab, local_content),
case Copies -- Nodes of
[] ->
verify_nodes(Tabs, 0);
_Else when Local == true, Nodes /= [] ->
verify_nodes(Tabs, 0);
Else ->
N2 =
if
N > 20 ->
log("<>WARNING<> ~w Waiting for table: ~p on ~p ~n",
[node(), Tab, Else]),
0;
true -> N+1
end,
timer:sleep(500),
verify_nodes([Tab| Tabs], N2)
end.
Nicely stop on all given nodes
%%
%% Returns a list of error tuples {BadNode, Reason}
stop_mnesia(Nodes) when is_list(Nodes) ->
stop_appls([mnesia], Nodes).
stop_appls([Appl | Appls], Nodes) when is_list(Nodes) ->
{Stopped, BadNodes} = rpc:multicall(Nodes, ?MODULE, remote_stop, [Appl]),
BadS =[{Node, Appl, Res} || {Node, Res} <- Stopped, Res /= stopped],
BadN =[{BadNode, Appl, bad_node} || BadNode <- BadNodes],
BadS ++ BadN ++ stop_appls(Appls, Nodes);
stop_appls([], _Nodes) ->
[].
remote_stop(mnesia) ->
{node(), mnesia:stop()};
remote_stop(Appl) ->
{node(), application:stop(Appl)}.
remote_kill([Appl | Appls]) ->
catch Appl:lkill(),
application:stop(Appl),
remote_kill(Appls);
remote_kill([]) ->
ok.
Abruptly kill on all given nodes
%% Returns []
kill_appls(Appls, Nodes) when is_list(Nodes) ->
verbose("<>WARNING<> Intentionally killing ~p: ~w...~n",
[Appls, Nodes], ?FILE, ?LINE),
rpc:multicall(Nodes, ?MODULE, remote_kill, [Appls]),
[].
kill_mnesia(Nodes) when is_list(Nodes) ->
kill_appls([mnesia], Nodes).
reload_appls([Appl | Appls], Selected) ->
kill_appls([Appl], Selected),
timer:sleep(1000),
Ok = {[ok || _N <- Selected], []},
{Ok2temp, Empty} = rpc:multicall(Selected, application, unload, [Appl]),
Conv = fun({error,{not_loaded,mnesia}}) -> ok; (Else) -> Else end,
Ok2 = {lists:map(Conv, Ok2temp), Empty},
Ok3 = rpc:multicall(Selected, application, load, [Appl]),
if
Ok /= Ok2 ->
?fatal("Cannot unload appl ~p: ~p~n", [Appl, Ok2]);
Ok /= Ok3 ->
?fatal("Cannot load appl ~p: ~p~n", [Appl, Ok3]);
true ->
ok
end,
reload_appls(Appls, Selected);
reload_appls([], _Selected) ->
ok.
shutdown() ->
log("<>WARNING<> Intentionally shutting down all nodes... ~p~n",
[nodes() ++ [node()]]),
rpc:multicall(nodes(), erlang, halt, []),
erlang:halt().
verify_mnesia(Ups, Downs, File, Line) when is_list(Ups), is_list(Downs) ->
BadUps =
[N || N <- Ups, rpc:call(N, mnesia, system_info, [is_running]) /= yes],
BadDowns =
[N || N <- Downs, rpc:call(N, mnesia, system_info, [is_running]) == yes],
if
BadUps == [] ->
ignore;
true ->
error("Mnesia is not running as expected: ~p~n",
[BadUps], File, Line)
end,
if
BadDowns == [] ->
ignore;
true ->
error("Mnesia is not stopped as expected: ~p~n",
[BadDowns], File, Line)
end,
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
verify_replica_location(Tab, [], [], [], _) ->
?match({'EXIT', _}, mnesia:table_info(Tab, ram_copies)),
?match({'EXIT', _}, mnesia:table_info(Tab, disc_copies)),
?match({'EXIT', _}, mnesia:table_info(Tab, disc_only_copies)),
?match({'EXIT', _}, mnesia:table_info(Tab, where_to_write)),
?match({'EXIT', _}, mnesia:table_info(Tab, where_to_read)),
[];
verify_replica_location(Tab, DiscOnly0, Ram0, Disc0, AliveNodes0) ->
%% sync_tables(AliveNodes0, [Tab]),
AliveNodes = lists:sort(AliveNodes0),
DiscOnly = lists:sort(DiscOnly0),
Ram = lists:sort(Ram0),
Disc = lists:sort(Disc0),
Write = ignore_dead(DiscOnly ++ Ram ++ Disc, AliveNodes),
Read = ignore_dead(DiscOnly ++ Ram ++ Disc, AliveNodes),
This = node(),
timer:sleep(100),
S1 = ?match(AliveNodes, lists:sort(mnesia:system_info(running_db_nodes))),
S2 = ?match(DiscOnly, lists:sort(mnesia:table_info(Tab, disc_only_copies))),
S3 = ?match(Ram, lists:sort(mnesia:table_info(Tab, ram_copies) ++
mnesia:table_info(Tab, ext_ets))),
S4 = ?match(Disc, lists:sort(mnesia:table_info(Tab, disc_copies))),
S5 = ?match(Write, lists:sort(mnesia:table_info(Tab, where_to_write))),
S6 = case lists:member(This, Read) of
true ->
?match(This, mnesia:table_info(Tab, where_to_read));
false ->
?match(true, lists:member(mnesia:table_info(Tab, where_to_read), Read))
end,
lists:filter(fun({success,_}) -> false; (_) -> true end, [S1,S2,S3,S4,S5,S6]).
ignore_dead(Nodes, AliveNodes) ->
Filter = fun(Node) -> lists:member(Node, AliveNodes) end,
lists:sort(lists:zf(Filter, Nodes)).
remote_activate_debug_fun(N, I, F, C, File, Line) ->
Pid = spawn_link(N, ?MODULE, do_remote_activate_debug_fun, [self(), I, F, C, File, Line]),
receive
{activated, Pid} -> ok;
{'EXIT', Pid, Reason} -> {error, Reason}
end.
do_remote_activate_debug_fun(From, I, F, C, File, Line) ->
mnesia_lib:activate_debug_fun(I, F, C, File, Line),
From ! {activated, self()},
timer:sleep(infinity). % Dies whenever the test process dies !!
sort(L) when is_list(L) ->
lists:sort(L);
sort({atomic, L}) when is_list(L) ->
{atomic, lists:sort(L)};
sort({ok, L}) when is_list(L) ->
{ok, lists:sort(L)};
sort(W) ->
W.
| null | https://raw.githubusercontent.com/erlang/otp/2b397d7e5580480dc32fa9751db95f4b89ff029e/lib/mnesia/test/mnesia_test_lib.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
Purpose: Test case support library
Test case identifiers must have the following syntax: {Module, Function}.
single argument. The returned value is treated as a list of sub
test cases. If the list of sub test cases is [] the test case
function is invoked again, this time with a list of nodes as
argument. If the list of sub test cases is not empty, the test
case driver applies the algorithm recursively on each element
in the list.
All test cases are written in such a manner
in order to prepare the test case execution. When that is
done, the test machinery ensures that at least X number
of nodes are connected to each other. If too few nodes was
specified in the Config, the test case is skipped. If there
was enough node names in the Config, X of them are selected
and if some of them happens to be down they are restarted
via the peer module. When all nodes are up and running a
started a on all nodes. This means that all test cases may
doc(TestCases)
Generates a test spec from parts of the test case structure
struct(TestCases)
Prints out the test case structure
test(TestCases)
Run parts of the test suite. Uses test/2.
Reads Config from mnesia_test.config and starts them if necessary.
test(TestCases, Config)
Run parts of the test suite on the given Nodes,
assuming that the nodes are up and running.
included for test server compatibility
assume that all test cases only takes Config as sole argument
Nodes = select_nodes(all, Config, ?FILE, ?LINE),
global:send(mnesia_test_case_sup, Fail),
Index the test case structure
Implemented leaf test case
Test suite
Not yet implemented
Show the test case structure
Execute the test cases
Reason;
For testserver
Generate erlang crashdumps.
GenDump = fun(Node) ->
File = "CRASH_" ++ atom_to_list(Node) ++ ".dump",
end,
erlang:halt("DebugTimeTrap"),
Returns [Name, Host]
Start Mnesia on all given nodes and wait for specified
tables to be accessible on each node. The atom all means
that we should wait for all tables to be loaded
Wait for the tables to be accessible from all nodes in the list
and that all nodes are aware of that the other nodes also ...
Returns a list of error tuples {BadNode, Reason}
Returns []
sync_tables(AliveNodes0, [Tab]),
Dies whenever the test process dies !! | Copyright Ericsson AB 1996 - 2023 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
Author :
This test suite may be run as a part of the Grand Test Suite
of Erlang . The test suite is structured in a hierarchy .
Each test case is implemented as an exported function with arity 1 .
The driver of the test suite runs in two passes as follows :
first the test case function is invoked with the atom ' suite ' as
that they start to invoke ? , Config )
disk resident schema is created on all nodes and is
assume that is up and running on all acquired nodes .
and wipes out the directories as a starter .
and wipes out the directories as a starter .
-module(mnesia_test_lib).
-author('').
-export([
log/2,
log/4,
verbose/4,
default_config/0,
diskless/1,
eval_test_case/3,
test_driver/2,
test_case_evaluator/3,
activity_evaluator/1,
flush/0,
pick_msg/0,
start_activities/1,
start_transactions/1,
start_transactions/2,
start_sync_transactions/1,
start_sync_transactions/2,
sync_trans_tid_serial/1,
prepare_test_case/5,
select_nodes/4,
init_nodes/3,
error/4,
node_sup/0,
start_mnesia/1,
start_mnesia/2,
start_appls/2,
start_appls/3,
start_wait/2,
storage_type/2,
stop_mnesia/1,
stop_appls/2,
sort/1,
kill_mnesia/1,
kill_appls/2,
verify_mnesia/4,
shutdown/0,
verify_replica_location/5,
lookup_config/2,
sync_tables/2,
remote_start/3,
remote_stop/1,
remote_kill/1,
reload_appls/2,
remote_activate_debug_fun/6,
do_remote_activate_debug_fun/6,
test/1,
test/2,
doc/1,
struct/1,
init_per_testcase/2,
end_per_testcase/2,
kill_tc/2
]).
-include("mnesia_test_lib.hrl").
init_per_testcase(_Func, Config) ->
Env = application:get_all_env(mnesia),
[application:unset_env(mnesia, Key, [{timeout, infinity}]) ||
{Key, _} <- Env, Key /= included_applications],
global:register_name(mnesia_global_logger, group_leader()),
Config.
end_per_testcase(_Func, Config) ->
global:unregister_name(mnesia_global_logger),
rpc : multicall(Nodes , , , [ ] ) ,
Config.
Use ? log(Format , ) as wrapper
log(Format, Args, LongFile, Line) ->
File = filename:basename(LongFile),
Format2 = lists:concat([File, "(", Line, ")", ": ", Format]),
log(Format2, Args).
log(Format, Args) ->
case global:whereis_name(mnesia_global_logger) of
undefined ->
io:format(user, Format, Args);
Pid ->
io:format(Pid, Format, Args)
end.
verbose(Format, Args, File, Line) ->
Arg = mnesia_test_verbose,
case get(Arg) of
false ->
ok;
true ->
log(Format, Args, File, Line);
undefined ->
case init:get_argument(Arg) of
{ok, List} when is_list(List) ->
case lists:last(List) of
["true"] ->
put(Arg, true),
log(Format, Args, File, Line);
_ ->
put(Arg, false),
ok
end;
_ ->
put(Arg, false),
ok
end
end.
-record('REASON', {file, line, desc}).
error(Format, Args, File, Line) ->
global:send(mnesia_global_logger, {failed, File, Line}),
Fail = #'REASON'{file = filename:basename(File),
line = Line,
desc = Args},
case global:whereis_name(mnesia_test_case_sup) of
undefined ->
ignore;
Pid ->
Pid ! Fail
end,
log("<>ERROR<>~n" ++ Format, Args, File, Line).
storage_type(Default, Config) ->
case diskless(Config) of
true ->
ram_copies;
false ->
Default
end.
default_config() ->
[{nodes, default_nodes()}].
default_nodes() ->
mk_nodes(3, []).
mk_nodes(0, Nodes) ->
Nodes;
mk_nodes(N, []) ->
mk_nodes(N - 1, [node()]);
mk_nodes(N, Nodes) when N > 0 ->
Head = hd(Nodes),
[Name, Host] = node_to_name_and_host(Head),
Nodes ++ [mk_node(I, Name, Host) || I <- lists:seq(1, N)].
mk_node(N, Name, Host) ->
list_to_atom(lists:concat([Name ++ integer_to_list(N) ++ "@" ++ Host])).
node_start_link(Host, Name) ->
node_start_link(Host, Name, 10).
node_start_link(Host, Name, Retries) ->
Debug = atom_to_list(mnesia:system_info(debug)),
Args = ["-mnesia", "debug", Debug,
"-pa", filename:dirname(code:which(?MODULE)),
"-pa", filename:dirname(code:which(mnesia))],
case starter(Host, Name, Args) of
{ok, NewNode} ->
?match(pong, net_adm:ping(NewNode)),
{ok, Cwd} = file:get_cwd(),
Path = code:get_path(),
ok = rpc:call(NewNode, file, set_cwd, [Cwd]),
true = rpc:call(NewNode, code, set_path, [Path]),
ok = rpc:call(NewNode, error_logger, tty, [false]),
spawn_link(NewNode, ?MODULE, node_sup, []),
rpc:multicall([node() | nodes()], global, sync, []),
{ok, NewNode};
{error, Reason} when Retries == 0->
{error, Reason};
{error, Reason} ->
io:format("Could not start node ~p ~p retrying~n",
[{Host, Name, Args}, Reason]),
timer:sleep(500),
node_start_link(Host, Name, Retries - 1)
end.
starter(Host, Name, Args) ->
{ok, _, Node} = peer:start(#{host => Host, name => Name, args => Args}),
{ok, Node}.
node_sup() ->
process_flag(trap_exit, true),
receive
{'EXIT', _, _} ->
ignore
end.
doc(TestCases) when is_list(TestCases) ->
test(TestCases, suite),
SuiteFname = "index.html",
io:format("Generating HTML test specification to file: ~s~n",
[SuiteFname]),
{ok, Fd} = file:open(SuiteFname, [write]),
io:format(Fd, "<TITLE>Test specification for ~p</TITLE>.~n", [TestCases]),
io:format(Fd, "<H1>Test specification for ~p</H1>~n", [TestCases]),
io:format(Fd, "Test cases which not are implemented yet are written in <B>bold face</B>.~n~n", []),
io:format(Fd, "<BR><BR>~n", []),
io:format(Fd, "~n<DL>~n", []),
do_doc(Fd, TestCases, []),
io:format(Fd, "</DL>~n", []),
file:close(Fd);
doc(TestCases) ->
doc([TestCases]).
do_doc(Fd, [H | T], List) ->
case H of
{Module, TestCase} when is_atom(Module), is_atom(TestCase) ->
do_doc(Fd, Module, TestCase, List);
TestCase when is_atom(TestCase), List == [] ->
do_doc(Fd, mnesia_SUITE, TestCase, List);
TestCase when is_atom(TestCase) ->
do_doc(Fd, hd(List), TestCase, List)
end,
do_doc(Fd, T, List);
do_doc(_, [], _) ->
ok.
do_doc(Fd, Module, TestCase, List) ->
case get_suite(Module, TestCase) of
[] ->
Head = ?flat_format("<A HREF=~p.html#~p_1>{~p, ~p}</A>}",
[Module, TestCase, Module, TestCase]),
print_doc(Fd, Module, TestCase, Head);
Suite when is_list(Suite) ->
Head = ?flat_format("{~p, ~p}", [Module, TestCase]),
print_doc(Fd, Module, TestCase, Head),
io:format(Fd, "~n<DL>~n", []),
do_doc(Fd, Suite, [Module | List]),
io:format(Fd, "</DL>~n", []);
'NYI' ->
Head = ?flat_format("<B>{~p, ~p}</B>", [Module, TestCase]),
print_doc(Fd, Module, TestCase, Head)
end.
print_doc(Fd, Mod, Fun, Head) ->
case catch (apply(Mod, Fun, [doc])) of
{'EXIT', _} ->
io:format(Fd, "<DT>~s</DT>~n", [Head]);
Doc when is_list(Doc) ->
io:format(Fd, "<DT><U>~s</U><BR><DD>~n", [Head]),
print_rows(Fd, Doc),
io:format(Fd, "</DD><BR><BR>~n", [])
end.
print_rows(_Fd, []) ->
ok;
print_rows(Fd, [H | T]) when is_list(H) ->
io:format(Fd, "~s~n", [H]),
print_rows(Fd, T);
print_rows(Fd, [H | T]) when is_integer(H) ->
io:format(Fd, "~s~n", [[H | T]]).
struct(TestCases) ->
T = test(TestCases, suite),
struct(T, "").
struct({Module, TestCase}, Indentation)
when is_atom(Module), is_atom(TestCase) ->
log("~s{~p, ~p} ...~n", [Indentation, Module, TestCase]);
struct({Module, TestCase, Other}, Indentation)
when is_atom(Module), is_atom(TestCase) ->
log("~s{~p, ~p} ~p~n", [Indentation, Module, TestCase, Other]);
struct([], _) ->
ok;
struct([TestCase | TestCases], Indentation) ->
struct(TestCase, Indentation),
struct(TestCases, Indentation);
struct({TestCase, []}, Indentation) ->
struct(TestCase, Indentation);
struct({TestCase, SubTestCases}, Indentation) when is_list(SubTestCases) ->
struct(TestCase, Indentation),
struct(SubTestCases, Indentation ++ " ").
test(TestCases) ->
test(TestCases, []).
test(TestCases, suite) when is_list(TestCases) ->
test_driver(TestCases, suite);
test(TestCases, Config) when is_list(TestCases) ->
D1 = lists:duplicate(10, $=),
D2 = lists:duplicate(10, $ ),
log("~n~s TEST CASES: ~p~n ~sCONFIG: ~p~n~n", [D1, TestCases, D2, Config]),
test_driver(TestCases, Config);
test(TestCase, Config) ->
test([TestCase], Config).
test_driver([], _Config) ->
[];
test_driver([T|TestCases], Config) ->
L1 = test_driver(T, Config),
L2 = test_driver(TestCases, Config),
[L1|L2];
test_driver({Module, TestCases}, Config) when is_list(TestCases)->
test_driver(default_module(Module, TestCases), Config);
test_driver({Module, all}, Config) ->
get_suite(Module, all, Config);
test_driver({Module, G={group, _}}, Config) ->
get_suite(Module, G, Config);
test_driver({_, {group, Module, Group}}, Config) ->
get_suite(Module, {group, Group}, Config);
test_driver({Module, TestCase}, Config) ->
Sec = timer:seconds(1) * 1000,
case Config of
suite ->
{Module, TestCase, 'IMPL'};
_ ->
log("Eval test case: ~w~n", [{Module, TestCase}]),
try timer:tc(?MODULE, eval_test_case, [Module, TestCase, Config]) of
{T, Res} ->
log("Tested ~w in ~w sec~n", [TestCase, T div Sec]),
{T div Sec, Res}
catch error:function_clause ->
log("<WARNING> Test case ~w NYI~n", [{Module, TestCase}]),
{0, {skip, {Module, TestCase}, "NYI"}}
end
end;
test_driver(TestCase, Config) ->
DefaultModule = mnesia_SUITE,
log("<>WARNING<> Missing module in test case identifier. "
"{~w, ~w} assumed~n", [DefaultModule, TestCase]),
test_driver({DefaultModule, TestCase}, Config).
default_module(DefaultModule, TestCases) when is_list(TestCases) ->
Fun = fun(T) ->
case T of
{group, _} -> {true, {DefaultModule, T}};
{_, _} -> true;
T -> {true, {DefaultModule, T}}
end
end,
lists:zf(Fun, TestCases).
get_suite(Module, TestCase, Config) ->
case get_suite(Module, TestCase) of
Suite when is_list(Suite), Config == suite ->
Res = test_driver(default_module(Module, Suite), Config),
{{Module, TestCase}, Res};
Suite when is_list(Suite) ->
log("Expand test case ~w~n", [{Module, TestCase}]),
Def = default_module(Module, Suite),
{T, Res} = timer:tc(?MODULE, test_driver, [Def, Config]),
Sec = timer:seconds(1) * 1000,
{T div Sec, {{Module, TestCase}, Res}};
'NYI' when Config == suite ->
{Module, TestCase, 'NYI'};
'NYI' ->
log("<WARNING> Test case ~w NYI~n", [{Module, TestCase}]),
{0, {skip, {Module, TestCase}, "NYI"}}
end.
Returns a list ( possibly empty ) or the atom ' NYI '
get_suite(Mod, {group, Suite}) ->
try
Groups = Mod:groups(),
{_, _, TCList} = lists:keyfind(Suite, 1, Groups),
TCList
catch
_:Reason:Stacktrace ->
io:format("Not implemented ~p ~p (~p ~p)~n",
[Mod,Suite,Reason,Stacktrace]),
'NYI'
end;
get_suite(Mod, all) ->
case catch (apply(Mod, all, [])) of
{'EXIT', _} -> 'NYI';
List when is_list(List) -> List
end;
get_suite(_Mod, _Fun) ->
[].
eval_test_case(Mod, Fun, Config) ->
flush(),
global:register_name(mnesia_test_case_sup, self()),
Flag = process_flag(trap_exit, true),
Pid = spawn_link(?MODULE, test_case_evaluator, [Mod, Fun, [Config]]),
R = wait_for_evaluator(Pid, Mod, Fun, Config),
global:unregister_name(mnesia_test_case_sup),
process_flag(trap_exit, Flag),
R.
flush() ->
receive Msg -> [Msg | flush()]
after 0 -> []
end.
wait_for_evaluator(Pid, Mod, Fun, Config) ->
receive
{'EXIT', Pid, {test_case_ok, _PidRes}} ->
Errors = flush(),
Res =
case Errors of
[] -> ok;
Errors -> failed
end,
{Res, {Mod, Fun}, Errors};
{'EXIT', Pid, {skipped, Reason}} ->
log("<WARNING> Test case ~w skipped, because ~p~n",
[{Mod, Fun}, Reason]),
Mod:end_per_testcase(Fun, Config),
{skip, {Mod, Fun}, Reason};
{'EXIT', Pid, Reason} ->
log("<>ERROR<> Eval process ~w exited, because ~p~n",
[{Mod, Fun}, Reason]),
Mod:end_per_testcase(Fun, Config),
{crash, {Mod, Fun}, Reason}
end.
test_case_evaluator(Mod, Fun, [Config]) ->
NewConfig = Mod:init_per_testcase(Fun, Config),
try
R = apply(Mod, Fun, [NewConfig]),
Mod:end_per_testcase(Fun, NewConfig),
exit({test_case_ok, R})
catch error:function_clause ->
exit({skipped, 'NYI'})
end.
activity_evaluator(Coordinator) ->
activity_evaluator_loop(Coordinator),
exit(normal).
activity_evaluator_loop(Coordinator) ->
receive
begin_trans ->
transaction(Coordinator, 0);
{begin_trans, MaxRetries} ->
transaction(Coordinator, MaxRetries);
end_trans ->
end_trans;
Fun when is_function(Fun) ->
Coordinator ! {self(), Fun()},
activity_evaluator_loop(Coordinator);
{ ' EXIT ' , Coordinator , Reason } - >
ExitExpr ->
? error("activity_evaluator_loop ~p ~p : } " , [ Coordinator , self ( ) , ExitExpr ] ) ,
exit(ExitExpr)
end.
transaction(Coordinator, MaxRetries) ->
Fun = fun() ->
Coordinator ! {self(), begin_trans},
activity_evaluator_loop(Coordinator)
end,
Coordinator ! {self(), mnesia:transaction(Fun, MaxRetries)},
activity_evaluator_loop(Coordinator).
pick_msg() ->
receive
Message -> Message
after 4000 -> timeout
end.
start_activities(Nodes) ->
Fun = fun(N) -> spawn_link(N, ?MODULE, activity_evaluator, [self()]) end,
Pids = mapl(Fun, Nodes),
{success, Pids}.
mapl(Fun, [H|T]) ->
Res = Fun(H),
[Res|mapl(Fun, T)];
mapl(_Fun, []) ->
[].
diskless(Config) ->
case lists:keysearch(diskless, 1, Config) of
{value, {diskless, true}} ->
true;
_Else ->
false
end.
start_transactions(Pids) ->
Fun = fun(Pid) ->
Pid ! begin_trans,
?match_receive({Pid, begin_trans})
end,
mapl(Fun, Pids).
start_sync_transactions(Pids) ->
Nodes = [node(Pid) || Pid <- Pids],
Fun = fun(Pid) ->
sync_trans_tid_serial(Nodes),
Pid ! begin_trans,
?match_receive({Pid, begin_trans})
end,
mapl(Fun, Pids).
start_transactions(Pids, MaxRetries) ->
Fun = fun(Pid) ->
Pid ! {begin_trans, MaxRetries},
?match_receive({Pid, begin_trans})
end,
mapl(Fun, Pids).
start_sync_transactions(Pids, MaxRetries) ->
Nodes = [node(Pid) || Pid <- Pids],
Fun = fun(Pid) ->
sync_trans_tid_serial(Nodes),
Pid ! {begin_trans, MaxRetries},
?match_receive({Pid, begin_trans})
end,
mapl(Fun, Pids).
sync_trans_tid_serial(Nodes) ->
Fun = fun() -> mnesia:write_lock_table(schema) end,
rpc:multicall(Nodes, mnesia, transaction, [Fun]).
select_nodes(N, Config, File, Line) ->
prepare_test_case([], N, Config, File, Line).
prepare_test_case(Actions, N, Config, File, Line) ->
NodeList1 = lookup_config(nodes, Config),
NodeList3 = append_unique(NodeList1, NodeList2),
This = node(),
All = [This | lists:delete(This, NodeList3)],
Selected = pick_nodes(N, All, File, Line),
case diskless(Config) of
true ->
ok;
false ->
rpc:multicall(Selected, application, set_env,[mnesia, schema_location, opt_disc])
end,
do_prepare(Actions, Selected, All, Config, File, Line).
do_prepare([], Selected, _All, _Config, _File, _Line) ->
Selected;
do_prepare([{init_test_case, Appls} | Actions], Selected, All, Config, File, Line) ->
set_kill_timer(Config),
Started = init_nodes(Selected, File, Line),
All2 = append_unique(Started, All),
Alive = mnesia_lib:intersect(nodes() ++ [node()], All2),
kill_appls(Appls, Alive),
process_flag(trap_exit, true),
do_prepare(Actions, Started, All2, Config, File, Line);
do_prepare([delete_schema | Actions], Selected, All, Config, File, Line) ->
Alive = mnesia_lib:intersect(nodes() ++ [node()], All),
case diskless(Config) of
true ->
skip;
false ->
Del = fun(Node) ->
case mnesia:delete_schema([Node]) of
ok -> ok;
{error, {"All nodes not running",_}} ->
ok;
Else ->
?log("Delete schema error ~p ~n", [Else])
end
end,
lists:foreach(Del, Alive)
end,
do_prepare(Actions, Selected, All, Config, File, Line);
do_prepare([create_schema | Actions], Selected, All, Config, File, Line) ->
Ext = proplists:get_value(default_properties, Config, ?BACKEND),
case diskless(Config) of
true ->
rpc:multicall(Selected, application, set_env, [mnesia, schema, Ext]),
skip;
_Else ->
case mnesia:create_schema(Selected, Ext) of
ok ->
ignore;
BadNodes ->
?fatal("Cannot create Mnesia schema on ~p~n", [BadNodes])
end
end,
do_prepare(Actions, Selected, All, Config, File, Line);
do_prepare([{start_appls, Appls} | Actions], Selected, All, Config, File, Line) ->
case start_appls(Appls, Selected, Config) of
[] -> ok;
Bad -> ?fatal("Cannot start appls ~p: ~p~n", [Appls, Bad])
end,
do_prepare(Actions, Selected, All, Config, File, Line);
do_prepare([{reload_appls, Appls} | Actions], Selected, All, Config, File, Line) ->
reload_appls(Appls, Selected),
do_prepare(Actions, Selected, All, Config, File, Line).
set_kill_timer(Config) ->
case init:get_argument(mnesia_test_timeout) of
{ok, _ } -> ok;
_ ->
Time0 =
case lookup_config(tc_timeout, Config) of
[] -> timer:minutes(5);
ConfigTime when is_integer(ConfigTime) -> ConfigTime
end,
Mul = try
test_server:timetrap_scale_factor()
catch _:_ -> 1 end,
(catch test_server:timetrap(Mul*Time0 + 1000)),
spawn_link(?MODULE, kill_tc, [self(),Time0*Mul])
end.
kill_tc(Pid, Time) ->
receive
after Time ->
case process_info(Pid) of
undefined -> ok;
_ ->
?error("Watchdog in test case timed out "
"in ~p min~n", [Time div (1000*60)]),
Files = mnesia_lib:dist_coredump(),
?log("Cores dumped to:~n ~p~n", [Files]),
rpc : call(Node , os , putenv , [ " ERL_CRASH_DUMP " , File ] ) ,
rpc : cast(Node , erlang , halt , [ " RemoteTimeTrap " ] )
[ ) || Node < - nodes ( ) ] ,
exit(Pid, kill)
end
end.
append_unique([], List) -> List;
append_unique([H|R], List) ->
case lists:member(H, List) of
true -> append_unique(R, List);
false -> [H | append_unique(R, List)]
end.
pick_nodes(all, Nodes, File, Line) ->
pick_nodes(length(Nodes), Nodes, File, Line);
pick_nodes(N, [H | T], File, Line) when N > 0 ->
[H | pick_nodes(N - 1, T, File, Line)];
pick_nodes(0, _Nodes, _File, _Line) ->
[];
pick_nodes(N, [], File, Line) ->
?skip("Test case (~p(~p)) ignored: ~p nodes missing~n",
[File, Line, N]).
init_nodes([Node | Nodes], File, Line) ->
case net_adm:ping(Node) of
pong ->
[Node | init_nodes(Nodes, File, Line)];
pang ->
[Name, Host] = node_to_name_and_host(Node),
case node_start_link(Host, Name) of
{ok, Node1} ->
Path = code:get_path(),
true = rpc:call(Node1, code, set_path, [Path]),
[Node1 | init_nodes(Nodes, File, Line)];
Other ->
?skip("Test case (~p(~p)) ignored: cannot start node ~p: ~p~n",
[File, Line, Node, Other])
end
end;
init_nodes([], _File, _Line) ->
[].
node_to_name_and_host(Node) ->
string:lexemes(atom_to_list(Node), [$@]).
lookup_config(Key,Config) ->
case lists:keysearch(Key,1,Config) of
{value,{Key,Val}} ->
Val;
_ ->
[]
end.
start_appls(Appls, Nodes) ->
start_appls(Appls, Nodes, [], [schema]).
start_appls(Appls, Nodes, Config) ->
start_appls(Appls, Nodes, Config, [schema]).
start_appls([Appl | Appls], Nodes, Config, Tabs) ->
{Started, BadStarters} =
rpc:multicall(Nodes, ?MODULE, remote_start, [Appl, Config, Nodes]),
BadS = [{Node, Appl, Res} || {Node, Res} <- Started, Res /= ok],
BadN = [{BadNode, Appl, bad_start} || BadNode <- BadStarters],
Bad = BadS ++ BadN,
case Appl of
mnesia when Bad == [] ->
sync_tables(Nodes, Tabs);
_ ->
ignore
end,
Bad ++ start_appls(Appls, Nodes, Config, Tabs);
start_appls([], _Nodes, _Config, _Tabs) ->
[].
remote_start(mnesia, Config, Nodes) ->
case diskless(Config) of
true ->
application_controller:set_env(mnesia,
extra_db_nodes,
Nodes -- [node()]),
application_controller:set_env(mnesia,
schema_location,
ram);
false ->
application_controller:set_env(mnesia,
schema_location,
opt_disc),
ignore
end,
{node(), mnesia:start()};
remote_start(Appl, _Config, _Nodes) ->
Res =
case application:start(Appl) of
{error, {already_started, Appl}} ->
ok;
Other ->
Other
end,
{node(), Res}.
Returns a list of error tuples { BadNode , , Reason }
start_mnesia(Nodes) ->
start_appls([mnesia], Nodes).
start_mnesia(Nodes, Tabs) when is_list(Nodes) ->
start_appls([mnesia], Nodes, [], Tabs).
sync_tables(Nodes, Tabs) ->
Res = send_wait(Nodes, Tabs, []),
if
Res == [] ->
mnesia:transaction(fun() -> mnesia:write_lock_table(schema) end),
Res;
true ->
Res
end.
send_wait([Node | Nodes], Tabs, Pids) ->
Pid = spawn_link(Node, ?MODULE, start_wait, [self(), Tabs]),
send_wait(Nodes, Tabs, [Pid | Pids]);
send_wait([], _Tabs, Pids) ->
rec_wait(Pids, []).
rec_wait([Pid | Pids], BadRes) ->
receive
{'EXIT', Pid, R} ->
rec_wait(Pids, [{node(Pid), bad_wait, R} | BadRes]);
{Pid, ok} ->
rec_wait(Pids, BadRes);
{Pid, {error, R}} ->
rec_wait(Pids, [{node(Pid), bad_wait, R} | BadRes])
end;
rec_wait([], BadRes) ->
BadRes.
start_wait(Coord, Tabs) ->
process_flag(trap_exit, true),
Mon = whereis(mnesia_monitor),
case catch link(Mon) of
{'EXIT', _} ->
unlink(Coord),
Coord ! {self(), {error, {node_not_running, node()}}};
_ ->
Res = start_wait_loop(Tabs),
unlink(Mon),
unlink(Coord),
Coord ! {self(), Res}
end.
start_wait_loop(Tabs) ->
receive
{'EXIT', Pid, Reason} ->
{error, {start_wait, Pid, Reason}}
after 0 ->
case mnesia:wait_for_tables(Tabs, timer:seconds(30)) of
ok ->
verify_nodes(Tabs);
{timeout, BadTabs} ->
log("<>WARNING<> Wait for tables ~p: ~p~n", [node(), Tabs]),
start_wait_loop(BadTabs);
{error, Reason} ->
{error, {start_wait, Reason}}
end
end.
verify_nodes(Tabs) ->
verify_nodes(Tabs, 0).
verify_nodes([], _) ->
ok;
verify_nodes([Tab| Tabs], N) ->
?match(X when is_atom(X), mnesia_lib:val({Tab, where_to_read})),
Nodes = mnesia:table_info(Tab, where_to_write),
Copies =
mnesia:table_info(Tab, disc_copies) ++
mnesia:table_info(Tab, disc_only_copies) ++
mnesia:table_info(Tab, ram_copies),
Local = mnesia:table_info(Tab, local_content),
case Copies -- Nodes of
[] ->
verify_nodes(Tabs, 0);
_Else when Local == true, Nodes /= [] ->
verify_nodes(Tabs, 0);
Else ->
N2 =
if
N > 20 ->
log("<>WARNING<> ~w Waiting for table: ~p on ~p ~n",
[node(), Tab, Else]),
0;
true -> N+1
end,
timer:sleep(500),
verify_nodes([Tab| Tabs], N2)
end.
Nicely stop on all given nodes
stop_mnesia(Nodes) when is_list(Nodes) ->
stop_appls([mnesia], Nodes).
stop_appls([Appl | Appls], Nodes) when is_list(Nodes) ->
{Stopped, BadNodes} = rpc:multicall(Nodes, ?MODULE, remote_stop, [Appl]),
BadS =[{Node, Appl, Res} || {Node, Res} <- Stopped, Res /= stopped],
BadN =[{BadNode, Appl, bad_node} || BadNode <- BadNodes],
BadS ++ BadN ++ stop_appls(Appls, Nodes);
stop_appls([], _Nodes) ->
[].
remote_stop(mnesia) ->
{node(), mnesia:stop()};
remote_stop(Appl) ->
{node(), application:stop(Appl)}.
remote_kill([Appl | Appls]) ->
catch Appl:lkill(),
application:stop(Appl),
remote_kill(Appls);
remote_kill([]) ->
ok.
Abruptly kill on all given nodes
kill_appls(Appls, Nodes) when is_list(Nodes) ->
verbose("<>WARNING<> Intentionally killing ~p: ~w...~n",
[Appls, Nodes], ?FILE, ?LINE),
rpc:multicall(Nodes, ?MODULE, remote_kill, [Appls]),
[].
kill_mnesia(Nodes) when is_list(Nodes) ->
kill_appls([mnesia], Nodes).
reload_appls([Appl | Appls], Selected) ->
kill_appls([Appl], Selected),
timer:sleep(1000),
Ok = {[ok || _N <- Selected], []},
{Ok2temp, Empty} = rpc:multicall(Selected, application, unload, [Appl]),
Conv = fun({error,{not_loaded,mnesia}}) -> ok; (Else) -> Else end,
Ok2 = {lists:map(Conv, Ok2temp), Empty},
Ok3 = rpc:multicall(Selected, application, load, [Appl]),
if
Ok /= Ok2 ->
?fatal("Cannot unload appl ~p: ~p~n", [Appl, Ok2]);
Ok /= Ok3 ->
?fatal("Cannot load appl ~p: ~p~n", [Appl, Ok3]);
true ->
ok
end,
reload_appls(Appls, Selected);
reload_appls([], _Selected) ->
ok.
shutdown() ->
log("<>WARNING<> Intentionally shutting down all nodes... ~p~n",
[nodes() ++ [node()]]),
rpc:multicall(nodes(), erlang, halt, []),
erlang:halt().
verify_mnesia(Ups, Downs, File, Line) when is_list(Ups), is_list(Downs) ->
BadUps =
[N || N <- Ups, rpc:call(N, mnesia, system_info, [is_running]) /= yes],
BadDowns =
[N || N <- Downs, rpc:call(N, mnesia, system_info, [is_running]) == yes],
if
BadUps == [] ->
ignore;
true ->
error("Mnesia is not running as expected: ~p~n",
[BadUps], File, Line)
end,
if
BadDowns == [] ->
ignore;
true ->
error("Mnesia is not stopped as expected: ~p~n",
[BadDowns], File, Line)
end,
ok.
verify_replica_location(Tab, [], [], [], _) ->
?match({'EXIT', _}, mnesia:table_info(Tab, ram_copies)),
?match({'EXIT', _}, mnesia:table_info(Tab, disc_copies)),
?match({'EXIT', _}, mnesia:table_info(Tab, disc_only_copies)),
?match({'EXIT', _}, mnesia:table_info(Tab, where_to_write)),
?match({'EXIT', _}, mnesia:table_info(Tab, where_to_read)),
[];
verify_replica_location(Tab, DiscOnly0, Ram0, Disc0, AliveNodes0) ->
AliveNodes = lists:sort(AliveNodes0),
DiscOnly = lists:sort(DiscOnly0),
Ram = lists:sort(Ram0),
Disc = lists:sort(Disc0),
Write = ignore_dead(DiscOnly ++ Ram ++ Disc, AliveNodes),
Read = ignore_dead(DiscOnly ++ Ram ++ Disc, AliveNodes),
This = node(),
timer:sleep(100),
S1 = ?match(AliveNodes, lists:sort(mnesia:system_info(running_db_nodes))),
S2 = ?match(DiscOnly, lists:sort(mnesia:table_info(Tab, disc_only_copies))),
S3 = ?match(Ram, lists:sort(mnesia:table_info(Tab, ram_copies) ++
mnesia:table_info(Tab, ext_ets))),
S4 = ?match(Disc, lists:sort(mnesia:table_info(Tab, disc_copies))),
S5 = ?match(Write, lists:sort(mnesia:table_info(Tab, where_to_write))),
S6 = case lists:member(This, Read) of
true ->
?match(This, mnesia:table_info(Tab, where_to_read));
false ->
?match(true, lists:member(mnesia:table_info(Tab, where_to_read), Read))
end,
lists:filter(fun({success,_}) -> false; (_) -> true end, [S1,S2,S3,S4,S5,S6]).
ignore_dead(Nodes, AliveNodes) ->
Filter = fun(Node) -> lists:member(Node, AliveNodes) end,
lists:sort(lists:zf(Filter, Nodes)).
remote_activate_debug_fun(N, I, F, C, File, Line) ->
Pid = spawn_link(N, ?MODULE, do_remote_activate_debug_fun, [self(), I, F, C, File, Line]),
receive
{activated, Pid} -> ok;
{'EXIT', Pid, Reason} -> {error, Reason}
end.
do_remote_activate_debug_fun(From, I, F, C, File, Line) ->
mnesia_lib:activate_debug_fun(I, F, C, File, Line),
From ! {activated, self()},
sort(L) when is_list(L) ->
lists:sort(L);
sort({atomic, L}) when is_list(L) ->
{atomic, lists:sort(L)};
sort({ok, L}) when is_list(L) ->
{ok, lists:sort(L)};
sort(W) ->
W.
|
50f31ebb28a137948a837531e03fce0fe22e11e969951bf2c1cc9132ac09cb64 | dQuadrant/kuber | Text.hs | # LANGUAGE FlexibleContexts #
module Cardano.Kuber.Utility.Text where
import Data.Text.Conversions (FromText (fromText), ToText (toText), Base16 (Base16, unBase16), convertText)
import Data.ByteString ( ByteString )
import Data.Functor ((<&>))
import qualified Data.ByteString.Lazy as LBS
toHexString :: (FromText a1, ToText (Base16 a2)) => a2 -> a1
toHexString bs = fromText $ toText (Base16 bs )
unHexLazy :: ToText a => a -> Maybe LBS.ByteString
unHexLazy v = convertText (toText v) <&> unBase16
unHexStrict :: ToText a => a -> Maybe ByteString
unHexStrict v = convertText (toText v) <&> unBase16
unHex :: (Functor f, FromText (f (Base16 b)), ToText a) => a -> f b
unHex v = convertText (toText v) <&> unBase16 | null | https://raw.githubusercontent.com/dQuadrant/kuber/c3381d7c966997f8e3fb2c9b2716ff3102b4c271/src/Cardano/Kuber/Utility/Text.hs | haskell | # LANGUAGE FlexibleContexts #
module Cardano.Kuber.Utility.Text where
import Data.Text.Conversions (FromText (fromText), ToText (toText), Base16 (Base16, unBase16), convertText)
import Data.ByteString ( ByteString )
import Data.Functor ((<&>))
import qualified Data.ByteString.Lazy as LBS
toHexString :: (FromText a1, ToText (Base16 a2)) => a2 -> a1
toHexString bs = fromText $ toText (Base16 bs )
unHexLazy :: ToText a => a -> Maybe LBS.ByteString
unHexLazy v = convertText (toText v) <&> unBase16
unHexStrict :: ToText a => a -> Maybe ByteString
unHexStrict v = convertText (toText v) <&> unBase16
unHex :: (Functor f, FromText (f (Base16 b)), ToText a) => a -> f b
unHex v = convertText (toText v) <&> unBase16 | |
db4a02f9bbcaaf8950de709157ea585199fe2ec9af7756dd00fcf8dd0c6edadb | jeromesimeon/Galax | print_type.ml | (***********************************************************************)
(* *)
(* GALAX *)
(* XQuery Engine *)
(* *)
Copyright 2001 - 2007 .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
$ I d : print_type.ml , v 1.5 2007/02/01 22:08:45 simeon Exp $
(* Module: Print_type
Description:
This module implements pretty-printing for the XQuery type
system.
*)
open Format
open Occurrence
open Xquery_type_ast
open Print_common
(***********************)
(* Print a simple type *)
(***********************)
let rec print_stype_spec ff spec =
match spec.pstype_specifier_desc with
| STypeRef tname -> fprintf ff "of simple type %a@;<0 -2>" print_uqname tname
| SAnonymous sderiv -> print_stype_derivation ff sderiv
and print_nested_stype_spec ff spec =
match spec.pstype_specifier_desc with
| STypeRef tname -> fprintf ff "%a@;<0 -2>" print_uqname tname
| SAnonymous sderiv -> fprintf ff "{@,%a@;<0 -2>}" print_stype_derivation sderiv
and print_stype_derivation ff = function
| SRestriction sspec ->
fprintf ff "@[<hv 2>restricts %a@]" print_nested_stype_spec sspec
| SList sspec ->
fprintf ff "@[<hv 2>list of %a@]" print_nested_stype_spec sspec
| SUnion sspecs ->
fprintf ff "@[<hv 2>union of {@,%a@;<0 -2>}@]" print_stype_union sspecs
and print_stype_union ff = function
| [] -> ()
| [s] -> print_nested_stype_spec ff s
| s :: ss ->
fprintf ff "@[%a |@,@ %a@]" print_nested_stype_spec s print_stype_union ss
(***************************)
(* Print a type derivation *)
(***************************)
let print_deriv ff = function
| TRestriction tname -> fprintf ff "restricts %a@ " print_uqname tname
| TExtension tname -> fprintf ff "extends %a@ " print_uqname tname
let print_deriv_opt ff = function
| Some dr -> print_deriv ff dr
| None -> ()
(******************************)
(* Print a substitution group *)
(******************************)
let print_substitutes_for ff = function
| TSubstitutesFor ename ->
fprintf ff "substitutes for %a@ " print_uqname ename
| TNonSubstitutesFor -> ()
(****************)
(* Print a type *)
(****************)
Note :
Here is , for each type constructor , the code used for precedence
in the pretty - printer .
4 - TAttributeRef , TAttributeLocal , TElementRef , TElementLocal ,
TGroupRef , TEmpty , TNone , TAtomicRef , TDocument , TText
3 - TBound
2 - TInterleave
1 - TSequence
0 - TChoice
Higher number indicates higher precedence .
- and
Here is, for each type constructor, the code used for precedence
in the pretty-printer.
4 - TAttributeRef, TAttributeLocal, TElementRef, TElementLocal,
TGroupRef, TEmpty, TNone, TAtomicRef, TDocument, TText
3 - TBound
2 - TInterleave
1 - TSequence
0 - TChoice
Higher number indicates higher precedence.
- Jerome and Phil
*)
let rec print_xtype ff m =
print_xtype_prec 0 ff m
and print_xtype_prec p ff xt =
match xt.pxtype_desc with
| TAtomicRef tname ->
fprintf ff "%a" print_uqname tname
| TElementRef ename ->
fprintf ff "element %a" print_uqname ename
| TElementLocal(ename,nillable,xtypespec) ->
fprintf ff "@[<hv 2>element %a%a%a@]"
print_uqname ename print_nillable nillable print_xtype_spec xtypespec
| TAttributeRef aname ->
fprintf ff "attribute %a"
print_uqname aname
| TAttributeLocal(aname,stypespec) ->
fprintf ff "@[<hv 2>attribute %a %a@]"
print_uqname aname print_stype_spec stypespec
| TDocument xtype ->
fprintf ff "@[<hv 2>document {@,%a@;<0 -2>}@]"
(print_xtype_prec 0) xtype
| TText ->
fprintf ff "text"
| TComment ->
fprintf ff "comment"
| TProcessingInstruction ->
fprintf ff "processing-instruction"
| TGroupRef gname ->
fprintf ff "group %a" print_uqname gname
| TAttrGroupRef gname ->
fprintf ff "attrGroup %a" print_uqname gname
| TBound(xtype,minocc,maxocc) ->
begin
match (minocc,maxocc) with
| (UP_INT 0, UNBOUNDED) ->
fprintf ff "%a*" (print_xtype_prec 3) xtype
| (UP_INT 1, UNBOUNDED) ->
fprintf ff "%a+" (print_xtype_prec 3) xtype
| (UP_INT 0, UP_INT 1) ->
fprintf ff "%a?" (print_xtype_prec 3) xtype
| (UNBOUNDED, _) ->
fprintf ff "none"
| _ ->
fprintf ff "%a (minoccurs %s) (maxoccurs %s)" (print_xtype_prec 3) xtype (string_of_occurs minocc) (string_of_occurs maxocc)
end
| TSequence(xtype1,xtype2) ->
if p > 1 then
fprintf ff "@[<hv 1>(%a,@ %a)@]"
(print_xtype_prec 1) xtype1 (print_xtype_prec 1) xtype2
else
fprintf ff "%a,@ %a"
(print_xtype_prec 1) xtype1 (print_xtype_prec 1) xtype2
| TEmpty ->
fprintf ff "()"
| TChoice(xtype1,xtype2) ->
if p > 0 then
fprintf ff "@[<hv 1>(%a |@ %a)@]"
(print_xtype_prec 0) xtype1 (print_xtype_prec 0) xtype2
else
fprintf ff "%a |@ %a"
(print_xtype_prec 0) xtype1 (print_xtype_prec 0) xtype2
| TNone ->
fprintf ff "none"
| TInterleave(xtype1,xtype2) ->
if p > 2 then
fprintf ff "@[<hv 1>(%a &@ %a)@]"
(print_xtype_prec 2) xtype1 (print_xtype_prec 2) xtype2
else
fprintf ff "%a &@ %a"
(print_xtype_prec 2) xtype1 (print_xtype_prec 2) xtype2
(**************************************)
(* Print a complex type specification *)
(**************************************)
and print_ctype_spec ff ctypespec =
match ctypespec.pctype_specifier_desc with
| TTypeRef tname ->
fprintf ff " of type %a@;<0 -2>" print_uqname tname
| TAnonymous xtderiv ->
fprintf ff " %a" print_ctype_derivation xtderiv
(******************************)
(* Print a type specification *)
(******************************)
and print_xtype_spec ff = function
| TSpecSimple sspec -> fprintf ff " %a" print_stype_spec sspec
| TSpecComplex cspec -> print_ctype_spec ff cspec
and print_ctype_derivation ff = function
| (der, None, mixed, cxtype) ->
fprintf ff "%a%a{%a@;<0 -2>}"
print_deriv_opt der
print_mixed mixed
print_xtype cxtype
| (der, Some axtype, mixed, cxtype) ->
fprintf ff "%a%a{%a@,;%a@;<0 -2>}"
print_deriv_opt der
print_mixed mixed
print_xtype cxtype
print_xtype axtype
let print_xtype_derivation ff = function
| TSimpleDerivation sder -> print_stype_derivation ff sder
| TComplexDerivation cder -> print_ctype_derivation ff cder
let print_xelem_derivation ff (substfor, nillable, xtypespec) =
fprintf ff
"%a%a%a"
print_substitutes_for substfor print_nillable nillable
print_xtype_spec xtypespec
(****************************)
(* Print a type declaration *)
(****************************)
let print_type_decl ff td =
match td.pxtype_declaration_desc with
| TAttributeDecl(aname,stypspec) ->
fprintf ff
"@[<hv 2>declare attribute %a @,%a;@]"
print_uqname aname
print_stype_spec stypspec
| TElementDecl(ename, elemder) ->
fprintf ff
"@[<hv 2>declare element %a%a;@]"
print_uqname ename
print_xelem_derivation elemder
| TTypeDecl(tname,TSimpleDerivation sder) ->
fprintf ff "@[<hv 2>declare simple type %a %a;@]"
print_uqname tname
print_stype_derivation sder
| TTypeDecl(tname,TComplexDerivation cder) ->
fprintf ff "@[<hv 2>declare complex type %a %a;@]"
print_uqname tname
print_ctype_derivation cder
| TGroupDecl(gname,xtype) ->
fprintf ff "@[<hv 2>declare group %a {@,%a@;<0 -2>};@]"
print_uqname gname
print_xtype xtype
| TAttrGroupDecl(gname,xtype) ->
fprintf ff "@[<hv 2>declare attrGroup %a {@,%a@;<0 -2>};@]"
print_uqname gname
print_xtype xtype
(************************************)
(* Print a set of type declarations *)
(************************************)
let print_issds ff typedecls =
List.iter (fun td -> fprintf ff "%a@\n" print_type_decl td) typedecls
(************************)
(* Print a whole schema *)
(************************)
let print_namespace_decls ff nsdecls =
let print_one ff = function
| ("", uri) ->
fprintf ff "declare default element namespace = %s;@\n" (Namespace_names.quoted_string_of_uri uri)
| (ncname, uri) ->
fprintf ff "declare namespace %s = %s;@\n" ncname (Namespace_names.quoted_string_of_uri uri)
in
let rec print_all ff = function
| nsd :: [] -> fprintf ff "%a@\n" print_one nsd
| nsd :: rest -> fprintf ff "%a%a" print_one nsd print_all rest
| [] -> ()
in print_all ff nsdecls
let rec print_xschema ff xs =
fprintf ff "@[<hv 2>declare schema {@,%a%a%a@;<0 -2>};@]\n@?"
print_namespace_decls xs.xschema_namespace_declarations
print_xschemas xs.xschema_imported_schemas
print_issds xs.xschema_type_declarations
and print_xschemas ff xss =
match xss with
| [] -> ()
| xs :: xss' ->
print_xschema ff xs;
print_xschemas ff xss'
let bprintf_xschema s xs =
Gmisc.bprintf_stub s print_xschema xs
| null | https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/ast_printer/print_type.ml | ocaml | *********************************************************************
GALAX
XQuery Engine
Distributed only by permission.
*********************************************************************
Module: Print_type
Description:
This module implements pretty-printing for the XQuery type
system.
*********************
Print a simple type
*********************
*************************
Print a type derivation
*************************
****************************
Print a substitution group
****************************
**************
Print a type
**************
************************************
Print a complex type specification
************************************
****************************
Print a type specification
****************************
**************************
Print a type declaration
**************************
**********************************
Print a set of type declarations
**********************************
**********************
Print a whole schema
********************** | Copyright 2001 - 2007 .
$ I d : print_type.ml , v 1.5 2007/02/01 22:08:45 simeon Exp $
open Format
open Occurrence
open Xquery_type_ast
open Print_common
let rec print_stype_spec ff spec =
match spec.pstype_specifier_desc with
| STypeRef tname -> fprintf ff "of simple type %a@;<0 -2>" print_uqname tname
| SAnonymous sderiv -> print_stype_derivation ff sderiv
and print_nested_stype_spec ff spec =
match spec.pstype_specifier_desc with
| STypeRef tname -> fprintf ff "%a@;<0 -2>" print_uqname tname
| SAnonymous sderiv -> fprintf ff "{@,%a@;<0 -2>}" print_stype_derivation sderiv
and print_stype_derivation ff = function
| SRestriction sspec ->
fprintf ff "@[<hv 2>restricts %a@]" print_nested_stype_spec sspec
| SList sspec ->
fprintf ff "@[<hv 2>list of %a@]" print_nested_stype_spec sspec
| SUnion sspecs ->
fprintf ff "@[<hv 2>union of {@,%a@;<0 -2>}@]" print_stype_union sspecs
and print_stype_union ff = function
| [] -> ()
| [s] -> print_nested_stype_spec ff s
| s :: ss ->
fprintf ff "@[%a |@,@ %a@]" print_nested_stype_spec s print_stype_union ss
let print_deriv ff = function
| TRestriction tname -> fprintf ff "restricts %a@ " print_uqname tname
| TExtension tname -> fprintf ff "extends %a@ " print_uqname tname
let print_deriv_opt ff = function
| Some dr -> print_deriv ff dr
| None -> ()
let print_substitutes_for ff = function
| TSubstitutesFor ename ->
fprintf ff "substitutes for %a@ " print_uqname ename
| TNonSubstitutesFor -> ()
Note :
Here is , for each type constructor , the code used for precedence
in the pretty - printer .
4 - TAttributeRef , TAttributeLocal , TElementRef , TElementLocal ,
TGroupRef , TEmpty , TNone , TAtomicRef , TDocument , TText
3 - TBound
2 - TInterleave
1 - TSequence
0 - TChoice
Higher number indicates higher precedence .
- and
Here is, for each type constructor, the code used for precedence
in the pretty-printer.
4 - TAttributeRef, TAttributeLocal, TElementRef, TElementLocal,
TGroupRef, TEmpty, TNone, TAtomicRef, TDocument, TText
3 - TBound
2 - TInterleave
1 - TSequence
0 - TChoice
Higher number indicates higher precedence.
- Jerome and Phil
*)
let rec print_xtype ff m =
print_xtype_prec 0 ff m
and print_xtype_prec p ff xt =
match xt.pxtype_desc with
| TAtomicRef tname ->
fprintf ff "%a" print_uqname tname
| TElementRef ename ->
fprintf ff "element %a" print_uqname ename
| TElementLocal(ename,nillable,xtypespec) ->
fprintf ff "@[<hv 2>element %a%a%a@]"
print_uqname ename print_nillable nillable print_xtype_spec xtypespec
| TAttributeRef aname ->
fprintf ff "attribute %a"
print_uqname aname
| TAttributeLocal(aname,stypespec) ->
fprintf ff "@[<hv 2>attribute %a %a@]"
print_uqname aname print_stype_spec stypespec
| TDocument xtype ->
fprintf ff "@[<hv 2>document {@,%a@;<0 -2>}@]"
(print_xtype_prec 0) xtype
| TText ->
fprintf ff "text"
| TComment ->
fprintf ff "comment"
| TProcessingInstruction ->
fprintf ff "processing-instruction"
| TGroupRef gname ->
fprintf ff "group %a" print_uqname gname
| TAttrGroupRef gname ->
fprintf ff "attrGroup %a" print_uqname gname
| TBound(xtype,minocc,maxocc) ->
begin
match (minocc,maxocc) with
| (UP_INT 0, UNBOUNDED) ->
fprintf ff "%a*" (print_xtype_prec 3) xtype
| (UP_INT 1, UNBOUNDED) ->
fprintf ff "%a+" (print_xtype_prec 3) xtype
| (UP_INT 0, UP_INT 1) ->
fprintf ff "%a?" (print_xtype_prec 3) xtype
| (UNBOUNDED, _) ->
fprintf ff "none"
| _ ->
fprintf ff "%a (minoccurs %s) (maxoccurs %s)" (print_xtype_prec 3) xtype (string_of_occurs minocc) (string_of_occurs maxocc)
end
| TSequence(xtype1,xtype2) ->
if p > 1 then
fprintf ff "@[<hv 1>(%a,@ %a)@]"
(print_xtype_prec 1) xtype1 (print_xtype_prec 1) xtype2
else
fprintf ff "%a,@ %a"
(print_xtype_prec 1) xtype1 (print_xtype_prec 1) xtype2
| TEmpty ->
fprintf ff "()"
| TChoice(xtype1,xtype2) ->
if p > 0 then
fprintf ff "@[<hv 1>(%a |@ %a)@]"
(print_xtype_prec 0) xtype1 (print_xtype_prec 0) xtype2
else
fprintf ff "%a |@ %a"
(print_xtype_prec 0) xtype1 (print_xtype_prec 0) xtype2
| TNone ->
fprintf ff "none"
| TInterleave(xtype1,xtype2) ->
if p > 2 then
fprintf ff "@[<hv 1>(%a &@ %a)@]"
(print_xtype_prec 2) xtype1 (print_xtype_prec 2) xtype2
else
fprintf ff "%a &@ %a"
(print_xtype_prec 2) xtype1 (print_xtype_prec 2) xtype2
and print_ctype_spec ff ctypespec =
match ctypespec.pctype_specifier_desc with
| TTypeRef tname ->
fprintf ff " of type %a@;<0 -2>" print_uqname tname
| TAnonymous xtderiv ->
fprintf ff " %a" print_ctype_derivation xtderiv
and print_xtype_spec ff = function
| TSpecSimple sspec -> fprintf ff " %a" print_stype_spec sspec
| TSpecComplex cspec -> print_ctype_spec ff cspec
and print_ctype_derivation ff = function
| (der, None, mixed, cxtype) ->
fprintf ff "%a%a{%a@;<0 -2>}"
print_deriv_opt der
print_mixed mixed
print_xtype cxtype
| (der, Some axtype, mixed, cxtype) ->
fprintf ff "%a%a{%a@,;%a@;<0 -2>}"
print_deriv_opt der
print_mixed mixed
print_xtype cxtype
print_xtype axtype
let print_xtype_derivation ff = function
| TSimpleDerivation sder -> print_stype_derivation ff sder
| TComplexDerivation cder -> print_ctype_derivation ff cder
let print_xelem_derivation ff (substfor, nillable, xtypespec) =
fprintf ff
"%a%a%a"
print_substitutes_for substfor print_nillable nillable
print_xtype_spec xtypespec
let print_type_decl ff td =
match td.pxtype_declaration_desc with
| TAttributeDecl(aname,stypspec) ->
fprintf ff
"@[<hv 2>declare attribute %a @,%a;@]"
print_uqname aname
print_stype_spec stypspec
| TElementDecl(ename, elemder) ->
fprintf ff
"@[<hv 2>declare element %a%a;@]"
print_uqname ename
print_xelem_derivation elemder
| TTypeDecl(tname,TSimpleDerivation sder) ->
fprintf ff "@[<hv 2>declare simple type %a %a;@]"
print_uqname tname
print_stype_derivation sder
| TTypeDecl(tname,TComplexDerivation cder) ->
fprintf ff "@[<hv 2>declare complex type %a %a;@]"
print_uqname tname
print_ctype_derivation cder
| TGroupDecl(gname,xtype) ->
fprintf ff "@[<hv 2>declare group %a {@,%a@;<0 -2>};@]"
print_uqname gname
print_xtype xtype
| TAttrGroupDecl(gname,xtype) ->
fprintf ff "@[<hv 2>declare attrGroup %a {@,%a@;<0 -2>};@]"
print_uqname gname
print_xtype xtype
let print_issds ff typedecls =
List.iter (fun td -> fprintf ff "%a@\n" print_type_decl td) typedecls
let print_namespace_decls ff nsdecls =
let print_one ff = function
| ("", uri) ->
fprintf ff "declare default element namespace = %s;@\n" (Namespace_names.quoted_string_of_uri uri)
| (ncname, uri) ->
fprintf ff "declare namespace %s = %s;@\n" ncname (Namespace_names.quoted_string_of_uri uri)
in
let rec print_all ff = function
| nsd :: [] -> fprintf ff "%a@\n" print_one nsd
| nsd :: rest -> fprintf ff "%a%a" print_one nsd print_all rest
| [] -> ()
in print_all ff nsdecls
let rec print_xschema ff xs =
fprintf ff "@[<hv 2>declare schema {@,%a%a%a@;<0 -2>};@]\n@?"
print_namespace_decls xs.xschema_namespace_declarations
print_xschemas xs.xschema_imported_schemas
print_issds xs.xschema_type_declarations
and print_xschemas ff xss =
match xss with
| [] -> ()
| xs :: xss' ->
print_xschema ff xs;
print_xschemas ff xss'
let bprintf_xschema s xs =
Gmisc.bprintf_stub s print_xschema xs
|
ba875b8f0294dfe741ce51349d47ece7b49077fea83d188dd5594a6e877a0d18 | fission-codes/fission | Types.hs | -- | JOSE @"cty"@ (Content Type) Header Parameter
module Web.UCAN.Header.Cty.Types (Cty (..)) where
import Data.Aeson
import RIO
import Test.QuickCheck
|
RFC 7519
5.2 . " cty " ( Content Type ) Header Parameter
The " cty " ( content type ) Header Parameter defined by [ ] and [ JWE ]
is used by this specification to convey structural information about
the JWT .
In the normal case in which nested signing or encryption operations
are not employed , the use of this Header Parameter is NOT
RECOMMENDED . In the case that nested signing or encryption is
employed , this Header Parameter MUST be present ; in this case , the
value MUST be " JWT " , to indicate that a Nested JWT is carried in this
JWT . While media type names are not case sensitive , it is
RECOMMENDED that " JWT " always be spelled using uppercase characters
for compatibility with legacy implementations . See Appendix A.2 for
an example of a Nested JWT .
RFC 7519
5.2. "cty" (Content Type) Header Parameter
The "cty" (content type) Header Parameter defined by [JWS] and [JWE]
is used by this specification to convey structural information about
the JWT.
In the normal case in which nested signing or encryption operations
are not employed, the use of this Header Parameter is NOT
RECOMMENDED. In the case that nested signing or encryption is
employed, this Header Parameter MUST be present; in this case, the
value MUST be "JWT", to indicate that a Nested JWT is carried in this
JWT. While media type names are not case sensitive, it is
RECOMMENDED that "JWT" always be spelled using uppercase characters
for compatibility with legacy implementations. See Appendix A.2 for
an example of a Nested JWT.
-}
data Cty
= JWT
deriving (Eq, Show, Read)
instance Arbitrary Cty where
arbitrary = return JWT
instance ToJSON Cty where
toJSON JWT = String "JWT"
instance FromJSON Cty where
parseJSON = withText "JWT.Header.Cty" \case
"JWT" -> return JWT
"jwt" -> return JWT
other -> fail $ show other <> "is not a valid JWT 'cty' value for Fission"
| null | https://raw.githubusercontent.com/fission-codes/fission/ae177407dccc20be67948a901956b99f40d37ac8/hs-ucan/library/Web/UCAN/Header/Cty/Types.hs | haskell | | JOSE @"cty"@ (Content Type) Header Parameter |
module Web.UCAN.Header.Cty.Types (Cty (..)) where
import Data.Aeson
import RIO
import Test.QuickCheck
|
RFC 7519
5.2 . " cty " ( Content Type ) Header Parameter
The " cty " ( content type ) Header Parameter defined by [ ] and [ JWE ]
is used by this specification to convey structural information about
the JWT .
In the normal case in which nested signing or encryption operations
are not employed , the use of this Header Parameter is NOT
RECOMMENDED . In the case that nested signing or encryption is
employed , this Header Parameter MUST be present ; in this case , the
value MUST be " JWT " , to indicate that a Nested JWT is carried in this
JWT . While media type names are not case sensitive , it is
RECOMMENDED that " JWT " always be spelled using uppercase characters
for compatibility with legacy implementations . See Appendix A.2 for
an example of a Nested JWT .
RFC 7519
5.2. "cty" (Content Type) Header Parameter
The "cty" (content type) Header Parameter defined by [JWS] and [JWE]
is used by this specification to convey structural information about
the JWT.
In the normal case in which nested signing or encryption operations
are not employed, the use of this Header Parameter is NOT
RECOMMENDED. In the case that nested signing or encryption is
employed, this Header Parameter MUST be present; in this case, the
value MUST be "JWT", to indicate that a Nested JWT is carried in this
JWT. While media type names are not case sensitive, it is
RECOMMENDED that "JWT" always be spelled using uppercase characters
for compatibility with legacy implementations. See Appendix A.2 for
an example of a Nested JWT.
-}
data Cty
= JWT
deriving (Eq, Show, Read)
instance Arbitrary Cty where
arbitrary = return JWT
instance ToJSON Cty where
toJSON JWT = String "JWT"
instance FromJSON Cty where
parseJSON = withText "JWT.Header.Cty" \case
"JWT" -> return JWT
"jwt" -> return JWT
other -> fail $ show other <> "is not a valid JWT 'cty' value for Fission"
|
b9575017050770ddaab330a0135a916f96300c32bc49df505a21d7324041249b | nuvla/api-server | binding.clj | (ns sixsq.nuvla.db.es.binding
"Binding protocol implemented for an Elasticsearch database that makes use
of the Elasticsearch REST API."
(:require
[clojure.tools.logging :as log]
[qbits.spandex :as spandex]
[sixsq.nuvla.auth.utils.acl :as acl-utils]
[sixsq.nuvla.db.binding :refer [Binding]]
[sixsq.nuvla.db.es.acl :as acl]
[sixsq.nuvla.db.es.aggregation :as aggregation]
[sixsq.nuvla.db.es.common.es-mapping :as mapping]
[sixsq.nuvla.db.es.common.utils :as escu]
[sixsq.nuvla.db.es.filter :as filter]
[sixsq.nuvla.db.es.order :as order]
[sixsq.nuvla.db.es.pagination :as paging]
[sixsq.nuvla.db.es.select :as select]
[sixsq.nuvla.db.utils.common :as cu]
[sixsq.nuvla.server.util.response :as r])
(:import
(java.io Closeable)))
;; FIXME: Need to understand why the refresh parameter must be used to make unit test pass.
(def ^:const sniff-interval-mills 5000)
(def ^:const sniff-after-failure-delay-mills 1000)
(defn create-client
[options]
(spandex/client options))
(defn create-sniffer
[client options]
(spandex/sniffer client (or options {})))
(defn create-index
[client index]
(try
(let [{:keys [status]} (spandex/request client {:url [index], :method :head})]
(if (= 200 status)
(log/debug index "index already exists")
(log/error "unexpected status code when checking" index "index (" status ")")))
(catch Exception e
(let [{:keys [status body]} (ex-data e)]
(try
(if (= 404 status)
(let [{{:keys [acknowledged shards_acknowledged]} :body} (spandex/request
client
{:url [index], :method :put})]
(if (and acknowledged shards_acknowledged)
(log/info index "index created")
(log/warn index "index may or may not have been created")))
(log/error "unexpected status code when checking" index "index (" status "). " body))
(catch Exception e
(let [{:keys [status body] :as _response} (ex-data e)
error (:error body)]
(log/error "unexpected status code when creating" index "index (" status "). " (or error e)))))))))
(defn set-index-mapping
[client index mapping]
(try
(let [{:keys [body status]} (spandex/request client {:url [index :_mapping]
:method :put
:body mapping})]
(if (= 200 status)
(log/info index "mapping updated")
(log/warn index "mapping could not be updated (" status "). " body)))
(catch Exception e
(let [{:keys [status body] :as _response} (ex-data e)
error (:error body)]
(log/warn index "mapping could not be updated (" status "). " (or error e))))))
(defn add-data
[client {:keys [id] :as data}]
(try
(let [[collection-id uuid] (cu/split-id id)
index (escu/collection-id->index collection-id)
updated-doc (-> data
(acl-utils/force-admin-role-right-all)
(acl-utils/normalize-acl-for-resource))
response (spandex/request client {:url [index :_doc uuid :_create]
:query-string {:refresh true}
:method :put
:body updated-doc})
success? (pos? (get-in response [:body :_shards :successful]))]
(if success?
(r/response-created id)
(r/response-conflict id)))
(catch Exception e
(let [{:keys [status body] :as _response} (ex-data e)
error (:error body)]
(if (= 409 status)
(r/response-conflict id)
(r/response-error (str "unexpected exception: " (or error e))))))))
(defn update-data
[client {:keys [id] :as data}]
(try
(let [[collection-id uuid] (cu/split-id id)
index (escu/collection-id->index collection-id)
updated-doc (-> data
(acl-utils/force-admin-role-right-all)
(acl-utils/normalize-acl-for-resource))
response (spandex/request client {:url [index :_doc uuid]
:query-string {:refresh true}
:method :put
:body updated-doc})
success? (pos? (get-in response [:body :_shards :successful]))]
(if success?
(r/json-response data)
(r/response-conflict id)))
(catch Exception e
(let [{:keys [body] :as _response} (ex-data e)
error (:error body)]
(r/response-error (str "unexpected exception updating " id ": " (or error e)))))))
(defn find-data
[client id]
(try
(let [[collection-id uuid] (cu/split-id id)
index (escu/collection-id->index collection-id)
response (spandex/request client {:url [index :_doc uuid]
:method :get})
found? (get-in response [:body :found])]
(if found?
(-> response :body :_source)
(throw (r/ex-not-found id))))
(catch Exception e
(let [{:keys [status] :as _response} (ex-data e)]
(if (= 404 status)
(throw (r/ex-not-found id))
(throw e))))))
(defn delete-data
[client id]
(let [[collection-id uuid] (cu/split-id id)
index (escu/collection-id->index collection-id)
response (spandex/request client {:url [index :_doc uuid]
:query-string {:refresh true}
:method :delete})
success? (pos? (get-in response [:body :_shards :successful]))
deleted? (= "deleted" (get-in response [:body :result]))]
(if (and success? deleted?)
(r/response-deleted id)
(r/response-error (str "could not delete document " id)))))
(defn query-data
[client collection-id {:keys [cimi-params] :as options}]
(try
(let [index (escu/collection-id->index collection-id)
paging (paging/paging cimi-params)
orderby (order/sorters cimi-params)
aggregation (aggregation/aggregators cimi-params)
selected (select/select cimi-params)
query {:query (acl/and-acl-query (filter/filter cimi-params) options)}
body (merge paging orderby selected query aggregation)
response (spandex/request client {:url [index :_search]
:method :post
:body body})
success? (-> response :body :_shards :successful pos?)
count-before-pagination (-> response :body :hits :total :value)
aggregations (-> response :body :aggregations)
meta (cond-> {:count count-before-pagination}
aggregations (assoc :aggregations aggregations))
hits (->> response :body :hits :hits (map :_source))]
(if success?
[meta hits]
(let [msg (str "error when querying: " (:body response))]
(throw (r/ex-response msg 500)))))
(catch Exception e
(let [{:keys [body] :as _response} (ex-data e)
error (:error body)
msg (str "unexpected exception querying: " (or error e))]
(throw (r/ex-response msg 500))))))
(defn bulk-delete-data
[client collection-id {:keys [cimi-params] :as options}]
(try
(let [index (escu/collection-id->index collection-id)
query {:query (acl/and-acl-delete (filter/filter cimi-params) options)}
response (spandex/request client {:url [index :_delete_by_query]
:query-string {:refresh true}
:method :post
:body query})
body-response (:body response)
success? (-> body-response :failures empty?)]
(if success?
body-response
(let [msg (str "error when deleting by query: " body-response)]
(throw (r/ex-response msg 500)))))
(catch Exception e
(let [{:keys [body] :as _response} (ex-data e)
error (:error body)
msg (str "unexpected exception delete by query: " (or error e))]
(throw (r/ex-response msg 500))))))
(deftype ElasticsearchRestBinding [client sniffer]
Binding
(initialize [_ collection-id {:keys [spec] :as _options}]
(let [index (escu/collection-id->index collection-id)
mapping (mapping/mapping spec)]
(create-index client index)
(set-index-mapping client index mapping)))
(add [_ data _options]
(add-data client data))
(add [_ _collection-id data _options]
(add-data client data))
(retrieve [_ id _options]
(find-data client id))
(delete [_ {:keys [id]} _options]
(delete-data client id))
(edit [_ data _options]
(update-data client data))
(query [_ collection-id options]
(query-data client collection-id options))
(bulk-delete [_ collection-id options]
(bulk-delete-data client collection-id options))
Closeable
(close [_]
(spandex/close! sniffer)
(spandex/close! client)))
| null | https://raw.githubusercontent.com/nuvla/api-server/595f7dab27c165b9377dd9849f524c09ddc9d84b/code/src/sixsq/nuvla/db/es/binding.clj | clojure | FIXME: Need to understand why the refresh parameter must be used to make unit test pass. | (ns sixsq.nuvla.db.es.binding
"Binding protocol implemented for an Elasticsearch database that makes use
of the Elasticsearch REST API."
(:require
[clojure.tools.logging :as log]
[qbits.spandex :as spandex]
[sixsq.nuvla.auth.utils.acl :as acl-utils]
[sixsq.nuvla.db.binding :refer [Binding]]
[sixsq.nuvla.db.es.acl :as acl]
[sixsq.nuvla.db.es.aggregation :as aggregation]
[sixsq.nuvla.db.es.common.es-mapping :as mapping]
[sixsq.nuvla.db.es.common.utils :as escu]
[sixsq.nuvla.db.es.filter :as filter]
[sixsq.nuvla.db.es.order :as order]
[sixsq.nuvla.db.es.pagination :as paging]
[sixsq.nuvla.db.es.select :as select]
[sixsq.nuvla.db.utils.common :as cu]
[sixsq.nuvla.server.util.response :as r])
(:import
(java.io Closeable)))
(def ^:const sniff-interval-mills 5000)
(def ^:const sniff-after-failure-delay-mills 1000)
(defn create-client
[options]
(spandex/client options))
(defn create-sniffer
[client options]
(spandex/sniffer client (or options {})))
(defn create-index
[client index]
(try
(let [{:keys [status]} (spandex/request client {:url [index], :method :head})]
(if (= 200 status)
(log/debug index "index already exists")
(log/error "unexpected status code when checking" index "index (" status ")")))
(catch Exception e
(let [{:keys [status body]} (ex-data e)]
(try
(if (= 404 status)
(let [{{:keys [acknowledged shards_acknowledged]} :body} (spandex/request
client
{:url [index], :method :put})]
(if (and acknowledged shards_acknowledged)
(log/info index "index created")
(log/warn index "index may or may not have been created")))
(log/error "unexpected status code when checking" index "index (" status "). " body))
(catch Exception e
(let [{:keys [status body] :as _response} (ex-data e)
error (:error body)]
(log/error "unexpected status code when creating" index "index (" status "). " (or error e)))))))))
(defn set-index-mapping
[client index mapping]
(try
(let [{:keys [body status]} (spandex/request client {:url [index :_mapping]
:method :put
:body mapping})]
(if (= 200 status)
(log/info index "mapping updated")
(log/warn index "mapping could not be updated (" status "). " body)))
(catch Exception e
(let [{:keys [status body] :as _response} (ex-data e)
error (:error body)]
(log/warn index "mapping could not be updated (" status "). " (or error e))))))
(defn add-data
[client {:keys [id] :as data}]
(try
(let [[collection-id uuid] (cu/split-id id)
index (escu/collection-id->index collection-id)
updated-doc (-> data
(acl-utils/force-admin-role-right-all)
(acl-utils/normalize-acl-for-resource))
response (spandex/request client {:url [index :_doc uuid :_create]
:query-string {:refresh true}
:method :put
:body updated-doc})
success? (pos? (get-in response [:body :_shards :successful]))]
(if success?
(r/response-created id)
(r/response-conflict id)))
(catch Exception e
(let [{:keys [status body] :as _response} (ex-data e)
error (:error body)]
(if (= 409 status)
(r/response-conflict id)
(r/response-error (str "unexpected exception: " (or error e))))))))
(defn update-data
[client {:keys [id] :as data}]
(try
(let [[collection-id uuid] (cu/split-id id)
index (escu/collection-id->index collection-id)
updated-doc (-> data
(acl-utils/force-admin-role-right-all)
(acl-utils/normalize-acl-for-resource))
response (spandex/request client {:url [index :_doc uuid]
:query-string {:refresh true}
:method :put
:body updated-doc})
success? (pos? (get-in response [:body :_shards :successful]))]
(if success?
(r/json-response data)
(r/response-conflict id)))
(catch Exception e
(let [{:keys [body] :as _response} (ex-data e)
error (:error body)]
(r/response-error (str "unexpected exception updating " id ": " (or error e)))))))
(defn find-data
[client id]
(try
(let [[collection-id uuid] (cu/split-id id)
index (escu/collection-id->index collection-id)
response (spandex/request client {:url [index :_doc uuid]
:method :get})
found? (get-in response [:body :found])]
(if found?
(-> response :body :_source)
(throw (r/ex-not-found id))))
(catch Exception e
(let [{:keys [status] :as _response} (ex-data e)]
(if (= 404 status)
(throw (r/ex-not-found id))
(throw e))))))
(defn delete-data
[client id]
(let [[collection-id uuid] (cu/split-id id)
index (escu/collection-id->index collection-id)
response (spandex/request client {:url [index :_doc uuid]
:query-string {:refresh true}
:method :delete})
success? (pos? (get-in response [:body :_shards :successful]))
deleted? (= "deleted" (get-in response [:body :result]))]
(if (and success? deleted?)
(r/response-deleted id)
(r/response-error (str "could not delete document " id)))))
(defn query-data
[client collection-id {:keys [cimi-params] :as options}]
(try
(let [index (escu/collection-id->index collection-id)
paging (paging/paging cimi-params)
orderby (order/sorters cimi-params)
aggregation (aggregation/aggregators cimi-params)
selected (select/select cimi-params)
query {:query (acl/and-acl-query (filter/filter cimi-params) options)}
body (merge paging orderby selected query aggregation)
response (spandex/request client {:url [index :_search]
:method :post
:body body})
success? (-> response :body :_shards :successful pos?)
count-before-pagination (-> response :body :hits :total :value)
aggregations (-> response :body :aggregations)
meta (cond-> {:count count-before-pagination}
aggregations (assoc :aggregations aggregations))
hits (->> response :body :hits :hits (map :_source))]
(if success?
[meta hits]
(let [msg (str "error when querying: " (:body response))]
(throw (r/ex-response msg 500)))))
(catch Exception e
(let [{:keys [body] :as _response} (ex-data e)
error (:error body)
msg (str "unexpected exception querying: " (or error e))]
(throw (r/ex-response msg 500))))))
(defn bulk-delete-data
[client collection-id {:keys [cimi-params] :as options}]
(try
(let [index (escu/collection-id->index collection-id)
query {:query (acl/and-acl-delete (filter/filter cimi-params) options)}
response (spandex/request client {:url [index :_delete_by_query]
:query-string {:refresh true}
:method :post
:body query})
body-response (:body response)
success? (-> body-response :failures empty?)]
(if success?
body-response
(let [msg (str "error when deleting by query: " body-response)]
(throw (r/ex-response msg 500)))))
(catch Exception e
(let [{:keys [body] :as _response} (ex-data e)
error (:error body)
msg (str "unexpected exception delete by query: " (or error e))]
(throw (r/ex-response msg 500))))))
(deftype ElasticsearchRestBinding [client sniffer]
Binding
(initialize [_ collection-id {:keys [spec] :as _options}]
(let [index (escu/collection-id->index collection-id)
mapping (mapping/mapping spec)]
(create-index client index)
(set-index-mapping client index mapping)))
(add [_ data _options]
(add-data client data))
(add [_ _collection-id data _options]
(add-data client data))
(retrieve [_ id _options]
(find-data client id))
(delete [_ {:keys [id]} _options]
(delete-data client id))
(edit [_ data _options]
(update-data client data))
(query [_ collection-id options]
(query-data client collection-id options))
(bulk-delete [_ collection-id options]
(bulk-delete-data client collection-id options))
Closeable
(close [_]
(spandex/close! sniffer)
(spandex/close! client)))
|
ba04272f0a2023b391957997c6dd096c4e841a58ec3a0edab749407fac05b75e | denisshevchenko/breadu.info | CLI.hs | # LANGUAGE ApplicativeDo #
# LANGUAGE RecordWildCards #
{-|
Module : CLI
Description : CLI options
Stability : experimental
Portability : POSIX
Work with CLI options.
-}
module CLI
( optionsParser
, makeSureOptionsAreValid
) where
import Options.Applicative.Simple
import Data.Monoid ( (<>) )
import Control.Monad ( when )
import Control.Monad.Extra ( unlessM )
import System.Directory ( doesDirectoryExist )
import System.Exit ( die )
| Type that represents CLI options , we use it just for ' ' .
data Options = Options
{ commonFood :: FilePath -- ^ Path to directory with .csv-files with common localized lists of food.
, port :: Int -- ^ Port that server will listen.
}
-- | Parser parses actual CLI-arguments into a value of 'Option' type.
optionsParser :: Parser Options
optionsParser = do
commonFood <- strOption $
long "food"
<> short 'f'
<> metavar "PATH_TO_CSV_DIR"
<> showDefault
<> value "./food/common" -- Option's default value.
<> help "Path to a directory containing .csv-files with common lists of food"
port <- option auto $
long "port"
<> short 'p'
<> metavar "PORT"
<> showDefault
<> value 3000 -- Option's default value.
<> help "Port that server will listen"
-- 'commonFood' and 'port' fields are already here, so thanks to 'RecordWildCards'. ;-)
return Options{..}
{-|
Just checks if options are valid, exit with error otherwise.
Note that we have a tuple as a final result, not an 'Options' type,
because we don't want unnecessary dependencies outside this module.
-}
makeSureOptionsAreValid :: (Options, ()) -> IO (FilePath, Int)
makeSureOptionsAreValid (Options {..}, ()) =
makeSurePortIsValid
>> makeSureCSVExists
>> return (commonFood, port)
where
-- | Checks if specified value is valid registered port,
please see for details .
makeSurePortIsValid :: IO ()
makeSurePortIsValid =
when (port < minPort || port > maxPort) reportAboutWrongPort
where
minPort = 1024 -- There's no reasons to run this service on a privileged port. ;-)
maxPort = 49151
reportAboutWrongPort = die $
"Please specify valid registered port, integer from "
<> show minPort <> " to " <> show maxPort <> "."
makeSureCSVExists :: IO ()
makeSureCSVExists =
unlessM (doesDirectoryExist commonFood) reportAboutCSVMissing
where
reportAboutCSVMissing = die $
"No such directory '"
<> commonFood
<> "', you can specify path to a directory with .csv-file via '--food' option."
| null | https://raw.githubusercontent.com/denisshevchenko/breadu.info/b613fa28a4b527f1459876348a72e7748f16cd88/src/app/CLI.hs | haskell | |
Module : CLI
Description : CLI options
Stability : experimental
Portability : POSIX
Work with CLI options.
^ Path to directory with .csv-files with common localized lists of food.
^ Port that server will listen.
| Parser parses actual CLI-arguments into a value of 'Option' type.
Option's default value.
Option's default value.
'commonFood' and 'port' fields are already here, so thanks to 'RecordWildCards'. ;-)
|
Just checks if options are valid, exit with error otherwise.
Note that we have a tuple as a final result, not an 'Options' type,
because we don't want unnecessary dependencies outside this module.
| Checks if specified value is valid registered port,
There's no reasons to run this service on a privileged port. ;-) | # LANGUAGE ApplicativeDo #
# LANGUAGE RecordWildCards #
module CLI
( optionsParser
, makeSureOptionsAreValid
) where
import Options.Applicative.Simple
import Data.Monoid ( (<>) )
import Control.Monad ( when )
import Control.Monad.Extra ( unlessM )
import System.Directory ( doesDirectoryExist )
import System.Exit ( die )
| Type that represents CLI options , we use it just for ' ' .
data Options = Options
}
optionsParser :: Parser Options
optionsParser = do
commonFood <- strOption $
long "food"
<> short 'f'
<> metavar "PATH_TO_CSV_DIR"
<> showDefault
<> help "Path to a directory containing .csv-files with common lists of food"
port <- option auto $
long "port"
<> short 'p'
<> metavar "PORT"
<> showDefault
<> help "Port that server will listen"
return Options{..}
makeSureOptionsAreValid :: (Options, ()) -> IO (FilePath, Int)
makeSureOptionsAreValid (Options {..}, ()) =
makeSurePortIsValid
>> makeSureCSVExists
>> return (commonFood, port)
where
please see for details .
makeSurePortIsValid :: IO ()
makeSurePortIsValid =
when (port < minPort || port > maxPort) reportAboutWrongPort
where
maxPort = 49151
reportAboutWrongPort = die $
"Please specify valid registered port, integer from "
<> show minPort <> " to " <> show maxPort <> "."
makeSureCSVExists :: IO ()
makeSureCSVExists =
unlessM (doesDirectoryExist commonFood) reportAboutCSVMissing
where
reportAboutCSVMissing = die $
"No such directory '"
<> commonFood
<> "', you can specify path to a directory with .csv-file via '--food' option."
|
a6840f1a6f4c74953536d6ef94f4138ab5bd8e68b0b0e3a98d513b5ce1b49cc1 | mmottl/pomap | po_examples.ml |
po_examples.ml - implementations of partial order examples
Copyright ( C ) 2001 - 2002 ( OEFAI )
email :
WWW :
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA
po_examples.ml - implementations of partial order examples
Copyright (C) 2001-2002 Markus Mottl (OEFAI)
email:
WWW:
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*)
open Format
module MakePONTuple (Spec : sig val choices : int end) = struct
type el = int * int
type ord = Unknown | Lower | Equal | Greater
let compare (x1, x2) (y1, y2) =
if x1 < y1 then
if x2 <= y2 then Lower
else Unknown
else if x1 > y1 then
if x2 < y2 then Unknown
else Greater
else (* x1 = y1 *)
if x2 < y2 then Lower
else if x2 > y2 then Greater
else Equal
let rand_el () = Random.int Spec.choices, Random.int Spec.choices
let pretty_print ppf (x, y) = fprintf ppf "(%d, %d)" x y
end
module MakePONList (Spec : sig val len : int val choices : int end) = struct
type el = int list
type ord = Unknown | Lower | Equal | Greater
let rec compare l1 l2 acc = match l1, l2 with
| h1 :: t1, h2 :: t2 ->
if h1 < h2 then
if acc = Greater then Unknown
else compare t1 t2 Lower
else if h1 > h2 then
if acc = Lower then Unknown
else compare t1 t2 Greater
else compare t1 t2 acc
| [], [] -> acc
| _ -> failwith "PONList.compare: lists have different length"
let compare l1 l2 = compare l1 l2 Equal
let rand_el () =
let l = ref [] in
for _i = 1 to Spec.len do l := Random.int Spec.choices :: !l done;
!l
let pretty_print ppf l =
let rec aux ppf = function
| [] -> ()
| [x] -> fprintf ppf "%d" x
| h :: t -> fprintf ppf "%d, %a" h aux t in
fprintf ppf "[%a]" aux l
end
module MakePOBList (Spec : sig val len : int end) = struct
type el = bool list
type ord = Unknown | Lower | Equal | Greater
let rec compare_loop l1 l2 acc =
match l1, l2 with
| h1 :: t1, h2 :: t2 ->
if h1 then
if h2 then compare_loop t1 t2 acc
else if acc = Lower then Unknown
else compare_loop t1 t2 Greater
else if h2 then
if acc = Greater then Unknown
else compare_loop t1 t2 Lower
else compare_loop t1 t2 acc
| [], [] -> acc
| _ -> failwith "POBList.compare_loop: lists have different length"
let compare l1 l2 = compare_loop l1 l2 Equal
let rand_el () =
let l = ref [] in
for _i = 1 to Spec.len do l := Random.bool () :: !l done;
!l
let int_of_bool b = if b then 1 else 0
let pretty_print ppf l =
let rec loop ppf = function
| [] -> ()
| [x] -> pp_print_int ppf (int_of_bool x)
| h :: t -> fprintf ppf "%d, %a" (int_of_bool h) loop t in
fprintf ppf "[%a]" loop l
end
| null | https://raw.githubusercontent.com/mmottl/pomap/9a1647ea2cf1ed8cf3ab8eedb4547c3b86a5efc6/examples/hasse/po_examples.ml | ocaml | x1 = y1 |
po_examples.ml - implementations of partial order examples
Copyright ( C ) 2001 - 2002 ( OEFAI )
email :
WWW :
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA
po_examples.ml - implementations of partial order examples
Copyright (C) 2001-2002 Markus Mottl (OEFAI)
email:
WWW:
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*)
open Format
module MakePONTuple (Spec : sig val choices : int end) = struct
type el = int * int
type ord = Unknown | Lower | Equal | Greater
let compare (x1, x2) (y1, y2) =
if x1 < y1 then
if x2 <= y2 then Lower
else Unknown
else if x1 > y1 then
if x2 < y2 then Unknown
else Greater
if x2 < y2 then Lower
else if x2 > y2 then Greater
else Equal
let rand_el () = Random.int Spec.choices, Random.int Spec.choices
let pretty_print ppf (x, y) = fprintf ppf "(%d, %d)" x y
end
module MakePONList (Spec : sig val len : int val choices : int end) = struct
type el = int list
type ord = Unknown | Lower | Equal | Greater
let rec compare l1 l2 acc = match l1, l2 with
| h1 :: t1, h2 :: t2 ->
if h1 < h2 then
if acc = Greater then Unknown
else compare t1 t2 Lower
else if h1 > h2 then
if acc = Lower then Unknown
else compare t1 t2 Greater
else compare t1 t2 acc
| [], [] -> acc
| _ -> failwith "PONList.compare: lists have different length"
let compare l1 l2 = compare l1 l2 Equal
let rand_el () =
let l = ref [] in
for _i = 1 to Spec.len do l := Random.int Spec.choices :: !l done;
!l
let pretty_print ppf l =
let rec aux ppf = function
| [] -> ()
| [x] -> fprintf ppf "%d" x
| h :: t -> fprintf ppf "%d, %a" h aux t in
fprintf ppf "[%a]" aux l
end
module MakePOBList (Spec : sig val len : int end) = struct
type el = bool list
type ord = Unknown | Lower | Equal | Greater
let rec compare_loop l1 l2 acc =
match l1, l2 with
| h1 :: t1, h2 :: t2 ->
if h1 then
if h2 then compare_loop t1 t2 acc
else if acc = Lower then Unknown
else compare_loop t1 t2 Greater
else if h2 then
if acc = Greater then Unknown
else compare_loop t1 t2 Lower
else compare_loop t1 t2 acc
| [], [] -> acc
| _ -> failwith "POBList.compare_loop: lists have different length"
let compare l1 l2 = compare_loop l1 l2 Equal
let rand_el () =
let l = ref [] in
for _i = 1 to Spec.len do l := Random.bool () :: !l done;
!l
let int_of_bool b = if b then 1 else 0
let pretty_print ppf l =
let rec loop ppf = function
| [] -> ()
| [x] -> pp_print_int ppf (int_of_bool x)
| h :: t -> fprintf ppf "%d, %a" (int_of_bool h) loop t in
fprintf ppf "[%a]" loop l
end
|
29be39e2961e21d3883ae4ddaf141d71822268390157eb4f57489ae52b909e4a | rudymatela/conjure | u-conjure.hs | -- u-conjure.hs -- u-Conjure
--
This is a prototype for Conjure , a library for conjuring code
-- out of partially implemented functions.
--
--
Copyright ( C ) 2021
-- Distributed under the 3-Clause BSD licence (see the file LICENSE).
--
--
To run this you need to have both LeanCheck and Express installed :
--
$ cabal install
-- $ cabal install express
--
-- If installation fails, use v1-install:
--
$ cabal v1 - install
-- $ cabal v1-install express
import Data.List
import Data.Maybe
import Data.Express
import Data.Typeable
import Test.LeanCheck.Error
square :: Int -> Int
square 0 = 0
square 1 = 1
square 2 = 4
square 3 = 9
square 4 = 16
add :: Int -> Int -> Int
add 0 0 = 0
add 0 1 = 1
add 1 0 = 1
add 1 1 = 2
factorial :: Int -> Int
factorial 0 = 1
factorial 1 = 1
factorial 2 = 2
factorial 3 = 6
factorial 4 = 24
second :: [Int] -> Int
second [x,y] = y
second [x,y,z] = y
second [x,y,z,w] = y
-- reverse
reverse' :: [Int] -> [Int]
reverse' [x,y] = [y,x]
reverse' [x,y,z] = [z,y,x]
-- ++
(+++) :: [Int] -> [Int] -> [Int]
[x] +++ [y] = [x,y]
[x,y] +++ [z,w] = [x,y,z,w]
main :: IO ()
main = do
conjure "square" square primitives
conjure "add" add primitives
conjure "factorial" factorial primitives
conjure "factorial" factorial
[ val (0 :: Int)
, val (1 :: Int)
, value "+" ((+) :: Int -> Int -> Int)
, value "*" ((*) :: Int -> Int -> Int)
, value "foldr" (foldr :: (Int -> Int -> Int) -> Int -> [Int] -> Int)
, value "enumFromTo" (enumFromTo :: Int -> Int -> [Int])
]
conjure "second" second listPrimitives
conjure "++" (+++) listPrimitives
conjure "reverse" reverse' listPrimitives
-- even by using fold and some cheating,
-- this function is out of reach
reverse xs = foldr ( \x xs - > xs + + [ x ] ) [ ] xs
-- reverse xs = foldr (flip (++) . unit) [] xs
conjure "reverse" reverse' $ listPrimitives ++
[ value "unit" ((:[]) :: Int -> [Int])
, value "++" ((++) :: [Int] -> [Int] -> [Int])
these last two are cheats :
, value "flip" (flip :: ([Int]->[Int]->[Int]) -> [Int] -> [Int] -> [Int])
, value "." ((.) :: ([Int]->[Int]->[Int]) -> (Int->[Int]) -> Int -> [Int] -> [Int])
]
where
primitives :: [Expr]
primitives =
[ val (0 :: Int)
, val (1 :: Int)
, val (2 :: Int)
, val (3 :: Int)
, value "+" ((+) :: Int -> Int -> Int)
, value "*" ((*) :: Int -> Int -> Int)
, value "-" ((-) :: Int -> Int -> Int)
]
listPrimitives :: [Expr]
listPrimitives =
[ val (0 :: Int)
, val (1 :: Int)
, val ([] :: [Int])
, value "head" (head :: [Int] -> Int)
, value "tail" (tail :: [Int] -> [Int])
, value ":" ((:) :: Int -> [Int] -> [Int])
, value "foldr" (foldr :: (Int -> [Int] -> [Int]) -> [Int] -> [Int] -> [Int])
]
conjure :: Typeable f => String -> f -> [Expr] -> IO ()
conjure nm f primitives = do
print (value nm f) -- prints the type signature
case conjureImplementations nm f primitives of
[] -> putStrLn $ "cannot conjure"
es - > putStrLn $ unlines $ map showEq es -- uncomment to show all found variations
(e:_) -> putStrLn $ showEq e
putStrLn ""
where
showEq eq = showExpr (lhs eq) ++ " = " ++ showExpr (rhs eq)
conjureImplementations :: Typeable f => String -> f -> [Expr] -> [Expr]
conjureImplementations nm f primitives =
[ appn -==- e
| e <- candidateExprsFrom $ exs ++ primitives
, isTrue (appn -==- e)
]
where
appn = application nm f primitives
(ef:exs) = unfoldApp appn
isTrue e = all (errorToFalse . eval False) . map (e //-) $ definedBinds appn
definedBinds :: Expr -> [[(Expr,Expr)]]
definedBinds ffxx = [bs | bs <- bss, errorToFalse . eval False $ e //- bs]
where
e = ffxx -==- ffxx
bss = take 360 $ groundBinds ffxx
application :: Typeable f => String -> f -> [Expr] -> Expr
application nm f es = mostGeneralCanonicalVariation $ appn (value nm f)
where
appn ff | isFun ff = case [e | Just (_ :$ e) <- (map (ff $$)) es] of
[] -> error "application: could not find type representative"
(e:_) -> appn (ff :$ holeAsTypeOf e)
| otherwise = ff
candidateExprsFrom :: [Expr] -> [Expr]
candidateExprsFrom = concat . take 7 . expressionsT
where
expressionsT ds = [ds] \/ (delay $ productMaybeWith ($$) es es)
where
es = expressionsT ds
(-==-) :: Expr -> Expr -> Expr
ex -==- ey = headOr (val False) . map (:$ ey) $ mapMaybe ($$ ex)
[ value "==" ((==) :: Int -> Int -> Bool)
, value "==" ((==) :: Bool -> Bool -> Bool)
, value "==" ((==) :: [Int] -> [Int] -> Bool)
, value "==" ((==) :: [Bool] -> [Bool] -> Bool)
]
where
headOr x [] = x
headOr _ (x:_) = x
lhs, rhs :: Expr -> Expr
lhs (((Value "==" _) :$ e) :$ _) = e
rhs (((Value "==" _) :$ _) :$ e) = e
groundBinds :: Expr -> [[(Expr,Expr)]]
groundBinds e = concat $ products [mapT ((,) v) (tiersFor v) | v <- nubVars e]
tiersFor :: Expr -> [[Expr]]
tiersFor e = case show (typ e) of
"Int" -> mapT val (tiers `asTypeOf` [[undefined :: Int]])
"Bool" -> mapT val (tiers `asTypeOf` [[undefined :: Bool]])
"[Int]" -> mapT val (tiers `asTypeOf` [[undefined :: [Int]]])
"[Bool]" -> mapT val (tiers `asTypeOf` [[undefined :: [Bool]]])
_ -> []
| null | https://raw.githubusercontent.com/rudymatela/conjure/e2f6a9ebe397babc527be5b040640315c9dbe7d5/proto/u-conjure.hs | haskell | u-conjure.hs -- u-Conjure
out of partially implemented functions.
Distributed under the 3-Clause BSD licence (see the file LICENSE).
$ cabal install express
If installation fails, use v1-install:
$ cabal v1-install express
reverse
++
even by using fold and some cheating,
this function is out of reach
reverse xs = foldr (flip (++) . unit) [] xs
prints the type signature
uncomment to show all found variations | This is a prototype for Conjure , a library for conjuring code
Copyright ( C ) 2021
To run this you need to have both LeanCheck and Express installed :
$ cabal install
$ cabal v1 - install
import Data.List
import Data.Maybe
import Data.Express
import Data.Typeable
import Test.LeanCheck.Error
square :: Int -> Int
square 0 = 0
square 1 = 1
square 2 = 4
square 3 = 9
square 4 = 16
add :: Int -> Int -> Int
add 0 0 = 0
add 0 1 = 1
add 1 0 = 1
add 1 1 = 2
factorial :: Int -> Int
factorial 0 = 1
factorial 1 = 1
factorial 2 = 2
factorial 3 = 6
factorial 4 = 24
second :: [Int] -> Int
second [x,y] = y
second [x,y,z] = y
second [x,y,z,w] = y
reverse' :: [Int] -> [Int]
reverse' [x,y] = [y,x]
reverse' [x,y,z] = [z,y,x]
(+++) :: [Int] -> [Int] -> [Int]
[x] +++ [y] = [x,y]
[x,y] +++ [z,w] = [x,y,z,w]
main :: IO ()
main = do
conjure "square" square primitives
conjure "add" add primitives
conjure "factorial" factorial primitives
conjure "factorial" factorial
[ val (0 :: Int)
, val (1 :: Int)
, value "+" ((+) :: Int -> Int -> Int)
, value "*" ((*) :: Int -> Int -> Int)
, value "foldr" (foldr :: (Int -> Int -> Int) -> Int -> [Int] -> Int)
, value "enumFromTo" (enumFromTo :: Int -> Int -> [Int])
]
conjure "second" second listPrimitives
conjure "++" (+++) listPrimitives
conjure "reverse" reverse' listPrimitives
reverse xs = foldr ( \x xs - > xs + + [ x ] ) [ ] xs
conjure "reverse" reverse' $ listPrimitives ++
[ value "unit" ((:[]) :: Int -> [Int])
, value "++" ((++) :: [Int] -> [Int] -> [Int])
these last two are cheats :
, value "flip" (flip :: ([Int]->[Int]->[Int]) -> [Int] -> [Int] -> [Int])
, value "." ((.) :: ([Int]->[Int]->[Int]) -> (Int->[Int]) -> Int -> [Int] -> [Int])
]
where
primitives :: [Expr]
primitives =
[ val (0 :: Int)
, val (1 :: Int)
, val (2 :: Int)
, val (3 :: Int)
, value "+" ((+) :: Int -> Int -> Int)
, value "*" ((*) :: Int -> Int -> Int)
, value "-" ((-) :: Int -> Int -> Int)
]
listPrimitives :: [Expr]
listPrimitives =
[ val (0 :: Int)
, val (1 :: Int)
, val ([] :: [Int])
, value "head" (head :: [Int] -> Int)
, value "tail" (tail :: [Int] -> [Int])
, value ":" ((:) :: Int -> [Int] -> [Int])
, value "foldr" (foldr :: (Int -> [Int] -> [Int]) -> [Int] -> [Int] -> [Int])
]
conjure :: Typeable f => String -> f -> [Expr] -> IO ()
conjure nm f primitives = do
case conjureImplementations nm f primitives of
[] -> putStrLn $ "cannot conjure"
(e:_) -> putStrLn $ showEq e
putStrLn ""
where
showEq eq = showExpr (lhs eq) ++ " = " ++ showExpr (rhs eq)
conjureImplementations :: Typeable f => String -> f -> [Expr] -> [Expr]
conjureImplementations nm f primitives =
[ appn -==- e
| e <- candidateExprsFrom $ exs ++ primitives
, isTrue (appn -==- e)
]
where
appn = application nm f primitives
(ef:exs) = unfoldApp appn
isTrue e = all (errorToFalse . eval False) . map (e //-) $ definedBinds appn
definedBinds :: Expr -> [[(Expr,Expr)]]
definedBinds ffxx = [bs | bs <- bss, errorToFalse . eval False $ e //- bs]
where
e = ffxx -==- ffxx
bss = take 360 $ groundBinds ffxx
application :: Typeable f => String -> f -> [Expr] -> Expr
application nm f es = mostGeneralCanonicalVariation $ appn (value nm f)
where
appn ff | isFun ff = case [e | Just (_ :$ e) <- (map (ff $$)) es] of
[] -> error "application: could not find type representative"
(e:_) -> appn (ff :$ holeAsTypeOf e)
| otherwise = ff
candidateExprsFrom :: [Expr] -> [Expr]
candidateExprsFrom = concat . take 7 . expressionsT
where
expressionsT ds = [ds] \/ (delay $ productMaybeWith ($$) es es)
where
es = expressionsT ds
(-==-) :: Expr -> Expr -> Expr
ex -==- ey = headOr (val False) . map (:$ ey) $ mapMaybe ($$ ex)
[ value "==" ((==) :: Int -> Int -> Bool)
, value "==" ((==) :: Bool -> Bool -> Bool)
, value "==" ((==) :: [Int] -> [Int] -> Bool)
, value "==" ((==) :: [Bool] -> [Bool] -> Bool)
]
where
headOr x [] = x
headOr _ (x:_) = x
lhs, rhs :: Expr -> Expr
lhs (((Value "==" _) :$ e) :$ _) = e
rhs (((Value "==" _) :$ _) :$ e) = e
groundBinds :: Expr -> [[(Expr,Expr)]]
groundBinds e = concat $ products [mapT ((,) v) (tiersFor v) | v <- nubVars e]
tiersFor :: Expr -> [[Expr]]
tiersFor e = case show (typ e) of
"Int" -> mapT val (tiers `asTypeOf` [[undefined :: Int]])
"Bool" -> mapT val (tiers `asTypeOf` [[undefined :: Bool]])
"[Int]" -> mapT val (tiers `asTypeOf` [[undefined :: [Int]]])
"[Bool]" -> mapT val (tiers `asTypeOf` [[undefined :: [Bool]]])
_ -> []
|
093ba3a700a299546225692426be479fa3ed01ae70371e489d409ca488826734 | kompendium-ano/factom-haskell-client | Api.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE GeneralizedNewtypeDeriving #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE TemplateHaskell #-}
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeOperators #-}
module Factom.RPC.Api
( runTCPClient
, reqHeights
, reqCurrentMinute
, reqDBlockByHeight
) where
import Control.Concurrent
import Control.Exception (bracket)
import Control.Monad.IO.Class
import Control.Remote.Monad.JSON
import Control.Remote.Monad.JSON.Client
import Control.Remote.Monad.JSON.Router
import Control.Remote.Monad.JSON.Trace
import Data.Aeson
import Data.Aeson.Types
import Data.Text as T
import Network.Socket (HostName, ServiceName,
SocketType (Stream),
addrAddress, addrFamily,
addrProtocol,
addrSocketType, close,
connect, defaultHints,
getAddrInfo, socket)
import Factom.RPC.JsonRpc (JsonRpcT, runJsonRpcT)
import Factom.RPC.Types.AdminBlock
import Factom.RPC.Types.DirectoryBlock
import Factom.RPC.Types.DirectoryBlockHeader
import Factom.RPC.Types.Heights
--------------------------------------------------------------------------------
endpoint = ":8088/v2"
endpointRemote = "" -- ""
runTCPClient :: HostName -> ServiceName -> JsonRpcT IO a -> IO a
runTCPClient host port f = do
addr <- resolve host port
bracket (open addr) close talk
where
resolve host' port' = do
let hints = defaultHints {addrSocketType = Stream}
addr:_ <- getAddrInfo (Just hints) (Just host') (Just port')
return addr
open addr = do
sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)
connect sock $ addrAddress addr
return sock
talk sock = runJsonRpcT sock f
-- | "ablock-by-height" - Retrieve administrative blocks for any given height.
--
reqAblockByHeight :: Int -> RPC ()
reqAblockByHeight height = method "adblock-by-height" $ List [toJSON height]
-- | "ack" - Find the status of a transaction
--
reqAck :: RPC ()
reqAck = method "ack" None
-- | Get all information about Admin Block
based on Merkle Root Tree
reqAdminBlock :: Text -> RPC Ablock
reqAdminBlock mqr = method "admin-block" $ List [String mqr]
| Retrieve information about the directory block anchors that have been confirmed on Bitcoin and Ethereum .
--
reqAnchors :: RPC ()
reqAnchors = method "anchors" None
| Retrieve information about the directory block anchors that have been confirmed on Bitcoin and Ethereum .
--
reqChainHead :: RPC ()
reqChainHead = method "chain-head" None
| Send a Chain Commit Message to factomd to create a new Chain .
--
reqCommitChain :: RPC ()
reqCommitChain = method "commit-chain" None
-- | Send an Entry Commit Message to factom to create a new Entry
--
reqCommitEntry :: RPC ()
reqCommitEntry = method "commit-entry" None
-- | Return statistic for current call
--
reqCurrentMinute :: RPC ()
reqCurrentMinute = method "current-minute" None
-- | Send an Entry Commit Message to factom to create a new Entry
--
reqDBlockByHeight :: Int -> RPC DirectoryBlock
reqDBlockByHeight height =
method "dblock-by-height" $ Named [("height", Number (fromIntegral height))]
-- | Retrieve basic system information along with a description of the node’s current perception of the network
--
reqDiagnostics :: RPC ()
reqDiagnostics = method "diagnostics" None
-- | Get information about directory block
--
reqDirectoryBlock :: Text -> RPC DirectoryBlock
reqDirectoryBlock keymr = method "directory-block" $ List [toJSON keymr]
-- | Get the most recently recorded block.
The directory block head is the last known directory block by factom .
-- This can be used to grab the latest block and the information required to traverse the entire blockchain.
reqDirectoryBlockHead :: RPC DirectoryBlockHeader
reqDirectoryBlockHead = method "directory-block-head" $ None
-- | Retrieve the entry credit block for any given height
--
reqEcblockByHeight :: Int -> RPC ()
reqEcblockByHeight height = method "ecblok-by-height" $ List [toJSON height]
-- | Get an Entry from factomd specified by the Entry Hash.
--
reqEntry :: Text -> RPC ()
reqEntry hash = method "entry" $ List [String hash]
-- |
--
reqEntryAck :: RPC ()
reqEntryAck = method "entry-ack" None
-- |
--
reqEntryBlock :: RPC ()
reqEntryBlock = method "entry-block" None
-- |
--
reqEntryCreditBalance :: RPC ()
reqEntryCreditBalance = method "entry-credit-balane" None
-- |
--
reqEntryCreditBlock :: RPC ()
reqEntryCreditBlock = method "entry-credit-block" None
-- |
--
reqEntryCreditRate :: RPC ()
reqEntryCreditRate = method "entry-credit-rate" None
-- |
--
reqFactoidAck :: RPC ()
reqFactoidAck = method "factoid-ack" None
-- |
--
reqFactoidBalance :: RPC ()
reqFactoidBalance = method "factoid-balance" None
-- |
--
reqFactoidBlock :: RPC ()
reqFactoidBlock = method "factoid-blok" None
-- |
--
reqFactoidSubmit :: RPC ()
reqFactoidSubmit = method "factoid-submit" None
-- |
--
reqFBlockByHeight :: RPC ()
reqFBlockByHeight = method "fblock-by-height" None
-- |
--
reqHeights :: RPC Heights
reqHeights = method "heights" None
$ Named [ ( " jsonrpc " , String " 2.0 " ) , ( " i d " , ( 0::Int ) ) ]
--------------------------------------------------------------------------------
main = do
let s = weakSession (traceSendAPI "" $ clientSendAPI endpointRemote)
h <-
send s $ do
ablockByHeight 1000
return h
print h
| null | https://raw.githubusercontent.com/kompendium-ano/factom-haskell-client/87a73bb9079859f6223f8259da0dc9da568d1233/src/Factom/RPC/Api.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeOperators #
------------------------------------------------------------------------------
""
| "ablock-by-height" - Retrieve administrative blocks for any given height.
| "ack" - Find the status of a transaction
| Get all information about Admin Block
| Send an Entry Commit Message to factom to create a new Entry
| Return statistic for current call
| Send an Entry Commit Message to factom to create a new Entry
| Retrieve basic system information along with a description of the node’s current perception of the network
| Get information about directory block
| Get the most recently recorded block.
This can be used to grab the latest block and the information required to traverse the entire blockchain.
| Retrieve the entry credit block for any given height
| Get an Entry from factomd specified by the Entry Hash.
|
|
|
|
|
|
|
|
|
|
|
------------------------------------------------------------------------------ | # LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Factom.RPC.Api
( runTCPClient
, reqHeights
, reqCurrentMinute
, reqDBlockByHeight
) where
import Control.Concurrent
import Control.Exception (bracket)
import Control.Monad.IO.Class
import Control.Remote.Monad.JSON
import Control.Remote.Monad.JSON.Client
import Control.Remote.Monad.JSON.Router
import Control.Remote.Monad.JSON.Trace
import Data.Aeson
import Data.Aeson.Types
import Data.Text as T
import Network.Socket (HostName, ServiceName,
SocketType (Stream),
addrAddress, addrFamily,
addrProtocol,
addrSocketType, close,
connect, defaultHints,
getAddrInfo, socket)
import Factom.RPC.JsonRpc (JsonRpcT, runJsonRpcT)
import Factom.RPC.Types.AdminBlock
import Factom.RPC.Types.DirectoryBlock
import Factom.RPC.Types.DirectoryBlockHeader
import Factom.RPC.Types.Heights
endpoint = ":8088/v2"
runTCPClient :: HostName -> ServiceName -> JsonRpcT IO a -> IO a
runTCPClient host port f = do
addr <- resolve host port
bracket (open addr) close talk
where
resolve host' port' = do
let hints = defaultHints {addrSocketType = Stream}
addr:_ <- getAddrInfo (Just hints) (Just host') (Just port')
return addr
open addr = do
sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)
connect sock $ addrAddress addr
return sock
talk sock = runJsonRpcT sock f
reqAblockByHeight :: Int -> RPC ()
reqAblockByHeight height = method "adblock-by-height" $ List [toJSON height]
reqAck :: RPC ()
reqAck = method "ack" None
based on Merkle Root Tree
reqAdminBlock :: Text -> RPC Ablock
reqAdminBlock mqr = method "admin-block" $ List [String mqr]
| Retrieve information about the directory block anchors that have been confirmed on Bitcoin and Ethereum .
reqAnchors :: RPC ()
reqAnchors = method "anchors" None
| Retrieve information about the directory block anchors that have been confirmed on Bitcoin and Ethereum .
reqChainHead :: RPC ()
reqChainHead = method "chain-head" None
| Send a Chain Commit Message to factomd to create a new Chain .
reqCommitChain :: RPC ()
reqCommitChain = method "commit-chain" None
reqCommitEntry :: RPC ()
reqCommitEntry = method "commit-entry" None
reqCurrentMinute :: RPC ()
reqCurrentMinute = method "current-minute" None
reqDBlockByHeight :: Int -> RPC DirectoryBlock
reqDBlockByHeight height =
method "dblock-by-height" $ Named [("height", Number (fromIntegral height))]
reqDiagnostics :: RPC ()
reqDiagnostics = method "diagnostics" None
reqDirectoryBlock :: Text -> RPC DirectoryBlock
reqDirectoryBlock keymr = method "directory-block" $ List [toJSON keymr]
The directory block head is the last known directory block by factom .
reqDirectoryBlockHead :: RPC DirectoryBlockHeader
reqDirectoryBlockHead = method "directory-block-head" $ None
reqEcblockByHeight :: Int -> RPC ()
reqEcblockByHeight height = method "ecblok-by-height" $ List [toJSON height]
reqEntry :: Text -> RPC ()
reqEntry hash = method "entry" $ List [String hash]
reqEntryAck :: RPC ()
reqEntryAck = method "entry-ack" None
reqEntryBlock :: RPC ()
reqEntryBlock = method "entry-block" None
reqEntryCreditBalance :: RPC ()
reqEntryCreditBalance = method "entry-credit-balane" None
reqEntryCreditBlock :: RPC ()
reqEntryCreditBlock = method "entry-credit-block" None
reqEntryCreditRate :: RPC ()
reqEntryCreditRate = method "entry-credit-rate" None
reqFactoidAck :: RPC ()
reqFactoidAck = method "factoid-ack" None
reqFactoidBalance :: RPC ()
reqFactoidBalance = method "factoid-balance" None
reqFactoidBlock :: RPC ()
reqFactoidBlock = method "factoid-blok" None
reqFactoidSubmit :: RPC ()
reqFactoidSubmit = method "factoid-submit" None
reqFBlockByHeight :: RPC ()
reqFBlockByHeight = method "fblock-by-height" None
reqHeights :: RPC Heights
reqHeights = method "heights" None
$ Named [ ( " jsonrpc " , String " 2.0 " ) , ( " i d " , ( 0::Int ) ) ]
main = do
let s = weakSession (traceSendAPI "" $ clientSendAPI endpointRemote)
h <-
send s $ do
ablockByHeight 1000
return h
print h
|
ccb2fa8bfe1f195ceec1426ea3ff1bc6aa9b311946688452ecffaa98618ff91b | yuriy-chumak/ol | glx.scm | ; based on
Mesa 3 - D graphics library
(define-library (lib glx)
(export
GLX_VERSION_1_1
GLX_VERSION_1_2
GLX_VERSION_1_3
GLX_VERSION_1_4
GLX_EXTENSION_NAME
; Tokens for glXChooseVisual and glXGetConfig:
GLX_USE_GL
GLX_BUFFER_SIZE
GLX_LEVEL
GLX_RGBA
GLX_DOUBLEBUFFER
GLX_STEREO
GLX_AUX_BUFFERS
GLX_RED_SIZE
GLX_GREEN_SIZE
GLX_BLUE_SIZE
GLX_ALPHA_SIZE
GLX_DEPTH_SIZE
GLX_STENCIL_SIZE
GLX_ACCUM_RED_SIZE
GLX_ACCUM_GREEN_SIZE
GLX_ACCUM_BLUE_SIZE
GLX_ACCUM_ALPHA_SIZE
; Error codes returned by glXGetConfig:
GLX_BAD_SCREEN
GLX_BAD_ATTRIBUTE
GLX_NO_EXTENSION
GLX_BAD_VISUAL
GLX_BAD_CONTEXT
GLX_BAD_VALUE
GLX_BAD_ENUM
;; /*
* GLX 1.1 and later :
;; */
# define GLX_VENDOR 1
# define GLX_VERSION 2
# define GLX_EXTENSIONS 3
;; /*
* GLX 1.3 and later :
;; */
# define GLX_CONFIG_CAVEAT 0x20
# define GLX_DONT_CARE 0xFFFFFFFF
;; #define GLX_X_VISUAL_TYPE 0x22
;; #define GLX_TRANSPARENT_TYPE 0x23
;; #define GLX_TRANSPARENT_INDEX_VALUE 0x24
# define GLX_TRANSPARENT_RED_VALUE 0x25
# define GLX_TRANSPARENT_GREEN_VALUE 0x26
# define GLX_TRANSPARENT_BLUE_VALUE 0x27
;; #define GLX_TRANSPARENT_ALPHA_VALUE 0x28
# define GLX_WINDOW_BIT 0x00000001
;; #define GLX_PIXMAP_BIT 0x00000002
# define GLX_PBUFFER_BIT 0x00000004
# define GLX_AUX_BUFFERS_BIT 0x00000010
;; #define GLX_FRONT_LEFT_BUFFER_BIT 0x00000001
# define
;; #define GLX_BACK_LEFT_BUFFER_BIT 0x00000004
;; #define GLX_BACK_RIGHT_BUFFER_BIT 0x00000008
;; #define GLX_DEPTH_BUFFER_BIT 0x00000020
# define GLX_STENCIL_BUFFER_BIT 0x00000040
;; #define GLX_ACCUM_BUFFER_BIT 0x00000080
;; #define GLX_NONE 0x8000
;; #define GLX_SLOW_CONFIG 0x8001
# define GLX_TRUE_COLOR 0x8002
# define GLX_DIRECT_COLOR 0x8003
;; #define GLX_PSEUDO_COLOR 0x8004
# define GLX_STATIC_COLOR 0x8005
;; #define GLX_GRAY_SCALE 0x8006
;; #define GLX_STATIC_GRAY 0x8007
;; #define GLX_TRANSPARENT_RGB 0x8008
;; #define GLX_TRANSPARENT_INDEX 0x8009
# define GLX_VISUAL_ID 0x800B
# define GLX_SCREEN 0x800C
# define GLX_NON_CONFORMANT_CONFIG 0x800D
# define GLX_DRAWABLE_TYPE 0x8010
;; #define GLX_RENDER_TYPE 0x8011
# define GLX_X_RENDERABLE 0x8012
;; #define GLX_FBCONFIG_ID 0x8013
# define GLX_RGBA_TYPE
;; #define GLX_COLOR_INDEX_TYPE 0x8015
# define GLX_MAX_PBUFFER_WIDTH 0x8016
# define GLX_MAX_PBUFFER_HEIGHT 0x8017
# define GLX_MAX_PBUFFER_PIXELS 0x8018
# define GLX_PRESERVED_CONTENTS 0x801B
# define GLX_LARGEST_PBUFFER 0x801C
# define GLX_WIDTH 0x801D
# define GLX_HEIGHT 0x801E
# define GLX_EVENT_MASK 0x801F
;; #define GLX_DAMAGED 0x8020
;; #define GLX_SAVED 0x8021
;; #define GLX_WINDOW 0x8022
;; #define GLX_PBUFFER 0x8023
;; #define GLX_PBUFFER_HEIGHT 0x8040
# define GLX_PBUFFER_WIDTH 0x8041
;; #define GLX_RGBA_BIT 0x00000001
# define
# define GLX_PBUFFER_CLOBBER_MASK 0x08000000
;; /*
* GLX 1.4 and later :
;; */
;; #define GLX_SAMPLE_BUFFERS 0x186a0 /*100000*/
# define GLX_SAMPLES 0x186a1 /*100001*/
typedef struct _ _ GLXcontextRec * GLXContext ;
typedef ;
typedef ;
;; /* GLX 1.3 and later */
typedef struct _ _ ;
typedef ;
typedef ;
typedef ;
typedef ;
GLX ( WGL : Windows , CGL : , EGL )
glXQueryVersion
glXChooseVisual glXCreateContext glXMakeCurrent glXSwapBuffers
glXChooseFBConfig glXGetVisualFromFBConfig ; glXCreateContextAttribs
;; True False None
;; GLX_RGBA
;; GLX_DOUBLEBUFFER
;; GLX_RED_SIZE GLX_GREEN_SIZE GLX_BLUE_SIZE GLX_DEPTH_SIZE
GLX_CONFIG_CAVEAT
;; GLX_DONT_CARE
;; GLX_X_VISUAL_TYPE
;; GLX_TRANSPARENT_TYPE
;; GLX_TRANSPARENT_INDEX_VALUE
GLX_TRANSPARENT_RED_VALUE
;; GLX_TRANSPARENT_GREEN_VALUE
GLX_TRANSPARENT_BLUE_VALUE
;; GLX_TRANSPARENT_ALPHA_VALUE
;; GLX_WINDOW_BIT
;; GLX_PIXMAP_BIT
;; GLX_PBUFFER_BIT
;; GLX_AUX_BUFFERS_BIT
;; GLX_FRONT_LEFT_BUFFER_BIT
GLX_FRONT_RIGHT_BUFFER_BIT
;; GLX_BACK_LEFT_BUFFER_BIT
;; GLX_BACK_RIGHT_BUFFER_BIT
;; GLX_DEPTH_BUFFER_BIT
;; GLX_STENCIL_BUFFER_BIT
;; GLX_ACCUM_BUFFER_BIT
;; GLX_NONE
;; GLX_SLOW_CONFIG
;; GLX_TRUE_COLOR
;; GLX_DIRECT_COLOR
;; GLX_PSEUDO_COLOR
;; GLX_STATIC_COLOR
;; GLX_GRAY_SCALE
;; GLX_STATIC_GRAY
;; GLX_TRANSPARENT_RGB
;; GLX_TRANSPARENT_INDEX
GLX_VISUAL_ID
;; GLX_SCREEN
GLX_NON_CONFORMANT_CONFIG
;; GLX_DRAWABLE_TYPE
;; GLX_RENDER_TYPE
GLX_X_RENDERABLE
GLX_FBCONFIG_ID
GLX_RGBA_TYPE
;; GLX_COLOR_INDEX_TYPE
;; GLX_MAX_PBUFFER_WIDTH
GLX_MAX_PBUFFER_HEIGHT
GLX_MAX_PBUFFER_PIXELS
;; GLX_PRESERVED_CONTENTS
;; GLX_LARGEST_PBUFFER
;; GLX_WIDTH
GLX_HEIGHT
GLX_EVENT_MASK
;; GLX_DAMAGED
;; GLX_SAVED
;; GLX_WINDOW
;; GLX_PBUFFER
;; GLX_PBUFFER_HEIGHT
;; GLX_PBUFFER_WIDTH
;; GLX_RGBA_BIT
;; GLX_COLOR_INDEX_BIT
GLX_PBUFFER_CLOBBER_MASK
)
(import
(otus lisp)
(otus ffi))
(begin
(setq Display* fft-void*)
(setq X11 (or
(load-dynamic-library "libX11.so")
(load-dynamic-library "libX11.so.6")
(lambda args #false)))
(setq XOpenDisplay (if X11 (X11 Display* "XOpenDisplay" type-string)))
(setq GLX (or
(load-dynamic-library "libGL.so.1")
(lambda args #false)))
(setq glXQueryVersion (GLX fft-int "glXQueryVersion" Display* fft-int& fft-int&))
(setq major (box 0))
(setq minor (box 0))
(if glXQueryVersion (glXQueryVersion (XOpenDisplay #false) major minor))
(setq major (unbox major))
(setq minor (unbox minor))
(define GLX_VERSION_1_1 (>= (+ minor (* major 10)) 11))
(define GLX_VERSION_1_2 (>= (+ minor (* major 10)) 12))
(define GLX_VERSION_1_3 (>= (+ minor (* major 10)) 13))
(define GLX_VERSION_1_4 (>= (+ minor (* major 10)) 14))
(define GLX_EXTENSION_NAME "GLX")
; Tokens for glXChooseVisual and glXGetConfig:
(define GLX_USE_GL 1)
(define GLX_BUFFER_SIZE 2)
(define GLX_LEVEL 3)
(define GLX_RGBA 4)
(define GLX_DOUBLEBUFFER 5)
(define GLX_STEREO 6)
(define GLX_AUX_BUFFERS 7)
(define GLX_RED_SIZE 8)
(define GLX_GREEN_SIZE 9)
(define GLX_BLUE_SIZE 10)
(define GLX_ALPHA_SIZE 11)
(define GLX_DEPTH_SIZE 12)
(define GLX_STENCIL_SIZE 13)
(define GLX_ACCUM_RED_SIZE 14)
(define GLX_ACCUM_GREEN_SIZE 15)
(define GLX_ACCUM_BLUE_SIZE 16)
(define GLX_ACCUM_ALPHA_SIZE 17)
; Error codes returned by glXGetConfig:
(define GLX_BAD_SCREEN 1)
(define GLX_BAD_ATTRIBUTE 2)
(define GLX_NO_EXTENSION 3)
(define GLX_BAD_VISUAL 4)
(define GLX_BAD_CONTEXT 5)
(define GLX_BAD_VALUE 6)
(define GLX_BAD_ENUM 7)
;; (define Window fft-void*)
;; (define Visual* fft-void*)
;; (define XSetWindowAttributes* fft-void*)
;; (define XEvent* fft-void*)
;; (define X11 (load-dynamic-library "libX11.so"))
;; ; functions
;; (define XOpenDisplay (X11 Display* "XOpenDisplay" type-string))
( define fft - int " XDefaultScreen " Display * ) )
;; (define XRootWindow (X11 Window "XRootWindow" Display* fft-int))
( define XFree ( X11 fft - int " " fft - void * ) )
;; (define XBlackPixel (X11 fft-unsigned-long "XBlackPixel" Display* fft-int))
;; (define XWhitePixel (X11 fft-unsigned-long "XWhitePixel" Display* fft-int))
( define XCreateWindow ( X11 Window " XCreateWindow "
;; Display* ; display
;; Window ; parent Window
;; fft-int fft-int fft-unsigned-int fft-unsigned-int ; x y width height
;; fft-unsigned-int ; border width
;; fft-int ; depth
;; fft-unsigned-int ; class
;; Visual* ; visual
fft - unsigned - long ; valuemask
;; XSetWindowAttributes* ; attributes
;; ))
;; (define XCreateSimpleWindow (X11 fft-void* "XCreateSimpleWindow"
Display * Window ; display , parent Window
;; fft-int fft-int fft-unsigned-int fft-unsigned-int ; x y width height
;; fft-unsigned-int ; border width
;; fft-unsigned-long ; border
;; fft-unsigned-long ; background
;; ))
;
( define ExposureMask ( < < 1 15 ) )
( define KeyPressMask ( < < 1 0 ) )
( define XSelectInput ( X11 fft - int " XSelectInput " Display * Window fft - long ) )
( define XMapWindow ( X11 fft - int " XMapWindow " Display * Window ) )
;; (define XPending (X11 fft-int "XPending" Display*))
( define XStoreName ( X11 fft - int " XStoreName " Display * Window type - string ) )
;; ; events
;; ; event types: (X.h)
( setq KeyPress 2 )
( setq KeyRelease 3 )
;; ;#define ButtonPress<---><---->4
; # define ButtonRelease<-><---->5
;; ;#define MotionNotify<--><---->6
; # define EnterNotify<---><---->7
; # define LeaveNotify<---><---->8
; # define FocusIn<-><----><---->9
; # define FocusOut<><---->10
; # define KeymapNotify<--><---->11
; # define Expose<--><----><---->12
;; ;#define GraphicsExpose<><---->13
; # define NoExpose<><---->14
; # define
;; ;#define CreateNotify<--><---->16
;; ;#define DestroyNotify<-><---->17
; # define UnmapNotify<---><---->18
; # define MapNotify><---->19
; # define MapRequest<----><---->20
; # define
; # define ConfigureNotify><---->22
; # define ConfigureRequest<---->23
;; ;#define GravityNotify<-><---->24
; # define ResizeRequest<-><---->25
; # define CirculateNotify><---->26
; # define CirculateRequest<---->27
; # define PropertyNotify<><---->28
; # define SelectionClear<><---->29
; # define
;; ;#define SelectionNotify><---->31
; # define ColormapNotify<><---->32
;; ;#define ClientMessage<-><---->33
; # define MappingNotify<-><---->34
; # define
;; ;#define LASTEvent><---->36<-->/* must be bigger than any event # */
;; (define XNextEvent (X11 fft-int "XNextEvent" Display* XEvent*))
; ( define Colormap fft - void * )
; ( define XCreateColormap ( X11 Colormap " XCreateColormap " fft - void * fft - void * fft - void * fft - int ) )
;; ; -wm-delete-window-on-x11
; ( define XInternAtom ( X11 fft - void * " XInternAtom " fft - void * type - string fft - int ) )
;; ;(define XSetWMProtocols (
;; (define int fft-int)
;; (define int* (fft* fft-int))
( define int )
;; ; -=( wgl )=------------------------------------------------------------
; opengl :
( define glXChooseVisual ( GLX fft - void * " glXChooseVisual " fft - void * int int * ) )
( define GLX_RGBA 4 )
;; (define GLX_DOUBLEBUFFER 5)
;; (define GLX_RED_SIZE 8)
( define GLX_GREEN_SIZE 9 )
( define GLX_BLUE_SIZE 10 )
( define GLX_DEPTH_SIZE 12 )
( define ( GLX fft - void * " glXCreateContext " fft - void * fft - void * fft - void * bool ) )
( define glXMakeCurrent ( GLX bool " glXMakeCurrent " fft - void * fft - void * fft - void * ) )
( define glXSwapBuffers ( GLX fft - int " glXSwapBuffers " fft - void * fft - void * ) )
( define glXChooseFBConfig(GLX fft - void * " glXChooseFBConfig " fft - void * fft - int type - vptr type - vptr ) ) ; minimal 1.3
( define glXGetVisualFromFBConfig ( GLX fft - void * " glXGetVisualFromFBConfig " fft - void * fft - void * ) )
))
| null | https://raw.githubusercontent.com/yuriy-chumak/ol/83dd03d311339763682eab02cbe0c1321daa25bc/libraries/lib/glx.scm | scheme | based on
Tokens for glXChooseVisual and glXGetConfig:
Error codes returned by glXGetConfig:
/*
*/
/*
*/
#define GLX_X_VISUAL_TYPE 0x22
#define GLX_TRANSPARENT_TYPE 0x23
#define GLX_TRANSPARENT_INDEX_VALUE 0x24
#define GLX_TRANSPARENT_ALPHA_VALUE 0x28
#define GLX_PIXMAP_BIT 0x00000002
#define GLX_FRONT_LEFT_BUFFER_BIT 0x00000001
#define GLX_BACK_LEFT_BUFFER_BIT 0x00000004
#define GLX_BACK_RIGHT_BUFFER_BIT 0x00000008
#define GLX_DEPTH_BUFFER_BIT 0x00000020
#define GLX_ACCUM_BUFFER_BIT 0x00000080
#define GLX_NONE 0x8000
#define GLX_SLOW_CONFIG 0x8001
#define GLX_PSEUDO_COLOR 0x8004
#define GLX_GRAY_SCALE 0x8006
#define GLX_STATIC_GRAY 0x8007
#define GLX_TRANSPARENT_RGB 0x8008
#define GLX_TRANSPARENT_INDEX 0x8009
#define GLX_RENDER_TYPE 0x8011
#define GLX_FBCONFIG_ID 0x8013
#define GLX_COLOR_INDEX_TYPE 0x8015
#define GLX_DAMAGED 0x8020
#define GLX_SAVED 0x8021
#define GLX_WINDOW 0x8022
#define GLX_PBUFFER 0x8023
#define GLX_PBUFFER_HEIGHT 0x8040
#define GLX_RGBA_BIT 0x00000001
/*
*/
#define GLX_SAMPLE_BUFFERS 0x186a0 /*100000*/
/* GLX 1.3 and later */
glXCreateContextAttribs
True False None
GLX_RGBA
GLX_DOUBLEBUFFER
GLX_RED_SIZE GLX_GREEN_SIZE GLX_BLUE_SIZE GLX_DEPTH_SIZE
GLX_DONT_CARE
GLX_X_VISUAL_TYPE
GLX_TRANSPARENT_TYPE
GLX_TRANSPARENT_INDEX_VALUE
GLX_TRANSPARENT_GREEN_VALUE
GLX_TRANSPARENT_ALPHA_VALUE
GLX_WINDOW_BIT
GLX_PIXMAP_BIT
GLX_PBUFFER_BIT
GLX_AUX_BUFFERS_BIT
GLX_FRONT_LEFT_BUFFER_BIT
GLX_BACK_LEFT_BUFFER_BIT
GLX_BACK_RIGHT_BUFFER_BIT
GLX_DEPTH_BUFFER_BIT
GLX_STENCIL_BUFFER_BIT
GLX_ACCUM_BUFFER_BIT
GLX_NONE
GLX_SLOW_CONFIG
GLX_TRUE_COLOR
GLX_DIRECT_COLOR
GLX_PSEUDO_COLOR
GLX_STATIC_COLOR
GLX_GRAY_SCALE
GLX_STATIC_GRAY
GLX_TRANSPARENT_RGB
GLX_TRANSPARENT_INDEX
GLX_SCREEN
GLX_DRAWABLE_TYPE
GLX_RENDER_TYPE
GLX_COLOR_INDEX_TYPE
GLX_MAX_PBUFFER_WIDTH
GLX_PRESERVED_CONTENTS
GLX_LARGEST_PBUFFER
GLX_WIDTH
GLX_DAMAGED
GLX_SAVED
GLX_WINDOW
GLX_PBUFFER
GLX_PBUFFER_HEIGHT
GLX_PBUFFER_WIDTH
GLX_RGBA_BIT
GLX_COLOR_INDEX_BIT
Tokens for glXChooseVisual and glXGetConfig:
Error codes returned by glXGetConfig:
(define Window fft-void*)
(define Visual* fft-void*)
(define XSetWindowAttributes* fft-void*)
(define XEvent* fft-void*)
(define X11 (load-dynamic-library "libX11.so"))
; functions
(define XOpenDisplay (X11 Display* "XOpenDisplay" type-string))
(define XRootWindow (X11 Window "XRootWindow" Display* fft-int))
(define XBlackPixel (X11 fft-unsigned-long "XBlackPixel" Display* fft-int))
(define XWhitePixel (X11 fft-unsigned-long "XWhitePixel" Display* fft-int))
Display* ; display
Window ; parent Window
fft-int fft-int fft-unsigned-int fft-unsigned-int ; x y width height
fft-unsigned-int ; border width
fft-int ; depth
fft-unsigned-int ; class
Visual* ; visual
valuemask
XSetWindowAttributes* ; attributes
))
(define XCreateSimpleWindow (X11 fft-void* "XCreateSimpleWindow"
display , parent Window
fft-int fft-int fft-unsigned-int fft-unsigned-int ; x y width height
fft-unsigned-int ; border width
fft-unsigned-long ; border
fft-unsigned-long ; background
))
(define XPending (X11 fft-int "XPending" Display*))
; events
; event types: (X.h)
;#define ButtonPress<---><---->4
# define ButtonRelease<-><---->5
;#define MotionNotify<--><---->6
# define EnterNotify<---><---->7
# define LeaveNotify<---><---->8
# define FocusIn<-><----><---->9
# define FocusOut<><---->10
# define KeymapNotify<--><---->11
# define Expose<--><----><---->12
;#define GraphicsExpose<><---->13
# define NoExpose<><---->14
# define
;#define CreateNotify<--><---->16
;#define DestroyNotify<-><---->17
# define UnmapNotify<---><---->18
# define MapNotify><---->19
# define MapRequest<----><---->20
# define
# define ConfigureNotify><---->22
# define ConfigureRequest<---->23
;#define GravityNotify<-><---->24
# define ResizeRequest<-><---->25
# define CirculateNotify><---->26
# define CirculateRequest<---->27
# define PropertyNotify<><---->28
# define SelectionClear<><---->29
# define
;#define SelectionNotify><---->31
# define ColormapNotify<><---->32
;#define ClientMessage<-><---->33
# define MappingNotify<-><---->34
# define
;#define LASTEvent><---->36<-->/* must be bigger than any event # */
(define XNextEvent (X11 fft-int "XNextEvent" Display* XEvent*))
( define Colormap fft - void * )
( define XCreateColormap ( X11 Colormap " XCreateColormap " fft - void * fft - void * fft - void * fft - int ) )
; -wm-delete-window-on-x11
( define XInternAtom ( X11 fft - void * " XInternAtom " fft - void * type - string fft - int ) )
;(define XSetWMProtocols (
(define int fft-int)
(define int* (fft* fft-int))
; -=( wgl )=------------------------------------------------------------
opengl :
(define GLX_DOUBLEBUFFER 5)
(define GLX_RED_SIZE 8)
minimal 1.3 | Mesa 3 - D graphics library
(define-library (lib glx)
(export
GLX_VERSION_1_1
GLX_VERSION_1_2
GLX_VERSION_1_3
GLX_VERSION_1_4
GLX_EXTENSION_NAME
GLX_USE_GL
GLX_BUFFER_SIZE
GLX_LEVEL
GLX_RGBA
GLX_DOUBLEBUFFER
GLX_STEREO
GLX_AUX_BUFFERS
GLX_RED_SIZE
GLX_GREEN_SIZE
GLX_BLUE_SIZE
GLX_ALPHA_SIZE
GLX_DEPTH_SIZE
GLX_STENCIL_SIZE
GLX_ACCUM_RED_SIZE
GLX_ACCUM_GREEN_SIZE
GLX_ACCUM_BLUE_SIZE
GLX_ACCUM_ALPHA_SIZE
GLX_BAD_SCREEN
GLX_BAD_ATTRIBUTE
GLX_NO_EXTENSION
GLX_BAD_VISUAL
GLX_BAD_CONTEXT
GLX_BAD_VALUE
GLX_BAD_ENUM
* GLX 1.1 and later :
# define GLX_VENDOR 1
# define GLX_VERSION 2
# define GLX_EXTENSIONS 3
* GLX 1.3 and later :
# define GLX_CONFIG_CAVEAT 0x20
# define GLX_DONT_CARE 0xFFFFFFFF
# define GLX_TRANSPARENT_RED_VALUE 0x25
# define GLX_TRANSPARENT_GREEN_VALUE 0x26
# define GLX_TRANSPARENT_BLUE_VALUE 0x27
# define GLX_WINDOW_BIT 0x00000001
# define GLX_PBUFFER_BIT 0x00000004
# define GLX_AUX_BUFFERS_BIT 0x00000010
# define
# define GLX_STENCIL_BUFFER_BIT 0x00000040
# define GLX_TRUE_COLOR 0x8002
# define GLX_DIRECT_COLOR 0x8003
# define GLX_STATIC_COLOR 0x8005
# define GLX_VISUAL_ID 0x800B
# define GLX_SCREEN 0x800C
# define GLX_NON_CONFORMANT_CONFIG 0x800D
# define GLX_DRAWABLE_TYPE 0x8010
# define GLX_X_RENDERABLE 0x8012
# define GLX_RGBA_TYPE
# define GLX_MAX_PBUFFER_WIDTH 0x8016
# define GLX_MAX_PBUFFER_HEIGHT 0x8017
# define GLX_MAX_PBUFFER_PIXELS 0x8018
# define GLX_PRESERVED_CONTENTS 0x801B
# define GLX_LARGEST_PBUFFER 0x801C
# define GLX_WIDTH 0x801D
# define GLX_HEIGHT 0x801E
# define GLX_EVENT_MASK 0x801F
# define GLX_PBUFFER_WIDTH 0x8041
# define
# define GLX_PBUFFER_CLOBBER_MASK 0x08000000
* GLX 1.4 and later :
# define GLX_SAMPLES 0x186a1 /*100001*/
GLX ( WGL : Windows , CGL : , EGL )
glXQueryVersion
glXChooseVisual glXCreateContext glXMakeCurrent glXSwapBuffers
GLX_CONFIG_CAVEAT
GLX_TRANSPARENT_RED_VALUE
GLX_TRANSPARENT_BLUE_VALUE
GLX_FRONT_RIGHT_BUFFER_BIT
GLX_VISUAL_ID
GLX_NON_CONFORMANT_CONFIG
GLX_X_RENDERABLE
GLX_FBCONFIG_ID
GLX_RGBA_TYPE
GLX_MAX_PBUFFER_HEIGHT
GLX_MAX_PBUFFER_PIXELS
GLX_HEIGHT
GLX_EVENT_MASK
GLX_PBUFFER_CLOBBER_MASK
)
(import
(otus lisp)
(otus ffi))
(begin
(setq Display* fft-void*)
(setq X11 (or
(load-dynamic-library "libX11.so")
(load-dynamic-library "libX11.so.6")
(lambda args #false)))
(setq XOpenDisplay (if X11 (X11 Display* "XOpenDisplay" type-string)))
(setq GLX (or
(load-dynamic-library "libGL.so.1")
(lambda args #false)))
(setq glXQueryVersion (GLX fft-int "glXQueryVersion" Display* fft-int& fft-int&))
(setq major (box 0))
(setq minor (box 0))
(if glXQueryVersion (glXQueryVersion (XOpenDisplay #false) major minor))
(setq major (unbox major))
(setq minor (unbox minor))
(define GLX_VERSION_1_1 (>= (+ minor (* major 10)) 11))
(define GLX_VERSION_1_2 (>= (+ minor (* major 10)) 12))
(define GLX_VERSION_1_3 (>= (+ minor (* major 10)) 13))
(define GLX_VERSION_1_4 (>= (+ minor (* major 10)) 14))
(define GLX_EXTENSION_NAME "GLX")
(define GLX_USE_GL 1)
(define GLX_BUFFER_SIZE 2)
(define GLX_LEVEL 3)
(define GLX_RGBA 4)
(define GLX_DOUBLEBUFFER 5)
(define GLX_STEREO 6)
(define GLX_AUX_BUFFERS 7)
(define GLX_RED_SIZE 8)
(define GLX_GREEN_SIZE 9)
(define GLX_BLUE_SIZE 10)
(define GLX_ALPHA_SIZE 11)
(define GLX_DEPTH_SIZE 12)
(define GLX_STENCIL_SIZE 13)
(define GLX_ACCUM_RED_SIZE 14)
(define GLX_ACCUM_GREEN_SIZE 15)
(define GLX_ACCUM_BLUE_SIZE 16)
(define GLX_ACCUM_ALPHA_SIZE 17)
(define GLX_BAD_SCREEN 1)
(define GLX_BAD_ATTRIBUTE 2)
(define GLX_NO_EXTENSION 3)
(define GLX_BAD_VISUAL 4)
(define GLX_BAD_CONTEXT 5)
(define GLX_BAD_VALUE 6)
(define GLX_BAD_ENUM 7)
( define fft - int " XDefaultScreen " Display * ) )
( define XFree ( X11 fft - int " " fft - void * ) )
( define XCreateWindow ( X11 Window " XCreateWindow "
( define ExposureMask ( < < 1 15 ) )
( define KeyPressMask ( < < 1 0 ) )
( define XSelectInput ( X11 fft - int " XSelectInput " Display * Window fft - long ) )
( define XMapWindow ( X11 fft - int " XMapWindow " Display * Window ) )
( define XStoreName ( X11 fft - int " XStoreName " Display * Window type - string ) )
( setq KeyPress 2 )
( setq KeyRelease 3 )
( define int )
( define glXChooseVisual ( GLX fft - void * " glXChooseVisual " fft - void * int int * ) )
( define GLX_RGBA 4 )
( define GLX_GREEN_SIZE 9 )
( define GLX_BLUE_SIZE 10 )
( define GLX_DEPTH_SIZE 12 )
( define ( GLX fft - void * " glXCreateContext " fft - void * fft - void * fft - void * bool ) )
( define glXMakeCurrent ( GLX bool " glXMakeCurrent " fft - void * fft - void * fft - void * ) )
( define glXSwapBuffers ( GLX fft - int " glXSwapBuffers " fft - void * fft - void * ) )
( define glXGetVisualFromFBConfig ( GLX fft - void * " glXGetVisualFromFBConfig " fft - void * fft - void * ) )
))
|
5c3ee76775c189bee80e6d45e1ec1204b7c1d31e14c333f5a8a4e3f82b03d8b1 | ghcjs/ghcjs-boot | IORef.hs | # LANGUAGE Unsafe #
# LANGUAGE NoImplicitPrelude , MagicHash , AutoDeriveTypeable #
{-# OPTIONS_GHC -funbox-strict-fields #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IORef
Copyright : ( c ) The University of Glasgow 2008
-- License : see libraries/base/LICENSE
--
-- Maintainer :
-- Stability : internal
Portability : non - portable ( GHC Extensions )
--
-- The IORef type
--
-----------------------------------------------------------------------------
module GHC.IORef (
IORef(..),
newIORef, readIORef, writeIORef, atomicModifyIORef
) where
import GHC.Base
import GHC.STRef
import GHC.IO
import Data.Typeable.Internal( Typeable )
-- ---------------------------------------------------------------------------
IORefs
-- |A mutable variable in the 'IO' monad
newtype IORef a = IORef (STRef RealWorld a) deriving( Typeable )
explicit instance because ca n't figure out a derived one
instance Eq (IORef a) where
IORef x == IORef y = x == y
-- |Build a new 'IORef'
newIORef :: a -> IO (IORef a)
newIORef v = stToIO (newSTRef v) >>= \ var -> return (IORef var)
-- |Read the value of an 'IORef'
readIORef :: IORef a -> IO a
readIORef (IORef var) = stToIO (readSTRef var)
-- |Write a new value into an 'IORef'
writeIORef :: IORef a -> a -> IO ()
writeIORef (IORef var) v = stToIO (writeSTRef var v)
atomicModifyIORef :: IORef a -> (a -> (a,b)) -> IO b
atomicModifyIORef (IORef (STRef r#)) f = IO $ \s -> atomicModifyMutVar# r# f s
| null | https://raw.githubusercontent.com/ghcjs/ghcjs-boot/8c549931da27ba9e607f77195208ec156c840c8a/boot/base/GHC/IORef.hs | haskell | # OPTIONS_GHC -funbox-strict-fields #
# OPTIONS_HADDOCK hide #
---------------------------------------------------------------------------
|
Module : GHC.IORef
License : see libraries/base/LICENSE
Maintainer :
Stability : internal
The IORef type
---------------------------------------------------------------------------
---------------------------------------------------------------------------
|A mutable variable in the 'IO' monad
|Build a new 'IORef'
|Read the value of an 'IORef'
|Write a new value into an 'IORef' | # LANGUAGE Unsafe #
# LANGUAGE NoImplicitPrelude , MagicHash , AutoDeriveTypeable #
Copyright : ( c ) The University of Glasgow 2008
Portability : non - portable ( GHC Extensions )
module GHC.IORef (
IORef(..),
newIORef, readIORef, writeIORef, atomicModifyIORef
) where
import GHC.Base
import GHC.STRef
import GHC.IO
import Data.Typeable.Internal( Typeable )
IORefs
newtype IORef a = IORef (STRef RealWorld a) deriving( Typeable )
explicit instance because ca n't figure out a derived one
instance Eq (IORef a) where
IORef x == IORef y = x == y
newIORef :: a -> IO (IORef a)
newIORef v = stToIO (newSTRef v) >>= \ var -> return (IORef var)
readIORef :: IORef a -> IO a
readIORef (IORef var) = stToIO (readSTRef var)
writeIORef :: IORef a -> a -> IO ()
writeIORef (IORef var) v = stToIO (writeSTRef var v)
atomicModifyIORef :: IORef a -> (a -> (a,b)) -> IO b
atomicModifyIORef (IORef (STRef r#)) f = IO $ \s -> atomicModifyMutVar# r# f s
|
1d2b1e6b71dd7d1c41e63472770ff5f183036cc284213ca0d1e55ede8521cb57 | fpco/ide-backend | TempFile.hs | # LANGUAGE CPP #
{-# OPTIONS_HADDOCK hide #-}
module Distribution.Compat.TempFile (
openTempFile,
openBinaryTempFile,
openNewBinaryFile,
createTempDirectory,
) where
import System.FilePath ((</>))
import Foreign.C (eEXIST)
import System.IO (Handle, openTempFile, openBinaryTempFile)
import Data.Bits ((.|.))
import System.Posix.Internals (c_open, c_close, o_CREAT, o_EXCL, o_RDWR,
o_BINARY, o_NONBLOCK, o_NOCTTY)
import System.IO.Error (isAlreadyExistsError)
import System.Posix.Internals (withFilePath)
import Foreign.C (CInt)
import GHC.IO.Handle.FD (fdToHandle)
import Distribution.Compat.Exception (tryIO)
import Control.Exception (onException)
import Foreign.C (getErrno, errnoToIOError)
import System.Posix.Internals (c_getpid)
#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS)
import System.Directory ( createDirectory )
#else
import qualified System.Posix
#endif
-- ------------------------------------------------------------
-- * temporary files
-- ------------------------------------------------------------
This is here for implementations that do not come with
System . IO.openTempFile . This includes nhc-1.20 , .
TODO : Not sure about JHC
-- TODO: This file should probably be removed.
-- This is a copy/paste of the openBinaryTempFile definition, but
if uses 666 rather than 600 for the permissions . The base library
-- needs to be changed to make this better.
openNewBinaryFile :: FilePath -> String -> IO (FilePath, Handle)
openNewBinaryFile dir template = do
pid <- c_getpid
findTempName pid
where
-- We split off the last extension, so we can use .foo.ext files
-- for temporary files (hidden on Unix OSes). Unfortunately we're
-- below file path in the hierarchy here.
(prefix,suffix) =
case break (== '.') $ reverse template of
First case : template contains no ' . 's . Just re - reverse it .
(rev_suffix, "") -> (reverse rev_suffix, "")
Second case : template contains at least one ' . ' . Strip the
-- dot from the prefix and prepend it to the suffix (if we don't
-- do this, the unique number will get added after the '.' and
-- thus be part of the extension, which is wrong.)
(rev_suffix, '.':rest) -> (reverse rest, '.':reverse rev_suffix)
-- Otherwise, something is wrong, because (break (== '.')) should
-- always return a pair with either the empty string or a string
beginning with ' . ' as the second component .
_ -> error "bug in System.IO.openTempFile"
oflags = rw_flags .|. o_EXCL .|. o_BINARY
findTempName x = do
fd <- withFilePath filepath $ \ f ->
c_open f oflags 0o666
if fd < 0
then do
errno <- getErrno
if errno == eEXIST
then findTempName (x+1)
else ioError (errnoToIOError "openNewBinaryFile" errno Nothing (Just dir))
else do
-- TODO: We want to tell fdToHandle what the file path is,
-- as any exceptions etc will only be able to report the
FD currently
h <- fdToHandle fd `onException` c_close fd
return (filepath, h)
where
filename = prefix ++ show x ++ suffix
filepath = dir `combine` filename
FIXME : bits copied from System . FilePath
combine a b
| null b = a
| null a = b
| last a == pathSeparator = a ++ b
| otherwise = a ++ [pathSeparator] ++ b
FIXME : Should use System . FilePath library
pathSeparator :: Char
#ifdef mingw32_HOST_OS
pathSeparator = '\\'
#else
pathSeparator = '/'
#endif
FIXME : Copied from
std_flags, output_flags, rw_flags :: CInt
std_flags = o_NONBLOCK .|. o_NOCTTY
output_flags = std_flags .|. o_CREAT
rw_flags = output_flags .|. o_RDWR
createTempDirectory :: FilePath -> String -> IO FilePath
createTempDirectory dir template = do
pid <- c_getpid
findTempName pid
where
findTempName x = do
let dirpath = dir </> template ++ "-" ++ show x
r <- tryIO $ mkPrivateDir dirpath
case r of
Right _ -> return dirpath
Left e | isAlreadyExistsError e -> findTempName (x+1)
| otherwise -> ioError e
mkPrivateDir :: String -> IO ()
#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS)
mkPrivateDir s = createDirectory s
#else
mkPrivateDir s = System.Posix.createDirectory s 0o700
#endif
| null | https://raw.githubusercontent.com/fpco/ide-backend/860636f2d0e872e9481569236bce690637e0016e/ide-backend/TestSuite/inputs/Cabal-1.22.0.0/Distribution/Compat/TempFile.hs | haskell | # OPTIONS_HADDOCK hide #
------------------------------------------------------------
* temporary files
------------------------------------------------------------
TODO: This file should probably be removed.
This is a copy/paste of the openBinaryTempFile definition, but
needs to be changed to make this better.
We split off the last extension, so we can use .foo.ext files
for temporary files (hidden on Unix OSes). Unfortunately we're
below file path in the hierarchy here.
dot from the prefix and prepend it to the suffix (if we don't
do this, the unique number will get added after the '.' and
thus be part of the extension, which is wrong.)
Otherwise, something is wrong, because (break (== '.')) should
always return a pair with either the empty string or a string
TODO: We want to tell fdToHandle what the file path is,
as any exceptions etc will only be able to report the | # LANGUAGE CPP #
module Distribution.Compat.TempFile (
openTempFile,
openBinaryTempFile,
openNewBinaryFile,
createTempDirectory,
) where
import System.FilePath ((</>))
import Foreign.C (eEXIST)
import System.IO (Handle, openTempFile, openBinaryTempFile)
import Data.Bits ((.|.))
import System.Posix.Internals (c_open, c_close, o_CREAT, o_EXCL, o_RDWR,
o_BINARY, o_NONBLOCK, o_NOCTTY)
import System.IO.Error (isAlreadyExistsError)
import System.Posix.Internals (withFilePath)
import Foreign.C (CInt)
import GHC.IO.Handle.FD (fdToHandle)
import Distribution.Compat.Exception (tryIO)
import Control.Exception (onException)
import Foreign.C (getErrno, errnoToIOError)
import System.Posix.Internals (c_getpid)
#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS)
import System.Directory ( createDirectory )
#else
import qualified System.Posix
#endif
This is here for implementations that do not come with
System . IO.openTempFile . This includes nhc-1.20 , .
TODO : Not sure about JHC
if uses 666 rather than 600 for the permissions . The base library
openNewBinaryFile :: FilePath -> String -> IO (FilePath, Handle)
openNewBinaryFile dir template = do
pid <- c_getpid
findTempName pid
where
(prefix,suffix) =
case break (== '.') $ reverse template of
First case : template contains no ' . 's . Just re - reverse it .
(rev_suffix, "") -> (reverse rev_suffix, "")
Second case : template contains at least one ' . ' . Strip the
(rev_suffix, '.':rest) -> (reverse rest, '.':reverse rev_suffix)
beginning with ' . ' as the second component .
_ -> error "bug in System.IO.openTempFile"
oflags = rw_flags .|. o_EXCL .|. o_BINARY
findTempName x = do
fd <- withFilePath filepath $ \ f ->
c_open f oflags 0o666
if fd < 0
then do
errno <- getErrno
if errno == eEXIST
then findTempName (x+1)
else ioError (errnoToIOError "openNewBinaryFile" errno Nothing (Just dir))
else do
FD currently
h <- fdToHandle fd `onException` c_close fd
return (filepath, h)
where
filename = prefix ++ show x ++ suffix
filepath = dir `combine` filename
FIXME : bits copied from System . FilePath
combine a b
| null b = a
| null a = b
| last a == pathSeparator = a ++ b
| otherwise = a ++ [pathSeparator] ++ b
FIXME : Should use System . FilePath library
pathSeparator :: Char
#ifdef mingw32_HOST_OS
pathSeparator = '\\'
#else
pathSeparator = '/'
#endif
FIXME : Copied from
std_flags, output_flags, rw_flags :: CInt
std_flags = o_NONBLOCK .|. o_NOCTTY
output_flags = std_flags .|. o_CREAT
rw_flags = output_flags .|. o_RDWR
createTempDirectory :: FilePath -> String -> IO FilePath
createTempDirectory dir template = do
pid <- c_getpid
findTempName pid
where
findTempName x = do
let dirpath = dir </> template ++ "-" ++ show x
r <- tryIO $ mkPrivateDir dirpath
case r of
Right _ -> return dirpath
Left e | isAlreadyExistsError e -> findTempName (x+1)
| otherwise -> ioError e
mkPrivateDir :: String -> IO ()
#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS)
mkPrivateDir s = createDirectory s
#else
mkPrivateDir s = System.Posix.createDirectory s 0o700
#endif
|
035a43749f00880cef8793cd290d753134a2ca59635eb0377f77ffc15c00cfa4 | bradrn/brassica | Parse.hs | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE KindSignatures #-}
# LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
module Brassica.SoundChange.Parse
( parseRule
, parseRuleWithCategories
, parseSoundChanges
-- ** Re-export
, errorBundlePretty
) where
import Data.Char (isSpace)
import Data.Foldable (asum)
import Data.List (transpose)
import Data.Maybe (isNothing, isJust, fromJust)
import Data.Void (Void)
import Control.Applicative.Permutations
import Control.Monad.State
import qualified Data.Map.Strict as M
import Text.Megaparsec hiding (State)
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
import Brassica.SoundChange.Types
import qualified Brassica.SoundChange.Category as C
newtype Config = Config
{ categories :: C.Categories Grapheme
}
type Parser = ParsecT Void String (State Config)
class ParseLexeme (a :: LexemeType) where
parseLexeme :: Parser (Lexeme a)
parseCategoryElement :: Parser (CategoryElement a)
-- space consumer which does not match newlines
sc :: Parser ()
sc = L.space space1' (L.skipLineComment ";") empty
where
adapted from megaparsec source : like ' ' , but does not
-- consume newlines (which are important for rule separation)
space1' = void $ takeWhile1P (Just "white space") ((&&) <$> isSpace <*> (/='\n'))
-- space consumer which matches newlines
scn :: Parser ()
scn = L.space space1 (L.skipLineComment ";") empty
lexeme :: Parser a -> Parser a
lexeme = L.lexeme sc
symbol :: String -> Parser String
symbol = L.symbol sc
keyChars :: [Char]
keyChars = "#[](){}>\\→/_^%~*"
parseGrapheme :: Parser (Grapheme, Bool)
parseGrapheme = lexeme $ (,) <$> takeWhile1P Nothing (not . ((||) <$> isSpace <*> (`elem` keyChars))) <*> (isJust <$> optional (char '~'))
parseGrapheme' :: Parser Grapheme
parseGrapheme' = lexeme $ takeWhile1P Nothing (not . ((||) <$> isSpace <*> (=='=')))
data CategoryModification a
= Union (CategoryElement a)
| Intersect (CategoryElement a)
| Subtract (CategoryElement a)
parseGraphemeOrCategory :: ParseLexeme a => Parser (Lexeme a)
parseGraphemeOrCategory = do
(g, isntCat) <- parseGrapheme
if isntCat
then return $ Grapheme g
else do
cats <- gets categories
return $ case C.lookup g cats of
Nothing -> Grapheme g
Just c -> Category $ C.bake $ GraphemeEl <$> c
parseCategory :: ParseLexeme a => Parser (Lexeme a)
parseCategory = do
mods <- symbol "[" *> someTill parseCategoryModification (symbol "]")
cats <- gets categories
return $ Category $ C.bake $
C.expand (C.mapCategories GraphemeEl cats) (toCategory mods)
parseCategoryStandalone :: Parser (Grapheme, C.Category 'C.Expanded Grapheme)
parseCategoryStandalone = do
g <- parseGrapheme'
_ <- symbol "="
-- Use Target here because it only allows graphemes, not boundaries
mods <- some (parseCategoryModification @'Target)
cats <- gets categories
return (g, C.expand cats $ toGrapheme <$> toCategory mods)
toGrapheme :: CategoryElement 'Target -> Grapheme
toGrapheme (GraphemeEl g) = g
categoriesDeclParse :: Parser CategoriesDecl
categoriesDeclParse = do
overwrite <- isJust <$> optional (symbol "new")
when overwrite $ put $ Config M.empty
_ <- symbol "categories" <* scn
-- parse category declarations, adding to the set of known
-- categories as each is parsed
_ <- some $ parseFeature <|> parseCategoryDecl
_ <- symbol "end" <* scn
Config catsNew <- get
return $ CategoriesDecl (C.values catsNew)
where
parseFeature = do
_ <- symbol "feature"
namePlain <- optional $ try $ parseGrapheme' <* symbol "="
modsPlain <- some (parseCategoryModification @'Target)
cats <- gets categories
let plainCat = C.expand cats $ toGrapheme <$> toCategory modsPlain
plain = C.bake plainCat
modifiedCats <- some (symbol "/" *> parseCategoryStandalone) <* scn
let modified = C.bake . snd <$> modifiedCats
syns = zipWith (\a b -> (a, C.UnionOf [C.Node a, C.categorise b])) plain $ transpose modified
modify $ \(Config cs) -> Config $ M.unions
[ M.fromList syns
, M.fromList modifiedCats
, case namePlain of
Nothing -> M.empty
Just n -> M.singleton n plainCat
, cs
]
parseCategoryDecl = do
(k, c) <- try parseCategoryStandalone <* scn
modify $ \(Config cs) -> Config (M.insert k c cs)
parseCategoryModification :: ParseLexeme a => Parser (CategoryModification a)
parseCategoryModification = parsePrefix <*> parseCategoryElement
where
parsePrefix =
(Intersect <$ char '+')
<|> (Subtract <$ char '-')
<|> pure Union
toCategory :: [CategoryModification a] -> C.Category 'C.Unexpanded (CategoryElement a)
toCategory = go C.Empty
where
go c [] = c
go c (Union e :es) = go (C.UnionOf [c, C.Node e]) es
go c (Intersect e:es) = go (C.Intersect c (C.Node e)) es
go c (Subtract e :es) = go (C.Subtract c (C.Node e)) es
parseOptional :: ParseLexeme a => Parser (Lexeme a)
parseOptional = Optional <$> between (symbol "(") (symbol ")") (some parseLexeme)
parseGeminate :: Parser (Lexeme a)
parseGeminate = Geminate <$ symbol ">"
parseMetathesis :: Parser (Lexeme 'Replacement)
parseMetathesis = Metathesis <$ symbol "\\"
parseWildcard :: (ParseLexeme a, OneOf a 'Target 'Env) => Parser (Lexeme a)
parseWildcard = Wildcard <$> (symbol "^" *> parseLexeme)
parseBoundary :: Parser ()
parseBoundary = () <$ symbol "#"
parseDiscard :: Parser (Lexeme 'Replacement)
parseDiscard = Discard <$ symbol "~"
parseKleene :: OneOf a 'Target 'Env => Lexeme a -> Parser (Lexeme a)
parseKleene l = (Kleene l <$ symbol "*") <|> pure l
instance ParseLexeme 'Target where
parseLexeme = asum
[ parseCategory
, parseOptional
, parseGeminate
, parseWildcard
, parseGraphemeOrCategory
] >>= parseKleene
parseCategoryElement = GraphemeEl . fst <$> parseGrapheme
instance ParseLexeme 'Replacement where
parseLexeme = asum
[ parseCategory
, parseOptional
, parseMetathesis
, parseDiscard
, parseGeminate
, parseGraphemeOrCategory
]
parseCategoryElement = GraphemeEl . fst <$> parseGrapheme
instance ParseLexeme 'Env where
parseLexeme = asum
[ parseCategory
, Boundary <$ parseBoundary
, parseOptional
, parseGeminate
, parseWildcard
, parseGraphemeOrCategory
] >>= parseKleene
parseCategoryElement = asum
[ BoundaryEl <$ parseBoundary
, GraphemeEl . fst <$> parseGrapheme
]
parseLexemes :: ParseLexeme a => Parser [Lexeme a]
parseLexemes = many parseLexeme
parseFlags :: Parser Flags
parseFlags = runPermutation $ Flags
<$> toPermutation (isNothing <$> optional (symbol "-x"))
<*> toPermutationWithDefault LTR ((LTR <$ symbol "-ltr") <|> (RTL <$ symbol "-rtl"))
<*> toPermutation (isJust <$> optional (symbol "-1"))
<*> toPermutation (isJust <$> optional (symbol "-?"))
ruleParser :: Parser Rule
ruleParser = do
-- This is an inlined version of 'match' from @megaparsec@;
-- 'match' itself would be tricky to use here, since it would need
-- to wrap multiple parsers rather than just one
o <- getOffset
s <- getInput
flags <- parseFlags
target <- parseLexemes
_ <- lexeme $ oneOf "/→"
replacement <- parseLexemes
let parseEnvironment = do
_ <- symbol "/"
env1 <- parseLexemes
_ <- symbol "_"
env2 <- parseLexemes
exception <- optional $ (,) <$> (symbol "/" *> parseLexemes) <* symbol "_" <*> parseLexemes
return (env1, env2, exception)
(env1, env2, exception) <- parseEnvironment <|> pure ([], [], Nothing)
_ <- optional scn -- consume newline after rule if present
o' <- getOffset
let plaintext = takeWhile notNewline $ (fst . fromJust) (takeN_ (o' - o) s)
return Rule{environment=(env1,env2), ..}
where
notNewline c = (c /= '\n') && (c /= '\r')
| Parse a ' String ' in Brassica sound change syntax into a
-- 'Rule'. Returns 'Left' if the input string is malformed.
--
-- For details on the syntax, refer to <#basic-rule-syntax>.
parseRule :: String -> Either (ParseErrorBundle String Void) Rule
parseRule = parseRuleWithCategories M.empty
-- | Same as 'parseRule', but also allows passing in some predefined
-- categories to substitute.
parseRuleWithCategories :: C.Categories Grapheme -> String -> Either (ParseErrorBundle String Void) Rule
parseRuleWithCategories cs s = flip evalState (Config cs) $ runParserT (scn *> ruleParser <* eof) "" s
| Parse a list of ' SoundChanges ' .
parseSoundChanges :: String -> Either (ParseErrorBundle String Void) SoundChanges
parseSoundChanges s = flip evalState (Config M.empty) $ runParserT (scn *> parser <* eof) "" s
where
parser = many $
CategoriesDeclS <$> categoriesDeclParse
<|> RuleS <$> ruleParser
| null | https://raw.githubusercontent.com/bradrn/brassica/2184253a8f60d544973cd953ad3c7efc7b40e940/src/Brassica/SoundChange/Parse.hs | haskell | # LANGUAGE KindSignatures #
** Re-export
space consumer which does not match newlines
consume newlines (which are important for rule separation)
space consumer which matches newlines
Use Target here because it only allows graphemes, not boundaries
parse category declarations, adding to the set of known
categories as each is parsed
This is an inlined version of 'match' from @megaparsec@;
'match' itself would be tricky to use here, since it would need
to wrap multiple parsers rather than just one
consume newline after rule if present
'Rule'. Returns 'Left' if the input string is malformed.
For details on the syntax, refer to <#basic-rule-syntax>.
| Same as 'parseRule', but also allows passing in some predefined
categories to substitute. | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
module Brassica.SoundChange.Parse
( parseRule
, parseRuleWithCategories
, parseSoundChanges
, errorBundlePretty
) where
import Data.Char (isSpace)
import Data.Foldable (asum)
import Data.List (transpose)
import Data.Maybe (isNothing, isJust, fromJust)
import Data.Void (Void)
import Control.Applicative.Permutations
import Control.Monad.State
import qualified Data.Map.Strict as M
import Text.Megaparsec hiding (State)
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
import Brassica.SoundChange.Types
import qualified Brassica.SoundChange.Category as C
newtype Config = Config
{ categories :: C.Categories Grapheme
}
type Parser = ParsecT Void String (State Config)
class ParseLexeme (a :: LexemeType) where
parseLexeme :: Parser (Lexeme a)
parseCategoryElement :: Parser (CategoryElement a)
sc :: Parser ()
sc = L.space space1' (L.skipLineComment ";") empty
where
adapted from megaparsec source : like ' ' , but does not
space1' = void $ takeWhile1P (Just "white space") ((&&) <$> isSpace <*> (/='\n'))
scn :: Parser ()
scn = L.space space1 (L.skipLineComment ";") empty
lexeme :: Parser a -> Parser a
lexeme = L.lexeme sc
symbol :: String -> Parser String
symbol = L.symbol sc
keyChars :: [Char]
keyChars = "#[](){}>\\→/_^%~*"
parseGrapheme :: Parser (Grapheme, Bool)
parseGrapheme = lexeme $ (,) <$> takeWhile1P Nothing (not . ((||) <$> isSpace <*> (`elem` keyChars))) <*> (isJust <$> optional (char '~'))
parseGrapheme' :: Parser Grapheme
parseGrapheme' = lexeme $ takeWhile1P Nothing (not . ((||) <$> isSpace <*> (=='=')))
data CategoryModification a
= Union (CategoryElement a)
| Intersect (CategoryElement a)
| Subtract (CategoryElement a)
parseGraphemeOrCategory :: ParseLexeme a => Parser (Lexeme a)
parseGraphemeOrCategory = do
(g, isntCat) <- parseGrapheme
if isntCat
then return $ Grapheme g
else do
cats <- gets categories
return $ case C.lookup g cats of
Nothing -> Grapheme g
Just c -> Category $ C.bake $ GraphemeEl <$> c
parseCategory :: ParseLexeme a => Parser (Lexeme a)
parseCategory = do
mods <- symbol "[" *> someTill parseCategoryModification (symbol "]")
cats <- gets categories
return $ Category $ C.bake $
C.expand (C.mapCategories GraphemeEl cats) (toCategory mods)
parseCategoryStandalone :: Parser (Grapheme, C.Category 'C.Expanded Grapheme)
parseCategoryStandalone = do
g <- parseGrapheme'
_ <- symbol "="
mods <- some (parseCategoryModification @'Target)
cats <- gets categories
return (g, C.expand cats $ toGrapheme <$> toCategory mods)
toGrapheme :: CategoryElement 'Target -> Grapheme
toGrapheme (GraphemeEl g) = g
categoriesDeclParse :: Parser CategoriesDecl
categoriesDeclParse = do
overwrite <- isJust <$> optional (symbol "new")
when overwrite $ put $ Config M.empty
_ <- symbol "categories" <* scn
_ <- some $ parseFeature <|> parseCategoryDecl
_ <- symbol "end" <* scn
Config catsNew <- get
return $ CategoriesDecl (C.values catsNew)
where
parseFeature = do
_ <- symbol "feature"
namePlain <- optional $ try $ parseGrapheme' <* symbol "="
modsPlain <- some (parseCategoryModification @'Target)
cats <- gets categories
let plainCat = C.expand cats $ toGrapheme <$> toCategory modsPlain
plain = C.bake plainCat
modifiedCats <- some (symbol "/" *> parseCategoryStandalone) <* scn
let modified = C.bake . snd <$> modifiedCats
syns = zipWith (\a b -> (a, C.UnionOf [C.Node a, C.categorise b])) plain $ transpose modified
modify $ \(Config cs) -> Config $ M.unions
[ M.fromList syns
, M.fromList modifiedCats
, case namePlain of
Nothing -> M.empty
Just n -> M.singleton n plainCat
, cs
]
parseCategoryDecl = do
(k, c) <- try parseCategoryStandalone <* scn
modify $ \(Config cs) -> Config (M.insert k c cs)
parseCategoryModification :: ParseLexeme a => Parser (CategoryModification a)
parseCategoryModification = parsePrefix <*> parseCategoryElement
where
parsePrefix =
(Intersect <$ char '+')
<|> (Subtract <$ char '-')
<|> pure Union
toCategory :: [CategoryModification a] -> C.Category 'C.Unexpanded (CategoryElement a)
toCategory = go C.Empty
where
go c [] = c
go c (Union e :es) = go (C.UnionOf [c, C.Node e]) es
go c (Intersect e:es) = go (C.Intersect c (C.Node e)) es
go c (Subtract e :es) = go (C.Subtract c (C.Node e)) es
parseOptional :: ParseLexeme a => Parser (Lexeme a)
parseOptional = Optional <$> between (symbol "(") (symbol ")") (some parseLexeme)
parseGeminate :: Parser (Lexeme a)
parseGeminate = Geminate <$ symbol ">"
parseMetathesis :: Parser (Lexeme 'Replacement)
parseMetathesis = Metathesis <$ symbol "\\"
parseWildcard :: (ParseLexeme a, OneOf a 'Target 'Env) => Parser (Lexeme a)
parseWildcard = Wildcard <$> (symbol "^" *> parseLexeme)
parseBoundary :: Parser ()
parseBoundary = () <$ symbol "#"
parseDiscard :: Parser (Lexeme 'Replacement)
parseDiscard = Discard <$ symbol "~"
parseKleene :: OneOf a 'Target 'Env => Lexeme a -> Parser (Lexeme a)
parseKleene l = (Kleene l <$ symbol "*") <|> pure l
instance ParseLexeme 'Target where
parseLexeme = asum
[ parseCategory
, parseOptional
, parseGeminate
, parseWildcard
, parseGraphemeOrCategory
] >>= parseKleene
parseCategoryElement = GraphemeEl . fst <$> parseGrapheme
instance ParseLexeme 'Replacement where
parseLexeme = asum
[ parseCategory
, parseOptional
, parseMetathesis
, parseDiscard
, parseGeminate
, parseGraphemeOrCategory
]
parseCategoryElement = GraphemeEl . fst <$> parseGrapheme
instance ParseLexeme 'Env where
parseLexeme = asum
[ parseCategory
, Boundary <$ parseBoundary
, parseOptional
, parseGeminate
, parseWildcard
, parseGraphemeOrCategory
] >>= parseKleene
parseCategoryElement = asum
[ BoundaryEl <$ parseBoundary
, GraphemeEl . fst <$> parseGrapheme
]
parseLexemes :: ParseLexeme a => Parser [Lexeme a]
parseLexemes = many parseLexeme
parseFlags :: Parser Flags
parseFlags = runPermutation $ Flags
<$> toPermutation (isNothing <$> optional (symbol "-x"))
<*> toPermutationWithDefault LTR ((LTR <$ symbol "-ltr") <|> (RTL <$ symbol "-rtl"))
<*> toPermutation (isJust <$> optional (symbol "-1"))
<*> toPermutation (isJust <$> optional (symbol "-?"))
ruleParser :: Parser Rule
ruleParser = do
o <- getOffset
s <- getInput
flags <- parseFlags
target <- parseLexemes
_ <- lexeme $ oneOf "/→"
replacement <- parseLexemes
let parseEnvironment = do
_ <- symbol "/"
env1 <- parseLexemes
_ <- symbol "_"
env2 <- parseLexemes
exception <- optional $ (,) <$> (symbol "/" *> parseLexemes) <* symbol "_" <*> parseLexemes
return (env1, env2, exception)
(env1, env2, exception) <- parseEnvironment <|> pure ([], [], Nothing)
o' <- getOffset
let plaintext = takeWhile notNewline $ (fst . fromJust) (takeN_ (o' - o) s)
return Rule{environment=(env1,env2), ..}
where
notNewline c = (c /= '\n') && (c /= '\r')
| Parse a ' String ' in Brassica sound change syntax into a
parseRule :: String -> Either (ParseErrorBundle String Void) Rule
parseRule = parseRuleWithCategories M.empty
parseRuleWithCategories :: C.Categories Grapheme -> String -> Either (ParseErrorBundle String Void) Rule
parseRuleWithCategories cs s = flip evalState (Config cs) $ runParserT (scn *> ruleParser <* eof) "" s
| Parse a list of ' SoundChanges ' .
parseSoundChanges :: String -> Either (ParseErrorBundle String Void) SoundChanges
parseSoundChanges s = flip evalState (Config M.empty) $ runParserT (scn *> parser <* eof) "" s
where
parser = many $
CategoriesDeclS <$> categoriesDeclParse
<|> RuleS <$> ruleParser
|
25b99c174be54f9d33dd716b9be6f78f47ccbe72ca93de67de0f5588cbddcafa | np/ling | Ling.hs | {-# LANGUAGE LambdaCase #-}
{-# LANGUAGE QuasiQuotes #-}
# LANGUAGE TemplateHaskell #
module Ling where
import Control.Monad.Writer (tell, execWriter)
import Data.Char (isDigit)
import System.Environment (getArgs)
import System.Exit (exitFailure)
import System.IO (hPutStrLn, stderr)
import qualified MiniC.Print as C
import Ling.Abs
import Ling.Check.Base ( TCEnv, TCOpts, debugChecker, defaultTCOpts, runTCOpts
, runTCEnv, strictPar, edefs, errorScope, tcOpts)
import Ling.Check.Program (checkProgram)
import qualified Ling.Compile.C as Compile
import Ling.Defs (reduceL)
import Ling.ErrM
import Ling.Layout (resolveLayout)
import Ling.Lex (Token)
import qualified Ling.Norm as N
import Ling.Norm (transProgramDecs)
import Ling.Par
import Ling.Prelude
import Ling.Print
import Ling.Fuse (fuseProgram)
import Ling.Scoped (Scoped(Scoped))
import Ling.Subst (substDefs)
import Ling.SubTerms (transProgramTerms)
import Ling.Reify
import Ling.Rename (hDec)
import qualified Ling.Sequential as Sequential
type ParseFun a = [Token] -> Err a
data Opts =
Opts
{ _noCheck, _showExpand, _doExpand, _doReduce, _doSeq
, _noSequential, _showTokens, _showAST, _showPretty, _noNorm
, _doRefresh, _doFuse, _compile, _compilePrims, _noPrims :: Bool
, _checkOpts :: TCOpts
, _seqGas :: Int
}
$(makeLenses ''Opts)
check :: Lens' Opts Bool
check = noCheck . iso not not
defaultOpts :: Opts
defaultOpts = Opts False False False False False False False False False False
False False False False False defaultTCOpts maxBound
debugCheck :: Setter' Opts Bool
debugCheck = mergeSetters check (checkOpts.debugChecker)
layoutLexer :: String -> [Token]
layoutLexer = resolveLayout True . myLexer
prims :: String
prims = [q|
id : (A : Type)(x : A)-> A
= \(A : Type)(x : A)-> x
_:_ : (A : Type)(x : A)-> A
= \(A : Type)(x : A)-> x
data Empty =
data Unit = `unit
data Bool = `false | `true
not : (x : Bool)-> Bool = \(x : Bool)-> case x of { `false -> `true, `true -> `false }
_&&_ : (x y : Bool)-> Bool = \(x y : Bool)-> case x of { `false -> `false, `true -> y }
_||_ : (x y : Bool)-> Bool = \(x y : Bool)-> case x of { `false -> y, `true -> `true }
_==B_ : (x y : Bool)-> Bool = \(x y : Bool)-> case x of { `false -> not y, `true -> y }
_/=B_ : (x y : Bool)-> Bool = \(x y : Bool)-> case x of { `false -> y, `true -> not y }
data LR = `left | `right
Int : Type
_+_ : (m : Int)(n : Int) -> Int
_-_ : (m : Int)(n : Int) -> Int
_*_ : (m : Int)(n : Int) -> Int
_/_ : (m : Int)(n : Int) -> Int
_%_ : (m : Int)(n : Int) -> Int
pow : (m : Int)(n : Int) -> Int
_==I_ : (m : Int)(n : Int) -> Bool
_<=I_ : (m : Int)(n : Int) -> Bool
_>=I_ : (m : Int)(n : Int) -> Bool
_<I_ : (m : Int)(n : Int) -> Bool
_>I_ : (m : Int)(n : Int) -> Bool
Vec : (A : Type)(n : Int) -> Type
take : (A : Type)(m : Int)(n : Int)(v : Vec A (m + n)) -> Vec A m
drop : (A : Type)(m : Int)(n : Int)(v : Vec A (m + n)) -> Vec A n
merge : (m : Int)(n : Int)(v0 : Vec Int m)(v1 : Vec Int n) -> Vec Int (m + n)
sort : (n : Int)(v : Vec Int n) -> Vec Int n
Session : Type
Log : (S : Session)-> Session
Seq : (S : Session)-> Session
Send : (S : Session)-> Session
Recv : (S : Session)-> Session
IO = \(I : Type)(O : (i : I) -> Type)-> ?(x : I). !O x
IO' = \(I : Type)(O : Type)-> ?I. !O
Par2 = \(S0 : Session)(S1 : Session)-> {S0, S1}
Ten2 = \(S0 : Session)(S1 : Session)-> [S0, S1]
Seq2 = \(S0 : Session)(S1 : Session)-> [:S0, S1:]
ParIO = \(I : Type)(O : Type)-> {?I, !O}
TenIO = \(I : Type)(O : Type)-> [?I, !O]
TenOI = \(O : Type)(I : Type)-> [!O, ?I]
SeqIO = \(I : Type)(O : Type)-> [: ?I, !O :]
SeqOI = \(O : Type)(I : Type)-> [: !O, ?I :]
EndoIO = \(T : Type)-> IO' T T
EndoLoli = \(S : Session)-> S -o S
EndoParIO = \(T : Type)-> ParIO T T
EndoTenIO = \(T : Type)-> TenIO T T
EndoTenOI = \(T : Type)-> TenOI T T
EndoSeqIO = \(T : Type)-> SeqIO T T
DotSort = \(A : Type)(n : Int)-> EndoIO (Vec A n)
ParSort = \(A : Type)(n : Int)-> EndoLoli (!Vec A n)
SeqSort = \(A : Type)(n : Int)-> [: ?Vec A n, !Vec A n :]
With = \(SL SR : Session)-> ?(b : LR). (case b of { `left -> SL, `right -> SR })
Oplus = \(SL SR : Session)-> !(b : LR). (case b of { `left -> SL, `right -> SR })
with_ =
\(SL SR : Session)
(pL : < SL >)(pR : < SR >)->
proc(c : With SL SR)
let x : LR <- c.
@(case x of { `left -> pL, `right -> pR })(c)
oplus =
\(SL SR : Session)
(b : LR)
(p : < case b of { `left -> SL, `right -> SR } >)->
proc(c)
c : Oplus SL SR <- b.
@p(c)
receiver =
\(A : Type)
(S : A -> Session)
(p : (x : A)-> < S x >)->
proc(c)
let x : A <- c.
@(p x)(c)
sender =
\(A : Type)
(S : A -> Session)
(t : A)
(p : < S t >)->
proc(c)
c : !(x : A). S x <- t.
@p(c)
Allocation : Type
auto : Allocation
fused : Allocation
fuse : (depth : Int)-> Allocation
alloc : Allocation = fuse 0
fuse1 : Allocation = fuse 1
fuse2 : Allocation = fuse 2
fuse3 : Allocation = fuse 3
Double : Type
_+D_ : (m : Double)(n : Double) -> Double
_-D_ : (m : Double)(n : Double) -> Double
_*D_ : (m : Double)(n : Double) -> Double
_/D_ : (m : Double)(n : Double) -> Double
powD : (m : Double)(n : Double) -> Double
_==D_ : (m : Double)(n : Double) -> Bool
_<=D_ : (m : Double)(n : Double) -> Bool
_>=D_ : (m : Double)(n : Double) -> Bool
_<D_ : (m : Double)(n : Double) -> Bool
_>D_ : (m : Double)(n : Double) -> Bool
Char : Type
_==C_ : (c0 c1 : Char)-> Bool
_>=C_ : (c0 c1 : Char)-> Bool
_<=C_ : (c0 c1 : Char)-> Bool
_>C_ : (c0 c1 : Char)-> Bool
_<C_ : (c0 c1 : Char)-> Bool
String : Type
_==S_ : (s0 s1 : String)-> Bool
showInt : (n : Int) -> String
showDouble : (n : Double) -> String
showChar : (c : Char) -> String
showString : (s : String) -> String
_++S_ : (s0 : String)(s1 : String) -> String
ccall : (A : Type)-> String -> A
Int2Double = ccall (Int -> Double) "(double)"
Int2Char = ccall (Int -> Char) "(char)"
sqrtD = ccall (Double -> Double) "sqrt"
ctype : String -> Type
cconst : (A : Type)-> String -> A
-- math.h
PI = cconst Double "M_PI"
-- complex.h
ComplexDouble : Type = ctype "complex double"
ComplexI = cconst ComplexDouble "_Complex_I"
Int2ComplexDouble = ccall (Int -> ComplexDouble) "(double complex)"
Double2Complex = ccall (Double -> ComplexDouble) "(double complex)"
_+CD_ : (m : ComplexDouble)(n : ComplexDouble) -> ComplexDouble
_-CD_ : (m : ComplexDouble)(n : ComplexDouble) -> ComplexDouble
_*CD_ : (m : ComplexDouble)(n : ComplexDouble) -> ComplexDouble
_/CD_ : (m : ComplexDouble)(n : ComplexDouble) -> ComplexDouble
powCD : (m : ComplexDouble)(n : ComplexDouble) -> ComplexDouble
_==CD_ : (m : ComplexDouble)(n : ComplexDouble) -> Bool
_<=CD_ : (m : ComplexDouble)(n : ComplexDouble) -> Bool
_>=CD_ : (m : ComplexDouble)(n : ComplexDouble) -> Bool
_<CD_ : (m : ComplexDouble)(n : ComplexDouble) -> Bool
_>CD_ : (m : ComplexDouble)(n : ComplexDouble) -> Bool
cabs = ccall (ComplexDouble -> Double) "cabs"
cacos = ccall (ComplexDouble -> ComplexDouble) "cacos"
cacosh = ccall (ComplexDouble -> ComplexDouble) "cacosh"
carg = ccall (ComplexDouble -> ComplexDouble) "carg"
casin = ccall (ComplexDouble -> ComplexDouble) "casin"
casinh = ccall (ComplexDouble -> ComplexDouble) "casinh"
catan = ccall (ComplexDouble -> ComplexDouble) "catan"
catanh = ccall (ComplexDouble -> ComplexDouble) "catanh"
ccos = ccall (ComplexDouble -> ComplexDouble) "ccos"
ccosh = ccall (ComplexDouble -> ComplexDouble) "ccosh"
cexp = ccall (ComplexDouble -> ComplexDouble) "cexp"
cimag = ccall (ComplexDouble -> Double) "cimag"
clog = ccall (ComplexDouble -> ComplexDouble) "clog"
conj = ccall (ComplexDouble -> ComplexDouble) "conj"
cpow = ccall (ComplexDouble -> ComplexDouble) "cpow"
cproj = ccall (ComplexDouble -> ComplexDouble) "cproj"
creal = ccall (ComplexDouble -> Double) "creal"
csin = ccall (ComplexDouble -> ComplexDouble) "csin"
csinh = ccall (ComplexDouble -> ComplexDouble) "csinh"
csqrt = ccall (ComplexDouble -> ComplexDouble) "csqrt"
ctan = ccall (ComplexDouble -> ComplexDouble) "ctan"
ctanh = ccall (ComplexDouble -> ComplexDouble) "ctanh"
|]
primsN :: N.Program
primsN =
case pProgram (layoutLexer prims) of
Bad e -> error $ "Bad prims\n" ++ e
Ok p -> norm p
runFile :: (Print a, Show a) => Opts -> ParseFun a -> FilePath -> IO a
runFile v p f = readFile f >>= run v p
runProgram :: Opts -> FilePath -> IO ()
runProgram opts f = runFile opts pProgram f >>= transP opts
run :: (Print a, Show a) => Opts -> ParseFun a -> String -> IO a
run opts p s = do
when (opts ^. showTokens) $ do
putStrLn "Tokens:"
for_ ts $ putStrLn . ppShow
case p ts of
Bad e -> failIO $ "Parse Failed: " ++ e
Ok tree -> return tree
where
ts = layoutLexer s
addPrims :: Bool -> Endom N.Program
addPrims doAddPrims
| doAddPrims = (primsN <>)
| otherwise = id
failIO :: String -> IO a
failIO s = hPutStrLn stderr s >> exitFailure
runErr :: Err a -> IO a
runErr (Ok a) = return a
runErr (Bad s) = failIO s
transOpts :: Opts -> [String]
transOpts opts = execWriter $ do
when (opts ^. doRefresh) $ tell ["Fresh"]
when (opts ^. doSeq) $ tell ["Sequential"]
when (opts ^. doFuse) $ tell ["Fused"]
when (opts ^. doReduce) $ tell ["Reduced"]
when (opts ^. doExpand) $ tell ["Expanded"]
transP :: Opts -> Program -> IO ()
transP opts prg = do
tcenv <- runErr . runTCOpts defaultTCOpts . errorScope "prims" . checkProgram $ addPrims (not (opts ^. noPrims)) ø
transP' opts (tcenv & tcOpts .~ opts ^. checkOpts) prg
transP' :: Opts -> TCEnv -> Program -> IO ()
transP' opts tcenv prg = do
when (tops /= [] && opts ^. noNorm) $
usage "--no-norm cannot be combined with --fresh, --expand, --reduce, --seq, or --fuse"
when (opts ^. showAST) $
case tops of
[] | opts ^. noNorm -> putStrLn $ "\n{- Abstract Syntax -}\n\n" ++ ppShow prg
| otherwise -> putStrLn $ "\n{- Abstract Syntax of Normalized program -}\n\n" ++ ppShow nprg
_ -> putStrLn $ "\n{- Abstract Syntax of " ++ unwords tops ++ " program -}\n\n" ++ ppShow eprg
when (opts ^. check) $ do
void . runErr . runTCEnv tcenv $ checkProgram nprg
putStrLn "Checking successful!"
when (opts ^. showPretty) $
case tops of
[] | opts ^. noNorm -> putStrLn $ "\n{- Pretty-printed program -}\n\n" ++ pretty prg
| otherwise -> putStrLn $ "\n{- Normalized program -}\n\n" ++ pretty nprg
_ -> putStrLn $ "\n{- " ++ unwords tops ++ " program -}\n\n" ++ pretty eprg
when (opts ^. compile) $
putStrLn $ "\n/* C program */\n\n" ++ C.printTree cprg
unless (opts ^. showPretty || opts ^. showAST || opts ^. compile) $
-- if we don't print any term we should at least force the result
length (show eprg) `seq` return ()
where
pdefs = tcenv ^. edefs
tops = transOpts opts
nprg = norm prg
rprg | opts ^. doRefresh = transProgramDecs (const hDec) nprg
| otherwise = nprg
sprg | opts ^. doSeq = Sequential.transProgram (opts ^. seqGas) pdefs rprg
| otherwise = rprg
fprg | opts ^. doFuse = fuseProgram pdefs sprg
| otherwise = sprg
wprg | opts ^. doReduce = transProgramTerms (\defs -> reduceL . Scoped (pdefs <> defs) ø) fprg
| otherwise = fprg
eprg | opts ^. doExpand = transProgramTerms (substDefs . (pdefs <>)) wprg
| otherwise = wprg
cprg = Compile.transProgram $ addPrims (opts ^. compilePrims) eprg
flagSpec :: [(String, (Endom Opts, String))]
flagSpec =
(\(x,y,z) -> (x,(y,z))) <$>
[ ("check" , add check , "Type check the program (default on)")
, ("pretty" , add showPretty , "Display the program (can be combined with transformations)")
, ("compile" , add compile , "Display the compiled program (C language)")
, ("expand" , add doExpand , "Rewrite the program with the definitions expanded")
, ("reduce" , add doReduce , "Reduce the program (weak form)")
, ("fuse" , add doFuse , "Display the fused program")
, ("seq" , add doSeq , "Display the sequential program")
, ("refresh" , add doRefresh , "Enable the internal renaming using fresh names")
, ("show-ast" , add showAST , "Display the program as an Abstract Syntax Tree")
, ("show-tokens" , add showTokens , "Display the program as a list of tokens from the lexer")
, ("debug-check" , add debugCheck , "Display debugging information while checking")
, ("compile-prims", add compilePrims , "Also compile the primitive definitions")
, ("strict-par" , add (checkOpts . strictPar) , "Make the checker stricter about pars (no mix rule)")
, ("no-prims" , add noPrims , "Do not include the primitive definitions")
, ("no-norm" , add noNorm , "Disable the normalizer")
, ("no-check" , add noCheck , "Disable type checking")
, ("seq-gas" , error "seq-gas" , "Set the maximum number of steps for --seq")
] where add opt opts = opts & opt .~ True
usage :: String -> IO a
usage msg = failIO $ unlines (msg : "" : "Usage: ling [option...] [file...]" : "" : "option ::=" : (fmtFlag <$> flagSpec))
where
fmtFlag (flag, (_, desc)) = " | --" ++ pad flag ++ " # " ++ desc
Just maxlen = maximumOf (each . _1 . to length) flagSpec
pad s = take maxlen (s ++ repeat ' ')
mainArgs :: Opts -> [String] -> IO ()
mainArgs opts = \case
[] -> getContents >>= run opts pProgram >>= transP opts
("--help":_) -> usage ""
("--seq-gas":args) ->
case args of
[] -> usage "Missing argument for --seq-gas"
s:args'
| all isDigit s -> mainArgs (opts & seqGas .~ read s) args'
| otherwise -> usage "Unexpected value for --seq-gas"
('-':'-':arg@(_:_)):args ->
case lookup arg flagSpec of
Just (opt, _) -> mainArgs (opt opts) args
_ -> usage $ "Unexpected flag --" ++ arg
[f] -> runProgram opts f
fs -> for_ fs $ \f -> putStrLn f >> runProgram opts f
main :: IO ()
main = mainArgs defaultOpts =<< getArgs
| null | https://raw.githubusercontent.com/np/ling/ca942db83ac927420d1ae5e24b4da164394ddbbe/Ling.hs | haskell | # LANGUAGE LambdaCase #
# LANGUAGE QuasiQuotes #
math.h
complex.h
if we don't print any term we should at least force the result | # LANGUAGE TemplateHaskell #
module Ling where
import Control.Monad.Writer (tell, execWriter)
import Data.Char (isDigit)
import System.Environment (getArgs)
import System.Exit (exitFailure)
import System.IO (hPutStrLn, stderr)
import qualified MiniC.Print as C
import Ling.Abs
import Ling.Check.Base ( TCEnv, TCOpts, debugChecker, defaultTCOpts, runTCOpts
, runTCEnv, strictPar, edefs, errorScope, tcOpts)
import Ling.Check.Program (checkProgram)
import qualified Ling.Compile.C as Compile
import Ling.Defs (reduceL)
import Ling.ErrM
import Ling.Layout (resolveLayout)
import Ling.Lex (Token)
import qualified Ling.Norm as N
import Ling.Norm (transProgramDecs)
import Ling.Par
import Ling.Prelude
import Ling.Print
import Ling.Fuse (fuseProgram)
import Ling.Scoped (Scoped(Scoped))
import Ling.Subst (substDefs)
import Ling.SubTerms (transProgramTerms)
import Ling.Reify
import Ling.Rename (hDec)
import qualified Ling.Sequential as Sequential
type ParseFun a = [Token] -> Err a
data Opts =
Opts
{ _noCheck, _showExpand, _doExpand, _doReduce, _doSeq
, _noSequential, _showTokens, _showAST, _showPretty, _noNorm
, _doRefresh, _doFuse, _compile, _compilePrims, _noPrims :: Bool
, _checkOpts :: TCOpts
, _seqGas :: Int
}
$(makeLenses ''Opts)
check :: Lens' Opts Bool
check = noCheck . iso not not
defaultOpts :: Opts
defaultOpts = Opts False False False False False False False False False False
False False False False False defaultTCOpts maxBound
debugCheck :: Setter' Opts Bool
debugCheck = mergeSetters check (checkOpts.debugChecker)
layoutLexer :: String -> [Token]
layoutLexer = resolveLayout True . myLexer
prims :: String
prims = [q|
id : (A : Type)(x : A)-> A
= \(A : Type)(x : A)-> x
_:_ : (A : Type)(x : A)-> A
= \(A : Type)(x : A)-> x
data Empty =
data Unit = `unit
data Bool = `false | `true
not : (x : Bool)-> Bool = \(x : Bool)-> case x of { `false -> `true, `true -> `false }
_&&_ : (x y : Bool)-> Bool = \(x y : Bool)-> case x of { `false -> `false, `true -> y }
_||_ : (x y : Bool)-> Bool = \(x y : Bool)-> case x of { `false -> y, `true -> `true }
_==B_ : (x y : Bool)-> Bool = \(x y : Bool)-> case x of { `false -> not y, `true -> y }
_/=B_ : (x y : Bool)-> Bool = \(x y : Bool)-> case x of { `false -> y, `true -> not y }
data LR = `left | `right
Int : Type
_+_ : (m : Int)(n : Int) -> Int
_-_ : (m : Int)(n : Int) -> Int
_*_ : (m : Int)(n : Int) -> Int
_/_ : (m : Int)(n : Int) -> Int
_%_ : (m : Int)(n : Int) -> Int
pow : (m : Int)(n : Int) -> Int
_==I_ : (m : Int)(n : Int) -> Bool
_<=I_ : (m : Int)(n : Int) -> Bool
_>=I_ : (m : Int)(n : Int) -> Bool
_<I_ : (m : Int)(n : Int) -> Bool
_>I_ : (m : Int)(n : Int) -> Bool
Vec : (A : Type)(n : Int) -> Type
take : (A : Type)(m : Int)(n : Int)(v : Vec A (m + n)) -> Vec A m
drop : (A : Type)(m : Int)(n : Int)(v : Vec A (m + n)) -> Vec A n
merge : (m : Int)(n : Int)(v0 : Vec Int m)(v1 : Vec Int n) -> Vec Int (m + n)
sort : (n : Int)(v : Vec Int n) -> Vec Int n
Session : Type
Log : (S : Session)-> Session
Seq : (S : Session)-> Session
Send : (S : Session)-> Session
Recv : (S : Session)-> Session
IO = \(I : Type)(O : (i : I) -> Type)-> ?(x : I). !O x
IO' = \(I : Type)(O : Type)-> ?I. !O
Par2 = \(S0 : Session)(S1 : Session)-> {S0, S1}
Ten2 = \(S0 : Session)(S1 : Session)-> [S0, S1]
Seq2 = \(S0 : Session)(S1 : Session)-> [:S0, S1:]
ParIO = \(I : Type)(O : Type)-> {?I, !O}
TenIO = \(I : Type)(O : Type)-> [?I, !O]
TenOI = \(O : Type)(I : Type)-> [!O, ?I]
SeqIO = \(I : Type)(O : Type)-> [: ?I, !O :]
SeqOI = \(O : Type)(I : Type)-> [: !O, ?I :]
EndoIO = \(T : Type)-> IO' T T
EndoLoli = \(S : Session)-> S -o S
EndoParIO = \(T : Type)-> ParIO T T
EndoTenIO = \(T : Type)-> TenIO T T
EndoTenOI = \(T : Type)-> TenOI T T
EndoSeqIO = \(T : Type)-> SeqIO T T
DotSort = \(A : Type)(n : Int)-> EndoIO (Vec A n)
ParSort = \(A : Type)(n : Int)-> EndoLoli (!Vec A n)
SeqSort = \(A : Type)(n : Int)-> [: ?Vec A n, !Vec A n :]
With = \(SL SR : Session)-> ?(b : LR). (case b of { `left -> SL, `right -> SR })
Oplus = \(SL SR : Session)-> !(b : LR). (case b of { `left -> SL, `right -> SR })
with_ =
\(SL SR : Session)
(pL : < SL >)(pR : < SR >)->
proc(c : With SL SR)
let x : LR <- c.
@(case x of { `left -> pL, `right -> pR })(c)
oplus =
\(SL SR : Session)
(b : LR)
(p : < case b of { `left -> SL, `right -> SR } >)->
proc(c)
c : Oplus SL SR <- b.
@p(c)
receiver =
\(A : Type)
(S : A -> Session)
(p : (x : A)-> < S x >)->
proc(c)
let x : A <- c.
@(p x)(c)
sender =
\(A : Type)
(S : A -> Session)
(t : A)
(p : < S t >)->
proc(c)
c : !(x : A). S x <- t.
@p(c)
Allocation : Type
auto : Allocation
fused : Allocation
fuse : (depth : Int)-> Allocation
alloc : Allocation = fuse 0
fuse1 : Allocation = fuse 1
fuse2 : Allocation = fuse 2
fuse3 : Allocation = fuse 3
Double : Type
_+D_ : (m : Double)(n : Double) -> Double
_-D_ : (m : Double)(n : Double) -> Double
_*D_ : (m : Double)(n : Double) -> Double
_/D_ : (m : Double)(n : Double) -> Double
powD : (m : Double)(n : Double) -> Double
_==D_ : (m : Double)(n : Double) -> Bool
_<=D_ : (m : Double)(n : Double) -> Bool
_>=D_ : (m : Double)(n : Double) -> Bool
_<D_ : (m : Double)(n : Double) -> Bool
_>D_ : (m : Double)(n : Double) -> Bool
Char : Type
_==C_ : (c0 c1 : Char)-> Bool
_>=C_ : (c0 c1 : Char)-> Bool
_<=C_ : (c0 c1 : Char)-> Bool
_>C_ : (c0 c1 : Char)-> Bool
_<C_ : (c0 c1 : Char)-> Bool
String : Type
_==S_ : (s0 s1 : String)-> Bool
showInt : (n : Int) -> String
showDouble : (n : Double) -> String
showChar : (c : Char) -> String
showString : (s : String) -> String
_++S_ : (s0 : String)(s1 : String) -> String
ccall : (A : Type)-> String -> A
Int2Double = ccall (Int -> Double) "(double)"
Int2Char = ccall (Int -> Char) "(char)"
sqrtD = ccall (Double -> Double) "sqrt"
ctype : String -> Type
cconst : (A : Type)-> String -> A
PI = cconst Double "M_PI"
ComplexDouble : Type = ctype "complex double"
ComplexI = cconst ComplexDouble "_Complex_I"
Int2ComplexDouble = ccall (Int -> ComplexDouble) "(double complex)"
Double2Complex = ccall (Double -> ComplexDouble) "(double complex)"
_+CD_ : (m : ComplexDouble)(n : ComplexDouble) -> ComplexDouble
_-CD_ : (m : ComplexDouble)(n : ComplexDouble) -> ComplexDouble
_*CD_ : (m : ComplexDouble)(n : ComplexDouble) -> ComplexDouble
_/CD_ : (m : ComplexDouble)(n : ComplexDouble) -> ComplexDouble
powCD : (m : ComplexDouble)(n : ComplexDouble) -> ComplexDouble
_==CD_ : (m : ComplexDouble)(n : ComplexDouble) -> Bool
_<=CD_ : (m : ComplexDouble)(n : ComplexDouble) -> Bool
_>=CD_ : (m : ComplexDouble)(n : ComplexDouble) -> Bool
_<CD_ : (m : ComplexDouble)(n : ComplexDouble) -> Bool
_>CD_ : (m : ComplexDouble)(n : ComplexDouble) -> Bool
cabs = ccall (ComplexDouble -> Double) "cabs"
cacos = ccall (ComplexDouble -> ComplexDouble) "cacos"
cacosh = ccall (ComplexDouble -> ComplexDouble) "cacosh"
carg = ccall (ComplexDouble -> ComplexDouble) "carg"
casin = ccall (ComplexDouble -> ComplexDouble) "casin"
casinh = ccall (ComplexDouble -> ComplexDouble) "casinh"
catan = ccall (ComplexDouble -> ComplexDouble) "catan"
catanh = ccall (ComplexDouble -> ComplexDouble) "catanh"
ccos = ccall (ComplexDouble -> ComplexDouble) "ccos"
ccosh = ccall (ComplexDouble -> ComplexDouble) "ccosh"
cexp = ccall (ComplexDouble -> ComplexDouble) "cexp"
cimag = ccall (ComplexDouble -> Double) "cimag"
clog = ccall (ComplexDouble -> ComplexDouble) "clog"
conj = ccall (ComplexDouble -> ComplexDouble) "conj"
cpow = ccall (ComplexDouble -> ComplexDouble) "cpow"
cproj = ccall (ComplexDouble -> ComplexDouble) "cproj"
creal = ccall (ComplexDouble -> Double) "creal"
csin = ccall (ComplexDouble -> ComplexDouble) "csin"
csinh = ccall (ComplexDouble -> ComplexDouble) "csinh"
csqrt = ccall (ComplexDouble -> ComplexDouble) "csqrt"
ctan = ccall (ComplexDouble -> ComplexDouble) "ctan"
ctanh = ccall (ComplexDouble -> ComplexDouble) "ctanh"
|]
primsN :: N.Program
primsN =
case pProgram (layoutLexer prims) of
Bad e -> error $ "Bad prims\n" ++ e
Ok p -> norm p
runFile :: (Print a, Show a) => Opts -> ParseFun a -> FilePath -> IO a
runFile v p f = readFile f >>= run v p
runProgram :: Opts -> FilePath -> IO ()
runProgram opts f = runFile opts pProgram f >>= transP opts
run :: (Print a, Show a) => Opts -> ParseFun a -> String -> IO a
run opts p s = do
when (opts ^. showTokens) $ do
putStrLn "Tokens:"
for_ ts $ putStrLn . ppShow
case p ts of
Bad e -> failIO $ "Parse Failed: " ++ e
Ok tree -> return tree
where
ts = layoutLexer s
addPrims :: Bool -> Endom N.Program
addPrims doAddPrims
| doAddPrims = (primsN <>)
| otherwise = id
failIO :: String -> IO a
failIO s = hPutStrLn stderr s >> exitFailure
runErr :: Err a -> IO a
runErr (Ok a) = return a
runErr (Bad s) = failIO s
transOpts :: Opts -> [String]
transOpts opts = execWriter $ do
when (opts ^. doRefresh) $ tell ["Fresh"]
when (opts ^. doSeq) $ tell ["Sequential"]
when (opts ^. doFuse) $ tell ["Fused"]
when (opts ^. doReduce) $ tell ["Reduced"]
when (opts ^. doExpand) $ tell ["Expanded"]
transP :: Opts -> Program -> IO ()
transP opts prg = do
tcenv <- runErr . runTCOpts defaultTCOpts . errorScope "prims" . checkProgram $ addPrims (not (opts ^. noPrims)) ø
transP' opts (tcenv & tcOpts .~ opts ^. checkOpts) prg
transP' :: Opts -> TCEnv -> Program -> IO ()
transP' opts tcenv prg = do
when (tops /= [] && opts ^. noNorm) $
usage "--no-norm cannot be combined with --fresh, --expand, --reduce, --seq, or --fuse"
when (opts ^. showAST) $
case tops of
[] | opts ^. noNorm -> putStrLn $ "\n{- Abstract Syntax -}\n\n" ++ ppShow prg
| otherwise -> putStrLn $ "\n{- Abstract Syntax of Normalized program -}\n\n" ++ ppShow nprg
_ -> putStrLn $ "\n{- Abstract Syntax of " ++ unwords tops ++ " program -}\n\n" ++ ppShow eprg
when (opts ^. check) $ do
void . runErr . runTCEnv tcenv $ checkProgram nprg
putStrLn "Checking successful!"
when (opts ^. showPretty) $
case tops of
[] | opts ^. noNorm -> putStrLn $ "\n{- Pretty-printed program -}\n\n" ++ pretty prg
| otherwise -> putStrLn $ "\n{- Normalized program -}\n\n" ++ pretty nprg
_ -> putStrLn $ "\n{- " ++ unwords tops ++ " program -}\n\n" ++ pretty eprg
when (opts ^. compile) $
putStrLn $ "\n/* C program */\n\n" ++ C.printTree cprg
unless (opts ^. showPretty || opts ^. showAST || opts ^. compile) $
length (show eprg) `seq` return ()
where
pdefs = tcenv ^. edefs
tops = transOpts opts
nprg = norm prg
rprg | opts ^. doRefresh = transProgramDecs (const hDec) nprg
| otherwise = nprg
sprg | opts ^. doSeq = Sequential.transProgram (opts ^. seqGas) pdefs rprg
| otherwise = rprg
fprg | opts ^. doFuse = fuseProgram pdefs sprg
| otherwise = sprg
wprg | opts ^. doReduce = transProgramTerms (\defs -> reduceL . Scoped (pdefs <> defs) ø) fprg
| otherwise = fprg
eprg | opts ^. doExpand = transProgramTerms (substDefs . (pdefs <>)) wprg
| otherwise = wprg
cprg = Compile.transProgram $ addPrims (opts ^. compilePrims) eprg
flagSpec :: [(String, (Endom Opts, String))]
flagSpec =
(\(x,y,z) -> (x,(y,z))) <$>
[ ("check" , add check , "Type check the program (default on)")
, ("pretty" , add showPretty , "Display the program (can be combined with transformations)")
, ("compile" , add compile , "Display the compiled program (C language)")
, ("expand" , add doExpand , "Rewrite the program with the definitions expanded")
, ("reduce" , add doReduce , "Reduce the program (weak form)")
, ("fuse" , add doFuse , "Display the fused program")
, ("seq" , add doSeq , "Display the sequential program")
, ("refresh" , add doRefresh , "Enable the internal renaming using fresh names")
, ("show-ast" , add showAST , "Display the program as an Abstract Syntax Tree")
, ("show-tokens" , add showTokens , "Display the program as a list of tokens from the lexer")
, ("debug-check" , add debugCheck , "Display debugging information while checking")
, ("compile-prims", add compilePrims , "Also compile the primitive definitions")
, ("strict-par" , add (checkOpts . strictPar) , "Make the checker stricter about pars (no mix rule)")
, ("no-prims" , add noPrims , "Do not include the primitive definitions")
, ("no-norm" , add noNorm , "Disable the normalizer")
, ("no-check" , add noCheck , "Disable type checking")
, ("seq-gas" , error "seq-gas" , "Set the maximum number of steps for --seq")
] where add opt opts = opts & opt .~ True
usage :: String -> IO a
usage msg = failIO $ unlines (msg : "" : "Usage: ling [option...] [file...]" : "" : "option ::=" : (fmtFlag <$> flagSpec))
where
fmtFlag (flag, (_, desc)) = " | --" ++ pad flag ++ " # " ++ desc
Just maxlen = maximumOf (each . _1 . to length) flagSpec
pad s = take maxlen (s ++ repeat ' ')
mainArgs :: Opts -> [String] -> IO ()
mainArgs opts = \case
[] -> getContents >>= run opts pProgram >>= transP opts
("--help":_) -> usage ""
("--seq-gas":args) ->
case args of
[] -> usage "Missing argument for --seq-gas"
s:args'
| all isDigit s -> mainArgs (opts & seqGas .~ read s) args'
| otherwise -> usage "Unexpected value for --seq-gas"
('-':'-':arg@(_:_)):args ->
case lookup arg flagSpec of
Just (opt, _) -> mainArgs (opt opts) args
_ -> usage $ "Unexpected flag --" ++ arg
[f] -> runProgram opts f
fs -> for_ fs $ \f -> putStrLn f >> runProgram opts f
main :: IO ()
main = mainArgs defaultOpts =<< getArgs
|
1570c7a38e3a713918a7141a3894ad099c7deb3eb8599946aa6e900b8fece574 | roelvandijk/numerals | TestData.hs | |
[ @ISO639 - 1@ ] ln
[ @ISO639 - 2@ ]
[ @ISO639 - 3@ ]
[ @Native name@ ]
[ @English name@ ]
[@ISO639-1@] ln
[@ISO639-2@] lin
[@ISO639-3@] lin
[@Native name@] Ngala
[@English name@] Lingala
-}
module Text.Numeral.Language.LIN.TestData (cardinals) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "numerals" Text.Numeral.Grammar ( defaultInflection )
import "this" Text.Numeral.Test ( TestData )
--------------------------------------------------------------------------------
-- Test data
--------------------------------------------------------------------------------
{-
Sources:
-to-count-in-lingala/en/lin/
-}
cardinals :: (Num i) => TestData i
cardinals =
[ ( "default"
, defaultInflection
, [ (0, "libungutulu")
, (1, "mókó")
, (2, "míbalé")
, (3, "mísáto")
, (4, "mínei")
, (5, "mítáno")
, (6, "motóba")
, (7, "sámbó")
, (8, "mwámbe")
, (9, "libwá")
, (10, "zómi")
, (11, "zómi na mókó")
, (12, "zómi na míbalé")
, (13, "zómi na mísáto")
, (14, "zómi na mínei")
, (15, "zómi na mítáno")
, (16, "zómi na motóba")
, (17, "zómi na sámbó")
, (18, "zómi na mwámbe")
, (19, "zómi na libwá")
, (20, "ntúkú míbalé")
, (21, "ntúkú míbalé na mókó")
, (22, "ntúkú míbalé na míbalé")
, (23, "ntúkú míbalé na mísáto")
, (24, "ntúkú míbalé na mínei")
, (25, "ntúkú míbalé na mítáno")
, (26, "ntúkú míbalé na motóba")
, (27, "ntúkú míbalé na sámbó")
, (28, "ntúkú míbalé na mwámbe")
, (29, "ntúkú míbalé na libwá")
, (30, "ntúkú mísáto")
, (31, "ntúkú mísáto na mókó")
, (32, "ntúkú mísáto na míbalé")
, (33, "ntúkú mísáto na mísáto")
, (34, "ntúkú mísáto na mínei")
, (35, "ntúkú mísáto na mítáno")
, (36, "ntúkú mísáto na motóba")
, (37, "ntúkú mísáto na sámbó")
, (38, "ntúkú mísáto na mwámbe")
, (39, "ntúkú mísáto na libwá")
, (40, "ntúkú mínei")
, (41, "ntúkú mínei na mókó")
, (42, "ntúkú mínei na míbalé")
, (43, "ntúkú mínei na mísáto")
, (44, "ntúkú mínei na mínei")
, (45, "ntúkú mínei na mítáno")
, (46, "ntúkú mínei na motóba")
, (47, "ntúkú mínei na sámbó")
, (48, "ntúkú mínei na mwámbe")
, (49, "ntúkú mínei na libwá")
, (50, "ntúkú mítáno")
, (51, "ntúkú mítáno na mókó")
, (52, "ntúkú mítáno na míbalé")
, (53, "ntúkú mítáno na mísáto")
, (54, "ntúkú mítáno na mínei")
, (55, "ntúkú mítáno na mítáno")
, (56, "ntúkú mítáno na motóba")
, (57, "ntúkú mítáno na sámbó")
, (58, "ntúkú mítáno na mwámbe")
, (59, "ntúkú mítáno na libwá")
, (60, "ntúkú motóba")
, (61, "ntúkú motóba na mókó")
, (62, "ntúkú motóba na míbalé")
, (63, "ntúkú motóba na mísáto")
, (64, "ntúkú motóba na mínei")
, (65, "ntúkú motóba na mítáno")
, (66, "ntúkú motóba na motóba")
, (67, "ntúkú motóba na sámbó")
, (68, "ntúkú motóba na mwámbe")
, (69, "ntúkú motóba na libwá")
, (70, "ntúkú sámbó")
, (71, "ntúkú sámbó na mókó")
, (72, "ntúkú sámbó na míbalé")
, (73, "ntúkú sámbó na mísáto")
, (74, "ntúkú sámbó na mínei")
, (75, "ntúkú sámbó na mítáno")
, (76, "ntúkú sámbó na motóba")
, (77, "ntúkú sámbó na sámbó")
, (78, "ntúkú sámbó na mwámbe")
, (79, "ntúkú sámbó na libwá")
, (80, "ntúkú mwámbe")
, (81, "ntúkú mwámbe na mókó")
, (82, "ntúkú mwámbe na míbalé")
, (83, "ntúkú mwámbe na mísáto")
, (84, "ntúkú mwámbe na mínei")
, (85, "ntúkú mwámbe na mítáno")
, (86, "ntúkú mwámbe na motóba")
, (87, "ntúkú mwámbe na sámbó")
, (88, "ntúkú mwámbe na mwámbe")
, (89, "ntúkú mwámbe na libwá")
, (90, "ntúkú libwá")
, (91, "ntúkú libwá na mókó")
, (92, "ntúkú libwá na míbalé")
, (93, "ntúkú libwá na mísáto")
, (94, "ntúkú libwá na mínei")
, (95, "ntúkú libwá na mítáno")
, (96, "ntúkú libwá na motóba")
, (97, "ntúkú libwá na sámbó")
, (98, "ntúkú libwá na mwámbe")
, (99, "ntúkú libwá na libwá")
, (100, "nkámá")
, (101, "nkámá mókó na mókó")
, (102, "nkámá mókó na míbalé")
, (103, "nkámá mókó na mísáto")
, (104, "nkámá mókó na mínei")
, (105, "nkámá mókó na mítáno")
, (106, "nkámá mókó na motóba")
, (107, "nkámá mókó na sámbó")
, (108, "nkámá mókó na mwámbe")
, (109, "nkámá mókó na libwá")
, (110, "nkámá mókó na zómi")
, (123, "nkámá mókó na ntúkú míbalé na mísáto")
, (200, "nkámá míbalé")
, (300, "nkámá mísáto")
, (321, "nkámá mísáto na ntúkú míbalé na mókó")
, (400, "nkámá mínei")
, (500, "nkámá mítáno")
, (600, "nkámá motóba")
, (700, "nkámá sámbó")
, (800, "nkámá mwámbe")
, (900, "nkámá libwá")
, (909, "nkámá libwá na libwá")
, (990, "nkámá libwá na ntúkú libwá")
, (999, "nkámá libwá na ntúkú libwá na libwá")
, (1000, "nkóto")
, (1001, "nkóto mókó na mókó")
, (1008, "nkóto mókó na mwámbe")
, (1234, "nkóto mókó na nkámá míbalé na ntúkú mísáto na mínei")
, (2000, "nkóto míbalé")
, (3000, "nkóto mísáto")
, (4000, "nkóto mínei")
, (4321, "nkóto mínei na nkámá mísáto na ntúkú míbalé na mókó")
, (5000, "nkóto mítáno")
, (6000, "nkóto motóba")
, (7000, "nkóto sámbó")
, (8000, "nkóto mwámbe")
, (9000, "nkóto libwá")
]
)
]
| null | https://raw.githubusercontent.com/roelvandijk/numerals/b1e4121e0824ac0646a3230bd311818e159ec127/src-test/Text/Numeral/Language/LIN/TestData.hs | haskell | ------------------------------------------------------------------------------
Imports
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Test data
------------------------------------------------------------------------------
Sources:
-to-count-in-lingala/en/lin/
| |
[ @ISO639 - 1@ ] ln
[ @ISO639 - 2@ ]
[ @ISO639 - 3@ ]
[ @Native name@ ]
[ @English name@ ]
[@ISO639-1@] ln
[@ISO639-2@] lin
[@ISO639-3@] lin
[@Native name@] Ngala
[@English name@] Lingala
-}
module Text.Numeral.Language.LIN.TestData (cardinals) where
import "numerals" Text.Numeral.Grammar ( defaultInflection )
import "this" Text.Numeral.Test ( TestData )
cardinals :: (Num i) => TestData i
cardinals =
[ ( "default"
, defaultInflection
, [ (0, "libungutulu")
, (1, "mókó")
, (2, "míbalé")
, (3, "mísáto")
, (4, "mínei")
, (5, "mítáno")
, (6, "motóba")
, (7, "sámbó")
, (8, "mwámbe")
, (9, "libwá")
, (10, "zómi")
, (11, "zómi na mókó")
, (12, "zómi na míbalé")
, (13, "zómi na mísáto")
, (14, "zómi na mínei")
, (15, "zómi na mítáno")
, (16, "zómi na motóba")
, (17, "zómi na sámbó")
, (18, "zómi na mwámbe")
, (19, "zómi na libwá")
, (20, "ntúkú míbalé")
, (21, "ntúkú míbalé na mókó")
, (22, "ntúkú míbalé na míbalé")
, (23, "ntúkú míbalé na mísáto")
, (24, "ntúkú míbalé na mínei")
, (25, "ntúkú míbalé na mítáno")
, (26, "ntúkú míbalé na motóba")
, (27, "ntúkú míbalé na sámbó")
, (28, "ntúkú míbalé na mwámbe")
, (29, "ntúkú míbalé na libwá")
, (30, "ntúkú mísáto")
, (31, "ntúkú mísáto na mókó")
, (32, "ntúkú mísáto na míbalé")
, (33, "ntúkú mísáto na mísáto")
, (34, "ntúkú mísáto na mínei")
, (35, "ntúkú mísáto na mítáno")
, (36, "ntúkú mísáto na motóba")
, (37, "ntúkú mísáto na sámbó")
, (38, "ntúkú mísáto na mwámbe")
, (39, "ntúkú mísáto na libwá")
, (40, "ntúkú mínei")
, (41, "ntúkú mínei na mókó")
, (42, "ntúkú mínei na míbalé")
, (43, "ntúkú mínei na mísáto")
, (44, "ntúkú mínei na mínei")
, (45, "ntúkú mínei na mítáno")
, (46, "ntúkú mínei na motóba")
, (47, "ntúkú mínei na sámbó")
, (48, "ntúkú mínei na mwámbe")
, (49, "ntúkú mínei na libwá")
, (50, "ntúkú mítáno")
, (51, "ntúkú mítáno na mókó")
, (52, "ntúkú mítáno na míbalé")
, (53, "ntúkú mítáno na mísáto")
, (54, "ntúkú mítáno na mínei")
, (55, "ntúkú mítáno na mítáno")
, (56, "ntúkú mítáno na motóba")
, (57, "ntúkú mítáno na sámbó")
, (58, "ntúkú mítáno na mwámbe")
, (59, "ntúkú mítáno na libwá")
, (60, "ntúkú motóba")
, (61, "ntúkú motóba na mókó")
, (62, "ntúkú motóba na míbalé")
, (63, "ntúkú motóba na mísáto")
, (64, "ntúkú motóba na mínei")
, (65, "ntúkú motóba na mítáno")
, (66, "ntúkú motóba na motóba")
, (67, "ntúkú motóba na sámbó")
, (68, "ntúkú motóba na mwámbe")
, (69, "ntúkú motóba na libwá")
, (70, "ntúkú sámbó")
, (71, "ntúkú sámbó na mókó")
, (72, "ntúkú sámbó na míbalé")
, (73, "ntúkú sámbó na mísáto")
, (74, "ntúkú sámbó na mínei")
, (75, "ntúkú sámbó na mítáno")
, (76, "ntúkú sámbó na motóba")
, (77, "ntúkú sámbó na sámbó")
, (78, "ntúkú sámbó na mwámbe")
, (79, "ntúkú sámbó na libwá")
, (80, "ntúkú mwámbe")
, (81, "ntúkú mwámbe na mókó")
, (82, "ntúkú mwámbe na míbalé")
, (83, "ntúkú mwámbe na mísáto")
, (84, "ntúkú mwámbe na mínei")
, (85, "ntúkú mwámbe na mítáno")
, (86, "ntúkú mwámbe na motóba")
, (87, "ntúkú mwámbe na sámbó")
, (88, "ntúkú mwámbe na mwámbe")
, (89, "ntúkú mwámbe na libwá")
, (90, "ntúkú libwá")
, (91, "ntúkú libwá na mókó")
, (92, "ntúkú libwá na míbalé")
, (93, "ntúkú libwá na mísáto")
, (94, "ntúkú libwá na mínei")
, (95, "ntúkú libwá na mítáno")
, (96, "ntúkú libwá na motóba")
, (97, "ntúkú libwá na sámbó")
, (98, "ntúkú libwá na mwámbe")
, (99, "ntúkú libwá na libwá")
, (100, "nkámá")
, (101, "nkámá mókó na mókó")
, (102, "nkámá mókó na míbalé")
, (103, "nkámá mókó na mísáto")
, (104, "nkámá mókó na mínei")
, (105, "nkámá mókó na mítáno")
, (106, "nkámá mókó na motóba")
, (107, "nkámá mókó na sámbó")
, (108, "nkámá mókó na mwámbe")
, (109, "nkámá mókó na libwá")
, (110, "nkámá mókó na zómi")
, (123, "nkámá mókó na ntúkú míbalé na mísáto")
, (200, "nkámá míbalé")
, (300, "nkámá mísáto")
, (321, "nkámá mísáto na ntúkú míbalé na mókó")
, (400, "nkámá mínei")
, (500, "nkámá mítáno")
, (600, "nkámá motóba")
, (700, "nkámá sámbó")
, (800, "nkámá mwámbe")
, (900, "nkámá libwá")
, (909, "nkámá libwá na libwá")
, (990, "nkámá libwá na ntúkú libwá")
, (999, "nkámá libwá na ntúkú libwá na libwá")
, (1000, "nkóto")
, (1001, "nkóto mókó na mókó")
, (1008, "nkóto mókó na mwámbe")
, (1234, "nkóto mókó na nkámá míbalé na ntúkú mísáto na mínei")
, (2000, "nkóto míbalé")
, (3000, "nkóto mísáto")
, (4000, "nkóto mínei")
, (4321, "nkóto mínei na nkámá mísáto na ntúkú míbalé na mókó")
, (5000, "nkóto mítáno")
, (6000, "nkóto motóba")
, (7000, "nkóto sámbó")
, (8000, "nkóto mwámbe")
, (9000, "nkóto libwá")
]
)
]
|
0d9b0da23cc2262c25c603073816ea719d873647fadf30814a02b9d3dc91656d | hbr/albatross | argument_parser.mli | open Module_types
module type S =
sig
type a (* The result of the argument parser *)
type key = string (* option key, must start with '-' *)
type doc = string (* option description *)
type spec (* specification of an option *)
=
| Unit of (a -> a) (* option with no argument *)
| String of (string -> a -> a) (* option with string argument *)
| Int of (int -> a -> a) (* option with integer argument *)
type anon = string -> a -> a (* function taking an anonymus argument into
the result *)
type error =
| Unknown_option of string
| Missing_argument of key*spec*doc
| Invalid_argument of key*spec*doc*string
val parse: string array ->
a ->
(key*spec*doc) list ->
anon ->
(a,error) result
val string_of_error: error -> string
val argument_type: spec -> string
end
module Make (A:ANY): S with type a = A.t
| null | https://raw.githubusercontent.com/hbr/albatross/8f28ef97951f92f30dc69cf94c0bbe20d64fba21/ocaml/fmlib/basic/argument_parser.mli | ocaml | The result of the argument parser
option key, must start with '-'
option description
specification of an option
option with no argument
option with string argument
option with integer argument
function taking an anonymus argument into
the result | open Module_types
module type S =
sig
=
type error =
| Unknown_option of string
| Missing_argument of key*spec*doc
| Invalid_argument of key*spec*doc*string
val parse: string array ->
a ->
(key*spec*doc) list ->
anon ->
(a,error) result
val string_of_error: error -> string
val argument_type: spec -> string
end
module Make (A:ANY): S with type a = A.t
|
ece17dd87d6a79d702b4496c1de782d01d6febf361a749f20d5853cb4b5d8a90 | aeternity/aeternity | aefa_stores.erl | %%%-------------------------------------------------------------------
( C ) 2019 , Aeternity Anstalt
%%% @doc
ADT for contract stores in FATE .
%%%
%%% The code assumes that if a get/put is issued, the contract is
%%% already in the store. Care must be taken to explicitly check if
%%% a contract is present, and put it (using put_contract/2) if it is not.
%%%
Entries are cached in a read / write manner to avoid multiple
%%% reading/converting and to avoid pushing terms that have not been
%%% altered. Both things are expensive as it involves going out to the
%%% underlying merkle trees.
%%%
%%% Use finalize/3 to push the stores back to the chain when the
%%% fate execution is done.
%%%
%%% @end
%%% -------------------------------------------------------------------
-module(aefa_stores).
-include_lib("aebytecode/include/aeb_fate_data.hrl").
-export([ finalize/3
, find_value/3
, has_contract/2
, initial_contract_store/0
, new/0
, put_contract_store/3
, put_value/4
, terms_to_finalize/1
%% Map functions
, cache_map_metadata/2
, store_map_lookup/4
, store_map_member/4
, store_map_to_list/3
, store_map_size/3
]).
-record(store, { cache = #{} :: contract_cache()
}).
-record(cache_entry, { store :: aect_contracts_store:store()
, dirty :: boolean()
, terms :: fate_terms()
}).
-type fate_val() :: aeb_fate_data:fate_type().
-type pubkey() :: <<_:256>>.
-type dirty() :: boolean().
-type fate_terms() :: #{ integer() => {fate_val(), dirty()} }.
-type contract_cache() :: #{pubkey() => #cache_entry{}}.
-type fate_map() :: aeb_fate_data:fate_map() | aeb_fate_data:fate_store_map().
-opaque store() :: #store{}.
-export_type([ store/0
]).
-define(MAX_STORE_POS, 16#ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff).
-define(META_STORE_POS, 0).
-define(VALID_STORE_POS(Pos), Pos > ?META_STORE_POS andalso Pos < ?MAX_STORE_POS).
-define(STORE_KEY_PREFIX, 0).
-define(STORE_MAP_PREFIX, 1).
-ifdef(TEST).
-define(ASSERT(Check, Err),
case Check of
true -> ok;
false -> error({assertion_failed, ?FILE, ?LINE, Err})
end).
-else.
-define(ASSERT(Check, Err), ok).
-endif.
-ifdef(DEBUG).
-define(DEBUG_STORE(S), debug_stores(S)).
-define(DEBUG_PRINT(Fmt, Args), io:format(Fmt, Args)).
-else.
-define(DEBUG_STORE(S), ok).
-define(DEBUG_PRINT(Fmt, Args), ok).
-endif.
%%%===================================================================
%%% API
%%%===================================================================
-spec new() -> store().
new() ->
#store{}.
-spec initial_contract_store() -> aect_contracts_store:store().
initial_contract_store() ->
aect_contracts_store:put(store_meta_key(),
aeb_fate_encoding:serialize(empty_store_meta_data()),
aect_contracts_store:new()).
-spec put_contract_store(pubkey(), aect_contracts_store:store(), store()) -> store().
put_contract_store(Pubkey, Store, #store{cache = Cache} = S) ->
S#store{cache = Cache#{Pubkey => new_contract_cache_entry(Store)}}.
-spec has_contract(pubkey(), store()) -> boolean().
has_contract(Pubkey, #store{cache = Cache}) ->
maps:is_key(Pubkey, Cache).
-spec find_value(pubkey(), non_neg_integer(), store()) ->
{'ok', fate_val(), store()}
| {'ok', fate_val()}
| 'error'.
find_value(Pubkey, StorePos, S) when ?VALID_STORE_POS(StorePos) ->
find_value_(Pubkey, StorePos, S).
-spec put_value(pubkey(), non_neg_integer(), fate_val(), store()) -> store().
put_value(Pubkey, StorePos, FateVal, #store{cache = Cache} = S) ->
Entry = maps:get(Pubkey, Cache),
Terms = maps:put(StorePos, {FateVal, true}, Entry#cache_entry.terms),
Entry1 = Entry#cache_entry{terms = Terms, dirty = true},
S#store{cache = Cache#{Pubkey => Entry1}}.
%% -- Local functions --------------------------------------------------------
update_ct_store(Pubkey, NewStore, #store{cache = Cache} = S) ->
E = maps:get(Pubkey, Cache),
S#store{cache = Cache#{Pubkey => E#cache_entry{store = NewStore}}}.
find_value_(Pubkey, StorePos, #store{cache = Cache} = S) ->
case find_term(StorePos, maps:get(Pubkey, Cache)) of
{ok, Term} ->
{ok, Term};
{ok, Term, Entry} ->
{ok, Term, S#store{cache = Cache#{Pubkey => Entry}}};
error ->
error
end.
%%%===================================================================
%%% Store maps
%%%
%%% Maps are saved in the store as follows
%%%
/store_meta_key ( ) ( store register 0 ) Map meta data : # { MapId = > ? METADATA(RawId , RefCount , ) }
%%% /?STORE_MAP_PREFIX/RawId:32 <<0>> - subtree node
/?STORE_MAP_PREFIX / RawId:32 / Key Value for Key in map RawId
%%%
Storing the metadata in register 0 means we get caching for it , so each
%%% call will only have to deserialize the metadata at most once.
%%%
Disinguishing the MapId from the RawId allows maps to be updated inplace ,
%%% when the old copy of the map is not saved.
-define(METADATA(RawId, RefCount, Size), ?FATE_TUPLE({RawId, RefCount, Size})).
-define(RAWID_BITS, 32).
-type map_id() :: non_neg_integer().
-type raw_id() :: non_neg_integer().
-type ref_count() :: non_neg_integer().
-type map_meta() :: ?METADATA(raw_id(), ref_count(), non_neg_integer()).
-type store_meta() :: #{ map_id() => map_meta() }.
%% -- Store map API ----------------------------------------------------------
-spec cache_map_metadata(pubkey(), store()) -> store().
cache_map_metadata(Pubkey, S) ->
case find_meta_data(Pubkey, S) of
{ok, _, S1} -> S1;
error -> S
end.
-spec store_map_lookup(pubkey(), non_neg_integer(), fate_val(), store()) -> {{ok, fate_val()} | error, store()}.
store_map_lookup(Pubkey, MapId, Key, #store{cache = Cache} = S) ->
#cache_entry{ store = Store } = maps:get(Pubkey, Cache),
{ok, Meta, S1} = find_meta_data(Pubkey, S),
?METADATA(RawId, _RefCount, _Size) = get_map_meta(MapId, Meta),
case find_in_store(map_data_key(RawId, Key), Store) of
error -> {error, S1};
{ok, Val, Store1} -> {{ok, Val}, update_ct_store(Pubkey, Store1, S1)}
end.
-spec store_map_member(pubkey(), non_neg_integer(), fate_val(), store()) -> {boolean(), store()}.
store_map_member(Pubkey, MapId, Key, #store{cache = Cache} = S) ->
#cache_entry{ store = Store } = maps:get(Pubkey, Cache),
{ok, Meta, S1} = find_meta_data(Pubkey, S),
?METADATA(RawId, _RefCount, _Size) = get_map_meta(MapId, Meta),
case aect_contracts_store:get_w_cache(map_data_key(RawId, Key), Store) of
{<<>>, _} -> {false, S1};
{_Val, Store1} -> {true, update_ct_store(Pubkey, Store1, S1)}
end.
-spec store_map_to_list(pubkey(), non_neg_integer(), store()) -> {[{fate_val(), fate_val()}], store()}.
store_map_to_list(Pubkey, MapId, #store{cache = Cache} = S) ->
#cache_entry{ store = Store } = maps:get(Pubkey, Cache),
{ok, Meta, S1} = find_meta_data(Pubkey, S),
?METADATA(RawId, _, _) = get_map_meta(MapId, Meta),
{Subtree, Store1} = aect_contracts_store:subtree_w_cache(map_data_key(RawId), Store),
{[ {aeb_fate_encoding:deserialize(K), aeb_fate_encoding:deserialize(V)}
|| {K, V} <- lists:keysort(1,maps:to_list(Subtree)) ], update_ct_store(Pubkey, Store1, S1)}.
-spec store_map_size(pubkey(), non_neg_integer(), store()) -> {non_neg_integer(), store()}.
store_map_size(Pubkey, MapId, S) ->
{ok, Meta, S1} = find_meta_data(Pubkey, S),
?METADATA(_, _, Size) = get_map_meta(MapId, Meta),
{Size, S1}.
%% -- Map metadata -----------------------------------------------------------
-spec find_meta_data(pubkey(), store()) -> {ok, store_meta(), store()} | error.
find_meta_data(Pubkey, S) ->
case find_value_(Pubkey, ?META_STORE_POS, S) of
{ok, Meta} -> {ok, Meta, S};
{ok, Meta, S1} -> {ok, Meta, S1};
error -> error
end.
empty_store_meta_data() -> #{}.
%% We need to know which map ids are in use when allocating fresh ones. We
include RawIds to ensure that we can set MapId = = RawId for newly allocated
%% maps.
-spec used_map_ids(store_meta()) -> [map_id()].
used_map_ids(Metadata) ->
lists:usort(lists:append(
[ [Id, RawId] || {Id, ?METADATA(RawId, _, _)} <- maps:to_list(Metadata) ])).
-spec put_map_meta(map_id(), map_meta(), store_meta()) -> store_meta().
put_map_meta(MapId, MapMeta, Metadata) ->
Metadata#{ MapId => MapMeta }.
-spec remove_map_meta(map_id(), store_meta()) -> store_meta().
remove_map_meta(MapId, Metadata) ->
maps:remove(MapId, Metadata).
-spec get_map_meta(map_id(), store_meta()) -> map_meta().
get_map_meta(MapId, Meta) ->
maps:get(MapId, Meta).
-spec find_meta_data_no_cache(#cache_entry{}) -> {ok, store_meta()} | error.
find_meta_data_no_cache(CacheEntry) ->
case find_term(?META_STORE_POS, CacheEntry) of
{ok, Meta} -> {ok, Meta};
{ok, Meta, _} -> {ok, Meta};
error -> error
end.
%%%===================================================================
Entry for one contract
new_contract_cache_entry(Store) ->
#cache_entry{ store = Store
, terms = #{}
, dirty = false
}.
find_term(StorePos, #cache_entry{terms = Terms} = E) ->
case maps:find(StorePos, Terms) of
{ok, {FateVal,_Dirty}} ->
{ok, FateVal};
error ->
case find_in_store(store_key(StorePos), E#cache_entry.store) of
error ->
error;
{ok, FateVal, Store1} ->
{ok, FateVal, E#cache_entry{terms = Terms#{StorePos => {FateVal, false}},
store = Store1}}
end
end.
find_in_store(Key, Store) ->
case aect_contracts_store:get_w_cache(Key, Store) of
{<<>>, _Store1} ->
error;
{Value, Store1} ->
FateVal = aeb_fate_encoding:deserialize(Value),
{ok, FateVal, Store1}
end.
store_key(Int) ->
<<?STORE_KEY_PREFIX, (binary:encode_unsigned(Int))/binary>>.
store_meta_key() ->
store_key(?META_STORE_POS).
map_data_key(RawId) ->
<<?STORE_MAP_PREFIX, RawId:?RAWID_BITS>>.
map_data_key(RawId, Key) ->
map_raw_key(RawId, aeb_fate_encoding:serialize(Key)).
map_raw_key(RawId, KeyBin) ->
<<(map_data_key(RawId))/binary, KeyBin/binary>>.
%%%===================================================================
%%% Write through cache to stores
-spec finalize(aefa_chain_api:state(), non_neg_integer(), store()) ->
{ok, aefa_chain_api:state(), non_neg_integer()}
| {error, out_of_gas}.
finalize(API, GasLeft, #store{cache = Cache} = _S) ->
?DEBUG_STORE(_S),
try maps:fold(fun finalize_entry/3, {[], GasLeft}, Cache) of
{Stores, GasLeft1} ->
API1 = finalize_stores(Stores, API),
{ok, API1, GasLeft1}
catch
throw:out_of_gas ->
{error, out_of_gas}
end.
finalize_stores([{Pubkey, Store}|Left], API) ->
API1 = aefa_chain_api:set_contract_store(Pubkey, Store, API),
finalize_stores(Left, API1);
finalize_stores([], API) ->
API.
finalize_entry(_Pubkey, #cache_entry{dirty = false}, Acc) ->
Acc;
finalize_entry(Pubkey, Cache = #cache_entry{store = Store}, {Writes, GasLeft}) ->
{ok, Metadata} = find_meta_data_no_cache(Cache), %% Last access so no need to cache here
%% Compute which updates need to be performed (see store_update() type
%% below). This also takes care of updating the metadata with new reference
%% counts and removing entries for maps to be garbage collected. New maps
%% get a dummy entry with only the reference count set.
{Metadata1, Updates} = compute_store_updates(Metadata, Cache),
?DEBUG_PRINT("Updates\n ~p\n", [Updates]),
?ASSERT(check_store_updates(Updates), {bad_store_updates, Updates}),
%% Performing the updates writes the necessary changes to the MP trees.
{Store1, GasLeft1} = perform_store_updates(Metadata, Updates, Metadata1, GasLeft, Store),
{[{Pubkey, Store1} | Writes], GasLeft1}.
%% These are the terms we need to pay traversal gas for before finalizing.
-spec terms_to_finalize(store()) -> [fate_val()].
terms_to_finalize(#store{cache = Cache}) ->
[ Term || #cache_entry{dirty = true, terms = Terms} <- maps:values(Cache),
{Term, true} <- maps:values(Terms) ].
%%%===================================================================
%%% Store updates
-type store_update() :: {push_term, pos_integer(), fate_val()} %% Write to store register
| {copy_map, map_id(), fate_map()} %% Create a new map (no inplace update)
| {update_map, map_id(), aeb_fate_data:fate_store_map()} %% Update an existing map inplace
| {gc_map, map_id()}. %% Garbage collect a map removing all entries
-ifdef(TEST).
%% Check that if a map is gc'd it's not also updated or copied.
-spec check_store_updates([store_update()]) -> boolean().
check_store_updates(Updates) ->
GCd = [ Id || {gc_map, Id} <- Updates ],
Copied = [ Id || {copy_map, _, ?FATE_STORE_MAP(_, Id)} <- Updates ],
Updated = [ Id || {update_map, _, ?FATE_STORE_MAP(_, Id)} <- Updates ],
GCd -- (Copied ++ Updated) == GCd.
-endif.
-spec compute_store_updates(store_meta(), #cache_entry{}) -> {store_meta(), [store_update()]}.
compute_store_updates(Metadata, #cache_entry{terms = TermCache, store = Store}) ->
UsedIds = used_map_ids(Metadata),
{Regs, Terms} = lists:unzip([{Reg, Term} || {Reg, {Term, Dirty}} <- lists:keysort(1,maps:to_list(TermCache)),
Reg > ?META_STORE_POS, Dirty]),
%% Go through the store register and find all maps that we want to put in
%% the store. Terms1 is the updated store registry values (containing only
%% store maps with empty caches), and Maps contains the new store maps that
%% we should create.
{Terms1, Maps} = aeb_fate_maps:allocate_store_maps(UsedIds, Terms),
NewRegs = lists:zip(Regs, Terms1),
%% Reference counting and garbage collection. Compute which maps can be
%% updated inplace (Reuse) and which maps can be garbage collected (Garbage).
RefCounts = compute_refcounts(NewRegs, Maps, Metadata, Store),
Metadata1 = update_refcounts(RefCounts, Metadata),
{Unused, Reuse, Metadata1b} = compute_reuse_fixpoint(Maps, Metadata1, Store),
{Garbage, Metadata2} = compute_garbage(Unused, Reuse, Metadata1b, Store),
CopyOrInplace = fun(MapId, ?FATE_STORE_MAP(_, Id) = Map) ->
case maps:get(Id, Reuse, no_reuse) of
MapId -> {update_map, MapId, Map};
_ -> {copy_map, MapId, Map}
end;
(MapId, Map) -> {copy_map, MapId, Map} end,
Updates = [ {push_term, Reg, Term} || {Reg, Term} <- NewRegs ] ++
[ CopyOrInplace(MapId, Map) || {MapId, Map} <- lists:keysort(1, maps:to_list(Maps)) ] ++
[ {gc_map, RawId} || RawId <- Garbage ],
It 's important ( very ! ) that copy_map runs before update_map , since
%% update map does a destructive update of the store. To make sure this is
%% the case we sort the updates.
Order = fun(push_term) -> 0;
(copy_map) -> 1;
(update_map) -> 2;
(gc_map) -> 3 end,
Compare = fun(A, B) -> Order(element(1, A)) =< Order(element(1, B)) end,
{Metadata2, lists:sort(Compare, Updates)}.
compute_reuse_fixpoint(Maps, Metadata, Store) ->
compute_reuse_fixpoint(unused_maps(Metadata), Maps, Metadata, Store, 100).
compute_reuse_fixpoint(Unused, Maps, Metadata, Store, Fuel) ->
Reuse = compute_inplace_updates(Unused, Maps),
RefCounts1 = compute_copy_refcounts(Metadata, Reuse, Maps, Store),
Metadata1 = update_refcounts(RefCounts1, Metadata),
Unused1 = unused_maps(Metadata1),
case Unused1 == Unused of
_ when Fuel =< 0 ->
?ASSERT(false, {reuse_fixpoint_out_of_fuel, Metadata, Unused, Maps}),
{Unused, Reuse, Metadata1};
true -> {Unused, Reuse, Metadata1};
false ->
%% NOTE: Metadata and not Metadata1. Reason: compute_copy_refcounts
%% will update refcounts assuming no inplace updates have been
%% taken into account. So, each iteration of the loop updates the
%% original metadata.
compute_reuse_fixpoint(Unused1, Maps, Metadata, Store, Fuel - 1)
end.
perform_store_updates(OldMeta, [Update|Left], Meta, GasLeft, Store) ->
?DEBUG_PRINT("Update: ~p\n", [Update]),
{Meta1, Bytes, Store1} = perform_store_update(OldMeta, Update, {Meta, Store}),
GasLeft1 = spend_size_gas(GasLeft, Bytes),
perform_store_updates(OldMeta, Left, Meta1, GasLeft1, Store1);
perform_store_updates(_OldMeta, [], Meta, GasLeft, Store) ->
%% Save the updated metadata at the end
{Store1, Bytes} = push_term(?META_STORE_POS, Meta, Store),
GasLeft1 = spend_size_gas(GasLeft, Bytes),
{Store1, GasLeft1}.
spend_size_gas(GasLeft, Bytes) ->
?DEBUG_PRINT("GasLeft: ~w Bytes: ~w\n", [GasLeft, Bytes]),
case GasLeft - Bytes * aec_governance:store_byte_gas() of
TooLittle when TooLittle < 0 ->
throw(out_of_gas);
Enough ->
Enough
end.
-spec perform_store_update(store_meta(), store_update(), {store_meta(), aect_contracts_store:store()}) ->
{store_meta(), non_neg_integer(), aect_contracts_store:store()}.
perform_store_update(_OldMeta, {push_term, StorePos, FateVal}, {Meta, Store}) ->
{Store1, Bytes} = push_term(StorePos, FateVal, Store),
{Meta, Bytes, Store1};
perform_store_update(OldMeta, {copy_map, MapId, Map}, S) ->
copy_map(OldMeta, MapId, Map, S);
perform_store_update(_OldMeta, {update_map, MapId, Map}, S) ->
update_map(MapId, Map, S);
perform_store_update(_OldMeta, {gc_map, MapId}, S) ->
gc_map(MapId, S).
%% Write to a store register
push_term(Pos, FateVal, Store) ->
Val = aeb_fate_encoding:serialize(FateVal),
Key = store_key(Pos),
Bytes = byte_size(Key) + byte_size(Val),
{aect_contracts_store:put(Key, Val, Store), Bytes}.
%% Allocate a new map.
copy_map(_OldMeta, MapId, Map, {Meta, Store}) when ?IS_FATE_MAP(Map) ->
The RefCount was set in compute_store_updates
?METADATA(_, RefCount, _) = get_map_meta(MapId, Meta),
RawId = = MapId for fresh maps
Size = maps:size(Map),
Update the metadata with RawId and Size
Meta1 = put_map_meta(MapId, ?METADATA(RawId, RefCount, Size), Meta),
%% Write the data
BinData = cache_to_bin_data(?FATE_MAP_VALUE(Map)), %% A map value is a special case of a cache (no tombstones)
{Store1, Bytes} = write_bin_data(RawId, BinData, Store),
%% and the subtree node that allows us to call aect_contracts_store:subtree
Store2 = aect_contracts_store:put(map_data_key(RawId), <<0>>, Store1),
{Meta1, Bytes, Store2};
copy_map(OldMeta, MapId, ?FATE_STORE_MAP(Cache, OldId), {Meta, Store}) ->
%% In case of a modified store map we need to copy all the entries for the
%% old map and then update with the new data (Cache).
?METADATA(_, RefCount, _) = get_map_meta(MapId, Meta),
%% We shouldn't get here if the old map is being garbage collected (should
%% be update_map instead), but the garbage collection analysis is limited
%% for nested maps currently, so we keep the old metadata around and look
%% it up there.
?METADATA(OldRawId, _RefCount, OldSize) = get_map_meta(OldId, OldMeta),
RawId = MapId,
OldMap = aect_contracts_store:subtree(map_data_key(OldRawId), Store),
NewData = cache_to_bin_data(Cache),
Size = OldSize + size_delta(OldMap, NewData),
Meta1 = put_map_meta(MapId, ?METADATA(RawId, RefCount, Size), Meta),
First copy the old data , then update with the new
{Store1, Bytes} = write_bin_data(RawId, lists:keysort(1, maps:to_list(OldMap)) ++ NewData, Store),
Store2 = aect_contracts_store:put(map_data_key(RawId), <<0>>, Store1),
{Meta1, Bytes, Store2}.
%% In-place update of an existing store map. This happens, for instance, when
%% you update a map in the state throwing away the old copy of the it.
update_map(MapId, ?FATE_STORE_MAP(Cache, OldId), {Meta, Store}) ->
Precomputed
?METADATA(RawId, _RefCount, OldSize) = get_map_meta(OldId, Meta),
NewData = cache_to_bin_data(Cache),
Size = OldSize + size_delta(RawId, Store, NewData),
{Store1, Bytes} = write_bin_data(RawId, NewData, Store),
Meta1 = put_map_meta(MapId, ?METADATA(RawId, RefCount, Size),
remove_map_meta(OldId, Meta)),
%% We also need to update the refcounts for nested maps. We already added
refcounts for maps in the Cache , now we have to subtract refcounts for
%% entries overwritten by the cache.
RefCounts = lists:foldl(fun({Key, _}, Count) ->
aeb_fate_maps:refcount_union(refcount_delta(Key, false, Store), %% old store
Count)
end, aeb_fate_maps:refcount_zero(), NewData),
Meta2 = update_refcounts(RefCounts, Meta1),
{Meta2, Bytes, Store1}.
gc_map(RawId, {Meta, Store}) ->
Only the RawId here , we already removed the MapId from the metadata .
Data = aect_contracts_store:subtree(map_data_key(RawId), Store),
Store1 = maps:fold(fun(Key, _, S) -> aect_contracts_store:remove(map_raw_key(RawId, Key), S) end,
aect_contracts_store:remove(map_data_key(RawId), Store), Data),
{Meta, _Bytes = 0, Store1}.
-type bin_data() :: [{binary(), binary() | ?FATE_MAP_TOMBSTONE}].
-type map_cache() :: #{fate_val() => fate_val() | ?FATE_MAP_TOMBSTONE}.
-spec cache_to_bin_data(map_cache()) -> bin_data().
cache_to_bin_data(Cache) ->
[ begin
KeyBin = aeb_fate_encoding:serialize(K),
ValBin = case V of ?FATE_MAP_TOMBSTONE -> ?FATE_MAP_TOMBSTONE;
_ -> aeb_fate_encoding:serialize(V)
end,
{KeyBin, ValBin}
end || {K, V} <- lists:keysort(1, maps:to_list(Cache)) ].
-spec write_bin_data(raw_id(), bin_data(), aect_contracts_store:store()) ->
{aect_contracts_store:store(), non_neg_integer()}.
write_bin_data(RawId, BinData, Store) ->
lists:foldl(
fun({K, ?FATE_MAP_TOMBSTONE}, {S, B}) ->
{aect_contracts_store:remove(map_raw_key(RawId, K), S),
B};
({K, V}, {S, B}) ->
{aect_contracts_store:put(map_raw_key(RawId, K), V, S),
B + byte_size(K) + byte_size(V)}
end, {Store, 0}, BinData).
%% Compute the change in size updating an old map with new entries.
-spec size_delta(#{binary() => binary()}, bin_data()) -> integer().
size_delta(OldMap, NewData) ->
size_delta_(fun(K) -> maps:is_key(K, OldMap) end, NewData).
%% Same as size_delta/2 but instead of a binary map we get a raw_id() and the
%% store.
-spec size_delta(raw_id(), aect_contracts_store:store(), bin_data()) -> integer().
size_delta(RawId, Store, NewData) ->
size_delta_(fun(K) -> <<>> /= aect_contracts_store:get(map_raw_key(RawId, K), Store) end,
NewData).
size_delta_(IsKey, NewData) ->
Delta = fun({K, ?FATE_MAP_TOMBSTONE}, N) ->
case IsKey(K) of
true -> N - 1;
false -> N
end;
({K, _}, N) ->
case IsKey(K) of
true -> N;
false -> N + 1
end end,
lists:foldl(Delta, 0, NewData).
%% -- Reference counting -----------------------------------------------------
%% Compute refcount deltas from updated store registers and updates to store
%% maps.
compute_refcounts(Regs, Maps, Metadata, Store) ->
TermRefCount = register_refcounts(Regs, Store),
MapRefCount = maps_refcounts(Metadata, Maps, Store),
aeb_fate_maps:refcount_union(TermRefCount, MapRefCount).
Refcount delta from updating the store registers .
register_refcounts(Regs, Store) ->
aeb_fate_maps:refcount_union(
[ refcount_delta(store_key(Reg), NewVal, Store)
|| {Reg, NewVal} <- Regs ]).
Refcount delta from store map updates .
maps_refcounts(Metadata, Maps, Store) ->
aeb_fate_maps:refcount_union(
[ map_refcounts(Metadata, Map, Store)
|| {_, Map} <- lists:keysort(1, maps:to_list(Maps)) ]).
map_refcounts(_Meta, Map, _Store) when ?IS_FATE_MAP(Map) ->
%% Fresh map, only adds new references.
aeb_fate_maps:refcount(Map);
map_refcounts(_Meta, ?FATE_STORE_MAP(Cache, _Id), _Store) ->
%% Note that this does not count as a reference to Id
maps:fold(fun(_Key, Val, Count) ->
%% We don't know if this map will be copied or updated in place,
%% so we shouldn't compute a refcount delta. Instead we conservatively
%% only look at the new value. Once the copy or update happens we'll take
%% the delta into account.
aeb_fate_maps:refcount_union(aeb_fate_maps:refcount(Val), Count)
end, #{}, Cache).
%% We need to increase the refcounts of maps contained in maps that are being
%% copied.
compute_copy_refcounts(Meta, Reuse, Maps, Store) ->
maps:fold(fun(MapId, ?FATE_STORE_MAP(Cache, Id), Count) ->
case maps:get(Id, Reuse, no_reuse) of
MapId ->
Subtract refcounts for entries overwritten by the Cache .
?METADATA(RawId, _RefCount, _Size) = get_map_meta(Id, Meta),
RemovedValues = [ Val || Key <- maps:keys(Cache),
{ok, Val, _} <- [find_in_store(map_data_key(RawId, Key), Store)] ],
Removed = aeb_fate_maps:refcount(RemovedValues),
aeb_fate_maps:refcount_diff(Count, Removed);
_ ->
Note that we already added refcounts for the Cache .
?METADATA(RawId, _RefCount, _Size) = get_map_meta(Id, Meta),
NewKeys = [ aeb_fate_encoding:serialize(Key) || Key <- maps:keys(Cache) ],
OldBin = maps:without(NewKeys, aect_contracts_store:subtree(map_data_key(RawId), Store)),
Count1 = aeb_fate_maps:refcount([ aeb_fate_encoding:deserialize(Val) || Val <- maps:values(OldBin) ]),
aeb_fate_maps:refcount_union(Count1, Count)
end;
(_, _, Count) -> Count
end, #{}, Maps).
%% Compute the difference in reference counts caused by performing a store
%% update: refcount(Val) - refcount(OldVal).
-spec refcount_delta(binary(), fate_val() | ?FATE_MAP_TOMBSTONE, aect_contracts_store:store()) ->
aeb_fate_maps:refcount().
refcount_delta(StoreKey, Val, Store) ->
New = aeb_fate_maps:refcount(Val),
Old =
case aect_contracts_store:get(StoreKey, Store) of
<<>> -> #{};
Bin -> aeb_fate_maps:refcount(aeb_fate_encoding:deserialize(Bin))
end,
aeb_fate_maps:refcount_diff(New, Old).
%% Write new refcounts to the metadata
-spec update_refcounts(aeb_fate_maps:refcount(), store_meta()) -> store_meta().
update_refcounts(Deltas, Meta) ->
maps:fold(fun(Id, Delta, M) ->
maps:update_with(Id,
fun(?METADATA(RawId, RefCount, Size)) ->
?METADATA(RawId, RefCount + Delta, Size)
end, ?METADATA(undefined, Delta, undefined), M)
end, Meta, Deltas).
%% Maps with refcount 0 are no longer needed and can be garbage collected or
%% updated in place.
unused_maps(Metadata) ->
maps:fold(fun(Id, ?METADATA(_, 0, _), Acc) -> Acc#{ Id => true };
(_, _, Acc) -> Acc end, #{}, Metadata).
%% Each map can only be reused once (obviously) so we return a map with the old
%% maps (to be reused) as keys.
compute_inplace_updates(Unused, Maps) ->
maps:fold(fun(MapId, ?FATE_STORE_MAP(_, OldId), Acc) ->
case maps:is_key(OldId, Unused) of
true -> Acc#{ OldId => MapId };
false -> Acc
end;
(_, _, Acc) -> Acc end, #{}, Maps).
%% Maps to be garbage collected are the unused maps except those that are being
%% updated in place. Marking a map for garbage collection requires updating the
%% reference counts for maps referenced by it. This may trigger more garbage
%% collection.
compute_garbage(Unused, Reuse, Metadata, Store) ->
Garbage = maps:keys(Unused) -- maps:keys(Reuse),
case Garbage of
[] -> {[], Metadata};
_ ->
Refcounts = gc_refcounts(Garbage, Metadata, Store),
Metadata1 = update_refcounts(Refcounts, Metadata),
Metadata2 = maps:without(Garbage, Metadata1),
Unused1 = unused_maps(Metadata2),
{Garbage1, Metadata3} = compute_garbage(Unused1, Reuse, Metadata2, Store),
GetRawId = fun(Id) -> ?METADATA(RawId, _, _) = get_map_meta(Id, Metadata), RawId end,
{lists:map(GetRawId, Garbage) ++ Garbage1, Metadata3}
end.
Refcount delta arising from garbage collecting a map .
gc_refcounts(Ids, Metadata, Store) ->
Count = fun(Id) ->
?METADATA(RawId, _, _) = get_map_meta(Id, Metadata),
%% Here we need to go over the entire map to update the
%% reference counts, so grab the subtree.
Data = aect_contracts_store:subtree(map_data_key(RawId), Store),
%% -sum([ refcount(Val) || Val <- Data ])
aeb_fate_maps:refcount_diff(aeb_fate_maps:refcount_zero(),
aeb_fate_maps:refcount_union(
[ aeb_fate_maps:refcount(aeb_fate_encoding:deserialize(Val))
|| Val <- maps:values(Data) ]))
end,
aeb_fate_maps:refcount_union(lists:map(Count, Ids)).
%% -- Debug ------------------------------------------------------------------
-ifdef(DEBUG).
debug_stores(#store{cache = Cache}) ->
[ begin
io:format("Contract: ~p\n- Store\n~s", [Pubkey, debug_store(Store)])
end || {Pubkey, #cache_entry{ store = Store }} <- maps:to_list(Cache) ],
ok.
debug_store(Store) ->
Map = aect_contracts_store:subtree(<<>>, Store),
Regs = maps:from_list(
[ {binary:decode_unsigned(Reg), aeb_fate_encoding:deserialize(Val)}
|| {<<?STORE_KEY_PREFIX, Reg/binary>>, Val} <- maps:to_list(Map) ]),
Maps = maps:from_list(
[ case Key of
<<>> -> {binary:decode_unsigned(RawId), Val};
_ -> {{binary:decode_unsigned(RawId), aeb_fate_encoding:deserialize(Key)},
aeb_fate_encoding:deserialize(Val)}
end || {<<?STORE_MAP_PREFIX, RawId:4/binary, Key/binary>>, Val} <- maps:to_list(Map) ]),
io_lib:format(" Regs: ~p\n Maps: ~p\n", [Regs, Maps]).
-endif.
| null | https://raw.githubusercontent.com/aeternity/aeternity/d4863b7233f406c753fd42da10b7ee19a14aa399/apps/aefate/src/aefa_stores.erl | erlang | -------------------------------------------------------------------
@doc
The code assumes that if a get/put is issued, the contract is
already in the store. Care must be taken to explicitly check if
a contract is present, and put it (using put_contract/2) if it is not.
reading/converting and to avoid pushing terms that have not been
altered. Both things are expensive as it involves going out to the
underlying merkle trees.
Use finalize/3 to push the stores back to the chain when the
fate execution is done.
@end
-------------------------------------------------------------------
Map functions
===================================================================
API
===================================================================
-- Local functions --------------------------------------------------------
===================================================================
Store maps
Maps are saved in the store as follows
/?STORE_MAP_PREFIX/RawId:32 <<0>> - subtree node
call will only have to deserialize the metadata at most once.
when the old copy of the map is not saved.
-- Store map API ----------------------------------------------------------
-- Map metadata -----------------------------------------------------------
We need to know which map ids are in use when allocating fresh ones. We
maps.
===================================================================
===================================================================
Write through cache to stores
Last access so no need to cache here
Compute which updates need to be performed (see store_update() type
below). This also takes care of updating the metadata with new reference
counts and removing entries for maps to be garbage collected. New maps
get a dummy entry with only the reference count set.
Performing the updates writes the necessary changes to the MP trees.
These are the terms we need to pay traversal gas for before finalizing.
===================================================================
Store updates
Write to store register
Create a new map (no inplace update)
Update an existing map inplace
Garbage collect a map removing all entries
Check that if a map is gc'd it's not also updated or copied.
Go through the store register and find all maps that we want to put in
the store. Terms1 is the updated store registry values (containing only
store maps with empty caches), and Maps contains the new store maps that
we should create.
Reference counting and garbage collection. Compute which maps can be
updated inplace (Reuse) and which maps can be garbage collected (Garbage).
update map does a destructive update of the store. To make sure this is
the case we sort the updates.
NOTE: Metadata and not Metadata1. Reason: compute_copy_refcounts
will update refcounts assuming no inplace updates have been
taken into account. So, each iteration of the loop updates the
original metadata.
Save the updated metadata at the end
Write to a store register
Allocate a new map.
Write the data
A map value is a special case of a cache (no tombstones)
and the subtree node that allows us to call aect_contracts_store:subtree
In case of a modified store map we need to copy all the entries for the
old map and then update with the new data (Cache).
We shouldn't get here if the old map is being garbage collected (should
be update_map instead), but the garbage collection analysis is limited
for nested maps currently, so we keep the old metadata around and look
it up there.
In-place update of an existing store map. This happens, for instance, when
you update a map in the state throwing away the old copy of the it.
We also need to update the refcounts for nested maps. We already added
entries overwritten by the cache.
old store
Compute the change in size updating an old map with new entries.
Same as size_delta/2 but instead of a binary map we get a raw_id() and the
store.
-- Reference counting -----------------------------------------------------
Compute refcount deltas from updated store registers and updates to store
maps.
Fresh map, only adds new references.
Note that this does not count as a reference to Id
We don't know if this map will be copied or updated in place,
so we shouldn't compute a refcount delta. Instead we conservatively
only look at the new value. Once the copy or update happens we'll take
the delta into account.
We need to increase the refcounts of maps contained in maps that are being
copied.
Compute the difference in reference counts caused by performing a store
update: refcount(Val) - refcount(OldVal).
Write new refcounts to the metadata
Maps with refcount 0 are no longer needed and can be garbage collected or
updated in place.
Each map can only be reused once (obviously) so we return a map with the old
maps (to be reused) as keys.
Maps to be garbage collected are the unused maps except those that are being
updated in place. Marking a map for garbage collection requires updating the
reference counts for maps referenced by it. This may trigger more garbage
collection.
Here we need to go over the entire map to update the
reference counts, so grab the subtree.
-sum([ refcount(Val) || Val <- Data ])
-- Debug ------------------------------------------------------------------ | ( C ) 2019 , Aeternity Anstalt
ADT for contract stores in FATE .
Entries are cached in a read / write manner to avoid multiple
-module(aefa_stores).
-include_lib("aebytecode/include/aeb_fate_data.hrl").
-export([ finalize/3
, find_value/3
, has_contract/2
, initial_contract_store/0
, new/0
, put_contract_store/3
, put_value/4
, terms_to_finalize/1
, cache_map_metadata/2
, store_map_lookup/4
, store_map_member/4
, store_map_to_list/3
, store_map_size/3
]).
-record(store, { cache = #{} :: contract_cache()
}).
-record(cache_entry, { store :: aect_contracts_store:store()
, dirty :: boolean()
, terms :: fate_terms()
}).
-type fate_val() :: aeb_fate_data:fate_type().
-type pubkey() :: <<_:256>>.
-type dirty() :: boolean().
-type fate_terms() :: #{ integer() => {fate_val(), dirty()} }.
-type contract_cache() :: #{pubkey() => #cache_entry{}}.
-type fate_map() :: aeb_fate_data:fate_map() | aeb_fate_data:fate_store_map().
-opaque store() :: #store{}.
-export_type([ store/0
]).
-define(MAX_STORE_POS, 16#ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff).
-define(META_STORE_POS, 0).
-define(VALID_STORE_POS(Pos), Pos > ?META_STORE_POS andalso Pos < ?MAX_STORE_POS).
-define(STORE_KEY_PREFIX, 0).
-define(STORE_MAP_PREFIX, 1).
-ifdef(TEST).
-define(ASSERT(Check, Err),
case Check of
true -> ok;
false -> error({assertion_failed, ?FILE, ?LINE, Err})
end).
-else.
-define(ASSERT(Check, Err), ok).
-endif.
-ifdef(DEBUG).
-define(DEBUG_STORE(S), debug_stores(S)).
-define(DEBUG_PRINT(Fmt, Args), io:format(Fmt, Args)).
-else.
-define(DEBUG_STORE(S), ok).
-define(DEBUG_PRINT(Fmt, Args), ok).
-endif.
-spec new() -> store().
new() ->
#store{}.
-spec initial_contract_store() -> aect_contracts_store:store().
initial_contract_store() ->
aect_contracts_store:put(store_meta_key(),
aeb_fate_encoding:serialize(empty_store_meta_data()),
aect_contracts_store:new()).
-spec put_contract_store(pubkey(), aect_contracts_store:store(), store()) -> store().
put_contract_store(Pubkey, Store, #store{cache = Cache} = S) ->
S#store{cache = Cache#{Pubkey => new_contract_cache_entry(Store)}}.
-spec has_contract(pubkey(), store()) -> boolean().
has_contract(Pubkey, #store{cache = Cache}) ->
maps:is_key(Pubkey, Cache).
-spec find_value(pubkey(), non_neg_integer(), store()) ->
{'ok', fate_val(), store()}
| {'ok', fate_val()}
| 'error'.
find_value(Pubkey, StorePos, S) when ?VALID_STORE_POS(StorePos) ->
find_value_(Pubkey, StorePos, S).
-spec put_value(pubkey(), non_neg_integer(), fate_val(), store()) -> store().
put_value(Pubkey, StorePos, FateVal, #store{cache = Cache} = S) ->
Entry = maps:get(Pubkey, Cache),
Terms = maps:put(StorePos, {FateVal, true}, Entry#cache_entry.terms),
Entry1 = Entry#cache_entry{terms = Terms, dirty = true},
S#store{cache = Cache#{Pubkey => Entry1}}.
update_ct_store(Pubkey, NewStore, #store{cache = Cache} = S) ->
E = maps:get(Pubkey, Cache),
S#store{cache = Cache#{Pubkey => E#cache_entry{store = NewStore}}}.
find_value_(Pubkey, StorePos, #store{cache = Cache} = S) ->
case find_term(StorePos, maps:get(Pubkey, Cache)) of
{ok, Term} ->
{ok, Term};
{ok, Term, Entry} ->
{ok, Term, S#store{cache = Cache#{Pubkey => Entry}}};
error ->
error
end.
/store_meta_key ( ) ( store register 0 ) Map meta data : # { MapId = > ? METADATA(RawId , RefCount , ) }
/?STORE_MAP_PREFIX / RawId:32 / Key Value for Key in map RawId
Storing the metadata in register 0 means we get caching for it , so each
Disinguishing the MapId from the RawId allows maps to be updated inplace ,
-define(METADATA(RawId, RefCount, Size), ?FATE_TUPLE({RawId, RefCount, Size})).
-define(RAWID_BITS, 32).
-type map_id() :: non_neg_integer().
-type raw_id() :: non_neg_integer().
-type ref_count() :: non_neg_integer().
-type map_meta() :: ?METADATA(raw_id(), ref_count(), non_neg_integer()).
-type store_meta() :: #{ map_id() => map_meta() }.
-spec cache_map_metadata(pubkey(), store()) -> store().
cache_map_metadata(Pubkey, S) ->
case find_meta_data(Pubkey, S) of
{ok, _, S1} -> S1;
error -> S
end.
-spec store_map_lookup(pubkey(), non_neg_integer(), fate_val(), store()) -> {{ok, fate_val()} | error, store()}.
store_map_lookup(Pubkey, MapId, Key, #store{cache = Cache} = S) ->
#cache_entry{ store = Store } = maps:get(Pubkey, Cache),
{ok, Meta, S1} = find_meta_data(Pubkey, S),
?METADATA(RawId, _RefCount, _Size) = get_map_meta(MapId, Meta),
case find_in_store(map_data_key(RawId, Key), Store) of
error -> {error, S1};
{ok, Val, Store1} -> {{ok, Val}, update_ct_store(Pubkey, Store1, S1)}
end.
-spec store_map_member(pubkey(), non_neg_integer(), fate_val(), store()) -> {boolean(), store()}.
store_map_member(Pubkey, MapId, Key, #store{cache = Cache} = S) ->
#cache_entry{ store = Store } = maps:get(Pubkey, Cache),
{ok, Meta, S1} = find_meta_data(Pubkey, S),
?METADATA(RawId, _RefCount, _Size) = get_map_meta(MapId, Meta),
case aect_contracts_store:get_w_cache(map_data_key(RawId, Key), Store) of
{<<>>, _} -> {false, S1};
{_Val, Store1} -> {true, update_ct_store(Pubkey, Store1, S1)}
end.
-spec store_map_to_list(pubkey(), non_neg_integer(), store()) -> {[{fate_val(), fate_val()}], store()}.
store_map_to_list(Pubkey, MapId, #store{cache = Cache} = S) ->
#cache_entry{ store = Store } = maps:get(Pubkey, Cache),
{ok, Meta, S1} = find_meta_data(Pubkey, S),
?METADATA(RawId, _, _) = get_map_meta(MapId, Meta),
{Subtree, Store1} = aect_contracts_store:subtree_w_cache(map_data_key(RawId), Store),
{[ {aeb_fate_encoding:deserialize(K), aeb_fate_encoding:deserialize(V)}
|| {K, V} <- lists:keysort(1,maps:to_list(Subtree)) ], update_ct_store(Pubkey, Store1, S1)}.
-spec store_map_size(pubkey(), non_neg_integer(), store()) -> {non_neg_integer(), store()}.
store_map_size(Pubkey, MapId, S) ->
{ok, Meta, S1} = find_meta_data(Pubkey, S),
?METADATA(_, _, Size) = get_map_meta(MapId, Meta),
{Size, S1}.
-spec find_meta_data(pubkey(), store()) -> {ok, store_meta(), store()} | error.
find_meta_data(Pubkey, S) ->
case find_value_(Pubkey, ?META_STORE_POS, S) of
{ok, Meta} -> {ok, Meta, S};
{ok, Meta, S1} -> {ok, Meta, S1};
error -> error
end.
empty_store_meta_data() -> #{}.
include RawIds to ensure that we can set MapId = = RawId for newly allocated
-spec used_map_ids(store_meta()) -> [map_id()].
used_map_ids(Metadata) ->
lists:usort(lists:append(
[ [Id, RawId] || {Id, ?METADATA(RawId, _, _)} <- maps:to_list(Metadata) ])).
-spec put_map_meta(map_id(), map_meta(), store_meta()) -> store_meta().
put_map_meta(MapId, MapMeta, Metadata) ->
Metadata#{ MapId => MapMeta }.
-spec remove_map_meta(map_id(), store_meta()) -> store_meta().
remove_map_meta(MapId, Metadata) ->
maps:remove(MapId, Metadata).
-spec get_map_meta(map_id(), store_meta()) -> map_meta().
get_map_meta(MapId, Meta) ->
maps:get(MapId, Meta).
-spec find_meta_data_no_cache(#cache_entry{}) -> {ok, store_meta()} | error.
find_meta_data_no_cache(CacheEntry) ->
case find_term(?META_STORE_POS, CacheEntry) of
{ok, Meta} -> {ok, Meta};
{ok, Meta, _} -> {ok, Meta};
error -> error
end.
Entry for one contract
new_contract_cache_entry(Store) ->
#cache_entry{ store = Store
, terms = #{}
, dirty = false
}.
find_term(StorePos, #cache_entry{terms = Terms} = E) ->
case maps:find(StorePos, Terms) of
{ok, {FateVal,_Dirty}} ->
{ok, FateVal};
error ->
case find_in_store(store_key(StorePos), E#cache_entry.store) of
error ->
error;
{ok, FateVal, Store1} ->
{ok, FateVal, E#cache_entry{terms = Terms#{StorePos => {FateVal, false}},
store = Store1}}
end
end.
find_in_store(Key, Store) ->
case aect_contracts_store:get_w_cache(Key, Store) of
{<<>>, _Store1} ->
error;
{Value, Store1} ->
FateVal = aeb_fate_encoding:deserialize(Value),
{ok, FateVal, Store1}
end.
store_key(Int) ->
<<?STORE_KEY_PREFIX, (binary:encode_unsigned(Int))/binary>>.
store_meta_key() ->
store_key(?META_STORE_POS).
map_data_key(RawId) ->
<<?STORE_MAP_PREFIX, RawId:?RAWID_BITS>>.
map_data_key(RawId, Key) ->
map_raw_key(RawId, aeb_fate_encoding:serialize(Key)).
map_raw_key(RawId, KeyBin) ->
<<(map_data_key(RawId))/binary, KeyBin/binary>>.
-spec finalize(aefa_chain_api:state(), non_neg_integer(), store()) ->
{ok, aefa_chain_api:state(), non_neg_integer()}
| {error, out_of_gas}.
finalize(API, GasLeft, #store{cache = Cache} = _S) ->
?DEBUG_STORE(_S),
try maps:fold(fun finalize_entry/3, {[], GasLeft}, Cache) of
{Stores, GasLeft1} ->
API1 = finalize_stores(Stores, API),
{ok, API1, GasLeft1}
catch
throw:out_of_gas ->
{error, out_of_gas}
end.
finalize_stores([{Pubkey, Store}|Left], API) ->
API1 = aefa_chain_api:set_contract_store(Pubkey, Store, API),
finalize_stores(Left, API1);
finalize_stores([], API) ->
API.
finalize_entry(_Pubkey, #cache_entry{dirty = false}, Acc) ->
Acc;
finalize_entry(Pubkey, Cache = #cache_entry{store = Store}, {Writes, GasLeft}) ->
{Metadata1, Updates} = compute_store_updates(Metadata, Cache),
?DEBUG_PRINT("Updates\n ~p\n", [Updates]),
?ASSERT(check_store_updates(Updates), {bad_store_updates, Updates}),
{Store1, GasLeft1} = perform_store_updates(Metadata, Updates, Metadata1, GasLeft, Store),
{[{Pubkey, Store1} | Writes], GasLeft1}.
-spec terms_to_finalize(store()) -> [fate_val()].
terms_to_finalize(#store{cache = Cache}) ->
[ Term || #cache_entry{dirty = true, terms = Terms} <- maps:values(Cache),
{Term, true} <- maps:values(Terms) ].
-ifdef(TEST).
-spec check_store_updates([store_update()]) -> boolean().
check_store_updates(Updates) ->
GCd = [ Id || {gc_map, Id} <- Updates ],
Copied = [ Id || {copy_map, _, ?FATE_STORE_MAP(_, Id)} <- Updates ],
Updated = [ Id || {update_map, _, ?FATE_STORE_MAP(_, Id)} <- Updates ],
GCd -- (Copied ++ Updated) == GCd.
-endif.
-spec compute_store_updates(store_meta(), #cache_entry{}) -> {store_meta(), [store_update()]}.
compute_store_updates(Metadata, #cache_entry{terms = TermCache, store = Store}) ->
UsedIds = used_map_ids(Metadata),
{Regs, Terms} = lists:unzip([{Reg, Term} || {Reg, {Term, Dirty}} <- lists:keysort(1,maps:to_list(TermCache)),
Reg > ?META_STORE_POS, Dirty]),
{Terms1, Maps} = aeb_fate_maps:allocate_store_maps(UsedIds, Terms),
NewRegs = lists:zip(Regs, Terms1),
RefCounts = compute_refcounts(NewRegs, Maps, Metadata, Store),
Metadata1 = update_refcounts(RefCounts, Metadata),
{Unused, Reuse, Metadata1b} = compute_reuse_fixpoint(Maps, Metadata1, Store),
{Garbage, Metadata2} = compute_garbage(Unused, Reuse, Metadata1b, Store),
CopyOrInplace = fun(MapId, ?FATE_STORE_MAP(_, Id) = Map) ->
case maps:get(Id, Reuse, no_reuse) of
MapId -> {update_map, MapId, Map};
_ -> {copy_map, MapId, Map}
end;
(MapId, Map) -> {copy_map, MapId, Map} end,
Updates = [ {push_term, Reg, Term} || {Reg, Term} <- NewRegs ] ++
[ CopyOrInplace(MapId, Map) || {MapId, Map} <- lists:keysort(1, maps:to_list(Maps)) ] ++
[ {gc_map, RawId} || RawId <- Garbage ],
It 's important ( very ! ) that copy_map runs before update_map , since
Order = fun(push_term) -> 0;
(copy_map) -> 1;
(update_map) -> 2;
(gc_map) -> 3 end,
Compare = fun(A, B) -> Order(element(1, A)) =< Order(element(1, B)) end,
{Metadata2, lists:sort(Compare, Updates)}.
compute_reuse_fixpoint(Maps, Metadata, Store) ->
compute_reuse_fixpoint(unused_maps(Metadata), Maps, Metadata, Store, 100).
compute_reuse_fixpoint(Unused, Maps, Metadata, Store, Fuel) ->
Reuse = compute_inplace_updates(Unused, Maps),
RefCounts1 = compute_copy_refcounts(Metadata, Reuse, Maps, Store),
Metadata1 = update_refcounts(RefCounts1, Metadata),
Unused1 = unused_maps(Metadata1),
case Unused1 == Unused of
_ when Fuel =< 0 ->
?ASSERT(false, {reuse_fixpoint_out_of_fuel, Metadata, Unused, Maps}),
{Unused, Reuse, Metadata1};
true -> {Unused, Reuse, Metadata1};
false ->
compute_reuse_fixpoint(Unused1, Maps, Metadata, Store, Fuel - 1)
end.
perform_store_updates(OldMeta, [Update|Left], Meta, GasLeft, Store) ->
?DEBUG_PRINT("Update: ~p\n", [Update]),
{Meta1, Bytes, Store1} = perform_store_update(OldMeta, Update, {Meta, Store}),
GasLeft1 = spend_size_gas(GasLeft, Bytes),
perform_store_updates(OldMeta, Left, Meta1, GasLeft1, Store1);
perform_store_updates(_OldMeta, [], Meta, GasLeft, Store) ->
{Store1, Bytes} = push_term(?META_STORE_POS, Meta, Store),
GasLeft1 = spend_size_gas(GasLeft, Bytes),
{Store1, GasLeft1}.
spend_size_gas(GasLeft, Bytes) ->
?DEBUG_PRINT("GasLeft: ~w Bytes: ~w\n", [GasLeft, Bytes]),
case GasLeft - Bytes * aec_governance:store_byte_gas() of
TooLittle when TooLittle < 0 ->
throw(out_of_gas);
Enough ->
Enough
end.
-spec perform_store_update(store_meta(), store_update(), {store_meta(), aect_contracts_store:store()}) ->
{store_meta(), non_neg_integer(), aect_contracts_store:store()}.
perform_store_update(_OldMeta, {push_term, StorePos, FateVal}, {Meta, Store}) ->
{Store1, Bytes} = push_term(StorePos, FateVal, Store),
{Meta, Bytes, Store1};
perform_store_update(OldMeta, {copy_map, MapId, Map}, S) ->
copy_map(OldMeta, MapId, Map, S);
perform_store_update(_OldMeta, {update_map, MapId, Map}, S) ->
update_map(MapId, Map, S);
perform_store_update(_OldMeta, {gc_map, MapId}, S) ->
gc_map(MapId, S).
push_term(Pos, FateVal, Store) ->
Val = aeb_fate_encoding:serialize(FateVal),
Key = store_key(Pos),
Bytes = byte_size(Key) + byte_size(Val),
{aect_contracts_store:put(Key, Val, Store), Bytes}.
copy_map(_OldMeta, MapId, Map, {Meta, Store}) when ?IS_FATE_MAP(Map) ->
The RefCount was set in compute_store_updates
?METADATA(_, RefCount, _) = get_map_meta(MapId, Meta),
RawId = = MapId for fresh maps
Size = maps:size(Map),
Update the metadata with RawId and Size
Meta1 = put_map_meta(MapId, ?METADATA(RawId, RefCount, Size), Meta),
{Store1, Bytes} = write_bin_data(RawId, BinData, Store),
Store2 = aect_contracts_store:put(map_data_key(RawId), <<0>>, Store1),
{Meta1, Bytes, Store2};
copy_map(OldMeta, MapId, ?FATE_STORE_MAP(Cache, OldId), {Meta, Store}) ->
?METADATA(_, RefCount, _) = get_map_meta(MapId, Meta),
?METADATA(OldRawId, _RefCount, OldSize) = get_map_meta(OldId, OldMeta),
RawId = MapId,
OldMap = aect_contracts_store:subtree(map_data_key(OldRawId), Store),
NewData = cache_to_bin_data(Cache),
Size = OldSize + size_delta(OldMap, NewData),
Meta1 = put_map_meta(MapId, ?METADATA(RawId, RefCount, Size), Meta),
First copy the old data , then update with the new
{Store1, Bytes} = write_bin_data(RawId, lists:keysort(1, maps:to_list(OldMap)) ++ NewData, Store),
Store2 = aect_contracts_store:put(map_data_key(RawId), <<0>>, Store1),
{Meta1, Bytes, Store2}.
update_map(MapId, ?FATE_STORE_MAP(Cache, OldId), {Meta, Store}) ->
Precomputed
?METADATA(RawId, _RefCount, OldSize) = get_map_meta(OldId, Meta),
NewData = cache_to_bin_data(Cache),
Size = OldSize + size_delta(RawId, Store, NewData),
{Store1, Bytes} = write_bin_data(RawId, NewData, Store),
Meta1 = put_map_meta(MapId, ?METADATA(RawId, RefCount, Size),
remove_map_meta(OldId, Meta)),
refcounts for maps in the Cache , now we have to subtract refcounts for
RefCounts = lists:foldl(fun({Key, _}, Count) ->
Count)
end, aeb_fate_maps:refcount_zero(), NewData),
Meta2 = update_refcounts(RefCounts, Meta1),
{Meta2, Bytes, Store1}.
gc_map(RawId, {Meta, Store}) ->
Only the RawId here , we already removed the MapId from the metadata .
Data = aect_contracts_store:subtree(map_data_key(RawId), Store),
Store1 = maps:fold(fun(Key, _, S) -> aect_contracts_store:remove(map_raw_key(RawId, Key), S) end,
aect_contracts_store:remove(map_data_key(RawId), Store), Data),
{Meta, _Bytes = 0, Store1}.
-type bin_data() :: [{binary(), binary() | ?FATE_MAP_TOMBSTONE}].
-type map_cache() :: #{fate_val() => fate_val() | ?FATE_MAP_TOMBSTONE}.
-spec cache_to_bin_data(map_cache()) -> bin_data().
cache_to_bin_data(Cache) ->
[ begin
KeyBin = aeb_fate_encoding:serialize(K),
ValBin = case V of ?FATE_MAP_TOMBSTONE -> ?FATE_MAP_TOMBSTONE;
_ -> aeb_fate_encoding:serialize(V)
end,
{KeyBin, ValBin}
end || {K, V} <- lists:keysort(1, maps:to_list(Cache)) ].
-spec write_bin_data(raw_id(), bin_data(), aect_contracts_store:store()) ->
{aect_contracts_store:store(), non_neg_integer()}.
write_bin_data(RawId, BinData, Store) ->
lists:foldl(
fun({K, ?FATE_MAP_TOMBSTONE}, {S, B}) ->
{aect_contracts_store:remove(map_raw_key(RawId, K), S),
B};
({K, V}, {S, B}) ->
{aect_contracts_store:put(map_raw_key(RawId, K), V, S),
B + byte_size(K) + byte_size(V)}
end, {Store, 0}, BinData).
-spec size_delta(#{binary() => binary()}, bin_data()) -> integer().
size_delta(OldMap, NewData) ->
size_delta_(fun(K) -> maps:is_key(K, OldMap) end, NewData).
-spec size_delta(raw_id(), aect_contracts_store:store(), bin_data()) -> integer().
size_delta(RawId, Store, NewData) ->
size_delta_(fun(K) -> <<>> /= aect_contracts_store:get(map_raw_key(RawId, K), Store) end,
NewData).
size_delta_(IsKey, NewData) ->
Delta = fun({K, ?FATE_MAP_TOMBSTONE}, N) ->
case IsKey(K) of
true -> N - 1;
false -> N
end;
({K, _}, N) ->
case IsKey(K) of
true -> N;
false -> N + 1
end end,
lists:foldl(Delta, 0, NewData).
compute_refcounts(Regs, Maps, Metadata, Store) ->
TermRefCount = register_refcounts(Regs, Store),
MapRefCount = maps_refcounts(Metadata, Maps, Store),
aeb_fate_maps:refcount_union(TermRefCount, MapRefCount).
Refcount delta from updating the store registers .
register_refcounts(Regs, Store) ->
aeb_fate_maps:refcount_union(
[ refcount_delta(store_key(Reg), NewVal, Store)
|| {Reg, NewVal} <- Regs ]).
Refcount delta from store map updates .
maps_refcounts(Metadata, Maps, Store) ->
aeb_fate_maps:refcount_union(
[ map_refcounts(Metadata, Map, Store)
|| {_, Map} <- lists:keysort(1, maps:to_list(Maps)) ]).
map_refcounts(_Meta, Map, _Store) when ?IS_FATE_MAP(Map) ->
aeb_fate_maps:refcount(Map);
map_refcounts(_Meta, ?FATE_STORE_MAP(Cache, _Id), _Store) ->
maps:fold(fun(_Key, Val, Count) ->
aeb_fate_maps:refcount_union(aeb_fate_maps:refcount(Val), Count)
end, #{}, Cache).
compute_copy_refcounts(Meta, Reuse, Maps, Store) ->
maps:fold(fun(MapId, ?FATE_STORE_MAP(Cache, Id), Count) ->
case maps:get(Id, Reuse, no_reuse) of
MapId ->
Subtract refcounts for entries overwritten by the Cache .
?METADATA(RawId, _RefCount, _Size) = get_map_meta(Id, Meta),
RemovedValues = [ Val || Key <- maps:keys(Cache),
{ok, Val, _} <- [find_in_store(map_data_key(RawId, Key), Store)] ],
Removed = aeb_fate_maps:refcount(RemovedValues),
aeb_fate_maps:refcount_diff(Count, Removed);
_ ->
Note that we already added refcounts for the Cache .
?METADATA(RawId, _RefCount, _Size) = get_map_meta(Id, Meta),
NewKeys = [ aeb_fate_encoding:serialize(Key) || Key <- maps:keys(Cache) ],
OldBin = maps:without(NewKeys, aect_contracts_store:subtree(map_data_key(RawId), Store)),
Count1 = aeb_fate_maps:refcount([ aeb_fate_encoding:deserialize(Val) || Val <- maps:values(OldBin) ]),
aeb_fate_maps:refcount_union(Count1, Count)
end;
(_, _, Count) -> Count
end, #{}, Maps).
-spec refcount_delta(binary(), fate_val() | ?FATE_MAP_TOMBSTONE, aect_contracts_store:store()) ->
aeb_fate_maps:refcount().
refcount_delta(StoreKey, Val, Store) ->
New = aeb_fate_maps:refcount(Val),
Old =
case aect_contracts_store:get(StoreKey, Store) of
<<>> -> #{};
Bin -> aeb_fate_maps:refcount(aeb_fate_encoding:deserialize(Bin))
end,
aeb_fate_maps:refcount_diff(New, Old).
-spec update_refcounts(aeb_fate_maps:refcount(), store_meta()) -> store_meta().
update_refcounts(Deltas, Meta) ->
maps:fold(fun(Id, Delta, M) ->
maps:update_with(Id,
fun(?METADATA(RawId, RefCount, Size)) ->
?METADATA(RawId, RefCount + Delta, Size)
end, ?METADATA(undefined, Delta, undefined), M)
end, Meta, Deltas).
unused_maps(Metadata) ->
maps:fold(fun(Id, ?METADATA(_, 0, _), Acc) -> Acc#{ Id => true };
(_, _, Acc) -> Acc end, #{}, Metadata).
compute_inplace_updates(Unused, Maps) ->
maps:fold(fun(MapId, ?FATE_STORE_MAP(_, OldId), Acc) ->
case maps:is_key(OldId, Unused) of
true -> Acc#{ OldId => MapId };
false -> Acc
end;
(_, _, Acc) -> Acc end, #{}, Maps).
compute_garbage(Unused, Reuse, Metadata, Store) ->
Garbage = maps:keys(Unused) -- maps:keys(Reuse),
case Garbage of
[] -> {[], Metadata};
_ ->
Refcounts = gc_refcounts(Garbage, Metadata, Store),
Metadata1 = update_refcounts(Refcounts, Metadata),
Metadata2 = maps:without(Garbage, Metadata1),
Unused1 = unused_maps(Metadata2),
{Garbage1, Metadata3} = compute_garbage(Unused1, Reuse, Metadata2, Store),
GetRawId = fun(Id) -> ?METADATA(RawId, _, _) = get_map_meta(Id, Metadata), RawId end,
{lists:map(GetRawId, Garbage) ++ Garbage1, Metadata3}
end.
Refcount delta arising from garbage collecting a map .
gc_refcounts(Ids, Metadata, Store) ->
Count = fun(Id) ->
?METADATA(RawId, _, _) = get_map_meta(Id, Metadata),
Data = aect_contracts_store:subtree(map_data_key(RawId), Store),
aeb_fate_maps:refcount_diff(aeb_fate_maps:refcount_zero(),
aeb_fate_maps:refcount_union(
[ aeb_fate_maps:refcount(aeb_fate_encoding:deserialize(Val))
|| Val <- maps:values(Data) ]))
end,
aeb_fate_maps:refcount_union(lists:map(Count, Ids)).
-ifdef(DEBUG).
debug_stores(#store{cache = Cache}) ->
[ begin
io:format("Contract: ~p\n- Store\n~s", [Pubkey, debug_store(Store)])
end || {Pubkey, #cache_entry{ store = Store }} <- maps:to_list(Cache) ],
ok.
debug_store(Store) ->
Map = aect_contracts_store:subtree(<<>>, Store),
Regs = maps:from_list(
[ {binary:decode_unsigned(Reg), aeb_fate_encoding:deserialize(Val)}
|| {<<?STORE_KEY_PREFIX, Reg/binary>>, Val} <- maps:to_list(Map) ]),
Maps = maps:from_list(
[ case Key of
<<>> -> {binary:decode_unsigned(RawId), Val};
_ -> {{binary:decode_unsigned(RawId), aeb_fate_encoding:deserialize(Key)},
aeb_fate_encoding:deserialize(Val)}
end || {<<?STORE_MAP_PREFIX, RawId:4/binary, Key/binary>>, Val} <- maps:to_list(Map) ]),
io_lib:format(" Regs: ~p\n Maps: ~p\n", [Regs, Maps]).
-endif.
|
97dde83723539fdd6168249416bbb9ef514c14f0d1b7648c2ebdf0fefde41ac8 | melange-re/melange | arg.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Para , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
type key = string
type doc = string
type usage_msg = string
type anon_fun = (string -> unit)
type spec =
| Unit of (unit -> unit) (* Call the function with unit argument *)
| Bool of (bool -> unit) (* Call the function with a bool argument *)
| Set of bool ref (* Set the reference to true *)
| Clear of bool ref (* Set the reference to false *)
| String of (string -> unit) (* Call the function with a string argument *)
| Set_string of string ref (* Set the reference to the string argument *)
| Int of (int -> unit) (* Call the function with an int argument *)
| Set_int of int ref (* Set the reference to the int argument *)
| Float of (float -> unit) (* Call the function with a float argument *)
| Set_float of float ref (* Set the reference to the float argument *)
| Tuple of spec list (* Take several arguments according to the
spec list *)
| Symbol of string list * (string -> unit)
Take one of the symbols as argument and
call the function with the symbol .
call the function with the symbol. *)
| Rest of (string -> unit) (* Stop interpreting keywords and call the
function with each remaining argument *)
| Rest_all of (string list -> unit)
(* Stop interpreting keywords and call the
function with all remaining arguments. *)
| Expand of (string -> string array) (* If the remaining arguments to process
are of the form
[["-foo"; "arg"] @ rest] where "foo"
is registered as [Expand f], then the
arguments [f "arg" @ rest] are
processed. Only allowed in
[parse_and_expand_argv_dynamic]. *)
exception Bad of string
exception Help of string
type error =
| Unknown of string
| Wrong of string * string * string (* option, actual, expected *)
| Missing of string
| Message of string
exception Stop of error (* used internally *)
open Printf
let rec assoc3 x l =
match l with
| [] -> raise Not_found
| (y1, y2, _) :: _ when y1 = x -> y2
| _ :: t -> assoc3 x t
let split s =
let i = String.index s '=' in
let len = String.length s in
String.sub s 0 i, String.sub s (i+1) (len-(i+1))
let make_symlist prefix sep suffix l =
match l with
| [] -> "<none>"
| h::t -> (List.fold_left (fun x y -> x ^ sep ^ y) (prefix ^ h) t) ^ suffix
let print_spec buf (key, spec, doc) =
if String.length doc > 0 then
match spec with
| Symbol (l, _) ->
bprintf buf " %s %s%s\n" key (make_symlist "{" "|" "}" l) doc
| _ ->
bprintf buf " %s %s\n" key doc
let help_action () = raise (Stop (Unknown "-help"))
let add_help speclist =
let add1 =
try ignore (assoc3 "-help" speclist); []
with Not_found ->
["-help", Unit help_action, " Display this list of options"]
and add2 =
try ignore (assoc3 "--help" speclist); []
with Not_found ->
["--help", Unit help_action, " Display this list of options"]
in
speclist @ (add1 @ add2)
let usage_b buf speclist errmsg =
bprintf buf "%s\n" errmsg;
List.iter (print_spec buf) (add_help speclist)
let usage_string speclist errmsg =
let b = Buffer.create 200 in
usage_b b speclist errmsg;
Buffer.contents b
let usage speclist errmsg =
eprintf "%s" (usage_string speclist errmsg)
let current = ref 0
let bool_of_string_opt x =
try Some (bool_of_string x)
with Invalid_argument _ -> None
let int_of_string_opt x =
try Some (int_of_string x)
with Failure _ -> None
let float_of_string_opt x =
try Some (float_of_string x)
with Failure _ -> None
let parse_and_expand_argv_dynamic_aux allow_expand current argv speclist anonfun
errmsg =
let initpos = !current in
let convert_error error =
(* convert an internal error to a Bad/Help exception
*or* add the program name as a prefix and the usage message as a suffix
to an user-raised Bad exception.
*)
let b = Buffer.create 200 in
let progname =
if initpos < (Array.length !argv) then !argv.(initpos) else "(?)" in
begin match error with
| Unknown "-help" -> ()
| Unknown "--help" -> ()
| Unknown s ->
bprintf b "%s: unknown option '%s'.\n" progname s
| Missing s ->
bprintf b "%s: option '%s' needs an argument.\n" progname s
| Wrong (opt, arg, expected) ->
bprintf b "%s: wrong argument '%s'; option '%s' expects %s.\n"
progname arg opt expected
| Message s -> (* user error message *)
bprintf b "%s: %s.\n" progname s
end;
usage_b b !speclist errmsg;
if error = Unknown "-help" || error = Unknown "--help"
then Help (Buffer.contents b)
else Bad (Buffer.contents b)
in
incr current;
while !current < (Array.length !argv) do
begin try
let s = !argv.(!current) in
if String.length s >= 1 && s.[0] = '-' then begin
let action, follow =
try assoc3 s !speclist, None
with Not_found ->
try
let keyword, arg = split s in
assoc3 keyword !speclist, Some arg
with Not_found -> raise (Stop (Unknown s))
in
let no_arg () =
match follow with
| None -> ()
| Some arg -> raise (Stop (Wrong (s, arg, "no argument"))) in
let get_arg () =
match follow with
| None ->
if !current + 1 < (Array.length !argv) then !argv.(!current + 1)
else raise (Stop (Missing s))
| Some arg -> arg
in
let consume_arg () =
match follow with
| None -> incr current
| Some _ -> ()
in
let rec treat_action = function
| Unit f -> no_arg (); f ();
| Bool f ->
let arg = get_arg () in
begin match bool_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a boolean")))
| Some s -> f s
end;
consume_arg ();
| Set r -> no_arg (); r := true;
| Clear r -> no_arg (); r := false;
| String f ->
let arg = get_arg () in
f arg;
consume_arg ();
| Symbol (symb, f) ->
let arg = get_arg () in
if List.mem arg symb then begin
f arg;
consume_arg ();
end else begin
raise (Stop (Wrong (s, arg, "one of: "
^ (make_symlist "" " " "" symb))))
end
| Set_string r ->
r := get_arg ();
consume_arg ();
| Int f ->
let arg = get_arg () in
begin match int_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "an integer")))
| Some x -> f x
end;
consume_arg ();
| Set_int r ->
let arg = get_arg () in
begin match int_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "an integer")))
| Some x -> r := x
end;
consume_arg ();
| Float f ->
let arg = get_arg () in
begin match float_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a float")))
| Some x -> f x
end;
consume_arg ();
| Set_float r ->
let arg = get_arg () in
begin match float_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a float")))
| Some x -> r := x
end;
consume_arg ();
| Tuple specs ->
no_arg ();
List.iter treat_action specs;
| Rest f ->
no_arg ();
while !current < (Array.length !argv) - 1 do
f !argv.(!current + 1);
consume_arg ();
done;
| Rest_all f ->
no_arg ();
let acc = ref [] in
while !current < Array.length !argv - 1 do
acc := !argv.(!current + 1) :: !acc;
consume_arg ();
done;
f (List.rev !acc)
| Expand f ->
if not allow_expand then
raise (Invalid_argument "Arg.Expand is is only allowed with \
Arg.parse_and_expand_argv_dynamic");
let arg = get_arg () in
let newarg = f arg in
consume_arg ();
let before = Array.sub !argv 0 (!current + 1)
and after =
Array.sub !argv (!current + 1)
((Array.length !argv) - !current - 1) in
argv:= Array.concat [before;newarg;after];
in
treat_action action end
else anonfun s
with | Bad m -> raise (convert_error (Message m));
| Stop e -> raise (convert_error e);
end;
incr current
done
let parse_and_expand_argv_dynamic current argv speclist anonfun errmsg =
parse_and_expand_argv_dynamic_aux true current argv speclist anonfun errmsg
let parse_argv_dynamic ?(current=current) argv speclist anonfun errmsg =
parse_and_expand_argv_dynamic_aux false current (ref argv) speclist anonfun
errmsg
let parse_argv ?(current=current) argv speclist anonfun errmsg =
parse_argv_dynamic ~current:current argv (ref speclist) anonfun errmsg
let parse l f msg =
try
parse_argv Sys.argv l f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let parse_dynamic l f msg =
try
parse_argv_dynamic Sys.argv l f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let parse_expand l f msg =
try
let argv = ref Sys.argv in
let spec = ref l in
let current = ref (!current) in
parse_and_expand_argv_dynamic current argv spec f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let second_word s =
let len = String.length s in
let rec loop n =
if n >= len then len
else if s.[n] = ' ' then loop (n+1)
else n
in
match String.index s '\t' with
| n -> loop (n+1)
| exception Not_found ->
begin match String.index s ' ' with
| n -> loop (n+1)
| exception Not_found -> len
end
let max_arg_len cur (kwd, spec, doc) =
match spec with
| Symbol _ -> Int.max cur (String.length kwd)
| _ -> Int.max cur (String.length kwd + second_word doc)
let replace_leading_tab s =
let seen = ref false in
String.map (function '\t' when not !seen -> seen := true; ' ' | c -> c) s
let add_padding len ksd =
match ksd with
| (_, _, "") ->
(* Do not pad undocumented options, so that they still don't show up when
* run through [usage] or [parse]. *)
ksd
| (kwd, (Symbol _ as spec), msg) ->
let cutcol = second_word msg in
let spaces = String.make ((Int.max 0 (len - cutcol)) + 3) ' ' in
(kwd, spec, "\n" ^ spaces ^ replace_leading_tab msg)
| (kwd, spec, msg) ->
let cutcol = second_word msg in
let kwd_len = String.length kwd in
let diff = len - kwd_len - cutcol in
if diff <= 0 then
(kwd, spec, replace_leading_tab msg)
else
let spaces = String.make diff ' ' in
let prefix = String.sub (replace_leading_tab msg) 0 cutcol in
let suffix = String.sub msg cutcol (String.length msg - cutcol) in
(kwd, spec, prefix ^ spaces ^ suffix)
let align ?(limit=max_int) speclist =
let completed = add_help speclist in
let len = List.fold_left max_arg_len 0 completed in
let len = Int.min len limit in
List.map (add_padding len) completed
let trim_cr s =
let len = String.length s in
if len > 0 && String.get s (len - 1) = '\r' then
String.sub s 0 (len - 1)
else
s
let read_aux trim sep file =
let ic = open_in_bin file in
let buf = Buffer.create 200 in
let words = ref [] in
let stash () =
let word = Buffer.contents buf in
let word = if trim then trim_cr word else word in
words := word :: !words;
Buffer.clear buf
in
begin
try while true do
let c = input_char ic in
if c = sep then stash () else Buffer.add_char buf c
done
with End_of_file -> ()
end;
if Buffer.length buf > 0 then stash ();
close_in ic;
Array.of_list (List.rev !words)
let read_arg = read_aux true '\n'
let read_arg0 = read_aux false '\x00'
let write_aux sep file args =
let oc = open_out_bin file in
Array.iter (fun s -> fprintf oc "%s%c" s sep) args;
close_out oc
let write_arg = write_aux '\n'
let write_arg0 = write_aux '\x00'
| null | https://raw.githubusercontent.com/melange-re/melange/246e6df78fe3b6cc124cb48e5a37fdffd99379ed/jscomp/stdlib-412/stdlib_modules/arg.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Call the function with unit argument
Call the function with a bool argument
Set the reference to true
Set the reference to false
Call the function with a string argument
Set the reference to the string argument
Call the function with an int argument
Set the reference to the int argument
Call the function with a float argument
Set the reference to the float argument
Take several arguments according to the
spec list
Stop interpreting keywords and call the
function with each remaining argument
Stop interpreting keywords and call the
function with all remaining arguments.
If the remaining arguments to process
are of the form
[["-foo"; "arg"] @ rest] where "foo"
is registered as [Expand f], then the
arguments [f "arg" @ rest] are
processed. Only allowed in
[parse_and_expand_argv_dynamic].
option, actual, expected
used internally
convert an internal error to a Bad/Help exception
*or* add the program name as a prefix and the usage message as a suffix
to an user-raised Bad exception.
user error message
Do not pad undocumented options, so that they still don't show up when
* run through [usage] or [parse]. | , projet Para , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
type key = string
type doc = string
type usage_msg = string
type anon_fun = (string -> unit)
type spec =
| Symbol of string list * (string -> unit)
Take one of the symbols as argument and
call the function with the symbol .
call the function with the symbol. *)
| Rest_all of (string list -> unit)
exception Bad of string
exception Help of string
type error =
| Unknown of string
| Missing of string
| Message of string
open Printf
let rec assoc3 x l =
match l with
| [] -> raise Not_found
| (y1, y2, _) :: _ when y1 = x -> y2
| _ :: t -> assoc3 x t
let split s =
let i = String.index s '=' in
let len = String.length s in
String.sub s 0 i, String.sub s (i+1) (len-(i+1))
let make_symlist prefix sep suffix l =
match l with
| [] -> "<none>"
| h::t -> (List.fold_left (fun x y -> x ^ sep ^ y) (prefix ^ h) t) ^ suffix
let print_spec buf (key, spec, doc) =
if String.length doc > 0 then
match spec with
| Symbol (l, _) ->
bprintf buf " %s %s%s\n" key (make_symlist "{" "|" "}" l) doc
| _ ->
bprintf buf " %s %s\n" key doc
let help_action () = raise (Stop (Unknown "-help"))
let add_help speclist =
let add1 =
try ignore (assoc3 "-help" speclist); []
with Not_found ->
["-help", Unit help_action, " Display this list of options"]
and add2 =
try ignore (assoc3 "--help" speclist); []
with Not_found ->
["--help", Unit help_action, " Display this list of options"]
in
speclist @ (add1 @ add2)
let usage_b buf speclist errmsg =
bprintf buf "%s\n" errmsg;
List.iter (print_spec buf) (add_help speclist)
let usage_string speclist errmsg =
let b = Buffer.create 200 in
usage_b b speclist errmsg;
Buffer.contents b
let usage speclist errmsg =
eprintf "%s" (usage_string speclist errmsg)
let current = ref 0
let bool_of_string_opt x =
try Some (bool_of_string x)
with Invalid_argument _ -> None
let int_of_string_opt x =
try Some (int_of_string x)
with Failure _ -> None
let float_of_string_opt x =
try Some (float_of_string x)
with Failure _ -> None
let parse_and_expand_argv_dynamic_aux allow_expand current argv speclist anonfun
errmsg =
let initpos = !current in
let convert_error error =
let b = Buffer.create 200 in
let progname =
if initpos < (Array.length !argv) then !argv.(initpos) else "(?)" in
begin match error with
| Unknown "-help" -> ()
| Unknown "--help" -> ()
| Unknown s ->
bprintf b "%s: unknown option '%s'.\n" progname s
| Missing s ->
bprintf b "%s: option '%s' needs an argument.\n" progname s
| Wrong (opt, arg, expected) ->
bprintf b "%s: wrong argument '%s'; option '%s' expects %s.\n"
progname arg opt expected
bprintf b "%s: %s.\n" progname s
end;
usage_b b !speclist errmsg;
if error = Unknown "-help" || error = Unknown "--help"
then Help (Buffer.contents b)
else Bad (Buffer.contents b)
in
incr current;
while !current < (Array.length !argv) do
begin try
let s = !argv.(!current) in
if String.length s >= 1 && s.[0] = '-' then begin
let action, follow =
try assoc3 s !speclist, None
with Not_found ->
try
let keyword, arg = split s in
assoc3 keyword !speclist, Some arg
with Not_found -> raise (Stop (Unknown s))
in
let no_arg () =
match follow with
| None -> ()
| Some arg -> raise (Stop (Wrong (s, arg, "no argument"))) in
let get_arg () =
match follow with
| None ->
if !current + 1 < (Array.length !argv) then !argv.(!current + 1)
else raise (Stop (Missing s))
| Some arg -> arg
in
let consume_arg () =
match follow with
| None -> incr current
| Some _ -> ()
in
let rec treat_action = function
| Unit f -> no_arg (); f ();
| Bool f ->
let arg = get_arg () in
begin match bool_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a boolean")))
| Some s -> f s
end;
consume_arg ();
| Set r -> no_arg (); r := true;
| Clear r -> no_arg (); r := false;
| String f ->
let arg = get_arg () in
f arg;
consume_arg ();
| Symbol (symb, f) ->
let arg = get_arg () in
if List.mem arg symb then begin
f arg;
consume_arg ();
end else begin
raise (Stop (Wrong (s, arg, "one of: "
^ (make_symlist "" " " "" symb))))
end
| Set_string r ->
r := get_arg ();
consume_arg ();
| Int f ->
let arg = get_arg () in
begin match int_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "an integer")))
| Some x -> f x
end;
consume_arg ();
| Set_int r ->
let arg = get_arg () in
begin match int_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "an integer")))
| Some x -> r := x
end;
consume_arg ();
| Float f ->
let arg = get_arg () in
begin match float_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a float")))
| Some x -> f x
end;
consume_arg ();
| Set_float r ->
let arg = get_arg () in
begin match float_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a float")))
| Some x -> r := x
end;
consume_arg ();
| Tuple specs ->
no_arg ();
List.iter treat_action specs;
| Rest f ->
no_arg ();
while !current < (Array.length !argv) - 1 do
f !argv.(!current + 1);
consume_arg ();
done;
| Rest_all f ->
no_arg ();
let acc = ref [] in
while !current < Array.length !argv - 1 do
acc := !argv.(!current + 1) :: !acc;
consume_arg ();
done;
f (List.rev !acc)
| Expand f ->
if not allow_expand then
raise (Invalid_argument "Arg.Expand is is only allowed with \
Arg.parse_and_expand_argv_dynamic");
let arg = get_arg () in
let newarg = f arg in
consume_arg ();
let before = Array.sub !argv 0 (!current + 1)
and after =
Array.sub !argv (!current + 1)
((Array.length !argv) - !current - 1) in
argv:= Array.concat [before;newarg;after];
in
treat_action action end
else anonfun s
with | Bad m -> raise (convert_error (Message m));
| Stop e -> raise (convert_error e);
end;
incr current
done
let parse_and_expand_argv_dynamic current argv speclist anonfun errmsg =
parse_and_expand_argv_dynamic_aux true current argv speclist anonfun errmsg
let parse_argv_dynamic ?(current=current) argv speclist anonfun errmsg =
parse_and_expand_argv_dynamic_aux false current (ref argv) speclist anonfun
errmsg
let parse_argv ?(current=current) argv speclist anonfun errmsg =
parse_argv_dynamic ~current:current argv (ref speclist) anonfun errmsg
let parse l f msg =
try
parse_argv Sys.argv l f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let parse_dynamic l f msg =
try
parse_argv_dynamic Sys.argv l f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let parse_expand l f msg =
try
let argv = ref Sys.argv in
let spec = ref l in
let current = ref (!current) in
parse_and_expand_argv_dynamic current argv spec f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let second_word s =
let len = String.length s in
let rec loop n =
if n >= len then len
else if s.[n] = ' ' then loop (n+1)
else n
in
match String.index s '\t' with
| n -> loop (n+1)
| exception Not_found ->
begin match String.index s ' ' with
| n -> loop (n+1)
| exception Not_found -> len
end
let max_arg_len cur (kwd, spec, doc) =
match spec with
| Symbol _ -> Int.max cur (String.length kwd)
| _ -> Int.max cur (String.length kwd + second_word doc)
let replace_leading_tab s =
let seen = ref false in
String.map (function '\t' when not !seen -> seen := true; ' ' | c -> c) s
let add_padding len ksd =
match ksd with
| (_, _, "") ->
ksd
| (kwd, (Symbol _ as spec), msg) ->
let cutcol = second_word msg in
let spaces = String.make ((Int.max 0 (len - cutcol)) + 3) ' ' in
(kwd, spec, "\n" ^ spaces ^ replace_leading_tab msg)
| (kwd, spec, msg) ->
let cutcol = second_word msg in
let kwd_len = String.length kwd in
let diff = len - kwd_len - cutcol in
if diff <= 0 then
(kwd, spec, replace_leading_tab msg)
else
let spaces = String.make diff ' ' in
let prefix = String.sub (replace_leading_tab msg) 0 cutcol in
let suffix = String.sub msg cutcol (String.length msg - cutcol) in
(kwd, spec, prefix ^ spaces ^ suffix)
let align ?(limit=max_int) speclist =
let completed = add_help speclist in
let len = List.fold_left max_arg_len 0 completed in
let len = Int.min len limit in
List.map (add_padding len) completed
let trim_cr s =
let len = String.length s in
if len > 0 && String.get s (len - 1) = '\r' then
String.sub s 0 (len - 1)
else
s
let read_aux trim sep file =
let ic = open_in_bin file in
let buf = Buffer.create 200 in
let words = ref [] in
let stash () =
let word = Buffer.contents buf in
let word = if trim then trim_cr word else word in
words := word :: !words;
Buffer.clear buf
in
begin
try while true do
let c = input_char ic in
if c = sep then stash () else Buffer.add_char buf c
done
with End_of_file -> ()
end;
if Buffer.length buf > 0 then stash ();
close_in ic;
Array.of_list (List.rev !words)
let read_arg = read_aux true '\n'
let read_arg0 = read_aux false '\x00'
let write_aux sep file args =
let oc = open_out_bin file in
Array.iter (fun s -> fprintf oc "%s%c" s sep) args;
close_out oc
let write_arg = write_aux '\n'
let write_arg0 = write_aux '\x00'
|
755064befabf687f8f1114f9b6c13e9047d27dc602bb0717aaba85e2d8465281 | faylang/fay | JsFunctionPassing.hs | {-# LANGUAGE EmptyDataDecls #-}
module JsFunctionPassing where
import FFI
data Func
makeFunc0 :: Int -> Func
makeFunc0 = ffi "function() {return %1;}"
callFunc0 :: Func -> Fay Int
callFunc0 = ffi "%1()"
makeFunc1 :: Int -> Func
makeFunc1 = ffi "function(x) {return %1;}"
callFunc1 :: Func -> Fay Int
callFunc1 = ffi "%1(1)"
main = do
callFunc0 (makeFunc0 1) >>= print
callFunc1 (makeFunc1 2) >>= print
| null | https://raw.githubusercontent.com/faylang/fay/8455d975f9f0db2ecc922410e43e484fbd134699/tests/JsFunctionPassing.hs | haskell | # LANGUAGE EmptyDataDecls # |
module JsFunctionPassing where
import FFI
data Func
makeFunc0 :: Int -> Func
makeFunc0 = ffi "function() {return %1;}"
callFunc0 :: Func -> Fay Int
callFunc0 = ffi "%1()"
makeFunc1 :: Int -> Func
makeFunc1 = ffi "function(x) {return %1;}"
callFunc1 :: Func -> Fay Int
callFunc1 = ffi "%1(1)"
main = do
callFunc0 (makeFunc0 1) >>= print
callFunc1 (makeFunc1 2) >>= print
|
12ff79e72bd454eae5b7f5dd9e1980454af3cf3eec040baf2ff03997e56d0bbb | antoniogarrote/clj-ml | filters.clj | ;;
;; Data processing of data with different filtering algorithms
@author
;;
(ns #^{:author "Antonio Garrote <>"}
clj-ml.filters
"This namespace defines a set of functions that can be applied to data sets to modify the
dataset in some way: transforming nominal attributes into binary attributes, removing
attributes etc.
A sample use of the API is shown below:
* ds * is the dataset where the first attribute is to be removed
(def *filter* (make-filter :remove-attributes {:dataset-format *ds* :attributes [0]}))
We apply the filter to the original data set and obtain the new one
(def *filtered-ds* (filter-apply *filter* *ds*))
The previous sample of code could be rewritten with the make-apply-filter function:
;; There is no necessity of passing the :dataset-format option, *ds* format is used
;; automatically
(def *filtered-ds* (make-apply-filter :remove-attributes {:attributes [0]} *ds*))"
(:use [clj-ml data utils])
(:import (weka.filters Filter)))
;; Options for the filters
(defmulti #^{:skip-wiki true}
make-filter-options
"Creates the right parameters for a filter"
(fn [kind map] kind))
(defmethod make-filter-options :supervised-discretize
([kind map]
(let [cols (get map :attributes)
pre-cols (reduce #(str %1 "," (+ %2 1)) "" cols)
cols-val-a ["-R" (.substring pre-cols 1 (.length pre-cols))]
cols-val-b (check-options {:invert "-V"
:binary "-D"
:better-encoding "-E"
:kononenko "-K"}
map
cols-val-a)]
(into-array cols-val-b))))
(defmethod make-filter-options :unsupervised-discretize
([kind map]
(let [cols (get map :attributes)
pre-cols (reduce #(str %1 "," (+ %2 1)) "" cols)
cols-val-a ["-R" (.substring pre-cols 1 (.length pre-cols))]
cols-val-b (check-options {:unset-class "-unset-class-temporarily"
:binary "-D"
:better-encoding "-E"
:equal-frequency "-F"
:optimize "-O"}
map
cols-val-a)
cols-val-c (check-option-values {:number-bins "-B"
:weight-bins "-M"}
map
cols-val-b)]
(into-array cols-val-c))))
(defmethod make-filter-options :supervised-nominal-to-binary
([kind map]
(let [cols-val (check-options {:also-binary "-N"
:for-each-nominal "-A"}
map
[""])]
(into-array cols-val))))
(defmethod make-filter-options :unsupervised-nominal-to-binary
([kind map]
(let [cols (get map :attributes)
pre-cols (reduce #(str %1 "," (+ %2 1)) "" cols)
cols-val-a ["-R" (.substring pre-cols 1 (.length pre-cols))]
cols-val-b (check-options {:invert "-V"
:also-binary "-N"
:for-each-nominal "-A"}
map
cols-val-a)]
(into-array cols-val-b))))
(defmethod make-filter-options :remove-attributes
([kind map]
(let [cols (get map :attributes)
pre-cols (reduce #(str %1 "," (+ %2 1)) "" cols)
cols-val-a ["-R" (.substring pre-cols 1 (.length pre-cols))]
cols-val-b (check-options {:invert "-V"}
map
cols-val-a)]
(into-array cols-val-b))))
(defmethod make-filter-options :select-append-attributes
([kind map]
(let [cols (get map :attributes)
pre-cols (reduce #(str %1 "," (+ %2 1)) "" cols)
cols-val-a ["-R" (.substring pre-cols 1 (.length pre-cols))]
cols-val-b (check-options {:invert "-V"}
map
cols-val-a)]
(into-array cols-val-b))))
(defmethod make-filter-options :project-attributes
([kind options]
(let [opts (if (nil? (:invert options))
(conj options {:invert true})
(dissoc options :invert))]
(make-filter-options :remove-attributes opts))))
;; Creation of filters
(defmacro #^{:skip-wiki true}
make-filter-m [kind options filter-class]
`(let [filter# (new ~filter-class)
dataset-format# (get ~options :dataset-format)
opts# (make-filter-options ~kind ~options)]
(.setOptions filter# opts#)
(.setInputFormat filter# dataset-format#)
filter#))
(defmulti make-filter
"Creates a filter for the provided attributes format. The first argument must be a symbol
identifying the kind of filter to generate.
Currently the following filters are supported:
- :supervised-discretize
- :unsupervised-discretize
- :supervised-nominal-to-binary
- :unsupervised-nominal-to-binary
- :remove-attributes
- :select-append-attributes
- :project-attributes
The second parameter is a map of attributes
for the filter to be built.
An example of usage could be:
(make-filter :remove {:attributes [0 1] :dataset-format dataset})
Documentation for the different filters:
* :supervised-discretize
An instance filter that discretizes a range of numeric attributes
in the dataset into nominal attributes. Discretization is by Fayyad
& Irani's MDL method (the default).
Parameters:
- :attributes
Index of the attributes to be discretized, sample value: [0,4,6]
- :invert
Invert mathcing sense of the columns, sample value: true
- :kononenko
Use Kononenko's MDL criterion, sample value: true
* :unsupervised-discretize
Unsupervised version of the discretize filter. Discretization is by simple
pinning.
Parameters:
- :attributes
Index of the attributes to be discretized, sample value: [0,4,6]
- :dataset-format
The dataset where the filter is going to be applied or a
description of the format of its attributes. Sample value:
dataset, (dataset-format dataset)
- :unset-class
Does not take class attribute into account for the application
of the filter, sample-value: true
- :binary
- :equal-frequency
Use equal frequency instead of equal width discretization, sample
value: true
- :optimize
Optmize the number of bins using leave-one-out estimate of
estimated entropy. Ingores the :binary attribute. sample value: true
- :number-bins
Defines the number of bins to divide the numeric attributes into
sample value: 3
* :supervised-nominal-to-binary
Converts nominal attributes into binary numeric attributes. An attribute with k values
is transformed into k binary attributes if the class is nominal.
Parameters:
- :dataset-format
The dataset where the filter is going to be applied or a
description of the format of its attributes. Sample value:
dataset, (dataset-format dataset)
- :also-binary
Sets if binary attributes are to be coded as nominal ones, sample value: true
- :for-each-nominal
For each nominal value one binary attribute is created, not only if the
values of the nominal attribute are greater than two.
* :unsupervised-nominal-to-binary
Unsupervised version of the :nominal-to-binary filter
Parameters:
- :attributes
Index of the attributes to be binarized. Sample value: [1 2 3]
- :dataset-format
The dataset where the filter is going to be applied or a
description of the format of its attributes. Sample value:
dataset, (dataset-format dataset)
- :also-binary
Sets if binary attributes are to be coded as nominal ones, sample value: true
- :for-each-nominal
For each nominal value one binary attribute is created, not only if the
values of the nominal attribute are greater than two., sample value: true
* :remove-attributes
Remove some columns from the data set after the provided attributes.
Parameters:
- :dataset-format
The dataset where the filter is going to be applied or a
description of the format of its attributes. Sample value:
dataset, (dataset-format dataset)
- :attributes
Index of the attributes to remove. Sample value: [1 2 3]
* :select-append-attributes
Append a copy of the selected columns at the end of the dataset.
Parameters:
- :dataset-format
The dataset where the filter is going to be applied or a
description of the format of its attributes. Sample value:
dataset, (dataset-format dataset)
- :attributes
Index of the attributes to remove. Sample value: [1 2 3]
- :invert
Invert the selection of the columns. Sample value: [0 1]
* :project-attributes
Project some columns from the provided dataset
Parameters:
- :dataset-format
The dataset where the filter is going to be applied or a
description of the format of its attributes. Sample value:
dataset, (dataset-format dataset)
- :invert
Invert the selection of columns. Sample value: [0 1]"
(fn [kind options] kind))
(defmethod make-filter :supervised-discretize
([kind options]
(make-filter-m kind options weka.filters.supervised.attribute.Discretize)))
(defmethod make-filter :unsupervised-discretize
([kind options]
(make-filter-m kind options weka.filters.unsupervised.attribute.Discretize)))
(defmethod make-filter :supervised-nominal-to-binary
([kind options]
(make-filter-m kind options weka.filters.supervised.attribute.NominalToBinary)))
(defmethod make-filter :unsupervised-nominal-to-binary
([kind options]
(make-filter-m kind options weka.filters.unsupervised.attribute.NominalToBinary)))
(defmethod make-filter :remove-attributes
([kind options]
(make-filter-m kind options weka.filters.unsupervised.attribute.Remove)))
(defmethod make-filter :select-append-attributes
([kind options]
(make-filter-m kind options weka.filters.unsupervised.attribute.Copy)))
(defmethod make-filter :project-attributes
([kind options]
(make-filter-m kind options weka.filters.unsupervised.attribute.Remove)))
;; Processing the filtering of data
(defn filter-apply
"Filters an input dataset using the provided filter and generates an output dataset. The
first argument is a filter and the second parameter the data set where the filter should
be applied."
[filter dataset]
(Filter/useFilter dataset filter))
(defn make-apply-filter
"Creates a new filter with the provided options and apply it to the provided dataset.
The :dataset-format attribute for the making of the filter will be setup to the
dataset passed as an argument if no other value is provided.
The application of this filter is equivalent a the consequetive application of
make-filter and apply-filter."
[kind options dataset]
(let [opts (if (nil? (:dataset-format options)) (conj options {:dataset-format dataset}))
filter (make-filter kind opts)]
(filter-apply filter dataset)))
| null | https://raw.githubusercontent.com/antoniogarrote/clj-ml/a6e3a41b7eed0c31ea344b286ebbeb0e81ce68bc/src/clj_ml/filters.clj | clojure |
Data processing of data with different filtering algorithms
There is no necessity of passing the :dataset-format option, *ds* format is used
automatically
Options for the filters
Creation of filters
Processing the filtering of data | @author
(ns #^{:author "Antonio Garrote <>"}
clj-ml.filters
"This namespace defines a set of functions that can be applied to data sets to modify the
dataset in some way: transforming nominal attributes into binary attributes, removing
attributes etc.
A sample use of the API is shown below:
* ds * is the dataset where the first attribute is to be removed
(def *filter* (make-filter :remove-attributes {:dataset-format *ds* :attributes [0]}))
We apply the filter to the original data set and obtain the new one
(def *filtered-ds* (filter-apply *filter* *ds*))
The previous sample of code could be rewritten with the make-apply-filter function:
(def *filtered-ds* (make-apply-filter :remove-attributes {:attributes [0]} *ds*))"
(:use [clj-ml data utils])
(:import (weka.filters Filter)))
(defmulti #^{:skip-wiki true}
make-filter-options
"Creates the right parameters for a filter"
(fn [kind map] kind))
(defmethod make-filter-options :supervised-discretize
([kind map]
(let [cols (get map :attributes)
pre-cols (reduce #(str %1 "," (+ %2 1)) "" cols)
cols-val-a ["-R" (.substring pre-cols 1 (.length pre-cols))]
cols-val-b (check-options {:invert "-V"
:binary "-D"
:better-encoding "-E"
:kononenko "-K"}
map
cols-val-a)]
(into-array cols-val-b))))
(defmethod make-filter-options :unsupervised-discretize
([kind map]
(let [cols (get map :attributes)
pre-cols (reduce #(str %1 "," (+ %2 1)) "" cols)
cols-val-a ["-R" (.substring pre-cols 1 (.length pre-cols))]
cols-val-b (check-options {:unset-class "-unset-class-temporarily"
:binary "-D"
:better-encoding "-E"
:equal-frequency "-F"
:optimize "-O"}
map
cols-val-a)
cols-val-c (check-option-values {:number-bins "-B"
:weight-bins "-M"}
map
cols-val-b)]
(into-array cols-val-c))))
(defmethod make-filter-options :supervised-nominal-to-binary
([kind map]
(let [cols-val (check-options {:also-binary "-N"
:for-each-nominal "-A"}
map
[""])]
(into-array cols-val))))
(defmethod make-filter-options :unsupervised-nominal-to-binary
([kind map]
(let [cols (get map :attributes)
pre-cols (reduce #(str %1 "," (+ %2 1)) "" cols)
cols-val-a ["-R" (.substring pre-cols 1 (.length pre-cols))]
cols-val-b (check-options {:invert "-V"
:also-binary "-N"
:for-each-nominal "-A"}
map
cols-val-a)]
(into-array cols-val-b))))
(defmethod make-filter-options :remove-attributes
([kind map]
(let [cols (get map :attributes)
pre-cols (reduce #(str %1 "," (+ %2 1)) "" cols)
cols-val-a ["-R" (.substring pre-cols 1 (.length pre-cols))]
cols-val-b (check-options {:invert "-V"}
map
cols-val-a)]
(into-array cols-val-b))))
(defmethod make-filter-options :select-append-attributes
([kind map]
(let [cols (get map :attributes)
pre-cols (reduce #(str %1 "," (+ %2 1)) "" cols)
cols-val-a ["-R" (.substring pre-cols 1 (.length pre-cols))]
cols-val-b (check-options {:invert "-V"}
map
cols-val-a)]
(into-array cols-val-b))))
(defmethod make-filter-options :project-attributes
([kind options]
(let [opts (if (nil? (:invert options))
(conj options {:invert true})
(dissoc options :invert))]
(make-filter-options :remove-attributes opts))))
(defmacro #^{:skip-wiki true}
make-filter-m [kind options filter-class]
`(let [filter# (new ~filter-class)
dataset-format# (get ~options :dataset-format)
opts# (make-filter-options ~kind ~options)]
(.setOptions filter# opts#)
(.setInputFormat filter# dataset-format#)
filter#))
(defmulti make-filter
"Creates a filter for the provided attributes format. The first argument must be a symbol
identifying the kind of filter to generate.
Currently the following filters are supported:
- :supervised-discretize
- :unsupervised-discretize
- :supervised-nominal-to-binary
- :unsupervised-nominal-to-binary
- :remove-attributes
- :select-append-attributes
- :project-attributes
The second parameter is a map of attributes
for the filter to be built.
An example of usage could be:
(make-filter :remove {:attributes [0 1] :dataset-format dataset})
Documentation for the different filters:
* :supervised-discretize
An instance filter that discretizes a range of numeric attributes
in the dataset into nominal attributes. Discretization is by Fayyad
& Irani's MDL method (the default).
Parameters:
- :attributes
Index of the attributes to be discretized, sample value: [0,4,6]
- :invert
Invert mathcing sense of the columns, sample value: true
- :kononenko
Use Kononenko's MDL criterion, sample value: true
* :unsupervised-discretize
Unsupervised version of the discretize filter. Discretization is by simple
pinning.
Parameters:
- :attributes
Index of the attributes to be discretized, sample value: [0,4,6]
- :dataset-format
The dataset where the filter is going to be applied or a
description of the format of its attributes. Sample value:
dataset, (dataset-format dataset)
- :unset-class
Does not take class attribute into account for the application
of the filter, sample-value: true
- :binary
- :equal-frequency
Use equal frequency instead of equal width discretization, sample
value: true
- :optimize
Optmize the number of bins using leave-one-out estimate of
estimated entropy. Ingores the :binary attribute. sample value: true
- :number-bins
Defines the number of bins to divide the numeric attributes into
sample value: 3
* :supervised-nominal-to-binary
Converts nominal attributes into binary numeric attributes. An attribute with k values
is transformed into k binary attributes if the class is nominal.
Parameters:
- :dataset-format
The dataset where the filter is going to be applied or a
description of the format of its attributes. Sample value:
dataset, (dataset-format dataset)
- :also-binary
Sets if binary attributes are to be coded as nominal ones, sample value: true
- :for-each-nominal
For each nominal value one binary attribute is created, not only if the
values of the nominal attribute are greater than two.
* :unsupervised-nominal-to-binary
Unsupervised version of the :nominal-to-binary filter
Parameters:
- :attributes
Index of the attributes to be binarized. Sample value: [1 2 3]
- :dataset-format
The dataset where the filter is going to be applied or a
description of the format of its attributes. Sample value:
dataset, (dataset-format dataset)
- :also-binary
Sets if binary attributes are to be coded as nominal ones, sample value: true
- :for-each-nominal
For each nominal value one binary attribute is created, not only if the
values of the nominal attribute are greater than two., sample value: true
* :remove-attributes
Remove some columns from the data set after the provided attributes.
Parameters:
- :dataset-format
The dataset where the filter is going to be applied or a
description of the format of its attributes. Sample value:
dataset, (dataset-format dataset)
- :attributes
Index of the attributes to remove. Sample value: [1 2 3]
* :select-append-attributes
Append a copy of the selected columns at the end of the dataset.
Parameters:
- :dataset-format
The dataset where the filter is going to be applied or a
description of the format of its attributes. Sample value:
dataset, (dataset-format dataset)
- :attributes
Index of the attributes to remove. Sample value: [1 2 3]
- :invert
Invert the selection of the columns. Sample value: [0 1]
* :project-attributes
Project some columns from the provided dataset
Parameters:
- :dataset-format
The dataset where the filter is going to be applied or a
description of the format of its attributes. Sample value:
dataset, (dataset-format dataset)
- :invert
Invert the selection of columns. Sample value: [0 1]"
(fn [kind options] kind))
(defmethod make-filter :supervised-discretize
([kind options]
(make-filter-m kind options weka.filters.supervised.attribute.Discretize)))
(defmethod make-filter :unsupervised-discretize
([kind options]
(make-filter-m kind options weka.filters.unsupervised.attribute.Discretize)))
(defmethod make-filter :supervised-nominal-to-binary
([kind options]
(make-filter-m kind options weka.filters.supervised.attribute.NominalToBinary)))
(defmethod make-filter :unsupervised-nominal-to-binary
([kind options]
(make-filter-m kind options weka.filters.unsupervised.attribute.NominalToBinary)))
(defmethod make-filter :remove-attributes
([kind options]
(make-filter-m kind options weka.filters.unsupervised.attribute.Remove)))
(defmethod make-filter :select-append-attributes
([kind options]
(make-filter-m kind options weka.filters.unsupervised.attribute.Copy)))
(defmethod make-filter :project-attributes
([kind options]
(make-filter-m kind options weka.filters.unsupervised.attribute.Remove)))
(defn filter-apply
"Filters an input dataset using the provided filter and generates an output dataset. The
first argument is a filter and the second parameter the data set where the filter should
be applied."
[filter dataset]
(Filter/useFilter dataset filter))
(defn make-apply-filter
"Creates a new filter with the provided options and apply it to the provided dataset.
The :dataset-format attribute for the making of the filter will be setup to the
dataset passed as an argument if no other value is provided.
The application of this filter is equivalent a the consequetive application of
make-filter and apply-filter."
[kind options dataset]
(let [opts (if (nil? (:dataset-format options)) (conj options {:dataset-format dataset}))
filter (make-filter kind opts)]
(filter-apply filter dataset)))
|
3788727f02472580a20c602d840a827f4b9ad52e7a559d46a99dd0464f67628d | CardanoSolutions/ogmios | Mary.hs | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
# LANGUAGE TypeApplications #
module Ogmios.Data.Json.Mary where
import Ogmios.Data.Json.Prelude
import Cardano.Binary
( serialize'
)
import Cardano.Ledger.Crypto
( Crypto
)
import Data.ByteString.Base16
( encodeBase16
)
import GHC.Records
( getField
)
import Ouroboros.Consensus.Cardano.Block
( MaryEra
)
import Ouroboros.Consensus.Protocol.TPraos
( TPraos
)
import Ouroboros.Consensus.Shelley.Ledger.Block
( ShelleyBlock (..)
)
import Ouroboros.Consensus.Shelley.Protocol.TPraos
()
import qualified Data.ByteString.Short as BS
import qualified Data.Map.Strict as Map
import qualified Ogmios.Data.Json.Allegra as Allegra
import qualified Ogmios.Data.Json.Shelley as Shelley
import qualified Cardano.Ledger.Block as Ledger
import qualified Cardano.Ledger.Core as Ledger
import qualified Cardano.Ledger.Era as Ledger
import qualified Cardano.Ledger.TxIn as Ledger
import qualified Cardano.Ledger.Shelley.BlockChain as Sh
import qualified Cardano.Ledger.Shelley.PParams as Sh
import qualified Cardano.Ledger.Shelley.Rules.Ledger as Sh
import qualified Cardano.Ledger.Shelley.Tx as Sh
import qualified Cardano.Ledger.Shelley.UTxO as Sh
import qualified Cardano.Ledger.AuxiliaryData as MA
import qualified Cardano.Ledger.Mary.Value as MA
import qualified Cardano.Ledger.ShelleyMA.AuxiliaryData as MA
import qualified Cardano.Ledger.ShelleyMA.Rules.Utxo as MA
import qualified Cardano.Ledger.ShelleyMA.TxBody as MA
--
-- Encoders
--
encodeAuxiliaryData
:: Crypto crypto
=> MA.AuxiliaryData (MaryEra crypto)
-> Json
encodeAuxiliaryData (MA.AuxiliaryData blob scripts) = encodeObject
[ ( "blob"
, Shelley.encodeMetadataBlob blob
)
, ( "scripts"
, encodeFoldable Allegra.encodeScript scripts
)
]
encodeBlock
:: Crypto crypto
=> SerializationMode
-> ShelleyBlock (TPraos crypto) (MaryEra crypto)
-> Json
encodeBlock mode (ShelleyBlock (Ledger.Block blkHeader txs) headerHash) =
encodeObject
[ ( "body"
, encodeFoldable (encodeTx mode) (Sh.txSeqTxns' txs)
)
, ( "header"
, Shelley.encodeBHeader mode blkHeader
)
, ( "headerHash"
, Shelley.encodeShelleyHash headerHash
)
]
encodeLedgerFailure
:: Crypto crypto
=> Sh.LedgerPredicateFailure (MaryEra crypto)
-> Json
encodeLedgerFailure = \case
Sh.UtxowFailure e ->
Shelley.encodeUtxowFailure encodeUtxoFailure e
Sh.DelegsFailure e ->
Shelley.encodeDelegsFailure e
encodePolicyId
:: Crypto crypto
=> MA.PolicyID crypto
-> Json
encodePolicyId (MA.PolicyID hash) =
Shelley.encodeScriptHash hash
encodePParams'
:: (forall a. (a -> Json) -> Sh.HKD f a -> Json)
-> Sh.PParams' f era
-> Json
encodePParams' =
Shelley.encodePParams'
encodeProposedPPUpdates
:: Ledger.PParamsDelta era ~ Sh.PParamsUpdate era
=> Crypto (Ledger.Crypto era)
=> Sh.ProposedPPUpdates era
-> Json
encodeProposedPPUpdates =
Shelley.encodeProposedPPUpdates
encodeTx
:: forall crypto. (Crypto crypto)
=> SerializationMode
-> Sh.Tx (MaryEra crypto)
-> Json
encodeTx mode x = encodeObjectWithMode mode
[ ( "id"
, Shelley.encodeTxId (Ledger.txid @(MaryEra crypto) (Sh.body x))
)
, ( "body"
, encodeTxBody (Sh.body x)
)
, ( "metadata"
, (,) <$> fmap (("hash",) . Shelley.encodeAuxiliaryDataHash) (adHash (Sh.body x))
<*> fmap (("body",) . encodeAuxiliaryData) (Sh.auxiliaryData x)
& encodeStrictMaybe (\(a, b) -> encodeObject [a,b])
)
]
[ ( "witness"
, encodeWitnessSet (Sh.wits x)
)
, ( "raw"
, encodeByteStringBase64 (serialize' x)
)
]
where
adHash :: MA.TxBody era -> StrictMaybe (MA.AuxiliaryDataHash (Ledger.Crypto era))
adHash = getField @"adHash"
encodeTxBody
:: Crypto crypto
=> MA.TxBody (MaryEra crypto)
-> Json
encodeTxBody (MA.TxBody inps outs certs wdrls fee validity updates _ mint) = encodeObject
[ ( "inputs"
, encodeFoldable Shelley.encodeTxIn inps
)
, ( "outputs"
, encodeFoldable encodeTxOut outs
)
, ( "certificates"
, encodeFoldable Shelley.encodeDCert certs
)
, ( "withdrawals"
, Shelley.encodeWdrl wdrls
)
, ( "fee"
, encodeCoin fee
)
, ( "validityInterval"
, Allegra.encodeValidityInterval validity
)
, ( "update"
, encodeStrictMaybe Shelley.encodeUpdate updates
)
, ( "mint"
, encodeValue mint
)
]
encodeTxOut
:: Crypto crypto
=> Sh.TxOut (MaryEra crypto)
-> Json
encodeTxOut (Sh.TxOut addr value) = encodeObject
[ ( "address"
, Shelley.encodeAddress addr
)
, ( "value"
, encodeValue value
)
]
encodeUtxo
:: Crypto crypto
=> Sh.UTxO (MaryEra crypto)
-> Json
encodeUtxo =
encodeList id . Map.foldrWithKey (\i o -> (:) (encodeIO i o)) [] . Sh.unUTxO
where
encodeIO = curry (encode2Tuple Shelley.encodeTxIn encodeTxOut)
encodeUtxoWithMode
:: Crypto crypto
=> SerializationMode
-> Sh.UTxO (MaryEra crypto)
-> Json
encodeUtxoWithMode mode =
encodeListWithMode mode id . Map.foldrWithKey (\i o -> (:) (encodeIO i o)) [] . Sh.unUTxO
where
encodeIO = curry (encode2Tuple Shelley.encodeTxIn encodeTxOut)
encodeUtxoFailure
:: Crypto crypto
=> MA.UtxoPredicateFailure (MaryEra crypto)
-> Json
encodeUtxoFailure = \case
MA.BadInputsUTxO inputs ->
encodeObject
[ ( "badInputs"
, encodeFoldable Shelley.encodeTxIn inputs
)
]
MA.OutsideValidityIntervalUTxO itv currentSlot ->
encodeObject
[ ( "outsideOfValidityInterval", encodeObject
[ ( "interval" , Allegra.encodeValidityInterval itv )
, ( "currentSlot" , encodeSlotNo currentSlot )
]
)
]
MA.OutputTooBigUTxO outs ->
encodeObject
[ ( "tooManyAssetsInOutput"
, encodeFoldable encodeTxOut outs
)
]
MA.MaxTxSizeUTxO actualSize maxSize ->
encodeObject
[ ( "txTooLarge", encodeObject
[ ( "maximumSize", encodeInteger maxSize )
, ( "actualSize", encodeInteger actualSize )
]
)
]
MA.InputSetEmptyUTxO ->
encodeObject
[ ( "missingAtLeastOneInputUtxo", encodeNull )
]
MA.FeeTooSmallUTxO required actual ->
encodeObject
[ ( "feeTooSmall", encodeObject
[ ( "requiredFee", encodeCoin required )
, ( "actualFee", encodeCoin actual )
]
)
]
MA.ValueNotConservedUTxO consumed produced ->
encodeObject
[ ( "valueNotConserved", encodeObject
[ ( "consumed", encodeValue consumed )
, ( "produced", encodeValue produced )
]
)
]
MA.WrongNetwork expected invalidAddrs ->
encodeObject
[ ( "networkMismatch", encodeObject
[ ( "expectedNetwork"
, Shelley.encodeNetwork expected
)
, ( "invalidEntities"
, Shelley.encodeEntities "address" Shelley.encodeAddress invalidAddrs
)
]
)
]
MA.WrongNetworkWithdrawal expected invalidAccts ->
encodeObject
[ ( "networkMismatch", encodeObject
[ ( "expectedNetwork"
, Shelley.encodeNetwork expected
)
, ( "invalidEntities"
, Shelley.encodeEntities "rewardAccount" Shelley.encodeRewardAcnt invalidAccts
)
]
)
]
MA.OutputTooSmallUTxO outs ->
encodeObject
[ ( "outputTooSmall"
, encodeFoldable encodeTxOut outs
)
]
MA.OutputBootAddrAttrsTooBig outs ->
encodeObject
[ ( "addressAttributesTooLarge"
, encodeFoldable Shelley.encodeAddress ((\(Sh.TxOut addr _) -> addr) <$> outs)
)
]
MA.TriesToForgeADA ->
encodeObject
[ ( "triesToForgeAda", encodeNull )
]
MA.UpdateFailure e ->
Shelley.encodeUpdateFailure e
encodeValue
:: Crypto crypto
=> MA.Value crypto
-> Json
encodeValue (MA.Value coins assets) = encodeObject
[ ( "coins"
, encodeInteger coins
)
, ( "assets"
, encodeMap stringifyAssetId encodeInteger (flatten assets)
)
]
where
flatten :: (Ord k1, Ord k2) => Map k1 (Map k2 a) -> Map (k1, k2) a
flatten = Map.foldrWithKey
(\k inner -> Map.union (Map.mapKeys (k,) inner))
mempty
encodeWitnessSet
:: Crypto crypto
=> Sh.WitnessSet (MaryEra crypto)
-> Json
encodeWitnessSet x = encodeObject
[ ( "signatures"
, Shelley.encodeWitVKeys (Sh.addrWits x)
)
, ( "scripts"
, encodeMap Shelley.stringifyScriptHash Allegra.encodeScript (Sh.scriptWits x)
)
, ( "bootstrap"
, encodeFoldable Shelley.encodeBootstrapWitness (Sh.bootWits x)
)
]
--
-- Conversion To Text
--
stringifyAssetId :: Crypto crypto => (MA.PolicyID crypto, MA.AssetName) -> Text
stringifyAssetId (MA.PolicyID pid, MA.AssetName bytes)
| BS.null bytes = Shelley.stringifyScriptHash pid
| otherwise = Shelley.stringifyScriptHash pid <> "." <> encodeBase16 (fromShort bytes)
| null | https://raw.githubusercontent.com/CardanoSolutions/ogmios/317c826d9d0388cb7efaf61a34085fc7c1b12b06/server/src/Ogmios/Data/Json/Mary.hs | haskell |
Encoders
Conversion To Text
| This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
# LANGUAGE TypeApplications #
module Ogmios.Data.Json.Mary where
import Ogmios.Data.Json.Prelude
import Cardano.Binary
( serialize'
)
import Cardano.Ledger.Crypto
( Crypto
)
import Data.ByteString.Base16
( encodeBase16
)
import GHC.Records
( getField
)
import Ouroboros.Consensus.Cardano.Block
( MaryEra
)
import Ouroboros.Consensus.Protocol.TPraos
( TPraos
)
import Ouroboros.Consensus.Shelley.Ledger.Block
( ShelleyBlock (..)
)
import Ouroboros.Consensus.Shelley.Protocol.TPraos
()
import qualified Data.ByteString.Short as BS
import qualified Data.Map.Strict as Map
import qualified Ogmios.Data.Json.Allegra as Allegra
import qualified Ogmios.Data.Json.Shelley as Shelley
import qualified Cardano.Ledger.Block as Ledger
import qualified Cardano.Ledger.Core as Ledger
import qualified Cardano.Ledger.Era as Ledger
import qualified Cardano.Ledger.TxIn as Ledger
import qualified Cardano.Ledger.Shelley.BlockChain as Sh
import qualified Cardano.Ledger.Shelley.PParams as Sh
import qualified Cardano.Ledger.Shelley.Rules.Ledger as Sh
import qualified Cardano.Ledger.Shelley.Tx as Sh
import qualified Cardano.Ledger.Shelley.UTxO as Sh
import qualified Cardano.Ledger.AuxiliaryData as MA
import qualified Cardano.Ledger.Mary.Value as MA
import qualified Cardano.Ledger.ShelleyMA.AuxiliaryData as MA
import qualified Cardano.Ledger.ShelleyMA.Rules.Utxo as MA
import qualified Cardano.Ledger.ShelleyMA.TxBody as MA
encodeAuxiliaryData
:: Crypto crypto
=> MA.AuxiliaryData (MaryEra crypto)
-> Json
encodeAuxiliaryData (MA.AuxiliaryData blob scripts) = encodeObject
[ ( "blob"
, Shelley.encodeMetadataBlob blob
)
, ( "scripts"
, encodeFoldable Allegra.encodeScript scripts
)
]
encodeBlock
:: Crypto crypto
=> SerializationMode
-> ShelleyBlock (TPraos crypto) (MaryEra crypto)
-> Json
encodeBlock mode (ShelleyBlock (Ledger.Block blkHeader txs) headerHash) =
encodeObject
[ ( "body"
, encodeFoldable (encodeTx mode) (Sh.txSeqTxns' txs)
)
, ( "header"
, Shelley.encodeBHeader mode blkHeader
)
, ( "headerHash"
, Shelley.encodeShelleyHash headerHash
)
]
encodeLedgerFailure
:: Crypto crypto
=> Sh.LedgerPredicateFailure (MaryEra crypto)
-> Json
encodeLedgerFailure = \case
Sh.UtxowFailure e ->
Shelley.encodeUtxowFailure encodeUtxoFailure e
Sh.DelegsFailure e ->
Shelley.encodeDelegsFailure e
encodePolicyId
:: Crypto crypto
=> MA.PolicyID crypto
-> Json
encodePolicyId (MA.PolicyID hash) =
Shelley.encodeScriptHash hash
encodePParams'
:: (forall a. (a -> Json) -> Sh.HKD f a -> Json)
-> Sh.PParams' f era
-> Json
encodePParams' =
Shelley.encodePParams'
encodeProposedPPUpdates
:: Ledger.PParamsDelta era ~ Sh.PParamsUpdate era
=> Crypto (Ledger.Crypto era)
=> Sh.ProposedPPUpdates era
-> Json
encodeProposedPPUpdates =
Shelley.encodeProposedPPUpdates
encodeTx
:: forall crypto. (Crypto crypto)
=> SerializationMode
-> Sh.Tx (MaryEra crypto)
-> Json
encodeTx mode x = encodeObjectWithMode mode
[ ( "id"
, Shelley.encodeTxId (Ledger.txid @(MaryEra crypto) (Sh.body x))
)
, ( "body"
, encodeTxBody (Sh.body x)
)
, ( "metadata"
, (,) <$> fmap (("hash",) . Shelley.encodeAuxiliaryDataHash) (adHash (Sh.body x))
<*> fmap (("body",) . encodeAuxiliaryData) (Sh.auxiliaryData x)
& encodeStrictMaybe (\(a, b) -> encodeObject [a,b])
)
]
[ ( "witness"
, encodeWitnessSet (Sh.wits x)
)
, ( "raw"
, encodeByteStringBase64 (serialize' x)
)
]
where
adHash :: MA.TxBody era -> StrictMaybe (MA.AuxiliaryDataHash (Ledger.Crypto era))
adHash = getField @"adHash"
encodeTxBody
:: Crypto crypto
=> MA.TxBody (MaryEra crypto)
-> Json
encodeTxBody (MA.TxBody inps outs certs wdrls fee validity updates _ mint) = encodeObject
[ ( "inputs"
, encodeFoldable Shelley.encodeTxIn inps
)
, ( "outputs"
, encodeFoldable encodeTxOut outs
)
, ( "certificates"
, encodeFoldable Shelley.encodeDCert certs
)
, ( "withdrawals"
, Shelley.encodeWdrl wdrls
)
, ( "fee"
, encodeCoin fee
)
, ( "validityInterval"
, Allegra.encodeValidityInterval validity
)
, ( "update"
, encodeStrictMaybe Shelley.encodeUpdate updates
)
, ( "mint"
, encodeValue mint
)
]
encodeTxOut
:: Crypto crypto
=> Sh.TxOut (MaryEra crypto)
-> Json
encodeTxOut (Sh.TxOut addr value) = encodeObject
[ ( "address"
, Shelley.encodeAddress addr
)
, ( "value"
, encodeValue value
)
]
encodeUtxo
:: Crypto crypto
=> Sh.UTxO (MaryEra crypto)
-> Json
encodeUtxo =
encodeList id . Map.foldrWithKey (\i o -> (:) (encodeIO i o)) [] . Sh.unUTxO
where
encodeIO = curry (encode2Tuple Shelley.encodeTxIn encodeTxOut)
encodeUtxoWithMode
:: Crypto crypto
=> SerializationMode
-> Sh.UTxO (MaryEra crypto)
-> Json
encodeUtxoWithMode mode =
encodeListWithMode mode id . Map.foldrWithKey (\i o -> (:) (encodeIO i o)) [] . Sh.unUTxO
where
encodeIO = curry (encode2Tuple Shelley.encodeTxIn encodeTxOut)
encodeUtxoFailure
:: Crypto crypto
=> MA.UtxoPredicateFailure (MaryEra crypto)
-> Json
encodeUtxoFailure = \case
MA.BadInputsUTxO inputs ->
encodeObject
[ ( "badInputs"
, encodeFoldable Shelley.encodeTxIn inputs
)
]
MA.OutsideValidityIntervalUTxO itv currentSlot ->
encodeObject
[ ( "outsideOfValidityInterval", encodeObject
[ ( "interval" , Allegra.encodeValidityInterval itv )
, ( "currentSlot" , encodeSlotNo currentSlot )
]
)
]
MA.OutputTooBigUTxO outs ->
encodeObject
[ ( "tooManyAssetsInOutput"
, encodeFoldable encodeTxOut outs
)
]
MA.MaxTxSizeUTxO actualSize maxSize ->
encodeObject
[ ( "txTooLarge", encodeObject
[ ( "maximumSize", encodeInteger maxSize )
, ( "actualSize", encodeInteger actualSize )
]
)
]
MA.InputSetEmptyUTxO ->
encodeObject
[ ( "missingAtLeastOneInputUtxo", encodeNull )
]
MA.FeeTooSmallUTxO required actual ->
encodeObject
[ ( "feeTooSmall", encodeObject
[ ( "requiredFee", encodeCoin required )
, ( "actualFee", encodeCoin actual )
]
)
]
MA.ValueNotConservedUTxO consumed produced ->
encodeObject
[ ( "valueNotConserved", encodeObject
[ ( "consumed", encodeValue consumed )
, ( "produced", encodeValue produced )
]
)
]
MA.WrongNetwork expected invalidAddrs ->
encodeObject
[ ( "networkMismatch", encodeObject
[ ( "expectedNetwork"
, Shelley.encodeNetwork expected
)
, ( "invalidEntities"
, Shelley.encodeEntities "address" Shelley.encodeAddress invalidAddrs
)
]
)
]
MA.WrongNetworkWithdrawal expected invalidAccts ->
encodeObject
[ ( "networkMismatch", encodeObject
[ ( "expectedNetwork"
, Shelley.encodeNetwork expected
)
, ( "invalidEntities"
, Shelley.encodeEntities "rewardAccount" Shelley.encodeRewardAcnt invalidAccts
)
]
)
]
MA.OutputTooSmallUTxO outs ->
encodeObject
[ ( "outputTooSmall"
, encodeFoldable encodeTxOut outs
)
]
MA.OutputBootAddrAttrsTooBig outs ->
encodeObject
[ ( "addressAttributesTooLarge"
, encodeFoldable Shelley.encodeAddress ((\(Sh.TxOut addr _) -> addr) <$> outs)
)
]
MA.TriesToForgeADA ->
encodeObject
[ ( "triesToForgeAda", encodeNull )
]
MA.UpdateFailure e ->
Shelley.encodeUpdateFailure e
encodeValue
:: Crypto crypto
=> MA.Value crypto
-> Json
encodeValue (MA.Value coins assets) = encodeObject
[ ( "coins"
, encodeInteger coins
)
, ( "assets"
, encodeMap stringifyAssetId encodeInteger (flatten assets)
)
]
where
flatten :: (Ord k1, Ord k2) => Map k1 (Map k2 a) -> Map (k1, k2) a
flatten = Map.foldrWithKey
(\k inner -> Map.union (Map.mapKeys (k,) inner))
mempty
encodeWitnessSet
:: Crypto crypto
=> Sh.WitnessSet (MaryEra crypto)
-> Json
encodeWitnessSet x = encodeObject
[ ( "signatures"
, Shelley.encodeWitVKeys (Sh.addrWits x)
)
, ( "scripts"
, encodeMap Shelley.stringifyScriptHash Allegra.encodeScript (Sh.scriptWits x)
)
, ( "bootstrap"
, encodeFoldable Shelley.encodeBootstrapWitness (Sh.bootWits x)
)
]
stringifyAssetId :: Crypto crypto => (MA.PolicyID crypto, MA.AssetName) -> Text
stringifyAssetId (MA.PolicyID pid, MA.AssetName bytes)
| BS.null bytes = Shelley.stringifyScriptHash pid
| otherwise = Shelley.stringifyScriptHash pid <> "." <> encodeBase16 (fromShort bytes)
|
85bb7ee24f705cf3f58a6af2cd551b06b0a46a9c0a301a5e48707cb1fa901d60 | tisnik/clojure-examples | core.clj | (ns carmine9.core
(:require [taoensso.carmine :as carmine :refer (wcar)]))
(def redis-connection {
:pool {}
:spec {
:uri "redis@127.0.0.1:6379"}})
(defmacro wcar*
[& body]
`(carmine/wcar redis-connection ~@body))
(defn -main
[& args]
(println "Working with two sets")
(println "Fill in sets s1 and s2")
(println
(wcar*
(carmine/srem :s1 :a :b :c :d :e :f)
(carmine/srem :s2 :a :b :c :d :e :f)
(carmine/sadd :s1 :a :b :c :d)
(carmine/sadd :s2 :c :d :e :f)
(carmine/smembers :s1)
(carmine/smembers :s2)))
(println "Set operations")
(println "union")
(println
(wcar*
(carmine/sunionstore :s3 :s1 :s2)
(carmine/smembers :s3)))
(println "intersection")
(println
(wcar*
(carmine/sinterstore :s4 :s1 :s2)
(carmine/smembers :s4)))
(println "diff")
(println
(wcar*
(carmine/sdiffstore :s5 :s1 :s2)
(carmine/smembers :s5)
(carmine/sdiffstore :s6 :s2 :s1)
(carmine/smembers :s6)))
(println "Done"))
| null | https://raw.githubusercontent.com/tisnik/clojure-examples/303b7784f9238309af9b9853b426a88accf042e4/carmine9/src/carmine9/core.clj | clojure | (ns carmine9.core
(:require [taoensso.carmine :as carmine :refer (wcar)]))
(def redis-connection {
:pool {}
:spec {
:uri "redis@127.0.0.1:6379"}})
(defmacro wcar*
[& body]
`(carmine/wcar redis-connection ~@body))
(defn -main
[& args]
(println "Working with two sets")
(println "Fill in sets s1 and s2")
(println
(wcar*
(carmine/srem :s1 :a :b :c :d :e :f)
(carmine/srem :s2 :a :b :c :d :e :f)
(carmine/sadd :s1 :a :b :c :d)
(carmine/sadd :s2 :c :d :e :f)
(carmine/smembers :s1)
(carmine/smembers :s2)))
(println "Set operations")
(println "union")
(println
(wcar*
(carmine/sunionstore :s3 :s1 :s2)
(carmine/smembers :s3)))
(println "intersection")
(println
(wcar*
(carmine/sinterstore :s4 :s1 :s2)
(carmine/smembers :s4)))
(println "diff")
(println
(wcar*
(carmine/sdiffstore :s5 :s1 :s2)
(carmine/smembers :s5)
(carmine/sdiffstore :s6 :s2 :s1)
(carmine/smembers :s6)))
(println "Done"))
| |
a6816c934017842fa480a57f80cf064f5fb7da0dcf6b5163f21388dd3910a1c5 | sgbj/MaximaSharp | opers.lisp | -*- Mode : Lisp ; Package : Maxima ; Syntax : Common - Lisp ; Base : 10 -*- ; ; ; ;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; The data in this file contains enhancments. ;;;;;
;;; ;;;;;
Copyright ( c ) 1984,1987 by , University of Texas ; ; ; ; ;
;;; All rights reserved ;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
( c ) Copyright 1980 Massachusetts Institute of Technology ; ; ;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(in-package :maxima)
(macsyma-module opers)
This file is the run - time half of the OPERS package , an interface to the
general representation simplifier . When new expressions are being
;; created, the functions in this file or the macros in MOPERS should be called
rather than the entrypoints in SIMP such as SIMPLIFYA or SIMPLUS . Many of
;; the functions in this file will do a pre-simplification to prevent
;; unnecessary consing. [Of course, this is really the "wrong" thing, since
;; knowledge about 0 being the additive identity of the reals is now
kept in two different places . ]
The basic functions in the virtual interface are ADD , SUB , MUL , DIV , POWER ,
NCMUL , NCPOWER , , INV . Each of these functions assume that their
;; arguments are simplified. Some functions will have a "*" adjoined to the
;; end of the name (as in ADD*). These do not assume that their arguments are
simplified . In addition , there are a few entrypoints such as ADDN , MULN
which take a list of terms as a first argument , and a simplification flag as
the second argument . The above functions are the only entrypoints to this
;; package.
The functions ADD2 , ADD2 * , , * , and MUL3 are for use internal to
;; this package and should not be called externally. Note that MOPERS is
;; needed to compile this file.
;; Addition primitives.
(defmfun add2 (x y)
(cond ((numberp x)
(cond ((numberp y) (+ x y))
((=0 x) y)
(t (simplifya `((mplus) ,x ,y) t))))
((=0 y) x)
(t (simplifya `((mplus) ,x ,y) t))))
(defmfun add2* (x y)
(cond
((and (numberp x) (numberp y)) (+ x y))
((=0 x) (simplifya y nil))
((=0 y) (simplifya x nil))
(t (simplifya `((mplus) ,x ,y) nil))))
The first two cases in this cond should n't be needed , but exist
for compatibility with the old OPERS package . The old ADDLIS
deleted zeros ahead of time . Is this worth it ?
(defmfun addn (terms simp-flag)
(cond ((null terms) 0)
(t (simplifya `((mplus) . ,terms) simp-flag))))
(declare-top (special $negdistrib))
(defmfun neg (x)
(cond ((numberp x) (- x))
(t (let (($negdistrib t))
(simplifya `((mtimes) -1 ,x) t)))))
(defmfun sub (x y)
(cond
((and (numberp x) (numberp y)) (- x y))
((=0 y) x)
((=0 x) (neg y))
(t (add x (neg y)))))
(defmfun sub* (x y)
(cond
((and (numberp x) (numberp y)) (- x y))
((=0 y) x)
((=0 x) (neg y))
(t
(add (simplifya x nil) (mul -1 (simplifya y nil))))))
Multiplication primitives -- is it worthwhile to handle the 3 - arg
;; case specially? Don't simplify x*0 --> 0 since x could be non-scalar.
(defmfun mul2 (x y)
(cond
((and (numberp x) (numberp y)) (* x y))
((=1 x) y)
((=1 y) x)
(t (simplifya `((mtimes) ,x ,y) t))))
(defmfun mul2* (x y)
(cond
((and (numberp x) (numberp y)) (* x y))
((=1 x) (simplifya y nil))
((=1 y) (simplifya x nil))
(t (simplifya `((mtimes) ,x ,y) nil))))
(defmfun mul3 (x y z)
(cond ((=1 x) (mul2 y z))
((=1 y) (mul2 x z))
((=1 z) (mul2 x y))
(t (simplifya `((mtimes) ,x ,y ,z) t))))
The first two cases in this cond should n't be needed , but exist
for compatibility with the old OPERS package . The old MULSLIS
;; deleted ones ahead of time. Is this worth it?
(defmfun muln (factors simp-flag)
(cond ((null factors) 1)
((atom factors) factors)
(t (simplifya `((mtimes) . ,factors) simp-flag))))
(defmfun div (x y)
(if (=1 x)
(inv y)
(mul x (inv y))))
(defmfun div* (x y)
(if (=1 x)
(inv* y)
(mul (simplifya x nil) (inv* y))))
(defmfun ncmul2 (x y)
(simplifya `((mnctimes) ,x ,y) t))
(defmfun ncmuln (factors flag)
(simplifya `((mnctimes) . ,factors) flag))
;; Exponentiation
Do n't use BASE as a parameter name since it is special in MacLisp .
(defmfun power (*base power)
(cond ((=1 power) *base)
(t (simplifya `((mexpt) ,*base ,power) t))))
(defmfun power* (*base power)
(cond ((=1 power) (simplifya *base nil))
(t (simplifya `((mexpt) ,*base ,power) nil))))
(defmfun ncpower (x y)
(cond ((=0 y) 1)
((=1 y) x)
(t (simplifya `((mncexpt) ,x ,y) t))))
;; [Add something for constructing equations here at some point.]
( ROOT X N ) takes the Nth root of X.
Warning ! may give a complex expression back , starting from a
;; positive (evidently) real expression, viz. sqrt[(sinh-sin) / (sin-sinh)] or
;; something.
(defmfun root (x n)
(cond ((=0 x) 0)
((=1 x) 1)
(t (simplifya `((mexpt) ,x ((rat simp) 1 ,n)) t))))
;; (Porm flag expr) is +expr if flag is true, and -expr
otherwise . Morp is the opposite . Names stand for " plus or minus "
;; and vice versa.
(defmfun porm (s x) (if s x (neg x)))
(defmfun morp (s x) (if s (neg x) x))
| null | https://raw.githubusercontent.com/sgbj/MaximaSharp/75067d7e045b9ed50883b5eb09803b4c8f391059/Test/bin/Debug/Maxima-5.30.0/share/maxima/5.30.0/src/opers.lisp | lisp | Package : Maxima ; Syntax : Common - Lisp ; Base : 10 -*- ; ; ; ;
The data in this file contains enhancments. ;;;;;
;;;;;
; ; ; ;
All rights reserved ;;;;;
; ;
created, the functions in this file or the macros in MOPERS should be called
the functions in this file will do a pre-simplification to prevent
unnecessary consing. [Of course, this is really the "wrong" thing, since
knowledge about 0 being the additive identity of the reals is now
arguments are simplified. Some functions will have a "*" adjoined to the
end of the name (as in ADD*). These do not assume that their arguments are
package.
this package and should not be called externally. Note that MOPERS is
needed to compile this file.
Addition primitives.
case specially? Don't simplify x*0 --> 0 since x could be non-scalar.
deleted ones ahead of time. Is this worth it?
Exponentiation
[Add something for constructing equations here at some point.]
positive (evidently) real expression, viz. sqrt[(sinh-sin) / (sin-sinh)] or
something.
(Porm flag expr) is +expr if flag is true, and -expr
and vice versa. |
(in-package :maxima)
(macsyma-module opers)
This file is the run - time half of the OPERS package , an interface to the
general representation simplifier . When new expressions are being
rather than the entrypoints in SIMP such as SIMPLIFYA or SIMPLUS . Many of
kept in two different places . ]
The basic functions in the virtual interface are ADD , SUB , MUL , DIV , POWER ,
NCMUL , NCPOWER , , INV . Each of these functions assume that their
simplified . In addition , there are a few entrypoints such as ADDN , MULN
which take a list of terms as a first argument , and a simplification flag as
the second argument . The above functions are the only entrypoints to this
The functions ADD2 , ADD2 * , , * , and MUL3 are for use internal to
(defmfun add2 (x y)
(cond ((numberp x)
(cond ((numberp y) (+ x y))
((=0 x) y)
(t (simplifya `((mplus) ,x ,y) t))))
((=0 y) x)
(t (simplifya `((mplus) ,x ,y) t))))
(defmfun add2* (x y)
(cond
((and (numberp x) (numberp y)) (+ x y))
((=0 x) (simplifya y nil))
((=0 y) (simplifya x nil))
(t (simplifya `((mplus) ,x ,y) nil))))
The first two cases in this cond should n't be needed , but exist
for compatibility with the old OPERS package . The old ADDLIS
deleted zeros ahead of time . Is this worth it ?
(defmfun addn (terms simp-flag)
(cond ((null terms) 0)
(t (simplifya `((mplus) . ,terms) simp-flag))))
(declare-top (special $negdistrib))
(defmfun neg (x)
(cond ((numberp x) (- x))
(t (let (($negdistrib t))
(simplifya `((mtimes) -1 ,x) t)))))
(defmfun sub (x y)
(cond
((and (numberp x) (numberp y)) (- x y))
((=0 y) x)
((=0 x) (neg y))
(t (add x (neg y)))))
(defmfun sub* (x y)
(cond
((and (numberp x) (numberp y)) (- x y))
((=0 y) x)
((=0 x) (neg y))
(t
(add (simplifya x nil) (mul -1 (simplifya y nil))))))
Multiplication primitives -- is it worthwhile to handle the 3 - arg
(defmfun mul2 (x y)
(cond
((and (numberp x) (numberp y)) (* x y))
((=1 x) y)
((=1 y) x)
(t (simplifya `((mtimes) ,x ,y) t))))
(defmfun mul2* (x y)
(cond
((and (numberp x) (numberp y)) (* x y))
((=1 x) (simplifya y nil))
((=1 y) (simplifya x nil))
(t (simplifya `((mtimes) ,x ,y) nil))))
(defmfun mul3 (x y z)
(cond ((=1 x) (mul2 y z))
((=1 y) (mul2 x z))
((=1 z) (mul2 x y))
(t (simplifya `((mtimes) ,x ,y ,z) t))))
The first two cases in this cond should n't be needed , but exist
for compatibility with the old OPERS package . The old MULSLIS
(defmfun muln (factors simp-flag)
(cond ((null factors) 1)
((atom factors) factors)
(t (simplifya `((mtimes) . ,factors) simp-flag))))
(defmfun div (x y)
(if (=1 x)
(inv y)
(mul x (inv y))))
(defmfun div* (x y)
(if (=1 x)
(inv* y)
(mul (simplifya x nil) (inv* y))))
(defmfun ncmul2 (x y)
(simplifya `((mnctimes) ,x ,y) t))
(defmfun ncmuln (factors flag)
(simplifya `((mnctimes) . ,factors) flag))
Do n't use BASE as a parameter name since it is special in MacLisp .
(defmfun power (*base power)
(cond ((=1 power) *base)
(t (simplifya `((mexpt) ,*base ,power) t))))
(defmfun power* (*base power)
(cond ((=1 power) (simplifya *base nil))
(t (simplifya `((mexpt) ,*base ,power) nil))))
(defmfun ncpower (x y)
(cond ((=0 y) 1)
((=1 y) x)
(t (simplifya `((mncexpt) ,x ,y) t))))
( ROOT X N ) takes the Nth root of X.
Warning ! may give a complex expression back , starting from a
(defmfun root (x n)
(cond ((=0 x) 0)
((=1 x) 1)
(t (simplifya `((mexpt) ,x ((rat simp) 1 ,n)) t))))
otherwise . Morp is the opposite . Names stand for " plus or minus "
(defmfun porm (s x) (if s x (neg x)))
(defmfun morp (s x) (if s (neg x) x))
|
4bec47368567568e37a24df81753be81b076eb1b2ab0c1b588cad3096af9f759 | glguy/advent | 16.hs | {-# Language QuasiQuotes, ImportQualifiedPost #-}
|
Module : Main
Description : Day 16 solution
Copyright : ( c ) , 2018
License : ISC
Maintainer :
< >
Module : Main
Description : Day 16 solution
Copyright : (c) Eric Mertens, 2018
License : ISC
Maintainer :
<>
-}
module Main (main) where
import Advent (format, countBy)
import Control.Monad (foldM)
import Control.Monad.Trans.State (StateT(..))
import Data.Bits ((.&.), (.|.))
import Data.IntMap (IntMap)
import Data.IntMap qualified as IntMap
import Data.List (intersect, foldl')
import Data.Map (Map)
import Data.Map qualified as Map
import Data.Maybe (fromJust)
import Data.Set (Set)
import Data.Set qualified as Set
| Instructions are an opcode and 3 operands A B and C.
data Instruction = I !Int !Int !Int !Int deriving Show
-- | Examples are registers before, an instruction, and registers after.
data Example = E Registers Instruction Registers deriving Show
| Registers are represented using an IntMap . Assume no default values
type Registers = IntMap Int
-- | Print the answers to day 16
--
-- >>> :main
592
557
main :: IO ()
main =
do (inp1, inp2) <- [format|2018 16
(Before: [%u&(, )]%n
%u %u %u %u%n
After: [%u&(, )]%n)&%n
%n%n%n
(%u %u %u %u%n)*|]
let examples = [E (toRegisters x) (I o a1 a2 a3) (toRegisters y) | (x,o,a1,a2,a3,y) <- inp1]
instructions = [I o a1 a2 a3 | (o,a1,a2,a3) <- inp2]
print (part1 examples)
print (part2 examples instructions)
-- registers -----------------------------------------------------------
| Convert a 0 - indexed list into Registers .
--
> > > toRegisters [ 1,2,4,8 ]
-- fromList [(0,1),(1,2),(2,4),(3,8)]
toRegisters :: [Int] -> Registers
toRegisters = IntMap.fromList . zip [0..]
-- semantics -----------------------------------------------------------
-- | Pairs of opcode names and semantics function. Each function
expects the three operands A , B , C and the current registers
-- and produces the new registers.
opcodes :: Map String (Int -> Int -> Int -> Registers -> Registers)
opcodes =
let sem f a b c regs = (IntMap.insert c $! f (regs IntMap.!) a b) regs
val o = o in
Map.fromList
[ ("addr", sem $ \reg a b -> reg a + reg b)
, ("addi", sem $ \reg a b -> reg a + val b)
, ("mulr", sem $ \reg a b -> reg a * reg b)
, ("muli", sem $ \reg a b -> reg a * val b)
, ("banr", sem $ \reg a b -> reg a .&. reg b)
, ("bani", sem $ \reg a b -> reg a .&. val b)
, ("borr", sem $ \reg a b -> reg a .|. reg b)
, ("bori", sem $ \reg a b -> reg a .|. val b)
, ("setr", sem $ \reg a _ -> reg a)
, ("seti", sem $ \reg a _ -> val a)
, ("gtir", sem $ \reg a b -> if val a > reg b then 1 else 0)
, ("gtri", sem $ \reg a b -> if reg a > val b then 1 else 0)
, ("gtrr", sem $ \reg a b -> if reg a > reg b then 1 else 0)
, ("eqir", sem $ \reg a b -> if val a == reg b then 1 else 0)
, ("eqri", sem $ \reg a b -> if reg a == val b then 1 else 0)
, ("eqrr", sem $ \reg a b -> if reg a == reg b then 1 else 0)
]
-- tasks ---------------------------------------------------------------
| How many samples in your puzzle input behave like three or more opcodes ?
part1 :: [Example] -> Int
part1 = countBy $ \example -> 3 <= length (snd (getMatches example))
-- | What value is contained in register 0 after executing the test program?
part2 :: [Example] -> [Instruction] -> Int
read final register 0
where
apply all of the instructions in order to a zero - initialized registers
finalRegs = foldl' eval (toRegisters [0,0,0,0]) program
-- Get constraints, satisfy them, lookup solution in opcode map
semantics = (opcodes Map.!) <$> satConstraints (getConstraints examples)
-- lookup the semantics for an instruction and apply it to the registers
eval regs (I o a b c) = (semantics IntMap.! o) a b c regs
-- constraints and assignments -----------------------------------------
-- | Given an example extract the opcode and possible name for it.
--
> > ( E ( toRegisters [ 3,2,1,1 ] ) ( I 9 2 1 2 ) ( toRegisters [ 3,2,2,1 ] ) )
( 9,["addi","mulr","seti " ] )
getMatches :: Example -> (Int, [String])
getMatches (E before (I o a b c) after) =
(o, [ name | (name,f) <- Map.toList opcodes, after == f a b c before ])
-- | Get the constraints generated by a list of examples. Each opcode key is
-- associated with a list of possible opcode names.
getConstraints :: [Example] -> IntMap [String]
getConstraints = IntMap.fromListWith intersect . map getMatches
-- | Given some constraints, pick and element from each constraint so that each
-- element is unique. This function assumes a unique solution.
--
-- >>> satConstraints ["ab", "bc", "a"]
-- "bca"
satConstraints :: (Traversable t, Ord a) => t [a] -> t a
satConstraints constraints = fst (head (mapAccumLM pick constraints Set.empty))
where
pick :: Ord a => [a] -> Set a -> [(a, Set a)]
pick possible soFar =
[ (picked, Set.insert picked soFar)
| picked <- possible
, picked `Set.notMember` soFar ]
| Version of ' Data . ' that uses a ' Monad ' instance
-- to combine the results of the function.
mapAccumLM ::
(Traversable t, Monad m) =>
(a -> acc -> m (b, acc)) -> t a -> acc -> m (t b, acc)
mapAccumLM f = runStateT . traverse (StateT . f)
| null | https://raw.githubusercontent.com/glguy/advent/7ab9f9e47208fd5720e36bac33fee2b78d4ec50b/solutions/src/2018/16.hs | haskell | # Language QuasiQuotes, ImportQualifiedPost #
| Examples are registers before, an instruction, and registers after.
| Print the answers to day 16
>>> :main
registers -----------------------------------------------------------
fromList [(0,1),(1,2),(2,4),(3,8)]
semantics -----------------------------------------------------------
| Pairs of opcode names and semantics function. Each function
and produces the new registers.
tasks ---------------------------------------------------------------
| What value is contained in register 0 after executing the test program?
Get constraints, satisfy them, lookup solution in opcode map
lookup the semantics for an instruction and apply it to the registers
constraints and assignments -----------------------------------------
| Given an example extract the opcode and possible name for it.
| Get the constraints generated by a list of examples. Each opcode key is
associated with a list of possible opcode names.
| Given some constraints, pick and element from each constraint so that each
element is unique. This function assumes a unique solution.
>>> satConstraints ["ab", "bc", "a"]
"bca"
to combine the results of the function. | |
Module : Main
Description : Day 16 solution
Copyright : ( c ) , 2018
License : ISC
Maintainer :
< >
Module : Main
Description : Day 16 solution
Copyright : (c) Eric Mertens, 2018
License : ISC
Maintainer :
<>
-}
module Main (main) where
import Advent (format, countBy)
import Control.Monad (foldM)
import Control.Monad.Trans.State (StateT(..))
import Data.Bits ((.&.), (.|.))
import Data.IntMap (IntMap)
import Data.IntMap qualified as IntMap
import Data.List (intersect, foldl')
import Data.Map (Map)
import Data.Map qualified as Map
import Data.Maybe (fromJust)
import Data.Set (Set)
import Data.Set qualified as Set
| Instructions are an opcode and 3 operands A B and C.
data Instruction = I !Int !Int !Int !Int deriving Show
data Example = E Registers Instruction Registers deriving Show
| Registers are represented using an IntMap . Assume no default values
type Registers = IntMap Int
592
557
main :: IO ()
main =
do (inp1, inp2) <- [format|2018 16
(Before: [%u&(, )]%n
%u %u %u %u%n
After: [%u&(, )]%n)&%n
%n%n%n
(%u %u %u %u%n)*|]
let examples = [E (toRegisters x) (I o a1 a2 a3) (toRegisters y) | (x,o,a1,a2,a3,y) <- inp1]
instructions = [I o a1 a2 a3 | (o,a1,a2,a3) <- inp2]
print (part1 examples)
print (part2 examples instructions)
| Convert a 0 - indexed list into Registers .
> > > toRegisters [ 1,2,4,8 ]
toRegisters :: [Int] -> Registers
toRegisters = IntMap.fromList . zip [0..]
expects the three operands A , B , C and the current registers
opcodes :: Map String (Int -> Int -> Int -> Registers -> Registers)
opcodes =
let sem f a b c regs = (IntMap.insert c $! f (regs IntMap.!) a b) regs
val o = o in
Map.fromList
[ ("addr", sem $ \reg a b -> reg a + reg b)
, ("addi", sem $ \reg a b -> reg a + val b)
, ("mulr", sem $ \reg a b -> reg a * reg b)
, ("muli", sem $ \reg a b -> reg a * val b)
, ("banr", sem $ \reg a b -> reg a .&. reg b)
, ("bani", sem $ \reg a b -> reg a .&. val b)
, ("borr", sem $ \reg a b -> reg a .|. reg b)
, ("bori", sem $ \reg a b -> reg a .|. val b)
, ("setr", sem $ \reg a _ -> reg a)
, ("seti", sem $ \reg a _ -> val a)
, ("gtir", sem $ \reg a b -> if val a > reg b then 1 else 0)
, ("gtri", sem $ \reg a b -> if reg a > val b then 1 else 0)
, ("gtrr", sem $ \reg a b -> if reg a > reg b then 1 else 0)
, ("eqir", sem $ \reg a b -> if val a == reg b then 1 else 0)
, ("eqri", sem $ \reg a b -> if reg a == val b then 1 else 0)
, ("eqrr", sem $ \reg a b -> if reg a == reg b then 1 else 0)
]
| How many samples in your puzzle input behave like three or more opcodes ?
part1 :: [Example] -> Int
part1 = countBy $ \example -> 3 <= length (snd (getMatches example))
part2 :: [Example] -> [Instruction] -> Int
read final register 0
where
apply all of the instructions in order to a zero - initialized registers
finalRegs = foldl' eval (toRegisters [0,0,0,0]) program
semantics = (opcodes Map.!) <$> satConstraints (getConstraints examples)
eval regs (I o a b c) = (semantics IntMap.! o) a b c regs
> > ( E ( toRegisters [ 3,2,1,1 ] ) ( I 9 2 1 2 ) ( toRegisters [ 3,2,2,1 ] ) )
( 9,["addi","mulr","seti " ] )
getMatches :: Example -> (Int, [String])
getMatches (E before (I o a b c) after) =
(o, [ name | (name,f) <- Map.toList opcodes, after == f a b c before ])
getConstraints :: [Example] -> IntMap [String]
getConstraints = IntMap.fromListWith intersect . map getMatches
satConstraints :: (Traversable t, Ord a) => t [a] -> t a
satConstraints constraints = fst (head (mapAccumLM pick constraints Set.empty))
where
pick :: Ord a => [a] -> Set a -> [(a, Set a)]
pick possible soFar =
[ (picked, Set.insert picked soFar)
| picked <- possible
, picked `Set.notMember` soFar ]
| Version of ' Data . ' that uses a ' Monad ' instance
mapAccumLM ::
(Traversable t, Monad m) =>
(a -> acc -> m (b, acc)) -> t a -> acc -> m (t b, acc)
mapAccumLM f = runStateT . traverse (StateT . f)
|
f73a105b33e7f1683bd1c69563a43e5abb0dcf934002c54be3414de9bcda82bb | MrEbbinghaus/Todoish | root.cljs | (ns todoish.ui.root
(:require
[com.fulcrologic.fulcro.components :as comp :refer [defsc]]
[material-ui.utils :as mutils :refer [css-baseline]]
[material-ui.styles :as styles :refer [prefers-dark?]]
[todoish.ui.themes :as themes]
[taoensso.timbre :as log]
[com.fulcrologic.fulcro.routing.dynamic-routing :as dr :refer [defrouter]]
[todoish.ui.todo-app :as todo-app]
[todoish.ui.login :as login]
[com.fulcrologic.fulcro-css.css-injection :as inj]))
(def dark-mode-matcher
(and
(-> js/window .-matchMedia)
(-> js/window (.matchMedia "(prefers-color-scheme: dark)"))))
(defn onDarkModeChange [f]
(when dark-mode-matcher
(.addListener dark-mode-matcher f)))
(defn dark-mode?
"Checks for prefers-color-scheme: dark. (clj always returns false)"
[]
(and dark-mode-matcher (.-matches dark-mode-matcher)))
(defrouter RootRouter [this {:keys [current-state]}]
{:router-targets [login/LoginPage todo-app/TodoApp login/SignUpPage]}
#_(case current-state
:pending (dom/div "Loading...")
:failed (dom/div "Failed!")
;; default will be used when the current state isn't yet set
(dom/div "No route selected.")))
(def ui-root-router (comp/factory RootRouter))
(defsc Root [this {:keys [ui/theme ui/root-router]}]
{:query [:ui/theme
{:ui/root-router (comp/get-query RootRouter)}]
:initial-state
(fn [_] {:ui/root-router (comp/get-initial-state RootRouter)
:ui/theme (if (dark-mode?) :dark :light)})}
(styles/theme-provider {:theme (themes/get-mui-theme theme)}
(mutils/css-baseline {})
(inj/style-element {:component Root})
(ui-root-router root-router)))
| null | https://raw.githubusercontent.com/MrEbbinghaus/Todoish/42c5a8e575c19937e4ed3332b738e5f35e847f07/src/main/todoish/ui/root.cljs | clojure | default will be used when the current state isn't yet set | (ns todoish.ui.root
(:require
[com.fulcrologic.fulcro.components :as comp :refer [defsc]]
[material-ui.utils :as mutils :refer [css-baseline]]
[material-ui.styles :as styles :refer [prefers-dark?]]
[todoish.ui.themes :as themes]
[taoensso.timbre :as log]
[com.fulcrologic.fulcro.routing.dynamic-routing :as dr :refer [defrouter]]
[todoish.ui.todo-app :as todo-app]
[todoish.ui.login :as login]
[com.fulcrologic.fulcro-css.css-injection :as inj]))
(def dark-mode-matcher
(and
(-> js/window .-matchMedia)
(-> js/window (.matchMedia "(prefers-color-scheme: dark)"))))
(defn onDarkModeChange [f]
(when dark-mode-matcher
(.addListener dark-mode-matcher f)))
(defn dark-mode?
"Checks for prefers-color-scheme: dark. (clj always returns false)"
[]
(and dark-mode-matcher (.-matches dark-mode-matcher)))
(defrouter RootRouter [this {:keys [current-state]}]
{:router-targets [login/LoginPage todo-app/TodoApp login/SignUpPage]}
#_(case current-state
:pending (dom/div "Loading...")
:failed (dom/div "Failed!")
(dom/div "No route selected.")))
(def ui-root-router (comp/factory RootRouter))
(defsc Root [this {:keys [ui/theme ui/root-router]}]
{:query [:ui/theme
{:ui/root-router (comp/get-query RootRouter)}]
:initial-state
(fn [_] {:ui/root-router (comp/get-initial-state RootRouter)
:ui/theme (if (dark-mode?) :dark :light)})}
(styles/theme-provider {:theme (themes/get-mui-theme theme)}
(mutils/css-baseline {})
(inj/style-element {:component Root})
(ui-root-router root-router)))
|
37cda6197028b30e22bb430c744d2463441d2b71985f8733dd5ab4a102b655db | landakram/kiwi-desktop | core.cljs | (ns kiwi.page.core
(:require [clojure.string :as string]
[kiwi.utils :as utils]
[kiwi.markdown-processors :as markdown-processors]))
(defn capitalize-words [s]
(->> (string/split (str s) #"\b")
(map string/capitalize)
(string/join)))
(defn get-permalink-from-title [title]
(string/replace (string/lower-case title) " " "_"))
(defn get-title-from-permalink [permalink]
(-> permalink
(string/replace "_" " ")
(capitalize-words)))
(defn get-permalink [page]
(:permalink page))
(defn title [page]
(-> (get-permalink page)
(get-title-from-permalink)))
(defn- parse-alias-link [page-title]
(let [[name alias] (string/split page-title #":")]
[alias name]))
(defn- alias? [page-title]
(utils/contains page-title ":"))
(defn parse-page-title [page-title]
(if (alias? page-title)
(parse-alias-link page-title)
[page-title page-title]))
(defn- construct-classes [name permalinks]
(if (utils/in? permalinks name)
["internal"]
["internal" "new"]))
(defn extract-yaml-node [ast]
(first
(filter #(= "yaml" (:type %))
(:children ast))))
(defn extract-metadata [ast]
(let [yaml-node (extract-yaml-node ast)]
(get-in yaml-node [:data :parsedValue])))
(defn extract-tags [ast]
(let [yaml-node (extract-yaml-node ast)]
(get-in yaml-node [:data :parsedValue :tags])))
(defn extract-scheduled [ast]
(let [yaml-node (extract-yaml-node ast)
date (get-in yaml-node [:data :parsedValue :scheduled])]
(when date
(js/Date. date))))
(defn extract-scheduled-id [ast]
(let [yaml-node (extract-yaml-node ast)
event-id (get-in yaml-node [:data :parsedValue :scheduled-id])]
event-id))
(defn make-page [permalink contents modified-at]
(let [processor (markdown-processors/ast-processor [])
ast (markdown-processors/get-ast contents)]
{:title (get-title-from-permalink permalink)
:permalink permalink
:contents contents
:timestamp modified-at
:metadata (extract-metadata ast)
:scheduled (extract-scheduled ast)
:scheduled-id (extract-scheduled-id ast)
:tags (extract-tags ast)}))
(defn new-page [permalink]
(make-page permalink "" (js/Date.)))
| null | https://raw.githubusercontent.com/landakram/kiwi-desktop/cc7d0a5f28430f39d43dffb26850183601fd28f9/src/cljs/kiwi/page/core.cljs | clojure | (ns kiwi.page.core
(:require [clojure.string :as string]
[kiwi.utils :as utils]
[kiwi.markdown-processors :as markdown-processors]))
(defn capitalize-words [s]
(->> (string/split (str s) #"\b")
(map string/capitalize)
(string/join)))
(defn get-permalink-from-title [title]
(string/replace (string/lower-case title) " " "_"))
(defn get-title-from-permalink [permalink]
(-> permalink
(string/replace "_" " ")
(capitalize-words)))
(defn get-permalink [page]
(:permalink page))
(defn title [page]
(-> (get-permalink page)
(get-title-from-permalink)))
(defn- parse-alias-link [page-title]
(let [[name alias] (string/split page-title #":")]
[alias name]))
(defn- alias? [page-title]
(utils/contains page-title ":"))
(defn parse-page-title [page-title]
(if (alias? page-title)
(parse-alias-link page-title)
[page-title page-title]))
(defn- construct-classes [name permalinks]
(if (utils/in? permalinks name)
["internal"]
["internal" "new"]))
(defn extract-yaml-node [ast]
(first
(filter #(= "yaml" (:type %))
(:children ast))))
(defn extract-metadata [ast]
(let [yaml-node (extract-yaml-node ast)]
(get-in yaml-node [:data :parsedValue])))
(defn extract-tags [ast]
(let [yaml-node (extract-yaml-node ast)]
(get-in yaml-node [:data :parsedValue :tags])))
(defn extract-scheduled [ast]
(let [yaml-node (extract-yaml-node ast)
date (get-in yaml-node [:data :parsedValue :scheduled])]
(when date
(js/Date. date))))
(defn extract-scheduled-id [ast]
(let [yaml-node (extract-yaml-node ast)
event-id (get-in yaml-node [:data :parsedValue :scheduled-id])]
event-id))
(defn make-page [permalink contents modified-at]
(let [processor (markdown-processors/ast-processor [])
ast (markdown-processors/get-ast contents)]
{:title (get-title-from-permalink permalink)
:permalink permalink
:contents contents
:timestamp modified-at
:metadata (extract-metadata ast)
:scheduled (extract-scheduled ast)
:scheduled-id (extract-scheduled-id ast)
:tags (extract-tags ast)}))
(defn new-page [permalink]
(make-page permalink "" (js/Date.)))
| |
cfdfcbd8837aea8773f728c4e37c9560e788a5e1a09c2cb70ce3c9d188e47944 | SquidDev/illuaminate | lua_reference.mli | type t =
| Unknown
| InManual of string
| Undocumented
(** Get a link to a section of the manual *)
val manual_section : string -> string
(** Convert a Lua reference to a url. *)
val to_url : t -> string option
* Resolve a Lua ( 5.1 ) name and convert it into a section of the manual
val lookup_name : string -> t
* Result a Lua ( 5.1 ) type and convert it into a section of the manual
val lookup_type : string -> t
| null | https://raw.githubusercontent.com/SquidDev/illuaminate/da18b101b4710881b71c42554d70a3a7d17c3cd6/src/semantics/lua_reference.mli | ocaml | * Get a link to a section of the manual
* Convert a Lua reference to a url. | type t =
| Unknown
| InManual of string
| Undocumented
val manual_section : string -> string
val to_url : t -> string option
* Resolve a Lua ( 5.1 ) name and convert it into a section of the manual
val lookup_name : string -> t
* Result a Lua ( 5.1 ) type and convert it into a section of the manual
val lookup_type : string -> t
|
7de45915b358a7efdc7f286422ef8418ee2046ffef18241d5c4d7a7d89efc9e6 | mfikes/fifth-postulate | ns371.cljs | (ns fifth-postulate.ns371)
(defn solve-for01 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for02 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for03 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for04 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for05 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for06 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for07 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for08 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for09 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for10 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for11 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for12 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for13 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for14 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for15 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for16 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for17 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for18 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for19 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
| null | https://raw.githubusercontent.com/mfikes/fifth-postulate/22cfd5f8c2b4a2dead1c15a96295bfeb4dba235e/src/fifth_postulate/ns371.cljs | clojure | (ns fifth-postulate.ns371)
(defn solve-for01 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for02 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for03 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for04 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for05 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for06 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for07 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for08 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for09 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for10 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for11 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for12 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for13 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for14 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for15 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for16 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for17 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for18 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for19 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
| |
13181f060709ecc6e494bb6fd1331b56382be4a0827c81ceb107f8433163c239 | uw-unsat/serval | target.rkt | #lang racket/base
(require ffi/unsafe
ffi/unsafe/alloc
"core.rkt")
(provide (all-defined-out))
(define _LLVMByteOrdering
(_enum
'(big = 0
little)))
(struct _data-layout (pointer module) #:transparent)
(define _LLVMTargetDataRef
(make-ctype _pointer
_data-layout-pointer
#f))
(define-llvm LLVMGetModuleDataLayout
(_fun (m : _LLVMModuleRef)
-> (cptr : _pointer)
-> (_data-layout cptr m)))
(define-llvm LLVMCopyStringRepOfTargetData
(_fun _LLVMTargetDataRef
-> _LLVMMessageRef))
(define-llvm LLVMByteOrder
(_fun _LLVMTargetDataRef
-> _LLVMByteOrdering))
(define-llvm LLVMPointerSize
(_fun _LLVMTargetDataRef
-> _uint))
(define-llvm LLVMABISizeOfType
(_fun _LLVMTargetDataRef
_LLVMTypeRef
-> _ullong))
(define-llvm LLVMOffsetOfElement
(_fun _LLVMTargetDataRef
_LLVMTypeRef
_uint
-> _ullong))
| null | https://raw.githubusercontent.com/uw-unsat/serval/be11ecccf03f81b8bd0557acf8385a6a5d4f51ed/serval/llvm/capi/target.rkt | racket | #lang racket/base
(require ffi/unsafe
ffi/unsafe/alloc
"core.rkt")
(provide (all-defined-out))
(define _LLVMByteOrdering
(_enum
'(big = 0
little)))
(struct _data-layout (pointer module) #:transparent)
(define _LLVMTargetDataRef
(make-ctype _pointer
_data-layout-pointer
#f))
(define-llvm LLVMGetModuleDataLayout
(_fun (m : _LLVMModuleRef)
-> (cptr : _pointer)
-> (_data-layout cptr m)))
(define-llvm LLVMCopyStringRepOfTargetData
(_fun _LLVMTargetDataRef
-> _LLVMMessageRef))
(define-llvm LLVMByteOrder
(_fun _LLVMTargetDataRef
-> _LLVMByteOrdering))
(define-llvm LLVMPointerSize
(_fun _LLVMTargetDataRef
-> _uint))
(define-llvm LLVMABISizeOfType
(_fun _LLVMTargetDataRef
_LLVMTypeRef
-> _ullong))
(define-llvm LLVMOffsetOfElement
(_fun _LLVMTargetDataRef
_LLVMTypeRef
_uint
-> _ullong))
| |
f118404b2922df1fd672a2428641cf46f2bae4d5a3cc6f680c0453a141bd9392 | jashmenn/gen_server_mock | gen_server_mock.erl | %%%-------------------------------------------------------------------
%%% File : gen_server_mock.erl
%%% Author :
%%% Description : Mocking for gen_server. Expectations are ordered, every
%%% message required and no messages more than are expected are allowed.
%%%
Expectations get the same input as the handle_(whatever ) gen_server methods . They should return - > ok | { ok , NewState }
Created : 2009 - 08 - 05
%%% Inspired by: -questions/2008-April/034140.html
%%%-------------------------------------------------------------------
-module(gen_server_mock).
-behaviour(gen_server).
% API
-export([new/0, new/1, stop/1, crash/1,
expect/3, expect_call/2, expect_info/2, expect_cast/2,
assert_expectations/1]).
% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
code_change/3]).
Macros
-define(SERVER, ?MODULE).
-define(DEFAULT_CONFIG, {}).
-record(state, {
expectations
}).
-record(expectation, {
type,
lambda
}).
% steal assert from eunit
-define(assert(BoolExpr),
((fun () ->
case (BoolExpr) of
true -> ok;
__V -> .erlang:error({assertion_failed,
[{module, ?MODULE},
{line, ?LINE},
{expression, (??BoolExpr)},
{expected, true},
{value, case __V of false -> __V;
_ -> {not_a_boolean,__V}
end}]})
end
end)())).
-define(raise(ErrorName),
erlang:error({ErrorName,
[{module, ?MODULE},
{line, ?LINE}]})).
-define(raise_info(ErrorName, Info),
erlang:error({ErrorName,
[{module, ?MODULE},
{line, ?LINE},
{info, Info}
]})).
-define (DEBUG, true).
-define (TRACE(X, M), case ?DEBUG of
true -> io:format(user, "TRACE ~p:~p ~p ~p~n", [?MODULE, ?LINE, X, M]);
false -> ok
end).
%%====================================================================
%% API
%%====================================================================
%%--------------------------------------------------------------------
Function : start ( ) - > { ok , Pid } | ignore | { error , Error }
Description : for start_link
%%--------------------------------------------------------------------
start() ->
start_link([]).
%%--------------------------------------------------------------------
Function : start_link ( ) - > { ok , Pid } | ignore | { error , Error }
%% Description: Starts the server
%%--------------------------------------------------------------------
start_link(Config) ->
gen_server:start_link(?MODULE, [Config], []). % start a nameless server
%%--------------------------------------------------------------------
%% Function: new() -> {ok, Mock} | {error, Error}
%% Description:
%%--------------------------------------------------------------------
new() ->
case start() of
{ok, Pid} ->
{ok, Pid};
{error, Error} ->
{error, Error};
Other ->
{error, Other}
end.
%%--------------------------------------------------------------------
%% Function: new(N) when is_integer(N) -> [Pids]
%% Description: Return multiple Mock gen_servers
%%--------------------------------------------------------------------
new(N) when is_integer(N) -> % list() of Pids
lists:map(fun(_) -> {ok, Mock} = new(), Mock end, lists:seq(1, N)).
%%--------------------------------------------------------------------
%% Function: expect(Mock, Type, Callback) -> ok
%% Types: Mock = pid()
%% Type = atom() = call | cast | info
Callback = fun(Args ) - > ok | { ok , NewState } | { ok , ResponseValue , NewState }
matches signature of handle _ * in gen_server . e.g. handle_call
%%
%% Description: Set an expectation of Type
%%--------------------------------------------------------------------
expect(Mock, Type, Callback) ->
Exp = #expectation{type=Type, lambda=Callback},
added = gen_server:call(Mock, {expect, Exp}),
ok.
%%--------------------------------------------------------------------
%% Function: expect_call(Mock, Callback) -> ok
%% Types: Mock = pid()
Callback = fun(Args ) - > ok | { ok , NewState } | { ok , ResponseValue , NewState }
matches signature of handle_call in gen_server .
%%
%% Description: Set a call expectation
%%--------------------------------------------------------------------
expect_call(Mock, Callback) ->
expect(Mock, call, Callback).
%%--------------------------------------------------------------------
%% Function: expect_info(Mock, Callback) -> ok
%% Types: Mock = pid()
Callback = fun(Args ) - > ok | { ok , NewState } | { ok , ResponseValue , NewState }
matches signature of handle_info in gen_server .
%%
%% Description: Set a info expectation
%%--------------------------------------------------------------------
expect_info(Mock, Callback) ->
expect(Mock, info, Callback).
%%--------------------------------------------------------------------
%% Function: expect_cast(Mock, Callback) -> ok
%% Types: Mock = pid()
Callback = fun(Args ) - > ok | { ok , NewState } | { ok , ResponseValue , NewState }
matches signature of handle_cast in gen_server .
%%
%% Description: Set a cast expectation
%%--------------------------------------------------------------------
expect_cast(Mock, Callback) ->
expect(Mock, cast, Callback).
%%--------------------------------------------------------------------
%% Function: assert_expectations(Mock)-> ok
%% Types: Mock = pid() | [Mocks]
%% Description: Ensure expectations were fully met
%%--------------------------------------------------------------------
assert_expectations(Mock) when is_pid(Mock) ->
assert_expectations([Mock]);
assert_expectations([H|T]) ->
gen_server:call(H, assert_expectations),
ok = assert_expectations(T);
assert_expectations([]) ->
ok.
%%--------------------------------------------------------------------
%% Function: stop(Mock)-> ok
%% Types: Mock = pid() | [Mocks]
%% Description: Stop the Mock gen_server normally
%%--------------------------------------------------------------------
stop(H) when is_pid(H) ->
stop([H]);
stop([H|T]) ->
gen_server:cast(H, {'$gen_server_mock', stop}),
stop(T);
stop([]) ->
ok.
crash(H) when is_pid(H) ->
crash([H]);
crash([H|T]) ->
gen_server:cast(H, {'$gen_server_mock', crash}),
crash(T);
crash([]) ->
ok.
%%====================================================================
%% gen_server callbacks
%%====================================================================
%%--------------------------------------------------------------------
%% Function: init(Args) -> {ok, State} |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% Description: Initiates the server
%%--------------------------------------------------------------------
init(_Args) ->
InitialState = #state{expectations=[]},
{ok, InitialState}.
%%--------------------------------------------------------------------
Function : % % handle_call(Request , From , State ) - > { reply , Reply , State } |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% Description: Handling call messages
%%--------------------------------------------------------------------
% return the state
handle_call(state, _From, State) ->
{reply, {ok, State}, State};
handle_call({expect, Expectation}, _From, State) ->
{ok, NewState} = store_expectation(Expectation, State),
{reply, added, NewState};
handle_call(assert_expectations, _From, State) ->
{ok, NewState} = handle_assert_expectations(State),
{reply, ok, NewState};
handle_call(Request, From, State) ->
{ok, Reply, NewState} = reply_with_next_expectation(call, Request, From, undef, undef, State),
{reply, Reply, NewState}.
%%--------------------------------------------------------------------
Function : handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% Description: Handling cast messages
%%--------------------------------------------------------------------
handle_cast({'$gen_server_mock', stop}, State) ->
{stop, normal, State};
handle_cast({'$gen_server_mock', crash}, State) ->
{stop, crash, State};
handle_cast(Msg, State) ->
{ok, _Reply, NewState} = reply_with_next_expectation(cast, undef, undef, Msg, undef, State),
{noreply, NewState}.
%%--------------------------------------------------------------------
Function : handle_info(Info , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% Description: Handling all non call/cast messages
%%--------------------------------------------------------------------
handle_info(Info, State) ->
{ok, _Reply, NewState} = reply_with_next_expectation(info, undef, undef, undef, Info, State),
{noreply, NewState}.
%%--------------------------------------------------------------------
%% Function: terminate(Reason, State) -> void()
%% Description: This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any necessary
%% cleaning up. When it returns, the gen_server terminates with Reason.
%% The return value is ignored.
%%--------------------------------------------------------------------
terminate(_Reason, _State) ->
ok.
%%--------------------------------------------------------------------
Func : code_change(OldVsn , State , Extra ) - > { ok , NewState }
%% Description: Convert process state when code is changed
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%
%% private functions
%%
store_expectation(Expectation, State) -> % {ok, NewState}
NewExpectations = [Expectation|State#state.expectations],
NewState = State#state{expectations = NewExpectations},
{ok, NewState}.
pop(L) -> % {Result, NewList} | {undef, []}
case L of
[] -> {undef, []};
List -> {lists:last(List), lists:sublist(List, 1, length(List) - 1)}
end.
pop_expectation(State) -> % {ok, Expectation, NewState}
{Expectation, RestExpectations} = case pop(State#state.expectations) of
{undef, []} -> ?raise(no_gen_server_mock_expectation);
{Head, Rest} -> {Head, Rest}
end,
NewState = State#state{expectations = RestExpectations},
{ok, Expectation, NewState}.
handle_assert_expectations(State) -> % {ok, State}
ExpLeft = State#state.expectations,
case length(ExpLeft) > 0 of
true -> ?raise_info(unmet_gen_server_expectation, ExpLeft);
false -> ok
end,
{ok, State}.
reply_with_next_expectation(Type, Request, From, Msg, Info, State) -> % -> {ok, Reply, NewState}
{ok, Expectation, NewState} = pop_expectation(State),
?assert(Type =:= Expectation#expectation.type), % todo, have a useful error message, "expected this got that"
{ok, Reply, NewState2} = try call_expectation_lambda(Expectation, Type, Request, From, Msg, Info, NewState) of
{ok, R, State2} -> {ok, R, State2}
catch
error:function_clause ->
?raise_info(unexpected_request_made, {Expectation, Type, Request, From, Msg, Info, NewState})
end,
{ok, Reply, NewState2}.
% hmm what if we want better response.
call_expectation_lambda(Expectation, Type, Request, From, Msg, Info, State) -> % {ok, NewState}
L = Expectation#expectation.lambda,
Response = case Type of
call -> L(Request, From, State);
cast -> L(Msg, State);
info -> L(Info, State);
_ -> L(Request, From, Msg, Info, State)
end,
case Response of % hmmm
ok -> {ok, ok, State};
{ok, NewState} -> {ok, ok, NewState};
{ok, ResponseValue, NewState} -> {ok, ResponseValue, NewState};
Other -> Other
end.
| null | https://raw.githubusercontent.com/jashmenn/gen_server_mock/1a1ea3d858afd5cefe2056e5ab0e625a129c7e7c/src/gen_server_mock.erl | erlang | -------------------------------------------------------------------
File : gen_server_mock.erl
Author :
Description : Mocking for gen_server. Expectations are ordered, every
message required and no messages more than are expected are allowed.
Inspired by: -questions/2008-April/034140.html
-------------------------------------------------------------------
API
gen_server callbacks
steal assert from eunit
====================================================================
API
====================================================================
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Starts the server
--------------------------------------------------------------------
start a nameless server
--------------------------------------------------------------------
Function: new() -> {ok, Mock} | {error, Error}
Description:
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: new(N) when is_integer(N) -> [Pids]
Description: Return multiple Mock gen_servers
--------------------------------------------------------------------
list() of Pids
--------------------------------------------------------------------
Function: expect(Mock, Type, Callback) -> ok
Types: Mock = pid()
Type = atom() = call | cast | info
Description: Set an expectation of Type
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: expect_call(Mock, Callback) -> ok
Types: Mock = pid()
Description: Set a call expectation
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: expect_info(Mock, Callback) -> ok
Types: Mock = pid()
Description: Set a info expectation
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: expect_cast(Mock, Callback) -> ok
Types: Mock = pid()
Description: Set a cast expectation
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: assert_expectations(Mock)-> ok
Types: Mock = pid() | [Mocks]
Description: Ensure expectations were fully met
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: stop(Mock)-> ok
Types: Mock = pid() | [Mocks]
Description: Stop the Mock gen_server normally
--------------------------------------------------------------------
====================================================================
gen_server callbacks
====================================================================
--------------------------------------------------------------------
Function: init(Args) -> {ok, State} |
ignore |
{stop, Reason}
Description: Initiates the server
--------------------------------------------------------------------
--------------------------------------------------------------------
% handle_call(Request , From , State ) - > { reply , Reply , State } |
{stop, Reason, Reply, State} |
{stop, Reason, State}
Description: Handling call messages
--------------------------------------------------------------------
return the state
--------------------------------------------------------------------
{stop, Reason, State}
Description: Handling cast messages
--------------------------------------------------------------------
--------------------------------------------------------------------
{stop, Reason, State}
Description: Handling all non call/cast messages
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: terminate(Reason, State) -> void()
Description: This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any necessary
cleaning up. When it returns, the gen_server terminates with Reason.
The return value is ignored.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Convert process state when code is changed
--------------------------------------------------------------------
private functions
{ok, NewState}
{Result, NewList} | {undef, []}
{ok, Expectation, NewState}
{ok, State}
-> {ok, Reply, NewState}
todo, have a useful error message, "expected this got that"
hmm what if we want better response.
{ok, NewState}
hmmm | Expectations get the same input as the handle_(whatever ) gen_server methods . They should return - > ok | { ok , NewState }
Created : 2009 - 08 - 05
-module(gen_server_mock).
-behaviour(gen_server).
-export([new/0, new/1, stop/1, crash/1,
expect/3, expect_call/2, expect_info/2, expect_cast/2,
assert_expectations/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
code_change/3]).
Macros
-define(SERVER, ?MODULE).
-define(DEFAULT_CONFIG, {}).
-record(state, {
expectations
}).
-record(expectation, {
type,
lambda
}).
-define(assert(BoolExpr),
((fun () ->
case (BoolExpr) of
true -> ok;
__V -> .erlang:error({assertion_failed,
[{module, ?MODULE},
{line, ?LINE},
{expression, (??BoolExpr)},
{expected, true},
{value, case __V of false -> __V;
_ -> {not_a_boolean,__V}
end}]})
end
end)())).
-define(raise(ErrorName),
erlang:error({ErrorName,
[{module, ?MODULE},
{line, ?LINE}]})).
-define(raise_info(ErrorName, Info),
erlang:error({ErrorName,
[{module, ?MODULE},
{line, ?LINE},
{info, Info}
]})).
-define (DEBUG, true).
-define (TRACE(X, M), case ?DEBUG of
true -> io:format(user, "TRACE ~p:~p ~p ~p~n", [?MODULE, ?LINE, X, M]);
false -> ok
end).
Function : start ( ) - > { ok , Pid } | ignore | { error , Error }
Description : for start_link
start() ->
start_link([]).
Function : start_link ( ) - > { ok , Pid } | ignore | { error , Error }
start_link(Config) ->
new() ->
case start() of
{ok, Pid} ->
{ok, Pid};
{error, Error} ->
{error, Error};
Other ->
{error, Other}
end.
lists:map(fun(_) -> {ok, Mock} = new(), Mock end, lists:seq(1, N)).
Callback = fun(Args ) - > ok | { ok , NewState } | { ok , ResponseValue , NewState }
matches signature of handle _ * in gen_server . e.g. handle_call
expect(Mock, Type, Callback) ->
Exp = #expectation{type=Type, lambda=Callback},
added = gen_server:call(Mock, {expect, Exp}),
ok.
Callback = fun(Args ) - > ok | { ok , NewState } | { ok , ResponseValue , NewState }
matches signature of handle_call in gen_server .
expect_call(Mock, Callback) ->
expect(Mock, call, Callback).
Callback = fun(Args ) - > ok | { ok , NewState } | { ok , ResponseValue , NewState }
matches signature of handle_info in gen_server .
expect_info(Mock, Callback) ->
expect(Mock, info, Callback).
Callback = fun(Args ) - > ok | { ok , NewState } | { ok , ResponseValue , NewState }
matches signature of handle_cast in gen_server .
expect_cast(Mock, Callback) ->
expect(Mock, cast, Callback).
assert_expectations(Mock) when is_pid(Mock) ->
assert_expectations([Mock]);
assert_expectations([H|T]) ->
gen_server:call(H, assert_expectations),
ok = assert_expectations(T);
assert_expectations([]) ->
ok.
stop(H) when is_pid(H) ->
stop([H]);
stop([H|T]) ->
gen_server:cast(H, {'$gen_server_mock', stop}),
stop(T);
stop([]) ->
ok.
crash(H) when is_pid(H) ->
crash([H]);
crash([H|T]) ->
gen_server:cast(H, {'$gen_server_mock', crash}),
crash(T);
crash([]) ->
ok.
{ ok , State , Timeout } |
init(_Args) ->
InitialState = #state{expectations=[]},
{ok, InitialState}.
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call(state, _From, State) ->
{reply, {ok, State}, State};
handle_call({expect, Expectation}, _From, State) ->
{ok, NewState} = store_expectation(Expectation, State),
{reply, added, NewState};
handle_call(assert_expectations, _From, State) ->
{ok, NewState} = handle_assert_expectations(State),
{reply, ok, NewState};
handle_call(Request, From, State) ->
{ok, Reply, NewState} = reply_with_next_expectation(call, Request, From, undef, undef, State),
{reply, Reply, NewState}.
Function : handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_cast({'$gen_server_mock', stop}, State) ->
{stop, normal, State};
handle_cast({'$gen_server_mock', crash}, State) ->
{stop, crash, State};
handle_cast(Msg, State) ->
{ok, _Reply, NewState} = reply_with_next_expectation(cast, undef, undef, Msg, undef, State),
{noreply, NewState}.
Function : handle_info(Info , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_info(Info, State) ->
{ok, _Reply, NewState} = reply_with_next_expectation(info, undef, undef, undef, Info, State),
{noreply, NewState}.
terminate(_Reason, _State) ->
ok.
Func : code_change(OldVsn , State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
NewExpectations = [Expectation|State#state.expectations],
NewState = State#state{expectations = NewExpectations},
{ok, NewState}.
case L of
[] -> {undef, []};
List -> {lists:last(List), lists:sublist(List, 1, length(List) - 1)}
end.
{Expectation, RestExpectations} = case pop(State#state.expectations) of
{undef, []} -> ?raise(no_gen_server_mock_expectation);
{Head, Rest} -> {Head, Rest}
end,
NewState = State#state{expectations = RestExpectations},
{ok, Expectation, NewState}.
ExpLeft = State#state.expectations,
case length(ExpLeft) > 0 of
true -> ?raise_info(unmet_gen_server_expectation, ExpLeft);
false -> ok
end,
{ok, State}.
{ok, Expectation, NewState} = pop_expectation(State),
{ok, Reply, NewState2} = try call_expectation_lambda(Expectation, Type, Request, From, Msg, Info, NewState) of
{ok, R, State2} -> {ok, R, State2}
catch
error:function_clause ->
?raise_info(unexpected_request_made, {Expectation, Type, Request, From, Msg, Info, NewState})
end,
{ok, Reply, NewState2}.
L = Expectation#expectation.lambda,
Response = case Type of
call -> L(Request, From, State);
cast -> L(Msg, State);
info -> L(Info, State);
_ -> L(Request, From, Msg, Info, State)
end,
ok -> {ok, ok, State};
{ok, NewState} -> {ok, ok, NewState};
{ok, ResponseValue, NewState} -> {ok, ResponseValue, NewState};
Other -> Other
end.
|
5990edc427efff4aec2e7f17d15e89c14257b46680440aa728c641aaf4942bc3 | johnswanson/tictag | utils.clj | (ns tictag.utils
(:require [com.stuartsierra.component :as component]
[clj-time.local]
[clj-time.format :as f]
[clj-time.core :as t]
[clj-time.coerce :as tc]
[clojure.string :as str]
[taoensso.timbre :as timbre]))
(def wtf (f/formatter "yyyy-MM-dd HH:mm:ss"))
(defn local-time [time]
(clj-time.local/format-local-time
(t/to-time-zone time (t/default-time-zone))
:date-time))
(defn local-time-from-long [long-time]
(local-time (tc/from-long long-time)))
(defn system-map [m]
(apply component/system-map
(flatten (into [] m))))
(defn str-number? [s]
(try (Long. s) (catch Exception e nil)))
(defn success? [?http-resp]
(let [status (:status ?http-resp)]
(and status (<= 200 status 299))))
(defn kebab-str [kw]
(str/replace (name kw) #"_" "-"))
(defn with-ns [m ns]
(when m
(into {}
(map
(fn [[k v]]
[(keyword ns (kebab-str k)) v])
m))))
(defn without-ns [m]
(into {} (map (fn [[k v]] [(keyword (name k)) v]) m)))
(defn to-entities [db]
(->> db
(map
(fn [[k m]]
(map (fn [[id entity]]
{:path [k id]
:selector k
:namespace (namespace k)
:type (keyword (namespace k))
:id id
:entity entity})
m)))
(flatten)))
(defn deep-merge
"Deeply merges maps so that nested maps are combined rather than replaced.
For example:
(deep-merge {:foo {:bar :baz}} {:foo {:fuzz :buzz}})
;;=> {:foo {:bar :baz, :fuzz :buzz}}
;; contrast with clojure.core/merge
(merge {:foo {:bar :baz}} {:foo {:fuzz :buzz}})
;;=> {:foo {:fuzz :quzz}} ; note how last value for :foo wins"
[& vs]
(if (every? map? vs)
(apply merge-with deep-merge vs)
(last vs)))
(defn deep-merge*
[v1 v2]
(if (nil? v2) v1 (deep-merge v1 v2)))
(def ascending compare)
(def descending #(compare %2 %1))
(defn compare-by [& key-cmp-pairs]
(fn [x y]
(loop [[k cmp & more] key-cmp-pairs]
{:pre [(keyword? k), (fn? cmp), (even? (count more))]}
(let [result (cmp (k x) (k y))]
(if (and (zero? result) more)
(recur more)
result)))))
(defn ignore-trailing-slash
"Modifies the request uri before calling the handler.
Removes a single trailing slash from the end of the uri if present.
Useful for handling optional trailing slashes until Compojure's route matching syntax supports regex.
Adapted from -regex-for-matching-a-trailing-slash"
[handler]
(fn [request]
(let [uri (:uri request)]
(handler (assoc request :uri (if (and (not (= "/" uri))
(.endsWith uri "/"))
(subs uri 0 (dec (count uri)))
uri))))))
| null | https://raw.githubusercontent.com/johnswanson/tictag/89140b5084817690ec417b07b7d095ba7677f4e0/src/clj/tictag/utils.clj | clojure | => {:foo {:bar :baz, :fuzz :buzz}}
contrast with clojure.core/merge
=> {:foo {:fuzz :quzz}} ; note how last value for :foo wins" | (ns tictag.utils
(:require [com.stuartsierra.component :as component]
[clj-time.local]
[clj-time.format :as f]
[clj-time.core :as t]
[clj-time.coerce :as tc]
[clojure.string :as str]
[taoensso.timbre :as timbre]))
(def wtf (f/formatter "yyyy-MM-dd HH:mm:ss"))
(defn local-time [time]
(clj-time.local/format-local-time
(t/to-time-zone time (t/default-time-zone))
:date-time))
(defn local-time-from-long [long-time]
(local-time (tc/from-long long-time)))
(defn system-map [m]
(apply component/system-map
(flatten (into [] m))))
(defn str-number? [s]
(try (Long. s) (catch Exception e nil)))
(defn success? [?http-resp]
(let [status (:status ?http-resp)]
(and status (<= 200 status 299))))
(defn kebab-str [kw]
(str/replace (name kw) #"_" "-"))
(defn with-ns [m ns]
(when m
(into {}
(map
(fn [[k v]]
[(keyword ns (kebab-str k)) v])
m))))
(defn without-ns [m]
(into {} (map (fn [[k v]] [(keyword (name k)) v]) m)))
(defn to-entities [db]
(->> db
(map
(fn [[k m]]
(map (fn [[id entity]]
{:path [k id]
:selector k
:namespace (namespace k)
:type (keyword (namespace k))
:id id
:entity entity})
m)))
(flatten)))
(defn deep-merge
"Deeply merges maps so that nested maps are combined rather than replaced.
For example:
(deep-merge {:foo {:bar :baz}} {:foo {:fuzz :buzz}})
(merge {:foo {:bar :baz}} {:foo {:fuzz :buzz}})
[& vs]
(if (every? map? vs)
(apply merge-with deep-merge vs)
(last vs)))
(defn deep-merge*
[v1 v2]
(if (nil? v2) v1 (deep-merge v1 v2)))
(def ascending compare)
(def descending #(compare %2 %1))
(defn compare-by [& key-cmp-pairs]
(fn [x y]
(loop [[k cmp & more] key-cmp-pairs]
{:pre [(keyword? k), (fn? cmp), (even? (count more))]}
(let [result (cmp (k x) (k y))]
(if (and (zero? result) more)
(recur more)
result)))))
(defn ignore-trailing-slash
"Modifies the request uri before calling the handler.
Removes a single trailing slash from the end of the uri if present.
Useful for handling optional trailing slashes until Compojure's route matching syntax supports regex.
Adapted from -regex-for-matching-a-trailing-slash"
[handler]
(fn [request]
(let [uri (:uri request)]
(handler (assoc request :uri (if (and (not (= "/" uri))
(.endsWith uri "/"))
(subs uri 0 (dec (count uri)))
uri))))))
|
cc7ffcde2ab6a96e3260bc3e1e62e89a604a83a77e7f872ce45ad0604cc56d5d | haskell-jp/slack-log | Types.hs | {-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
# LANGUAGE StrictData #
module SlackLog.Types
( module SlackLog.Duration
, Config (..)
, TemplatePaths (..)
, TargetChannels
, UserName
, UserId
, ChannelName
, ChannelId
) where
import qualified Data.Aeson as Json
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import GHC.Generics (Generic)
import SlackLog.Duration
-- | Configuration type used mainly when converting JSON files into HTML.
data Config = Config
{ workspaceName :: T.Text
, timeZone :: String
-- ^ Show the times on this timezone.
, saveRepliesBefore :: Duration
-- ^ How long slack-logger saves replies before.
-- slack-logger checks if a thread has more replies for every saved parent
message if the ' Web . Slack . Common.messageTs ' of the last reply of the
-- thread is after 'saveRepliesBefore'.
-- Here are valid values for example:
--
* @1m@ for 1 minute .
* @2h@ for 2 hours .
* @3d@ for 3 days .
* @4w@ for 4 weeks .
, targetChannels :: TargetChannels
^ Target channels whose messages are collected by slack - logger .
-- This is the only configuration item used when collecting JSON files
from Slack : not only when converting JSON files into HTML .
, templatePaths :: TemplatePaths
-- ^ Path to the mustache template file used to convert the JSON files
-- into HTML. The template engine is < mustache>.
} deriving (Eq, Show, Generic, Json.FromJSON)
data TemplatePaths = TemplatePaths
{ indexPage :: FilePath
, messagesPage :: FilePath
, threadPage :: FilePath
} deriving (Eq, Show, Generic, Json.FromJSON)
type TargetChannels = HM.HashMap ChannelId ChannelName
type UserName = T.Text
type UserId = T.Text
type ChannelId = T.Text
type ChannelName = T.Text
| null | https://raw.githubusercontent.com/haskell-jp/slack-log/ccf7a64643b62cefa329600120ec5590a527c9c1/src/SlackLog/Types.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE DeriveGeneric #
| Configuration type used mainly when converting JSON files into HTML.
^ Show the times on this timezone.
^ How long slack-logger saves replies before.
slack-logger checks if a thread has more replies for every saved parent
thread is after 'saveRepliesBefore'.
Here are valid values for example:
This is the only configuration item used when collecting JSON files
^ Path to the mustache template file used to convert the JSON files
into HTML. The template engine is < mustache>. | # LANGUAGE StrictData #
module SlackLog.Types
( module SlackLog.Duration
, Config (..)
, TemplatePaths (..)
, TargetChannels
, UserName
, UserId
, ChannelName
, ChannelId
) where
import qualified Data.Aeson as Json
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import GHC.Generics (Generic)
import SlackLog.Duration
data Config = Config
{ workspaceName :: T.Text
, timeZone :: String
, saveRepliesBefore :: Duration
message if the ' Web . Slack . Common.messageTs ' of the last reply of the
* @1m@ for 1 minute .
* @2h@ for 2 hours .
* @3d@ for 3 days .
* @4w@ for 4 weeks .
, targetChannels :: TargetChannels
^ Target channels whose messages are collected by slack - logger .
from Slack : not only when converting JSON files into HTML .
, templatePaths :: TemplatePaths
} deriving (Eq, Show, Generic, Json.FromJSON)
data TemplatePaths = TemplatePaths
{ indexPage :: FilePath
, messagesPage :: FilePath
, threadPage :: FilePath
} deriving (Eq, Show, Generic, Json.FromJSON)
type TargetChannels = HM.HashMap ChannelId ChannelName
type UserName = T.Text
type UserId = T.Text
type ChannelId = T.Text
type ChannelName = T.Text
|
a86ae3021aeaa13a4bbb2d94fde181811b9940e189274249d10c4a875b1efb56 | kawasima/darzana | mapper.clj | (ns darzana.command.mapper
(:require [clojure.java.data :refer [to-java from-java]]
[clojure.spec.alpha :as s]
[darzana.validator :as v]))
(defmethod clojure.java.data/to-java [Long String] [clazz value]
(Long/parseLong value))
(defmethod clojure.java.data/to-java [Integer String] [clazz value]
(Integer/parseInt value))
(defn- error? [v]
(= (:scope (meta v)) :error))
(defn- map-to-type [to-type from-value validator]
(let [java-obj (to-java to-type from-value)]
(if-let [err (v/validate validator java-obj)]
(with-meta err {:scope :error})
java-obj)))
(defn read-value
[{{validator :validator} :runtime :as context} from to]
(let [{from-scope :scope from-var :var :or {from-scope :params}} from
{to-scope :scope to-var :var to-type :type :or {to-scope :page}} to
from-value (if from-var
(get-in context (into [:scope from-scope]
(if (coll? from-var) from-var [from-var])))
(get-in context [:scope from-scope]))
to-value (if to-type
(map-to-type to-type from-value validator)
from-value)]
(if (error? to-value)
(update-in context [:scope :error] merge to-value)
(assoc-in context [:scope to-scope to-var] to-value))))
| null | https://raw.githubusercontent.com/kawasima/darzana/4b37c8556f74219b707d23cb2d6dce70509a0c1b/src/darzana/command/mapper.clj | clojure | (ns darzana.command.mapper
(:require [clojure.java.data :refer [to-java from-java]]
[clojure.spec.alpha :as s]
[darzana.validator :as v]))
(defmethod clojure.java.data/to-java [Long String] [clazz value]
(Long/parseLong value))
(defmethod clojure.java.data/to-java [Integer String] [clazz value]
(Integer/parseInt value))
(defn- error? [v]
(= (:scope (meta v)) :error))
(defn- map-to-type [to-type from-value validator]
(let [java-obj (to-java to-type from-value)]
(if-let [err (v/validate validator java-obj)]
(with-meta err {:scope :error})
java-obj)))
(defn read-value
[{{validator :validator} :runtime :as context} from to]
(let [{from-scope :scope from-var :var :or {from-scope :params}} from
{to-scope :scope to-var :var to-type :type :or {to-scope :page}} to
from-value (if from-var
(get-in context (into [:scope from-scope]
(if (coll? from-var) from-var [from-var])))
(get-in context [:scope from-scope]))
to-value (if to-type
(map-to-type to-type from-value validator)
from-value)]
(if (error? to-value)
(update-in context [:scope :error] merge to-value)
(assoc-in context [:scope to-scope to-var] to-value))))
| |
d9a3b89059b796bc2efa906900ee822a0a6297c7b12ddd35ee67c5d790dada54 | synsem/texhs | Reference.hs | # LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
----------------------------------------------------------------------
-- |
Module : Text . Bib . Reader .
Copyright : 2015 - 2017 ,
2015 - 2016 Language Science Press .
-- License : GPL-3
--
-- Maintainer :
-- Stability : experimental
Portability : GHC
--
BibTeX parser , layer 2 :
-- interpret field values according to BibLaTeX's data model
-- (also see "Text.Bib.Types"), e.g. extract person names.
-- Cross-references between BibTeX entries are not resolved;
-- see "Text.Bib.Reader.BibTeX.Inheritance" instead.
----------------------------------------------------------------------
module Text.Bib.Reader.BibTeX.Reference
* Parsers
parseBib
, parseAgents
, parseList
) where
#if MIN_VERSION_base(4,8,0)
Prelude exports all required operators from Control . Applicative
#else
import Control.Applicative ((<$>))
#endif
import Data.Char (isLower)
import qualified Data.Map.Strict as M
import Data.Maybe (mapMaybe)
import Data.List (partition, intercalate)
import qualified Data.Text as T
import Text.Bib.Types
import Text.Bib.Reader.BibTeX.Structure
import Text.Doc.Types
import Text.Doc.Reader.TeX (tex2inlines)
import Text.TeX (readTeX)
import Text.TeX.Parser.Types
-------------------- Config
-- Default delimiter in name lists and literal lists.
nameSep :: String
nameSep = "and"
-- BibTeX fields that contain name lists.
agentFields :: [BibFieldName]
agentFields =
[ "author"
, "bookauthor"
, "bookeditor"
, "editor"
]
-- BibTeX fields that contain literal lists.
listFields :: [BibFieldName]
listFields =
legacy
, "institution"
, "location"
, "organization"
, "origlocation"
, "origpublisher"
, "publisher"
legacy
]
-- BibTeX fields that contain raw text.
--
These internal fields should not be parsed to Inlines .
-- They typically contain citekeys or other internal data.
rawFields :: [BibFieldName]
rawFields =
[ "crossref"
, "xdata"
]
-------------------- Main conversion
| Convert ' ' to ' BibDB ' .
--
Citekey conflicts : Later duplicates are ignored , i.e. old citekeys
-- shadow new ones. That is, if there are multiple entries with the
same citekey , only the first entry is retained ( in line with
-- biber-2.2). Citekeys are case-sensitive (unlike field names).
--
-- Fieldname conflicts: Earlier duplicates are ignored, i.e. new
-- fields overwrite old ones. That is, if there are multiple fields
-- within an entry that have the same name, only the last field is
-- retained (in line with biber-2.2). Field names are case-insensitive
-- (unlike citekeys).
parseBib :: BibTeXDB -> BibDB
parseBib db = M.fromListWith (flip const) $
mapMaybe (parseBibEntry (getPreambles db)) db
-- Convert a single 'Reference' type BibTeX entry to a 'BibEntry'.
--
Fields are interpreted in the context of \@preamble .
-- Non-reference entry types are ignored.
parseBibEntry :: String -> BibTeXEntry -> Maybe (CiteKey, BibEntry)
parseBibEntry preamble (Reference rt key rf) =
-- extract raw fields before TeX-ing the others
let (raws, contentFields) = partitionBy rawFields rf
-- prefix preamble to every content field before TeX-ing it
toTeX = parseTeXField preamble
texFields = map (fmap toTeX) contentFields
classify BibTeX field types , using predefined key lists like ' agentFields '
(agents, (lists, others)) =
partitionBy listFields <$>
partitionBy agentFields texFields
entryAgents = map (fmap (AgentList . parseAgents)) agents
entryLists = map (fmap (LiteralList . parseList)) lists
entryFields = map (fmap (LiteralField . stripInlines . tex2inlines)) others
entryRaw = map (fmap (RawField . unwrapFieldValue)) raws
-- resolve fieldname conflicts (using M.fromList): retain the last field
fields = M.unions (map M.fromList [entryRaw, entryAgents, entryLists, entryFields])
in Just (key, BibEntry rt fields)
parseBibEntry _ _ = Nothing
-------------------- Evaluate field content
-- Note: We cannot parse directly to 'Inline' elements, because some
fields ( e.g. ' agentFields ' ) need to be processed at \"brace level 0\ " ,
-- i.e. before groups are flattened to inline elements.
Parse a BibTeX field value as TeX.
parseTeXField :: String -> FieldValue -> TeX
parseTeXField prefix = readTeX "bibfield"
. (prefix++) . T.unpack . unwrapFieldValue
-- Collect BibTeX preambles.
getPreambles :: BibTeXDB -> String
getPreambles = T.unpack . T.concat . mapMaybe unwrapPreamble
------------------ literal lists
-- | Parse a literal lists.
--
-- Items are separated by \"and\" (at brace level 0).
parseList :: TeX -> [[Inline]]
parseList = filter (not . null)
. map (stripInlines . tex2inlines)
. splitTeXAtWord nameSep
------------------ Parse name lists
-- | Parse a list of agents (e.g. person or company names),
-- typically from a BibTeX field value of type @name list@.
--
Field values are split into several components in three steps :
( 1 ) into individual agent names separated by \"and\ " ( at brace level 0 ) ,
( 2 ) into proto name parts separated by comma ( \",\ " ) ( at brace level 0 ) ,
( 3 ) into words separated by whitespace ( at brace level 0 ) .
parseAgents :: TeX -> [Agent]
parseAgents = mapMaybe parseAgent . splitTeXAtWord nameSep
-- | Parse a single agent name.
parseAgent :: TeX -> Maybe Agent
parseAgent xs =
case map wordsTeX (splitTeXAtChar ',' xs) of
[] -> Nothing
[c1] -> return $ from1 c1
[c1,c2] -> return $ from2 c1 c2
(c1:c2:c3:cs) -> return $ (from2 c1 (c3 ++ concat cs))
{ agentSuffix = detex c2 }
where
mkAgent :: [TeX] -> [TeX] -> [TeX] -> [TeX] -> Agent
mkAgent n1 n2 n3 n4 = Agent (detex n1) (detex n2) (detex n3) (detex n4)
detex :: [TeX] -> [Inline]
detex = stripInlines . tex2inlines . intercalate [White]
-- If there are prefix names but no last name, use the final prefix name
-- as the last name (even though it is in lower case).
fillLast :: ([a], [a]) -> ([a], [a])
fillLast (prefixnames, lastnames) =
if null lastnames
then (init prefixnames, [last prefixnames])
else (prefixnames, lastnames)
-- Extract agent name from a single field (no comma).
from1 :: [TeX] -> Agent
from1 ws =
if any isLowerCase ws
then let (firstnames, (prefixnames, lastnames))
= (fillLast . span isLowerCase) <$> break isLowerCase ws
in mkAgent firstnames prefixnames lastnames []
else let (firstnames, lastnames) = (init ws, [last ws])
in mkAgent firstnames [] lastnames []
Extract agent name from two fields ( one comma ) .
from2 :: [TeX] -> [TeX] -> Agent
from2 ws firstnames =
let (prefixnames, lastnames) = fillLast (span isLowerCase ws)
in mkAgent firstnames prefixnames lastnames []
-------------------- TeX manipulation helpers (operating at brace level 0)
| at a separating character .
--
-- The character must appear on brace level 0.
splitTeXAtChar :: Char -> TeX -> [TeX]
splitTeXAtChar _ [] = []
splitTeXAtChar c (Plain xs@(_:_) : ts) =
case break (==c) xs of
case 1 : no match
(ys, []) ->
case splitTeXAtChar c ts of
[] -> [[Plain ys]]
(u:us) -> (Plain ys : u) : us
case 2 : match
(ys, zs) ->
let ys' = if null ys then [] else [Plain ys]
zs' = case dropWhile (==c) zs of
[] -> ts
cs@(_:_) -> Plain cs : ts
in case splitTeXAtChar c zs' of
[] -> [ys']
us@(_:_) -> ys':us
splitTeXAtChar c (t:ts) =
case splitTeXAtChar c ts of
[] -> [[t]]
(ys:zs) -> (t:ys):zs
| at a separating word .
--
-- The word must be surrounded by white space on brace level 0.
splitTeXAtWord :: String -> TeX -> [TeX]
splitTeXAtWord sep = map unwordsTeX . splitAnyAt [Plain sep] . wordsTeX
-- Split list at a certain item.
splitAnyAt :: Eq a => a -> [a] -> [[a]]
splitAnyAt _ [] = []
splitAnyAt sep xs@(_:_) =
let (ys, zs) = break (==sep) xs
in (if null ys then id else (ys:))
(splitAnyAt sep (dropWhile (==sep) zs))
-- | Break TeX into words.
--
A TeX word is a list of TeXAtoms that is
-- surrounded by whitespace on brace level 0.
wordsTeX :: TeX -> [TeX]
wordsTeX [] = []
wordsTeX xs@(_:_) =
let (t, ts) = break isWhite (dropWhile isWhite xs)
in t : wordsTeX (dropWhile isWhite ts)
-- | Combine TeX words by injecting top-level whitespace.
unwordsTeX :: [TeX] -> TeX
unwordsTeX = intercalate [White]
| Test whether a word starts with a lower - case letter .
--
-- This is used by BibTeX to detect prefix name parts.
isLowerCase :: TeX -> Bool
isLowerCase [] = True
isLowerCase (Plain (x:_) : _) = isLower x
isLowerCase (Group _ _ xs : _) = isLowerCase (dropWhile isWhite xs)
isLowerCase _ = False
-------------------- Helpers for BibTeX fields
-- Helper for classifying BibTeX fields.
--
-- Partition a list of key-value pairs according to whether
-- the key is contained in a provided list of keys (e.g. 'agentFields').
partitionBy :: Eq a => [a] -> [(a, b)] -> ([(a, b)], [(a, b)])
partitionBy fs = partition ((`elem` fs) . fst)
| null | https://raw.githubusercontent.com/synsem/texhs/9e2dce4ec8ae0b2c024e1883d9a93bab15f9a86f/src/Text/Bib/Reader/BibTeX/Reference.hs | haskell | # LANGUAGE OverloadedStrings #
--------------------------------------------------------------------
|
License : GPL-3
Maintainer :
Stability : experimental
interpret field values according to BibLaTeX's data model
(also see "Text.Bib.Types"), e.g. extract person names.
Cross-references between BibTeX entries are not resolved;
see "Text.Bib.Reader.BibTeX.Inheritance" instead.
--------------------------------------------------------------------
------------------ Config
Default delimiter in name lists and literal lists.
BibTeX fields that contain name lists.
BibTeX fields that contain literal lists.
BibTeX fields that contain raw text.
They typically contain citekeys or other internal data.
------------------ Main conversion
shadow new ones. That is, if there are multiple entries with the
biber-2.2). Citekeys are case-sensitive (unlike field names).
Fieldname conflicts: Earlier duplicates are ignored, i.e. new
fields overwrite old ones. That is, if there are multiple fields
within an entry that have the same name, only the last field is
retained (in line with biber-2.2). Field names are case-insensitive
(unlike citekeys).
Convert a single 'Reference' type BibTeX entry to a 'BibEntry'.
Non-reference entry types are ignored.
extract raw fields before TeX-ing the others
prefix preamble to every content field before TeX-ing it
resolve fieldname conflicts (using M.fromList): retain the last field
------------------ Evaluate field content
Note: We cannot parse directly to 'Inline' elements, because some
i.e. before groups are flattened to inline elements.
Collect BibTeX preambles.
---------------- literal lists
| Parse a literal lists.
Items are separated by \"and\" (at brace level 0).
---------------- Parse name lists
| Parse a list of agents (e.g. person or company names),
typically from a BibTeX field value of type @name list@.
| Parse a single agent name.
If there are prefix names but no last name, use the final prefix name
as the last name (even though it is in lower case).
Extract agent name from a single field (no comma).
------------------ TeX manipulation helpers (operating at brace level 0)
The character must appear on brace level 0.
The word must be surrounded by white space on brace level 0.
Split list at a certain item.
| Break TeX into words.
surrounded by whitespace on brace level 0.
| Combine TeX words by injecting top-level whitespace.
This is used by BibTeX to detect prefix name parts.
------------------ Helpers for BibTeX fields
Helper for classifying BibTeX fields.
Partition a list of key-value pairs according to whether
the key is contained in a provided list of keys (e.g. 'agentFields'). | # LANGUAGE CPP #
Module : Text . Bib . Reader .
Copyright : 2015 - 2017 ,
2015 - 2016 Language Science Press .
Portability : GHC
BibTeX parser , layer 2 :
module Text.Bib.Reader.BibTeX.Reference
* Parsers
parseBib
, parseAgents
, parseList
) where
#if MIN_VERSION_base(4,8,0)
Prelude exports all required operators from Control . Applicative
#else
import Control.Applicative ((<$>))
#endif
import Data.Char (isLower)
import qualified Data.Map.Strict as M
import Data.Maybe (mapMaybe)
import Data.List (partition, intercalate)
import qualified Data.Text as T
import Text.Bib.Types
import Text.Bib.Reader.BibTeX.Structure
import Text.Doc.Types
import Text.Doc.Reader.TeX (tex2inlines)
import Text.TeX (readTeX)
import Text.TeX.Parser.Types
nameSep :: String
nameSep = "and"
agentFields :: [BibFieldName]
agentFields =
[ "author"
, "bookauthor"
, "bookeditor"
, "editor"
]
listFields :: [BibFieldName]
listFields =
legacy
, "institution"
, "location"
, "organization"
, "origlocation"
, "origpublisher"
, "publisher"
legacy
]
These internal fields should not be parsed to Inlines .
rawFields :: [BibFieldName]
rawFields =
[ "crossref"
, "xdata"
]
| Convert ' ' to ' BibDB ' .
Citekey conflicts : Later duplicates are ignored , i.e. old citekeys
same citekey , only the first entry is retained ( in line with
parseBib :: BibTeXDB -> BibDB
parseBib db = M.fromListWith (flip const) $
mapMaybe (parseBibEntry (getPreambles db)) db
Fields are interpreted in the context of \@preamble .
parseBibEntry :: String -> BibTeXEntry -> Maybe (CiteKey, BibEntry)
parseBibEntry preamble (Reference rt key rf) =
let (raws, contentFields) = partitionBy rawFields rf
toTeX = parseTeXField preamble
texFields = map (fmap toTeX) contentFields
classify BibTeX field types , using predefined key lists like ' agentFields '
(agents, (lists, others)) =
partitionBy listFields <$>
partitionBy agentFields texFields
entryAgents = map (fmap (AgentList . parseAgents)) agents
entryLists = map (fmap (LiteralList . parseList)) lists
entryFields = map (fmap (LiteralField . stripInlines . tex2inlines)) others
entryRaw = map (fmap (RawField . unwrapFieldValue)) raws
fields = M.unions (map M.fromList [entryRaw, entryAgents, entryLists, entryFields])
in Just (key, BibEntry rt fields)
parseBibEntry _ _ = Nothing
fields ( e.g. ' agentFields ' ) need to be processed at \"brace level 0\ " ,
Parse a BibTeX field value as TeX.
parseTeXField :: String -> FieldValue -> TeX
parseTeXField prefix = readTeX "bibfield"
. (prefix++) . T.unpack . unwrapFieldValue
getPreambles :: BibTeXDB -> String
getPreambles = T.unpack . T.concat . mapMaybe unwrapPreamble
parseList :: TeX -> [[Inline]]
parseList = filter (not . null)
. map (stripInlines . tex2inlines)
. splitTeXAtWord nameSep
Field values are split into several components in three steps :
( 1 ) into individual agent names separated by \"and\ " ( at brace level 0 ) ,
( 2 ) into proto name parts separated by comma ( \",\ " ) ( at brace level 0 ) ,
( 3 ) into words separated by whitespace ( at brace level 0 ) .
parseAgents :: TeX -> [Agent]
parseAgents = mapMaybe parseAgent . splitTeXAtWord nameSep
parseAgent :: TeX -> Maybe Agent
parseAgent xs =
case map wordsTeX (splitTeXAtChar ',' xs) of
[] -> Nothing
[c1] -> return $ from1 c1
[c1,c2] -> return $ from2 c1 c2
(c1:c2:c3:cs) -> return $ (from2 c1 (c3 ++ concat cs))
{ agentSuffix = detex c2 }
where
mkAgent :: [TeX] -> [TeX] -> [TeX] -> [TeX] -> Agent
mkAgent n1 n2 n3 n4 = Agent (detex n1) (detex n2) (detex n3) (detex n4)
detex :: [TeX] -> [Inline]
detex = stripInlines . tex2inlines . intercalate [White]
fillLast :: ([a], [a]) -> ([a], [a])
fillLast (prefixnames, lastnames) =
if null lastnames
then (init prefixnames, [last prefixnames])
else (prefixnames, lastnames)
from1 :: [TeX] -> Agent
from1 ws =
if any isLowerCase ws
then let (firstnames, (prefixnames, lastnames))
= (fillLast . span isLowerCase) <$> break isLowerCase ws
in mkAgent firstnames prefixnames lastnames []
else let (firstnames, lastnames) = (init ws, [last ws])
in mkAgent firstnames [] lastnames []
Extract agent name from two fields ( one comma ) .
from2 :: [TeX] -> [TeX] -> Agent
from2 ws firstnames =
let (prefixnames, lastnames) = fillLast (span isLowerCase ws)
in mkAgent firstnames prefixnames lastnames []
| at a separating character .
splitTeXAtChar :: Char -> TeX -> [TeX]
splitTeXAtChar _ [] = []
splitTeXAtChar c (Plain xs@(_:_) : ts) =
case break (==c) xs of
case 1 : no match
(ys, []) ->
case splitTeXAtChar c ts of
[] -> [[Plain ys]]
(u:us) -> (Plain ys : u) : us
case 2 : match
(ys, zs) ->
let ys' = if null ys then [] else [Plain ys]
zs' = case dropWhile (==c) zs of
[] -> ts
cs@(_:_) -> Plain cs : ts
in case splitTeXAtChar c zs' of
[] -> [ys']
us@(_:_) -> ys':us
splitTeXAtChar c (t:ts) =
case splitTeXAtChar c ts of
[] -> [[t]]
(ys:zs) -> (t:ys):zs
| at a separating word .
splitTeXAtWord :: String -> TeX -> [TeX]
splitTeXAtWord sep = map unwordsTeX . splitAnyAt [Plain sep] . wordsTeX
splitAnyAt :: Eq a => a -> [a] -> [[a]]
splitAnyAt _ [] = []
splitAnyAt sep xs@(_:_) =
let (ys, zs) = break (==sep) xs
in (if null ys then id else (ys:))
(splitAnyAt sep (dropWhile (==sep) zs))
A TeX word is a list of TeXAtoms that is
wordsTeX :: TeX -> [TeX]
wordsTeX [] = []
wordsTeX xs@(_:_) =
let (t, ts) = break isWhite (dropWhile isWhite xs)
in t : wordsTeX (dropWhile isWhite ts)
unwordsTeX :: [TeX] -> TeX
unwordsTeX = intercalate [White]
| Test whether a word starts with a lower - case letter .
isLowerCase :: TeX -> Bool
isLowerCase [] = True
isLowerCase (Plain (x:_) : _) = isLower x
isLowerCase (Group _ _ xs : _) = isLowerCase (dropWhile isWhite xs)
isLowerCase _ = False
partitionBy :: Eq a => [a] -> [(a, b)] -> ([(a, b)], [(a, b)])
partitionBy fs = partition ((`elem` fs) . fst)
|
77d82c0c49f3755ee1d35ef1d1e7ef83cb5986f86e61427526781f55a6d2289f | ekmett/categories | Functor.hs | # LANGUAGE KindSignatures #
# LANGUAGE PolyKinds #
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE NoImplicitPrelude #
# LANGUAGE FlexibleContexts #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE TypeOperators #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE UndecidableInstances #
# LANGUAGE DefaultSignatures #
module Math.Functor
( Functor(..)
, FunctorOf
, ob
, Nat(..)
, Bifunctor, Dom2, Cod2
, bimap, first, second
, dimap, lmap, rmap
, contramap
) where
import Data.Constraint as Constraint
import Data.Type.Equality as Equality
import Data.Type.Coercion as Coercion
import Math.Category
import qualified Prelude
import Prelude (($), Either(..), Maybe(..))
--------------------------------------------------------------------------------
-- * Functors
--------------------------------------------------------------------------------
class (Category (Cod f), Category (Dom f)) => Functor (f :: i -> j) where
type Dom f :: i -> i -> *
type Cod f :: j -> j -> *
fmap :: Dom f a b -> Cod f (f a) (f b)
contramap :: Functor f => Op (Dom f) b a -> Cod f (f a) (f b)
contramap = fmap . unop
instance Functor ((->) e) where
type Dom ((->) e) = (->)
type Cod ((->) e) = (->)
fmap = (.)
instance Functor ((:-) e) where
type Dom ((:-) e) = (:-)
type Cod ((:-) e) = (->)
fmap = (.)
instance Functor Dict where
type Dom Dict = (:-)
type Cod Dict = (->)
fmap p Dict = case p of
Sub q -> q
instance Functor ((:~:) e) where
type Dom ((:~:) e) = (:~:)
type Cod ((:~:) e) = (->)
fmap = (.)
instance Functor (Coercion e) where
type Dom (Coercion e) = Coercion
type Cod (Coercion e) = (->)
fmap = (.)
instance Functor ((,) e) where
type Dom ((,) e) = (->)
type Cod ((,) e) = (->)
fmap f ~(a,b) = (a, f b)
instance Functor (Either a) where
type Dom (Either a) = (->)
type Cod (Either a) = (->)
fmap _ (Left a) = Left a
fmap f (Right b) = Right (f b)
instance Functor [] where
type Dom [] = (->)
type Cod [] = (->)
fmap = Prelude.fmap
instance Functor Maybe where
type Dom Maybe = (->)
type Cod Maybe = (->)
fmap = Prelude.fmap
instance (Category p, Op p ~ Yoneda p) => Functor (Yoneda p a) where
type Dom (Yoneda p a) = Yoneda p
type Cod (Yoneda p a) = (->)
fmap = (.)
class (Dom f ~ c, Cod f ~ d, Functor f) => FunctorOf (c :: i -> i -> *) (d :: j -> j -> *) (f :: i -> j)
instance (Dom f ~ c, Cod f ~ d, Functor f) => FunctorOf c d f
ob :: forall f a. Functor f => Ob (Dom f) a :- Ob (Cod f) (f a)
ob = Sub $ case source (fmap (id :: Dom f a a) :: Cod f (f a) (f a)) of
Dict -> Dict
--------------------------------------------------------------------------------
-- * Natural Transformations
--------------------------------------------------------------------------------
data Nat (c :: i -> i -> *) (d :: j -> j -> *) (f :: i -> j) (g :: i -> j) where
Nat :: (FunctorOf c d f, FunctorOf c d g) => { runNat :: forall a. Ob c a => d (f a) (g a) } -> Nat c d f g
instance (Category c, Category d) => Category (Nat c d) where
type Ob (Nat c d) = FunctorOf c d
id = Nat id1 where
id1 :: forall f x. (Functor f, Dom f ~ c, Cod f ~ d, Ob c x) => d (f x) (f x)
id1 = id \\ (ob :: Ob c x :- Ob d (f x))
Nat f . Nat g = Nat (f . g)
source Nat{} = Dict
target Nat{} = Dict
instance (Category c, Category d) => Functor (Nat c d) where
type Dom (Nat c d) = Op (Nat c d)
type Cod (Nat c d) = Nat (Nat c d) (->)
fmap (Op f) = Nat (. f)
instance (Category c, Category d) => Functor (Nat c d f) where
type Dom (Nat c d f) = Nat c d
type Cod (Nat c d f) = (->)
fmap = (.)
instance (Category c, Category d) => Functor (FunctorOf c d) where
type Dom (FunctorOf c d) = Nat c d
type Cod (FunctorOf c d) = (:-)
fmap Nat{} = Sub Dict
instance Functor (->) where
type Dom (->) = Op (->)
type Cod (->) = Nat (->) (->)
fmap (Op f) = Nat (. f)
instance Functor (:-) where
type Dom (:-) = Op (:-)
type Cod (:-) = Nat (:-) (->)
fmap (Op f) = Nat (. f)
instance Functor (:~:) where
type Dom (:~:) = Op (:~:)
type Cod (:~:) = Nat (:~:) (->)
fmap (Op f) = Nat (. f)
instance Functor Coercion where
type Dom Coercion = Op Coercion
type Cod Coercion = Nat Coercion (->)
fmap (Op f) = Nat (. f)
instance (Category p, Op p ~ Yoneda p) => Functor (Yoneda p) where
type Dom (Yoneda p) = p
type Cod (Yoneda p) = Nat (Yoneda p) (->)
fmap f = Nat (. Op f)
instance Functor (,) where
type Dom (,) = (->)
type Cod (,) = Nat (->) (->)
fmap f = Nat $ \(a,b) -> (f a, b)
instance Functor Either where
type Dom Either = (->)
type Cod Either = Nat (->) (->)
fmap f0 = Nat (go f0) where
go :: (a -> b) -> Either a c -> Either b c
go f (Left a) = Left (f a)
go _ (Right b) = Right b
--------------------------------------------------------------------------------
-- * Bifunctors
--------------------------------------------------------------------------------
type family NatDom (f :: (i -> j) -> (i -> j) -> *) :: (i -> i -> *) where NatDom (Nat p q) = p
type family NatCod (f :: (i -> j) -> (i -> j) -> *) :: (j -> j -> *) where NatCod (Nat p q) = q
type Dom2 p = NatDom (Cod p)
type Cod2 p = NatCod (Cod p)
class (Functor p, Cod p ~ Nat (Dom2 p) (Cod2 p), Category (Dom2 p), Category (Cod2 p)) => Bifunctor (p :: i -> j -> k)
instance (Functor p, Cod p ~ Nat (Dom2 p) (Cod2 p), Category (Dom2 p), Category (Cod2 p)) => Bifunctor (p :: i -> j -> k)
first :: (Functor f, Cod f ~ Nat d e, Ob d c) => Dom f a b -> e (f a c) (f b c)
first = runNat . fmap
second :: forall p a b c. (Bifunctor p, Ob (Dom p) c) => Dom2 p a b -> Cod2 p (p c a) (p c b)
second f = case ob :: Ob (Dom p) c :- FunctorOf (Dom2 p) (Cod2 p) (p c) of
Sub Dict -> fmap f
bimap :: Bifunctor p => Dom p a b -> Dom2 p c d -> Cod2 p (p a c) (p b d)
bimap f g = case source f of
Dict -> case target g of
Dict -> runNat (fmap f) . second g
--------------------------------------------------------------------------------
-- * Profunctors
--------------------------------------------------------------------------------
dimap :: Bifunctor p => Op (Dom p) b a -> Dom2 p c d -> Cod2 p (p a c) (p b d)
dimap = bimap . unop
lmap :: (Functor f, Cod f ~ Nat d e, Ob d c) => Op (Dom f) b a -> e (f a c) (f b c)
lmap = runNat . fmap . unop
rmap :: forall p a b c. (Bifunctor p, Ob (Dom p) c) => Dom2 p a b -> Cod2 p (p c a) (p c b)
rmap = second
| null | https://raw.githubusercontent.com/ekmett/categories/4a02808d28b275f59d9d6c08f0c2d329ee567a97/src/Math/Functor.hs | haskell | # LANGUAGE RankNTypes #
# LANGUAGE GADTs #
# LANGUAGE ConstraintKinds #
------------------------------------------------------------------------------
* Functors
------------------------------------------------------------------------------
------------------------------------------------------------------------------
* Natural Transformations
------------------------------------------------------------------------------
------------------------------------------------------------------------------
* Bifunctors
------------------------------------------------------------------------------
------------------------------------------------------------------------------
* Profunctors
------------------------------------------------------------------------------ | # LANGUAGE KindSignatures #
# LANGUAGE PolyKinds #
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE FlexibleContexts #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
# LANGUAGE TypeOperators #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE UndecidableInstances #
# LANGUAGE DefaultSignatures #
module Math.Functor
( Functor(..)
, FunctorOf
, ob
, Nat(..)
, Bifunctor, Dom2, Cod2
, bimap, first, second
, dimap, lmap, rmap
, contramap
) where
import Data.Constraint as Constraint
import Data.Type.Equality as Equality
import Data.Type.Coercion as Coercion
import Math.Category
import qualified Prelude
import Prelude (($), Either(..), Maybe(..))
class (Category (Cod f), Category (Dom f)) => Functor (f :: i -> j) where
type Dom f :: i -> i -> *
type Cod f :: j -> j -> *
fmap :: Dom f a b -> Cod f (f a) (f b)
contramap :: Functor f => Op (Dom f) b a -> Cod f (f a) (f b)
contramap = fmap . unop
instance Functor ((->) e) where
type Dom ((->) e) = (->)
type Cod ((->) e) = (->)
fmap = (.)
instance Functor ((:-) e) where
type Dom ((:-) e) = (:-)
type Cod ((:-) e) = (->)
fmap = (.)
instance Functor Dict where
type Dom Dict = (:-)
type Cod Dict = (->)
fmap p Dict = case p of
Sub q -> q
instance Functor ((:~:) e) where
type Dom ((:~:) e) = (:~:)
type Cod ((:~:) e) = (->)
fmap = (.)
instance Functor (Coercion e) where
type Dom (Coercion e) = Coercion
type Cod (Coercion e) = (->)
fmap = (.)
instance Functor ((,) e) where
type Dom ((,) e) = (->)
type Cod ((,) e) = (->)
fmap f ~(a,b) = (a, f b)
instance Functor (Either a) where
type Dom (Either a) = (->)
type Cod (Either a) = (->)
fmap _ (Left a) = Left a
fmap f (Right b) = Right (f b)
instance Functor [] where
type Dom [] = (->)
type Cod [] = (->)
fmap = Prelude.fmap
instance Functor Maybe where
type Dom Maybe = (->)
type Cod Maybe = (->)
fmap = Prelude.fmap
instance (Category p, Op p ~ Yoneda p) => Functor (Yoneda p a) where
type Dom (Yoneda p a) = Yoneda p
type Cod (Yoneda p a) = (->)
fmap = (.)
class (Dom f ~ c, Cod f ~ d, Functor f) => FunctorOf (c :: i -> i -> *) (d :: j -> j -> *) (f :: i -> j)
instance (Dom f ~ c, Cod f ~ d, Functor f) => FunctorOf c d f
ob :: forall f a. Functor f => Ob (Dom f) a :- Ob (Cod f) (f a)
ob = Sub $ case source (fmap (id :: Dom f a a) :: Cod f (f a) (f a)) of
Dict -> Dict
data Nat (c :: i -> i -> *) (d :: j -> j -> *) (f :: i -> j) (g :: i -> j) where
Nat :: (FunctorOf c d f, FunctorOf c d g) => { runNat :: forall a. Ob c a => d (f a) (g a) } -> Nat c d f g
instance (Category c, Category d) => Category (Nat c d) where
type Ob (Nat c d) = FunctorOf c d
id = Nat id1 where
id1 :: forall f x. (Functor f, Dom f ~ c, Cod f ~ d, Ob c x) => d (f x) (f x)
id1 = id \\ (ob :: Ob c x :- Ob d (f x))
Nat f . Nat g = Nat (f . g)
source Nat{} = Dict
target Nat{} = Dict
instance (Category c, Category d) => Functor (Nat c d) where
type Dom (Nat c d) = Op (Nat c d)
type Cod (Nat c d) = Nat (Nat c d) (->)
fmap (Op f) = Nat (. f)
instance (Category c, Category d) => Functor (Nat c d f) where
type Dom (Nat c d f) = Nat c d
type Cod (Nat c d f) = (->)
fmap = (.)
instance (Category c, Category d) => Functor (FunctorOf c d) where
type Dom (FunctorOf c d) = Nat c d
type Cod (FunctorOf c d) = (:-)
fmap Nat{} = Sub Dict
instance Functor (->) where
type Dom (->) = Op (->)
type Cod (->) = Nat (->) (->)
fmap (Op f) = Nat (. f)
instance Functor (:-) where
type Dom (:-) = Op (:-)
type Cod (:-) = Nat (:-) (->)
fmap (Op f) = Nat (. f)
instance Functor (:~:) where
type Dom (:~:) = Op (:~:)
type Cod (:~:) = Nat (:~:) (->)
fmap (Op f) = Nat (. f)
instance Functor Coercion where
type Dom Coercion = Op Coercion
type Cod Coercion = Nat Coercion (->)
fmap (Op f) = Nat (. f)
instance (Category p, Op p ~ Yoneda p) => Functor (Yoneda p) where
type Dom (Yoneda p) = p
type Cod (Yoneda p) = Nat (Yoneda p) (->)
fmap f = Nat (. Op f)
instance Functor (,) where
type Dom (,) = (->)
type Cod (,) = Nat (->) (->)
fmap f = Nat $ \(a,b) -> (f a, b)
instance Functor Either where
type Dom Either = (->)
type Cod Either = Nat (->) (->)
fmap f0 = Nat (go f0) where
go :: (a -> b) -> Either a c -> Either b c
go f (Left a) = Left (f a)
go _ (Right b) = Right b
type family NatDom (f :: (i -> j) -> (i -> j) -> *) :: (i -> i -> *) where NatDom (Nat p q) = p
type family NatCod (f :: (i -> j) -> (i -> j) -> *) :: (j -> j -> *) where NatCod (Nat p q) = q
type Dom2 p = NatDom (Cod p)
type Cod2 p = NatCod (Cod p)
class (Functor p, Cod p ~ Nat (Dom2 p) (Cod2 p), Category (Dom2 p), Category (Cod2 p)) => Bifunctor (p :: i -> j -> k)
instance (Functor p, Cod p ~ Nat (Dom2 p) (Cod2 p), Category (Dom2 p), Category (Cod2 p)) => Bifunctor (p :: i -> j -> k)
first :: (Functor f, Cod f ~ Nat d e, Ob d c) => Dom f a b -> e (f a c) (f b c)
first = runNat . fmap
second :: forall p a b c. (Bifunctor p, Ob (Dom p) c) => Dom2 p a b -> Cod2 p (p c a) (p c b)
second f = case ob :: Ob (Dom p) c :- FunctorOf (Dom2 p) (Cod2 p) (p c) of
Sub Dict -> fmap f
bimap :: Bifunctor p => Dom p a b -> Dom2 p c d -> Cod2 p (p a c) (p b d)
bimap f g = case source f of
Dict -> case target g of
Dict -> runNat (fmap f) . second g
dimap :: Bifunctor p => Op (Dom p) b a -> Dom2 p c d -> Cod2 p (p a c) (p b d)
dimap = bimap . unop
lmap :: (Functor f, Cod f ~ Nat d e, Ob d c) => Op (Dom f) b a -> e (f a c) (f b c)
lmap = runNat . fmap . unop
rmap :: forall p a b c. (Bifunctor p, Ob (Dom p) c) => Dom2 p a b -> Cod2 p (p c a) (p c b)
rmap = second
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.