_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
db7664205f26b4800dcc60194a3de4cc28d30c92758cc47b867cb9ef6b55196a | basho/riak_test | job_enable_common.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2016 Basho Technologies , Inc.
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(job_enable_common).
% Public API for use by other tests
-export([
bin_bucket/1,
bin_key/1,
bin_val/1,
close_client/1,
enabled_string/1,
get_enabled/2,
index_2i/0,
index_name/1,
index_yz/0,
load_data/1,
num_buckets/0, num_buckets/1,
num_keys/0, num_keys/1,
open_client/2,
populated_bucket/0,
set_enabled/3,
setup_cluster/1,
setup_yokozuna/1,
test_buckets/0,
test_keys/0,
test_label/3,
test_nums/0,
test_operation/4,
test_vals/0,
undefined_bucket/0
]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("riakc/include/riakc.hrl").
-include_lib("riakhttpc/include/rhc.hrl").
-include("job_enable_common.hrl").
-define(DEFAULT_NUM_BUCKETS, 7).
-define(DEFAULT_NUM_KEYS, 9).
%% ===================================================================
%% Test API
%% ===================================================================
enabled_string(true) ->
"enabled";
enabled_string(false) ->
"disabled".
test_label(Class, Enabled, ClientType) ->
io_lib:format("~s ~p ~s", [ClientType, Class, enabled_string(Enabled)]).
bin_bucket(Num) ->
erlang:list_to_binary(["Bucket_", erlang:integer_to_list(Num)]).
bin_key(Num) ->
erlang:list_to_binary(["Key_", erlang:integer_to_list(Num)]).
bin_val(Num) ->
erlang:list_to_binary(["Val_", erlang:integer_to_list(Num)]).
index_2i() ->
{integer_index, "valnum_index_2i"}.
index_yz() ->
<<"valnum_index_yz">>.
index_name(Name) when erlang:is_atom(Name) ->
erlang:atom_to_list(Name);
index_name(Name) when erlang:is_binary(Name) ->
erlang:binary_to_list(Name);
index_name(Name) when erlang:is_list(Name) ->
Name;
index_name({binary_index, Name}) ->
index_name(Name) ++ "_bin";
index_name({integer_index, Name}) ->
index_name(Name) ++ "_int";
index_name(Index) ->
erlang:error(badarg, [Index]).
num_buckets() ->
Key = {?MODULE, num_buckets},
case erlang:get(Key) of
undefined ->
Num = ?DEFAULT_NUM_BUCKETS,
erlang:put(Key, Num),
Num;
Val ->
Val
end.
num_buckets(Num) when erlang:is_integer(Num) andalso Num > 0 ->
Key = {?MODULE, num_buckets},
case erlang:get(Key) of
undefined ->
erlang:put(Key, Num),
Num;
Num ->
Num;
_ ->
erlang:erase({?MODULE, test_buckets}),
erlang:erase({?MODULE, populated_bucket}),
erlang:put(Key, Num),
Num
end.
num_keys() ->
Key = {?MODULE, num_keys},
case erlang:get(Key) of
undefined ->
Num = ?DEFAULT_NUM_KEYS,
erlang:put(Key, Num),
Num;
Val ->
Val
end.
num_keys(Num) when erlang:is_integer(Num) andalso Num > 0 ->
Key = {?MODULE, num_keys},
case erlang:get(Key) of
undefined ->
erlang:put(Key, Num),
Num;
Num ->
Num;
_ ->
erlang:erase({?MODULE, test_keys}),
erlang:erase({?MODULE, test_nums}),
erlang:erase({?MODULE, test_vals}),
erlang:put(Key, Num),
Num
end.
populated_bucket() ->
Key = {?MODULE, populated_bucket},
case erlang:get(Key) of
undefined ->
Buckets = test_buckets(),
Bucket = lists:nth(erlang:length(Buckets) div 2, Buckets),
erlang:put(Key, Bucket),
Bucket;
Val ->
Val
end.
undefined_bucket() ->
<<"Undefined_Bucket">>.
test_buckets() ->
Key = {?MODULE, test_buckets},
case erlang:get(Key) of
undefined ->
New = bin_buckets(num_buckets(), []),
erlang:put(Key, New),
New;
Val ->
Val
end.
test_keys() ->
Key = {?MODULE, test_keys},
case erlang:get(Key) of
undefined ->
New = bin_keys(num_keys(), []),
erlang:put(Key, New),
New;
Val ->
Val
end.
test_nums() ->
Key = {?MODULE, test_nums},
case erlang:get(Key) of
undefined ->
New = lists:seq(1, num_keys()),
erlang:put(Key, New),
New;
Val ->
Val
end.
test_vals() ->
Key = {?MODULE, test_vals},
case erlang:get(Key) of
undefined ->
New = bin_vals(num_keys(), []),
erlang:put(Key, New),
New;
Val ->
Val
end.
get_enabled(Nodes, Class) when erlang:is_list(Nodes) ->
[get_enabled(Node, Class) || Node <- Nodes];
get_enabled(Node, {App, Op}) ->
rpc:call(Node, riak_core_util, job_class_enabled, [App, Op]).
set_enabled([], _, _) ->
ok;
set_enabled([Node | Nodes], Class, Enabled) ->
?assertEqual(ok, set_enabled(Node, Class, Enabled)),
set_enabled(Nodes, Class, Enabled);
set_enabled(Node, {App, Op}, true) ->
rpc:call(Node, riak_core_util, enable_job_class, [App, Op]);
set_enabled(Node, {App, Op}, false) ->
rpc:call(Node, riak_core_util, disable_job_class, [App, Op]).
open_client(http = Type, Node) ->
% HTTP connections are constant records, so re-use them
Key = {?MODULE, httpc, Node},
case erlang:get(Key) of
undefined ->
New = {Type, rhc, rt:httpc(Node)},
erlang:put(Key, New),
New;
Conn ->
Conn
end;
open_client(pbc = Type, Node) ->
{Type, riakc_pb_socket, rt:pbc(Node)}.
close_client({http, _Mod, _RHC}) ->
ok;
close_client({pbc, Mod, PBC}) ->
Mod:stop(PBC).
setup_cluster([Node | _] = Nodes) ->
lager:info("Creating a cluster of ~b nodes ...", [erlang:length(Nodes)]),
?assertEqual(ok, rt:join_cluster(Nodes)),
load_data(Node),
?assertEqual(ok, rt:wait_until_transfers_complete(Nodes)).
setup_yokozuna([Node | _]) ->
setup_yokozuna(Node);
setup_yokozuna(Node) ->
% create the YZ search index
{_, Mod, Conn} = Client = open_client(pbc, Node),
?assertEqual(ok, Mod:create_search_index(Conn, index_yz())),
close_client(Client).
load_data([Node | _]) ->
load_data(Node);
load_data(Node) ->
lager:info("Writing known data to node ~p ...", [Node]),
PBConn = rt:pbc(Node),
load_data(PBConn, populated_bucket(), test_buckets()),
riakc_pb_socket:stop(PBConn).
test_operation(Node, Class, Enabled, ClientType) ->
lager:info("Testing ~s on ~p",
[test_label(Class, Enabled, ClientType), Node]),
test_request(Node, Class, Enabled, ClientType).
%% ===================================================================
%% Internal Operation Tests
%% ===================================================================
%%
Notes on test_request/4 implementation :
%%
%% The 'rhc' and 'riakc_pb_socket' hide a lot of implementation details,
%% including the command they actually issue, so we rely on the error message
%% in the response for disabled switches to confirm that the request got routed
%% to where we wanted it to on the receiving end.
%%
%% This results in some odd head clause ordering below, as the approach differs
%% for each operation. All operations for a given ?TOKEN_XXX are clustered
%% together, but the order within the cluster varies as we match patterns as
%% dictated by the behavior of the client modules for each.
%%
We currently uses ' riakc_pb_socket ' for protobufs , but that does n't give us
%% access to all available operations, so some are stubbed out unless/until we
%% dig deeper and implement them ourselves.
%%
%% The 'rhc' module has the same problem, but compounds it by not returning the
%% response body on errors, so for tests where it doesn't give us what we want
%% we skip it and use 'ibrowse' directly, building the URL from scratch.
%% For some reason using rt:httpc(Node) and getting the host/port out of the
returned # rhc { } is more reliable than calling rt :
%% directly.
%%
% riakc_pb_socket always lists buckets with streams, so skip the non-stream
% test unless/until we want to implement it directly.
test_request(Node, ?TOKEN_LIST_BUCKETS = Class, Enabled, pbc = ClientType) ->
{_, Mod, _} = Client = open_client(ClientType, Node),
lager:warning(
"non-streaming list-buckets is not implemented in the ~p client,"
" skipping the ~s test.",
[Mod, test_label(Class, Enabled, ClientType)]),
close_client(Client),
ok;
test_request(Node, ?TOKEN_LIST_BUCKETS = Class, Enabled, http = Scheme) ->
URL = make_url(Node, Scheme, "/buckets?buckets=true"),
Result = ibrowse:send_req(URL, [], get, [], [{response_format, binary}]),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
case Enabled of
true ->
{struct, PList} = mochijson2:decode(
unicode:characters_to_list(Body, utf8)),
Buckets = proplists:get_value(<<"buckets">>, PList, []),
?assertEqual({"200", test_buckets()}, {Code, lists:sort(Buckets)});
false ->
?assertEqual({"403", ?ERRMSG_BIN(Class)}, {Code, Body})
end;
test_request(Node, ?TOKEN_LIST_BUCKETS_S = Class, false, http = Scheme) ->
URL = make_url(Node, Scheme, "/buckets?buckets=stream"),
Result = ibrowse:send_req(URL, [], get),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
?assertEqual({"403", ?ERRMSG_TXT(Class)}, {Code, Body});
test_request(Node, ?TOKEN_LIST_BUCKETS_S = Class, Enabled, ClientType) ->
{_, Mod, Conn} = Client = open_client(ClientType, Node),
% 'rhc' and 'riakc_pb_socket' list_buckets always use stream_list_buckets
Result = Mod:list_buckets(Conn),
close_client(Client),
case Enabled of
true ->
?assertMatch({ok, L} when erlang:is_list(L), Result),
{ok, Buckets} = Result,
?assertEqual(test_buckets(), lists:sort(Buckets));
false ->
?assertEqual({error, ?ERRMSG_BIN(Class)}, Result)
end;
% protobuf list-keys only does streams, so skip the non-stream test
test_request(_, ?TOKEN_LIST_KEYS = Class, Enabled, pbc = ClientType) ->
lager:info(
"non-streaming list-keys over protobufs is not implemented in Riak,"
" skipping the ~s test.", [test_label(Class, Enabled, ClientType)]),
ok;
test_request(Node, ?TOKEN_LIST_KEYS = Class, Enabled, http = Scheme) ->
URL = make_url(Node, Scheme, ["/buckets/",
erlang:binary_to_list(populated_bucket()), "/keys?keys=true"]),
Result = ibrowse:send_req(URL, [], get, [], [{response_format, binary}]),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
case Enabled of
true ->
{struct, PList} = mochijson2:decode(
unicode:characters_to_list(Body, utf8)),
Keys = proplists:get_value(<<"keys">>, PList, []),
?assertEqual({"200", test_keys()}, {Code, lists:sort(Keys)});
false ->
?assertEqual({"403", ?ERRMSG_BIN(Class)}, {Code, Body})
end;
test_request(Node, ?TOKEN_LIST_KEYS_S = Class, false, http = Scheme) ->
URL = make_url(Node, Scheme, ["/buckets/",
erlang:binary_to_list(populated_bucket()), "/keys?keys=stream"]),
Result = ibrowse:send_req(URL, [], get),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
?assertEqual({"403", ?ERRMSG_TXT(Class)}, {Code, Body});
test_request(Node, ?TOKEN_LIST_KEYS_S = Class, Enabled, ClientType) ->
{_, Mod, Conn} = Client = open_client(ClientType, Node),
% 'rhc' and 'riakc_pb_socket' list_keys always use stream_list_keys
Result = Mod:list_keys(Conn, populated_bucket()),
close_client(Client),
case Enabled of
true ->
?assertMatch({ok, _}, Result),
{ok, Keys} = Result,
?assertEqual(test_keys(), lists:sort(Keys));
false ->
?assertEqual({error, ?ERRMSG_BIN(Class)}, Result)
end;
Map Reduce tests need a lot of love once code discriminates between term
% and javascript MR requests.
% TODO: Change to discrete implementations so http error body is validated.
% TODO: Try both forms with the other enabled/disabled to check crossover.
test_request(Node, ?TOKEN_MAP_REDUCE = Class, Enabled, ClientType) ->
Bucket = populated_bucket(),
{_, Mod, Conn} = Client = open_client(ClientType, Node),
Result = Mod:mapred(Conn, Bucket, []),
close_client(Client),
case Enabled of
true ->
?assertMatch({ok, [{_, _}]}, Result),
{ok, [{_, Pairs}]} = Result,
Expect = case ClientType of
pbc ->
[{Bucket, Key} || Key <- test_keys()];
http ->
[[Bucket, Key] || Key <- test_keys()]
end,
?assertEqual(Expect, lists:sort(Pairs));
false ->
case ClientType of
pbc ->
?assertEqual({error, ?ERRMSG_BIN(Class)}, Result);
http ->
?assertMatch({error, {"403", _}}, Result)
end
end;
test_request(_Node, ?TOKEN_MAP_REDUCE_JS = Class, Enabled, ClientType) ->
lager:info(
"map-reduce javascript discrimination is not implemented in Riak,"
" skipping the ~s test.", [test_label(Class, Enabled, ClientType)]),
ok;
test_request(Node, ?TOKEN_SEC_INDEX = Class, Enabled, pbc = ClientType) ->
Bucket = populated_bucket(),
Index = index_2i(),
Num = rt:random_uniform(num_keys()),
{_, Mod, Conn} = Client = open_client(ClientType, Node),
Result = Mod:get_index_eq(Conn, Bucket, Index, Num, [{stream, false}]),
close_client(Client),
case Enabled of
true ->
Key = bin_key(Num),
?assertMatch({ok, {index_results_v1, [Key], _, _}}, Result);
false ->
?assertEqual({error, ?ERRMSG_BIN(Class)}, Result)
end;
test_request(Node, ?TOKEN_SEC_INDEX = Class, Enabled, http = Scheme) ->
Num = rt:random_uniform(num_keys()),
URL = make_url(Node, Scheme, [
"/buckets/", erlang:binary_to_list(populated_bucket()),
"/index/", index_name(index_2i()), "/", erlang:integer_to_list(Num) ]),
Result = ibrowse:send_req(URL, [], get, [], [{response_format, binary}]),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
case Enabled of
true ->
Key = bin_key(Num),
{struct, PList} = mochijson2:decode(
unicode:characters_to_list(Body, utf8)),
Keys = proplists:get_value(<<"keys">>, PList, []),
?assertEqual({"200", [Key]}, {Code, Keys});
false ->
?assertEqual({"403", ?ERRMSG_BIN(Class)}, {Code, Body})
end;
test_request(Node, ?TOKEN_SEC_INDEX_S = Class, Enabled, pbc = ClientType) ->
Lo = rt:random_uniform(num_keys() - 3),
Hi = (Lo + 3),
{_, Mod, Conn} = Client = open_client(ClientType, Node),
{ok, ReqId} = Mod:get_index_range(
Conn, populated_bucket(), index_2i(), Lo, Hi, [{stream, true}]),
on success result keys are sorted by receive_2i_stream/2
Result = receive_2i_stream(ReqId, []),
close_client(Client),
case Enabled of
true ->
Expect = [bin_key(N) || N <- lists:seq(Lo, Hi)],
?assertEqual({ok, Expect}, Result);
false ->
?assertEqual({error, ?ERRMSG_BIN(Class)}, Result)
end;
test_request(Node, ?TOKEN_SEC_INDEX_S = Class, false, http = Scheme) ->
Num = rt:random_uniform(num_keys()),
URL = make_url(Node, Scheme, [
"/buckets/", erlang:binary_to_list(populated_bucket()),
"/index/", index_name(index_2i()), "/", erlang:integer_to_list(Num),
"?stream=true" ]),
Result = ibrowse:send_req(URL, [], get, [], [{response_format, binary}]),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
?assertEqual({"403", ?ERRMSG_BIN(Class)}, {Code, Body});
test_request(Node, ?TOKEN_SEC_INDEX_S, true, http = ClientType) ->
Bucket = populated_bucket(),
Index = index_2i(),
Num = rt:random_uniform(num_keys()),
Key = bin_key(Num),
{_, Mod, Conn} = Client = open_client(ClientType, Node),
Result = Mod:get_index(Conn, Bucket, Index, Num),
close_client(Client),
?assertMatch({ok, {index_results_v1, [Key], _, _}}, Result);
%% This requires that YZ be running and that
%% riakc_pb_socket:create_search_index(Connection, index_yz())
%% (or equivalent) has been successfully called before invoking this test.
This module 's load_data/1 function DOES NOT do this for you by default .
test_request(Node, ?TOKEN_YZ_SEARCH = Class, Enabled, pbc = ClientType) ->
Index = index_yz(),
Bucket = populated_bucket(),
Num = rt:random_uniform(num_keys()),
Key = bin_key(Num),
Query = <<"_yz_rb:", Bucket/binary, " AND _yz_rk:", Key/binary>>,
{_, Mod, Conn} = Client = open_client(ClientType, Node),
Result = Mod:search(Conn, Index, Query),
close_client(Client),
case Enabled of
true ->
?assertMatch({ok, #search_results{}}, Result);
false ->
?assertEqual({error, ?ERRMSG_BIN(Class)}, Result)
end;
test_request(Node, ?TOKEN_YZ_SEARCH = Class, Enabled, http) ->
Bucket = populated_bucket(),
Num = rt:random_uniform(num_keys()),
Key = bin_key(Num),
URL = make_url(Node, [
"/search/query/", erlang:binary_to_list(index_yz()),
"?wt=json&q=_yz_rb:", erlang:binary_to_list(Bucket),
"%20AND%20_yz_rk:", erlang:binary_to_list(Key) ]),
Result = ibrowse:send_req(URL, [], get),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
case Enabled of
true ->
?assertEqual("200", Code);
false ->
?assertEqual({"403", ?ERRMSG_TXT(Class)}, {Code, Body})
end;
test_request(_Node, ?TOKEN_OLD_SEARCH = Class, Enabled, ClientType) ->
lager:warning(
"riak_search job switch test not implemented,"
" skipping the ~s test.", [test_label(Class, Enabled, ClientType)]),
ok.
%% ===================================================================
%% Internal Support
%% ===================================================================
bin_buckets(0, Result) ->
lists:sort(Result);
bin_buckets(Count, Result) ->
bin_buckets((Count - 1), [bin_bucket(Count) | Result]).
bin_keys(0, Result) ->
lists:sort(Result);
bin_keys(Count, Result) ->
bin_keys((Count - 1), [bin_key(Count) | Result]).
bin_vals(0, Result) ->
lists:sort(Result);
bin_vals(Count, Result) ->
bin_vals((Count - 1), [bin_val(Count) | Result]).
load_data(PBConn, Bucket, [Bucket | Buckets]) ->
Index = index_2i(),
Load = fun({Num, Key, Val}) ->
Obj1 = riakc_obj:new(Bucket, Key, Val),
Meta1 = riakc_obj:get_update_metadata(Obj1),
Meta2 = riakc_obj:set_secondary_index(Meta1, [{Index, [Num]}]),
Obj2 = riakc_obj:update_metadata(Obj1, Meta2),
?assertEqual(ok, riakc_pb_socket:put(PBConn, Obj2))
end,
lists:foreach(Load, lists:zip3(test_nums(), test_keys(), test_vals())),
load_data(PBConn, Bucket, Buckets);
load_data(PBConn, PopBucket, [Bucket | Buckets]) ->
?assertEqual(ok, riakc_pb_socket:put(PBConn,
riakc_obj:new(Bucket, <<"test_key">>, <<"test_value">>))),
load_data(PBConn, PopBucket, Buckets);
load_data(_, _, []) ->
ok.
make_url(#rhc{ip = IP, port = Port, options = Opts}, Parts) ->
case proplists:get_value(is_ssl, Opts) of
true ->
make_url(https, IP, Port, Parts);
_ ->
make_url(http, IP, Port, Parts)
end;
make_url(Node, Parts) ->
make_url(Node, http, Parts).
make_url(Node, Scheme, Parts) ->
seems to be more reliable than calling rt : directly
#rhc{ip = IP, port = Port} = rt:httpc(Node),
make_url(Scheme, IP, Port, Parts).
make_url(Scheme, Host, Port, Parts) ->
lists:flatten([io_lib:format("~s://~s:~b", [Scheme, Host, Port]), Parts]).
receive_2i_stream(ReqId, Result) ->
receive
{ReqId, {done, _}} ->
{ok, lists:sort(lists:flatten(Result))};
{ReqId, {error, Reason}} ->
{error, Reason};
{ReqId, {index_stream_result_v1, [Val], _}} ->
receive_2i_stream(ReqId, [Val | Result]);
% sent once before 'done'
{ReqId, {index_stream_result_v1, [], _}} ->
receive_2i_stream(ReqId, Result);
not clear if it can send more than one
{ReqId, {index_stream_result_v1, Vals, _}} when erlang:is_list(Vals) ->
receive_2i_stream(ReqId, Vals ++ Result)
end.
| null | https://raw.githubusercontent.com/basho/riak_test/8170137b283061ba94bc85bf42575021e26c929d/tests/job_enable_common.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
Public API for use by other tests
===================================================================
Test API
===================================================================
HTTP connections are constant records, so re-use them
create the YZ search index
===================================================================
Internal Operation Tests
===================================================================
The 'rhc' and 'riakc_pb_socket' hide a lot of implementation details,
including the command they actually issue, so we rely on the error message
in the response for disabled switches to confirm that the request got routed
to where we wanted it to on the receiving end.
This results in some odd head clause ordering below, as the approach differs
for each operation. All operations for a given ?TOKEN_XXX are clustered
together, but the order within the cluster varies as we match patterns as
dictated by the behavior of the client modules for each.
access to all available operations, so some are stubbed out unless/until we
dig deeper and implement them ourselves.
The 'rhc' module has the same problem, but compounds it by not returning the
response body on errors, so for tests where it doesn't give us what we want
we skip it and use 'ibrowse' directly, building the URL from scratch.
For some reason using rt:httpc(Node) and getting the host/port out of the
directly.
riakc_pb_socket always lists buckets with streams, so skip the non-stream
test unless/until we want to implement it directly.
'rhc' and 'riakc_pb_socket' list_buckets always use stream_list_buckets
protobuf list-keys only does streams, so skip the non-stream test
'rhc' and 'riakc_pb_socket' list_keys always use stream_list_keys
and javascript MR requests.
TODO: Change to discrete implementations so http error body is validated.
TODO: Try both forms with the other enabled/disabled to check crossover.
This requires that YZ be running and that
riakc_pb_socket:create_search_index(Connection, index_yz())
(or equivalent) has been successfully called before invoking this test.
===================================================================
Internal Support
===================================================================
sent once before 'done' | Copyright ( c ) 2016 Basho Technologies , Inc.
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(job_enable_common).
-export([
bin_bucket/1,
bin_key/1,
bin_val/1,
close_client/1,
enabled_string/1,
get_enabled/2,
index_2i/0,
index_name/1,
index_yz/0,
load_data/1,
num_buckets/0, num_buckets/1,
num_keys/0, num_keys/1,
open_client/2,
populated_bucket/0,
set_enabled/3,
setup_cluster/1,
setup_yokozuna/1,
test_buckets/0,
test_keys/0,
test_label/3,
test_nums/0,
test_operation/4,
test_vals/0,
undefined_bucket/0
]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("riakc/include/riakc.hrl").
-include_lib("riakhttpc/include/rhc.hrl").
-include("job_enable_common.hrl").
-define(DEFAULT_NUM_BUCKETS, 7).
-define(DEFAULT_NUM_KEYS, 9).
enabled_string(true) ->
"enabled";
enabled_string(false) ->
"disabled".
test_label(Class, Enabled, ClientType) ->
io_lib:format("~s ~p ~s", [ClientType, Class, enabled_string(Enabled)]).
bin_bucket(Num) ->
erlang:list_to_binary(["Bucket_", erlang:integer_to_list(Num)]).
bin_key(Num) ->
erlang:list_to_binary(["Key_", erlang:integer_to_list(Num)]).
bin_val(Num) ->
erlang:list_to_binary(["Val_", erlang:integer_to_list(Num)]).
index_2i() ->
{integer_index, "valnum_index_2i"}.
index_yz() ->
<<"valnum_index_yz">>.
index_name(Name) when erlang:is_atom(Name) ->
erlang:atom_to_list(Name);
index_name(Name) when erlang:is_binary(Name) ->
erlang:binary_to_list(Name);
index_name(Name) when erlang:is_list(Name) ->
Name;
index_name({binary_index, Name}) ->
index_name(Name) ++ "_bin";
index_name({integer_index, Name}) ->
index_name(Name) ++ "_int";
index_name(Index) ->
erlang:error(badarg, [Index]).
num_buckets() ->
Key = {?MODULE, num_buckets},
case erlang:get(Key) of
undefined ->
Num = ?DEFAULT_NUM_BUCKETS,
erlang:put(Key, Num),
Num;
Val ->
Val
end.
num_buckets(Num) when erlang:is_integer(Num) andalso Num > 0 ->
Key = {?MODULE, num_buckets},
case erlang:get(Key) of
undefined ->
erlang:put(Key, Num),
Num;
Num ->
Num;
_ ->
erlang:erase({?MODULE, test_buckets}),
erlang:erase({?MODULE, populated_bucket}),
erlang:put(Key, Num),
Num
end.
num_keys() ->
Key = {?MODULE, num_keys},
case erlang:get(Key) of
undefined ->
Num = ?DEFAULT_NUM_KEYS,
erlang:put(Key, Num),
Num;
Val ->
Val
end.
num_keys(Num) when erlang:is_integer(Num) andalso Num > 0 ->
Key = {?MODULE, num_keys},
case erlang:get(Key) of
undefined ->
erlang:put(Key, Num),
Num;
Num ->
Num;
_ ->
erlang:erase({?MODULE, test_keys}),
erlang:erase({?MODULE, test_nums}),
erlang:erase({?MODULE, test_vals}),
erlang:put(Key, Num),
Num
end.
populated_bucket() ->
Key = {?MODULE, populated_bucket},
case erlang:get(Key) of
undefined ->
Buckets = test_buckets(),
Bucket = lists:nth(erlang:length(Buckets) div 2, Buckets),
erlang:put(Key, Bucket),
Bucket;
Val ->
Val
end.
undefined_bucket() ->
<<"Undefined_Bucket">>.
test_buckets() ->
Key = {?MODULE, test_buckets},
case erlang:get(Key) of
undefined ->
New = bin_buckets(num_buckets(), []),
erlang:put(Key, New),
New;
Val ->
Val
end.
test_keys() ->
Key = {?MODULE, test_keys},
case erlang:get(Key) of
undefined ->
New = bin_keys(num_keys(), []),
erlang:put(Key, New),
New;
Val ->
Val
end.
test_nums() ->
Key = {?MODULE, test_nums},
case erlang:get(Key) of
undefined ->
New = lists:seq(1, num_keys()),
erlang:put(Key, New),
New;
Val ->
Val
end.
test_vals() ->
Key = {?MODULE, test_vals},
case erlang:get(Key) of
undefined ->
New = bin_vals(num_keys(), []),
erlang:put(Key, New),
New;
Val ->
Val
end.
get_enabled(Nodes, Class) when erlang:is_list(Nodes) ->
[get_enabled(Node, Class) || Node <- Nodes];
get_enabled(Node, {App, Op}) ->
rpc:call(Node, riak_core_util, job_class_enabled, [App, Op]).
set_enabled([], _, _) ->
ok;
set_enabled([Node | Nodes], Class, Enabled) ->
?assertEqual(ok, set_enabled(Node, Class, Enabled)),
set_enabled(Nodes, Class, Enabled);
set_enabled(Node, {App, Op}, true) ->
rpc:call(Node, riak_core_util, enable_job_class, [App, Op]);
set_enabled(Node, {App, Op}, false) ->
rpc:call(Node, riak_core_util, disable_job_class, [App, Op]).
open_client(http = Type, Node) ->
Key = {?MODULE, httpc, Node},
case erlang:get(Key) of
undefined ->
New = {Type, rhc, rt:httpc(Node)},
erlang:put(Key, New),
New;
Conn ->
Conn
end;
open_client(pbc = Type, Node) ->
{Type, riakc_pb_socket, rt:pbc(Node)}.
close_client({http, _Mod, _RHC}) ->
ok;
close_client({pbc, Mod, PBC}) ->
Mod:stop(PBC).
setup_cluster([Node | _] = Nodes) ->
lager:info("Creating a cluster of ~b nodes ...", [erlang:length(Nodes)]),
?assertEqual(ok, rt:join_cluster(Nodes)),
load_data(Node),
?assertEqual(ok, rt:wait_until_transfers_complete(Nodes)).
setup_yokozuna([Node | _]) ->
setup_yokozuna(Node);
setup_yokozuna(Node) ->
{_, Mod, Conn} = Client = open_client(pbc, Node),
?assertEqual(ok, Mod:create_search_index(Conn, index_yz())),
close_client(Client).
load_data([Node | _]) ->
load_data(Node);
load_data(Node) ->
lager:info("Writing known data to node ~p ...", [Node]),
PBConn = rt:pbc(Node),
load_data(PBConn, populated_bucket(), test_buckets()),
riakc_pb_socket:stop(PBConn).
test_operation(Node, Class, Enabled, ClientType) ->
lager:info("Testing ~s on ~p",
[test_label(Class, Enabled, ClientType), Node]),
test_request(Node, Class, Enabled, ClientType).
Notes on test_request/4 implementation :
We currently uses ' riakc_pb_socket ' for protobufs , but that does n't give us
returned # rhc { } is more reliable than calling rt :
test_request(Node, ?TOKEN_LIST_BUCKETS = Class, Enabled, pbc = ClientType) ->
{_, Mod, _} = Client = open_client(ClientType, Node),
lager:warning(
"non-streaming list-buckets is not implemented in the ~p client,"
" skipping the ~s test.",
[Mod, test_label(Class, Enabled, ClientType)]),
close_client(Client),
ok;
test_request(Node, ?TOKEN_LIST_BUCKETS = Class, Enabled, http = Scheme) ->
URL = make_url(Node, Scheme, "/buckets?buckets=true"),
Result = ibrowse:send_req(URL, [], get, [], [{response_format, binary}]),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
case Enabled of
true ->
{struct, PList} = mochijson2:decode(
unicode:characters_to_list(Body, utf8)),
Buckets = proplists:get_value(<<"buckets">>, PList, []),
?assertEqual({"200", test_buckets()}, {Code, lists:sort(Buckets)});
false ->
?assertEqual({"403", ?ERRMSG_BIN(Class)}, {Code, Body})
end;
test_request(Node, ?TOKEN_LIST_BUCKETS_S = Class, false, http = Scheme) ->
URL = make_url(Node, Scheme, "/buckets?buckets=stream"),
Result = ibrowse:send_req(URL, [], get),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
?assertEqual({"403", ?ERRMSG_TXT(Class)}, {Code, Body});
test_request(Node, ?TOKEN_LIST_BUCKETS_S = Class, Enabled, ClientType) ->
{_, Mod, Conn} = Client = open_client(ClientType, Node),
Result = Mod:list_buckets(Conn),
close_client(Client),
case Enabled of
true ->
?assertMatch({ok, L} when erlang:is_list(L), Result),
{ok, Buckets} = Result,
?assertEqual(test_buckets(), lists:sort(Buckets));
false ->
?assertEqual({error, ?ERRMSG_BIN(Class)}, Result)
end;
test_request(_, ?TOKEN_LIST_KEYS = Class, Enabled, pbc = ClientType) ->
lager:info(
"non-streaming list-keys over protobufs is not implemented in Riak,"
" skipping the ~s test.", [test_label(Class, Enabled, ClientType)]),
ok;
test_request(Node, ?TOKEN_LIST_KEYS = Class, Enabled, http = Scheme) ->
URL = make_url(Node, Scheme, ["/buckets/",
erlang:binary_to_list(populated_bucket()), "/keys?keys=true"]),
Result = ibrowse:send_req(URL, [], get, [], [{response_format, binary}]),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
case Enabled of
true ->
{struct, PList} = mochijson2:decode(
unicode:characters_to_list(Body, utf8)),
Keys = proplists:get_value(<<"keys">>, PList, []),
?assertEqual({"200", test_keys()}, {Code, lists:sort(Keys)});
false ->
?assertEqual({"403", ?ERRMSG_BIN(Class)}, {Code, Body})
end;
test_request(Node, ?TOKEN_LIST_KEYS_S = Class, false, http = Scheme) ->
URL = make_url(Node, Scheme, ["/buckets/",
erlang:binary_to_list(populated_bucket()), "/keys?keys=stream"]),
Result = ibrowse:send_req(URL, [], get),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
?assertEqual({"403", ?ERRMSG_TXT(Class)}, {Code, Body});
test_request(Node, ?TOKEN_LIST_KEYS_S = Class, Enabled, ClientType) ->
{_, Mod, Conn} = Client = open_client(ClientType, Node),
Result = Mod:list_keys(Conn, populated_bucket()),
close_client(Client),
case Enabled of
true ->
?assertMatch({ok, _}, Result),
{ok, Keys} = Result,
?assertEqual(test_keys(), lists:sort(Keys));
false ->
?assertEqual({error, ?ERRMSG_BIN(Class)}, Result)
end;
Map Reduce tests need a lot of love once code discriminates between term
test_request(Node, ?TOKEN_MAP_REDUCE = Class, Enabled, ClientType) ->
Bucket = populated_bucket(),
{_, Mod, Conn} = Client = open_client(ClientType, Node),
Result = Mod:mapred(Conn, Bucket, []),
close_client(Client),
case Enabled of
true ->
?assertMatch({ok, [{_, _}]}, Result),
{ok, [{_, Pairs}]} = Result,
Expect = case ClientType of
pbc ->
[{Bucket, Key} || Key <- test_keys()];
http ->
[[Bucket, Key] || Key <- test_keys()]
end,
?assertEqual(Expect, lists:sort(Pairs));
false ->
case ClientType of
pbc ->
?assertEqual({error, ?ERRMSG_BIN(Class)}, Result);
http ->
?assertMatch({error, {"403", _}}, Result)
end
end;
test_request(_Node, ?TOKEN_MAP_REDUCE_JS = Class, Enabled, ClientType) ->
lager:info(
"map-reduce javascript discrimination is not implemented in Riak,"
" skipping the ~s test.", [test_label(Class, Enabled, ClientType)]),
ok;
test_request(Node, ?TOKEN_SEC_INDEX = Class, Enabled, pbc = ClientType) ->
Bucket = populated_bucket(),
Index = index_2i(),
Num = rt:random_uniform(num_keys()),
{_, Mod, Conn} = Client = open_client(ClientType, Node),
Result = Mod:get_index_eq(Conn, Bucket, Index, Num, [{stream, false}]),
close_client(Client),
case Enabled of
true ->
Key = bin_key(Num),
?assertMatch({ok, {index_results_v1, [Key], _, _}}, Result);
false ->
?assertEqual({error, ?ERRMSG_BIN(Class)}, Result)
end;
test_request(Node, ?TOKEN_SEC_INDEX = Class, Enabled, http = Scheme) ->
Num = rt:random_uniform(num_keys()),
URL = make_url(Node, Scheme, [
"/buckets/", erlang:binary_to_list(populated_bucket()),
"/index/", index_name(index_2i()), "/", erlang:integer_to_list(Num) ]),
Result = ibrowse:send_req(URL, [], get, [], [{response_format, binary}]),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
case Enabled of
true ->
Key = bin_key(Num),
{struct, PList} = mochijson2:decode(
unicode:characters_to_list(Body, utf8)),
Keys = proplists:get_value(<<"keys">>, PList, []),
?assertEqual({"200", [Key]}, {Code, Keys});
false ->
?assertEqual({"403", ?ERRMSG_BIN(Class)}, {Code, Body})
end;
test_request(Node, ?TOKEN_SEC_INDEX_S = Class, Enabled, pbc = ClientType) ->
Lo = rt:random_uniform(num_keys() - 3),
Hi = (Lo + 3),
{_, Mod, Conn} = Client = open_client(ClientType, Node),
{ok, ReqId} = Mod:get_index_range(
Conn, populated_bucket(), index_2i(), Lo, Hi, [{stream, true}]),
on success result keys are sorted by receive_2i_stream/2
Result = receive_2i_stream(ReqId, []),
close_client(Client),
case Enabled of
true ->
Expect = [bin_key(N) || N <- lists:seq(Lo, Hi)],
?assertEqual({ok, Expect}, Result);
false ->
?assertEqual({error, ?ERRMSG_BIN(Class)}, Result)
end;
test_request(Node, ?TOKEN_SEC_INDEX_S = Class, false, http = Scheme) ->
Num = rt:random_uniform(num_keys()),
URL = make_url(Node, Scheme, [
"/buckets/", erlang:binary_to_list(populated_bucket()),
"/index/", index_name(index_2i()), "/", erlang:integer_to_list(Num),
"?stream=true" ]),
Result = ibrowse:send_req(URL, [], get, [], [{response_format, binary}]),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
?assertEqual({"403", ?ERRMSG_BIN(Class)}, {Code, Body});
test_request(Node, ?TOKEN_SEC_INDEX_S, true, http = ClientType) ->
Bucket = populated_bucket(),
Index = index_2i(),
Num = rt:random_uniform(num_keys()),
Key = bin_key(Num),
{_, Mod, Conn} = Client = open_client(ClientType, Node),
Result = Mod:get_index(Conn, Bucket, Index, Num),
close_client(Client),
?assertMatch({ok, {index_results_v1, [Key], _, _}}, Result);
This module 's load_data/1 function DOES NOT do this for you by default .
test_request(Node, ?TOKEN_YZ_SEARCH = Class, Enabled, pbc = ClientType) ->
Index = index_yz(),
Bucket = populated_bucket(),
Num = rt:random_uniform(num_keys()),
Key = bin_key(Num),
Query = <<"_yz_rb:", Bucket/binary, " AND _yz_rk:", Key/binary>>,
{_, Mod, Conn} = Client = open_client(ClientType, Node),
Result = Mod:search(Conn, Index, Query),
close_client(Client),
case Enabled of
true ->
?assertMatch({ok, #search_results{}}, Result);
false ->
?assertEqual({error, ?ERRMSG_BIN(Class)}, Result)
end;
test_request(Node, ?TOKEN_YZ_SEARCH = Class, Enabled, http) ->
Bucket = populated_bucket(),
Num = rt:random_uniform(num_keys()),
Key = bin_key(Num),
URL = make_url(Node, [
"/search/query/", erlang:binary_to_list(index_yz()),
"?wt=json&q=_yz_rb:", erlang:binary_to_list(Bucket),
"%20AND%20_yz_rk:", erlang:binary_to_list(Key) ]),
Result = ibrowse:send_req(URL, [], get),
?assertMatch({ok, _, _, _}, Result),
{_, Code, _, Body} = Result,
case Enabled of
true ->
?assertEqual("200", Code);
false ->
?assertEqual({"403", ?ERRMSG_TXT(Class)}, {Code, Body})
end;
test_request(_Node, ?TOKEN_OLD_SEARCH = Class, Enabled, ClientType) ->
lager:warning(
"riak_search job switch test not implemented,"
" skipping the ~s test.", [test_label(Class, Enabled, ClientType)]),
ok.
bin_buckets(0, Result) ->
lists:sort(Result);
bin_buckets(Count, Result) ->
bin_buckets((Count - 1), [bin_bucket(Count) | Result]).
bin_keys(0, Result) ->
lists:sort(Result);
bin_keys(Count, Result) ->
bin_keys((Count - 1), [bin_key(Count) | Result]).
bin_vals(0, Result) ->
lists:sort(Result);
bin_vals(Count, Result) ->
bin_vals((Count - 1), [bin_val(Count) | Result]).
load_data(PBConn, Bucket, [Bucket | Buckets]) ->
Index = index_2i(),
Load = fun({Num, Key, Val}) ->
Obj1 = riakc_obj:new(Bucket, Key, Val),
Meta1 = riakc_obj:get_update_metadata(Obj1),
Meta2 = riakc_obj:set_secondary_index(Meta1, [{Index, [Num]}]),
Obj2 = riakc_obj:update_metadata(Obj1, Meta2),
?assertEqual(ok, riakc_pb_socket:put(PBConn, Obj2))
end,
lists:foreach(Load, lists:zip3(test_nums(), test_keys(), test_vals())),
load_data(PBConn, Bucket, Buckets);
load_data(PBConn, PopBucket, [Bucket | Buckets]) ->
?assertEqual(ok, riakc_pb_socket:put(PBConn,
riakc_obj:new(Bucket, <<"test_key">>, <<"test_value">>))),
load_data(PBConn, PopBucket, Buckets);
load_data(_, _, []) ->
ok.
make_url(#rhc{ip = IP, port = Port, options = Opts}, Parts) ->
case proplists:get_value(is_ssl, Opts) of
true ->
make_url(https, IP, Port, Parts);
_ ->
make_url(http, IP, Port, Parts)
end;
make_url(Node, Parts) ->
make_url(Node, http, Parts).
make_url(Node, Scheme, Parts) ->
seems to be more reliable than calling rt : directly
#rhc{ip = IP, port = Port} = rt:httpc(Node),
make_url(Scheme, IP, Port, Parts).
make_url(Scheme, Host, Port, Parts) ->
lists:flatten([io_lib:format("~s://~s:~b", [Scheme, Host, Port]), Parts]).
receive_2i_stream(ReqId, Result) ->
receive
{ReqId, {done, _}} ->
{ok, lists:sort(lists:flatten(Result))};
{ReqId, {error, Reason}} ->
{error, Reason};
{ReqId, {index_stream_result_v1, [Val], _}} ->
receive_2i_stream(ReqId, [Val | Result]);
{ReqId, {index_stream_result_v1, [], _}} ->
receive_2i_stream(ReqId, Result);
not clear if it can send more than one
{ReqId, {index_stream_result_v1, Vals, _}} when erlang:is_list(Vals) ->
receive_2i_stream(ReqId, Vals ++ Result)
end.
|
50b1fa29081fd3e896bc147c86bae95aa5156cef29ae6016afd3c7c80338c9fa | fossas/fossa-cli | PackagesConfig.hs | # LANGUAGE RecordWildCards #
module Strategy.NuGet.PackagesConfig (
discover,
findProjects,
getDeps,
mkProject,
buildGraph,
PackagesConfig (..),
NuGetDependency (..),
) where
import App.Fossa.Analyze.Types (AnalyzeProject (analyzeProject'), analyzeProject)
import Control.Effect.Diagnostics (Diagnostics, Has, context)
import Control.Effect.Reader (Reader)
import Data.Aeson (ToJSON)
import Data.Foldable (find)
import Data.Map.Strict qualified as Map
import Data.Text (Text)
import DepTypes (
DepType (NuGetType),
Dependency (..),
VerConstraint (CEq),
)
import Discovery.Filters (AllFilters)
import Discovery.Simple (simpleDiscover)
import Discovery.Walk (
WalkStep (WalkContinue),
fileName,
walkWithFilters',
)
import Effect.ReadFS (ReadFS, readContentsXML)
import GHC.Generics (Generic)
import Graphing (Graphing)
import Graphing qualified
import Parse.XML (FromXML (..), attr, children)
import Path (Abs, Dir, File, Path, parent)
import Types (
DependencyResults (..),
DiscoveredProject (..),
DiscoveredProjectType (PackagesConfigProjectType),
GraphBreadth (Partial),
)
discover :: (Has ReadFS sig m, Has Diagnostics sig m, Has (Reader AllFilters) sig m) => Path Abs Dir -> m [DiscoveredProject PackagesConfigProject]
discover = simpleDiscover findProjects mkProject PackagesConfigProjectType
findProjects :: (Has ReadFS sig m, Has Diagnostics sig m, Has (Reader AllFilters) sig m) => Path Abs Dir -> m [PackagesConfigProject]
findProjects = walkWithFilters' $ \_ _ files -> do
case find (\f -> fileName f == "packages.config") files of
Nothing -> pure ([], WalkContinue)
Just file -> pure ([PackagesConfigProject file], WalkContinue)
newtype PackagesConfigProject = PackagesConfigProject
{ packagesConfigFile :: Path Abs File
}
deriving (Eq, Ord, Show, Generic)
instance ToJSON PackagesConfigProject
instance AnalyzeProject PackagesConfigProject where
analyzeProject _ = getDeps
analyzeProject' _ = getDeps
mkProject :: PackagesConfigProject -> DiscoveredProject PackagesConfigProject
mkProject project =
DiscoveredProject
{ projectType = PackagesConfigProjectType
, projectBuildTargets = mempty
, projectPath = parent $ packagesConfigFile project
, projectData = project
}
getDeps :: (Has ReadFS sig m, Has Diagnostics sig m) => PackagesConfigProject -> m DependencyResults
getDeps = context "PackagesConfig" . context "Static analysis" . analyze' . packagesConfigFile
analyze' :: (Has ReadFS sig m, Has Diagnostics sig m) => Path Abs File -> m DependencyResults
analyze' file = do
config <- readContentsXML @PackagesConfig file
graph <- context "Building dependency graph" $ pure (buildGraph config)
pure $
DependencyResults
{ dependencyGraph = graph
, dependencyGraphBreadth = Partial
, dependencyManifestFiles = [file]
}
instance FromXML PackagesConfig where
parseElement el = PackagesConfig <$> children "package" el
instance FromXML NuGetDependency where
parseElement el =
NuGetDependency
<$> attr "id" el
<*> attr "version" el
newtype PackagesConfig = PackagesConfig
{ deps :: [NuGetDependency]
}
deriving (Eq, Ord, Show)
data NuGetDependency = NuGetDependency
{ depID :: Text
, depVersion :: Text
}
deriving (Eq, Ord, Show)
buildGraph :: PackagesConfig -> Graphing Dependency
buildGraph = Graphing.fromList . map toDependency . deps
where
toDependency NuGetDependency{..} =
Dependency
{ dependencyType = NuGetType
, dependencyName = depID
, dependencyVersion = Just (CEq depVersion)
, dependencyLocations = []
, dependencyEnvironments = mempty
, dependencyTags = Map.empty
}
| null | https://raw.githubusercontent.com/fossas/fossa-cli/187f19afec2133466d1998c89fc7f1c77107c2b0/src/Strategy/NuGet/PackagesConfig.hs | haskell | # LANGUAGE RecordWildCards #
module Strategy.NuGet.PackagesConfig (
discover,
findProjects,
getDeps,
mkProject,
buildGraph,
PackagesConfig (..),
NuGetDependency (..),
) where
import App.Fossa.Analyze.Types (AnalyzeProject (analyzeProject'), analyzeProject)
import Control.Effect.Diagnostics (Diagnostics, Has, context)
import Control.Effect.Reader (Reader)
import Data.Aeson (ToJSON)
import Data.Foldable (find)
import Data.Map.Strict qualified as Map
import Data.Text (Text)
import DepTypes (
DepType (NuGetType),
Dependency (..),
VerConstraint (CEq),
)
import Discovery.Filters (AllFilters)
import Discovery.Simple (simpleDiscover)
import Discovery.Walk (
WalkStep (WalkContinue),
fileName,
walkWithFilters',
)
import Effect.ReadFS (ReadFS, readContentsXML)
import GHC.Generics (Generic)
import Graphing (Graphing)
import Graphing qualified
import Parse.XML (FromXML (..), attr, children)
import Path (Abs, Dir, File, Path, parent)
import Types (
DependencyResults (..),
DiscoveredProject (..),
DiscoveredProjectType (PackagesConfigProjectType),
GraphBreadth (Partial),
)
discover :: (Has ReadFS sig m, Has Diagnostics sig m, Has (Reader AllFilters) sig m) => Path Abs Dir -> m [DiscoveredProject PackagesConfigProject]
discover = simpleDiscover findProjects mkProject PackagesConfigProjectType
findProjects :: (Has ReadFS sig m, Has Diagnostics sig m, Has (Reader AllFilters) sig m) => Path Abs Dir -> m [PackagesConfigProject]
findProjects = walkWithFilters' $ \_ _ files -> do
case find (\f -> fileName f == "packages.config") files of
Nothing -> pure ([], WalkContinue)
Just file -> pure ([PackagesConfigProject file], WalkContinue)
newtype PackagesConfigProject = PackagesConfigProject
{ packagesConfigFile :: Path Abs File
}
deriving (Eq, Ord, Show, Generic)
instance ToJSON PackagesConfigProject
instance AnalyzeProject PackagesConfigProject where
analyzeProject _ = getDeps
analyzeProject' _ = getDeps
mkProject :: PackagesConfigProject -> DiscoveredProject PackagesConfigProject
mkProject project =
DiscoveredProject
{ projectType = PackagesConfigProjectType
, projectBuildTargets = mempty
, projectPath = parent $ packagesConfigFile project
, projectData = project
}
getDeps :: (Has ReadFS sig m, Has Diagnostics sig m) => PackagesConfigProject -> m DependencyResults
getDeps = context "PackagesConfig" . context "Static analysis" . analyze' . packagesConfigFile
analyze' :: (Has ReadFS sig m, Has Diagnostics sig m) => Path Abs File -> m DependencyResults
analyze' file = do
config <- readContentsXML @PackagesConfig file
graph <- context "Building dependency graph" $ pure (buildGraph config)
pure $
DependencyResults
{ dependencyGraph = graph
, dependencyGraphBreadth = Partial
, dependencyManifestFiles = [file]
}
instance FromXML PackagesConfig where
parseElement el = PackagesConfig <$> children "package" el
instance FromXML NuGetDependency where
parseElement el =
NuGetDependency
<$> attr "id" el
<*> attr "version" el
newtype PackagesConfig = PackagesConfig
{ deps :: [NuGetDependency]
}
deriving (Eq, Ord, Show)
data NuGetDependency = NuGetDependency
{ depID :: Text
, depVersion :: Text
}
deriving (Eq, Ord, Show)
buildGraph :: PackagesConfig -> Graphing Dependency
buildGraph = Graphing.fromList . map toDependency . deps
where
toDependency NuGetDependency{..} =
Dependency
{ dependencyType = NuGetType
, dependencyName = depID
, dependencyVersion = Just (CEq depVersion)
, dependencyLocations = []
, dependencyEnvironments = mempty
, dependencyTags = Map.empty
}
| |
df67900d8b1a56ce2688823a346db52087c7ded7de1df145ac4c67ef6e10fd2d | iu-parfunc/haskell_dsl_tour | Fusion.hs | {-# LANGUAGE GADTs #-}
-- | Fusion transformation
--
module Fusion where
import AST
import Substitution
| Implement operator fusion by rewriting the AST .
--
fuseAcc :: OpenAcc aenv a -> OpenAcc aenv a
fuseAcc acc =
case acc of
Use xs -> Use xs
Avar ix -> Avar ix
Generate sh f -> Generate sh f
Alet bnd body -> Alet (fuseAcc bnd) (fuseAcc body)
Map f a ->
case fuseAcc a of
Map g b -> Map (f `compose` g) b
Generate sh g -> Generate sh (f `compose` g)
a' -> Map f a'
| null | https://raw.githubusercontent.com/iu-parfunc/haskell_dsl_tour/f75a7e492a1e5d219a77fb128f70441d54a706eb/middle_end/GADT_transforms/src/Fusion.hs | haskell | # LANGUAGE GADTs #
| Fusion transformation
|
module Fusion where
import AST
import Substitution
| Implement operator fusion by rewriting the AST .
fuseAcc :: OpenAcc aenv a -> OpenAcc aenv a
fuseAcc acc =
case acc of
Use xs -> Use xs
Avar ix -> Avar ix
Generate sh f -> Generate sh f
Alet bnd body -> Alet (fuseAcc bnd) (fuseAcc body)
Map f a ->
case fuseAcc a of
Map g b -> Map (f `compose` g) b
Generate sh g -> Generate sh (f `compose` g)
a' -> Map f a'
|
af442cc2f54b21d90648ad3e937897ac370475e0fadf8a3bffd6c96bd1168823 | BinaryAnalysisPlatform/bap | bap_value.ml | open Bap_core_theory
open Core_kernel[@@warning "-D"]
open Regular.Std
open Format
module type S = sig
type t [@@deriving bin_io, compare, sexp]
val pp : Format.formatter -> t -> unit
end
module Uid = Type_equal.Id.Uid
module Typeid = String
type void
type literal = (void,void,void) format
type uid = Uid.t
type typeid = Typeid.t [@@deriving bin_io, compare, sexp]
type 'a tag = {
key : 'a Type_equal.Id.t;
slot : (Theory.program,'a option) KB.slot;
}
module Value = struct
type t = Univ_map.Packed.t = T : 'a Type_equal.Id.t * 'a -> t
end
module Equal = struct
type ('a,'b) t = ('a,'b) Type_equal.t = T : ('a,'a) t
let proof = Type_equal.Id.same_witness_exn
let try_prove = Type_equal.Id.same_witness
end
type type_info = {
pp : Format.formatter -> Value.t -> unit;
of_string : string -> Value.t;
to_string : Value.t -> string;
of_sexp : Sexp.t -> Value.t;
to_sexp : Value.t -> Sexp.t;
collect : Theory.Label.t -> Univ_map.t -> Univ_map.t KB.t;
compare : Value.t -> Value.t -> int;
}
let types : (typeid, type_info) Hashtbl.t =
Hashtbl.create ~size:128 (module Typeid)
let uid = Type_equal.Id.uid
type ('a,'b) eq = ('a,'b) Type_equal.t = T : ('a,'a) eq
let register_slot (type a) ?uuid slot
(module S : S with type t = a) : a tag =
let slot_name = KB.Slot.name slot in
let uuid = match uuid with
| None -> KB.Name.package slot_name
| Some uuid -> uuid in
let name = KB.Name.show @@
KB.Name.create ~package:uuid @@
KB.Name.unqualified slot_name in
let key = Type_equal.Id.create name S.sexp_of_t in
let pp ppf (Value.T (k,x)) =
let T = Equal.proof k key in
S.pp ppf x in
let of_string str =
Value.T (key, Binable.of_string (module S) str) in
let to_string (Value.T (k,x)) =
let T = Equal.proof k key in
Binable.to_string (module S) x in
let of_sexp str =
Value.T (key, S.t_of_sexp str) in
let to_sexp (Value.T (k,x)) =
let T = Equal.proof k key in
S.sexp_of_t x in
let compare (Value.T (kx,x)) (Value.T (ky,y)) =
match Equal.try_prove kx ky with
| None -> Uid.compare (uid kx) (uid ky)
| Some T ->
let T = Equal.proof kx key in
S.compare x y in
let collect obj dict =
let open KB.Syntax in
KB.collect slot obj >>| function
| None -> dict
| Some x -> Univ_map.set dict key x in
let info = {
pp;
of_sexp;
to_sexp;
of_string;
to_string;
collect;
compare;
} in
Hashtbl.add_exn types ~key:name ~data:info;
{key; slot}
let register (type a) ?public ?desc ?package ~name ~uuid
(module S : S with type t = a) =
let persistent = KB.Persistent.of_binable (module struct
type t = S.t option [@@deriving bin_io]
end) in
let equal x y = S.compare x y = 0 in
let domain = KB.Domain.optional ~equal name ~inspect:S.sexp_of_t in
let slot = KB.Class.property ?public ?desc ~persistent ?package
Theory.Program.cls name domain in
register_slot ~uuid slot (module S)
let key_name k =
KB.Name.unqualified @@ KB.Name.read @@ Type_equal.Id.name k
let key_typeid k = Type_equal.Id.name k
let tagname (Value.T (k,_)) = key_name k
let typeid (Value.T (k,_)) = key_typeid k
let info typeid =
Hashtbl.find_and_call types typeid
~if_found:Fn.id
~if_not_found:(fun typeid ->
invalid_argf "Can't deserialize type %s, \
as it is no longer known to the system"
typeid ())
let ops x = info (typeid x)
let compare_value x y = (ops x).compare x y
let compare = compare_value
let sexp_of_value x = Sexp.List [
Sexp.Atom (typeid x);
(ops x).to_sexp x;
]
let value_of_sexp = function
| Sexp.List [Atom typeid; repr] ->
(info typeid).of_sexp repr
| _ -> invalid_arg "Value.t_of_sexp: broken representation"
module Univ = struct
type t = Value.t
let sexp_of_t = sexp_of_value
let t_of_sexp = value_of_sexp
let compare = compare_value
module Repr = struct
type t = {
typeid : string;
data : string;
} [@@deriving bin_io]
end
include Binable.Of_binable(Repr)(struct
type t = Value.t
let to_binable x = Repr.{
typeid = typeid x;
data = (ops x).to_string x;
}
let of_binable {Repr.typeid; data} =
(info typeid).of_string data
end) [@@warning "-D"]
end
let create {key} x = Value.T (key,x)
let is {key} (Value.T (k,_)) = Type_equal.Id.same key k
let get
: type a. a tag -> Value.t -> a option =
fun {key} (Value.T (k,x)) ->
if Type_equal.Id.same key k
then
let T = Equal.proof key k in
Some x
else None
let get_exn
: type a. a tag -> Value.t -> a =
fun {key} (Value.T (k,x)) ->
let T = Equal.proof key k in
x
module Tag = struct
type 'a t = 'a tag
let name tag = key_name tag.key
let typeid tag = key_typeid tag.key
let key tag = tag.key
let uid tag = uid tag.key
let register (type a) ?public ?desc ?package ~name ~uuid
(typ : (module S with type t = a)) : a tag =
register ?public ?desc ?package ~name ~uuid typ
let register_slot = register_slot
let slot tag = tag.slot
let same_witness t1 t2 =
Option.try_with (fun () ->
Type_equal.Id.same_witness_exn t1.key t2.key)
let same_witness_exn t1 t2 =
Type_equal.Id.same_witness_exn t1.key t2.key
let same t1 t2 = Type_equal.Id.same t1.key t2.key
end
module Match = struct
type 's t = {
default : (unit -> 's);
handlers : (Value.t -> 's) Map.M(Uid).t;
}
let empty = Map.empty (module Uid)
let default default = {
handlers = empty;
default = default;
}
let case t f (tab : 's t) =
let h = Map.set tab.handlers (Tag.uid t) (fun v -> f (get_exn t v)) in
{tab with handlers = h}
let run (Value.T (k,_) as v) tab =
match Map.find tab.handlers (uid k) with
| Some f -> f v
| None -> tab.default ()
let switch = run
let select x y = switch y x
end
module Dict = struct
type t = Univ_map.t
let empty = Univ_map.empty
let is_empty = Univ_map.is_empty
let set dict {key} x = Univ_map.set dict key x
let remove dict {key} = Univ_map.remove dict key
let mem dict {key} = Univ_map.mem dict key
let find dict {key} = Univ_map.find dict key
let add dict {key} x = Univ_map.add dict key x
let change dict {key} f = Univ_map.change dict key ~f
let data dict =
Univ_map.to_alist dict |>
Seq.of_list
let to_sequence dict =
Seq.map (data dict) ~f:(fun v -> typeid v,v)
let filter t ~f =
data t |>
Seq.fold ~init:empty ~f:(fun dict (Value.T (k,x) as v) ->
if f v then Univ_map.set dict k x else dict)
let compare x y =
compare_list
compare_value
(Univ_map.to_alist x)
(Univ_map.to_alist y)
module Data = struct
type t = Univ.t list [@@deriving bin_io, sexp]
let of_dict = Univ_map.to_alist
let to_dict =
List.fold ~init:empty ~f:(fun dict (Value.T (k,x)) ->
Univ_map.set dict k x)
end
include Binable.Of_binable(Data)(struct
type t = Univ_map.t
let to_binable = Data.of_dict
let of_binable = Data.to_dict
end) [@@warning "-D"]
include Sexpable.Of_sexpable(Data)(struct
type t = Univ_map.t
let to_sexpable = Data.of_dict
let of_sexpable = Data.to_dict
end)
end
type dict = Dict.t [@@deriving bin_io, compare, sexp]
type t = Univ.t [@@deriving bin_io, compare, sexp]
include struct type value = Univ.t [@@deriving bin_io] end
include Regular.Make(struct
type t = Univ.t [@@deriving bin_io, compare, sexp]
let compare = Univ.compare
let hash = Hashtbl.hash
let pp ppf v = (ops v).pp ppf v
let module_name = Some "Bap.Std.Value"
let version = "2.0.0"
end)
| null | https://raw.githubusercontent.com/BinaryAnalysisPlatform/bap/253afc171bbfd0fe1b34f6442795dbf4b1798348/lib/bap_types/bap_value.ml | ocaml | open Bap_core_theory
open Core_kernel[@@warning "-D"]
open Regular.Std
open Format
module type S = sig
type t [@@deriving bin_io, compare, sexp]
val pp : Format.formatter -> t -> unit
end
module Uid = Type_equal.Id.Uid
module Typeid = String
type void
type literal = (void,void,void) format
type uid = Uid.t
type typeid = Typeid.t [@@deriving bin_io, compare, sexp]
type 'a tag = {
key : 'a Type_equal.Id.t;
slot : (Theory.program,'a option) KB.slot;
}
module Value = struct
type t = Univ_map.Packed.t = T : 'a Type_equal.Id.t * 'a -> t
end
module Equal = struct
type ('a,'b) t = ('a,'b) Type_equal.t = T : ('a,'a) t
let proof = Type_equal.Id.same_witness_exn
let try_prove = Type_equal.Id.same_witness
end
type type_info = {
pp : Format.formatter -> Value.t -> unit;
of_string : string -> Value.t;
to_string : Value.t -> string;
of_sexp : Sexp.t -> Value.t;
to_sexp : Value.t -> Sexp.t;
collect : Theory.Label.t -> Univ_map.t -> Univ_map.t KB.t;
compare : Value.t -> Value.t -> int;
}
let types : (typeid, type_info) Hashtbl.t =
Hashtbl.create ~size:128 (module Typeid)
let uid = Type_equal.Id.uid
type ('a,'b) eq = ('a,'b) Type_equal.t = T : ('a,'a) eq
let register_slot (type a) ?uuid slot
(module S : S with type t = a) : a tag =
let slot_name = KB.Slot.name slot in
let uuid = match uuid with
| None -> KB.Name.package slot_name
| Some uuid -> uuid in
let name = KB.Name.show @@
KB.Name.create ~package:uuid @@
KB.Name.unqualified slot_name in
let key = Type_equal.Id.create name S.sexp_of_t in
let pp ppf (Value.T (k,x)) =
let T = Equal.proof k key in
S.pp ppf x in
let of_string str =
Value.T (key, Binable.of_string (module S) str) in
let to_string (Value.T (k,x)) =
let T = Equal.proof k key in
Binable.to_string (module S) x in
let of_sexp str =
Value.T (key, S.t_of_sexp str) in
let to_sexp (Value.T (k,x)) =
let T = Equal.proof k key in
S.sexp_of_t x in
let compare (Value.T (kx,x)) (Value.T (ky,y)) =
match Equal.try_prove kx ky with
| None -> Uid.compare (uid kx) (uid ky)
| Some T ->
let T = Equal.proof kx key in
S.compare x y in
let collect obj dict =
let open KB.Syntax in
KB.collect slot obj >>| function
| None -> dict
| Some x -> Univ_map.set dict key x in
let info = {
pp;
of_sexp;
to_sexp;
of_string;
to_string;
collect;
compare;
} in
Hashtbl.add_exn types ~key:name ~data:info;
{key; slot}
let register (type a) ?public ?desc ?package ~name ~uuid
(module S : S with type t = a) =
let persistent = KB.Persistent.of_binable (module struct
type t = S.t option [@@deriving bin_io]
end) in
let equal x y = S.compare x y = 0 in
let domain = KB.Domain.optional ~equal name ~inspect:S.sexp_of_t in
let slot = KB.Class.property ?public ?desc ~persistent ?package
Theory.Program.cls name domain in
register_slot ~uuid slot (module S)
let key_name k =
KB.Name.unqualified @@ KB.Name.read @@ Type_equal.Id.name k
let key_typeid k = Type_equal.Id.name k
let tagname (Value.T (k,_)) = key_name k
let typeid (Value.T (k,_)) = key_typeid k
let info typeid =
Hashtbl.find_and_call types typeid
~if_found:Fn.id
~if_not_found:(fun typeid ->
invalid_argf "Can't deserialize type %s, \
as it is no longer known to the system"
typeid ())
let ops x = info (typeid x)
let compare_value x y = (ops x).compare x y
let compare = compare_value
let sexp_of_value x = Sexp.List [
Sexp.Atom (typeid x);
(ops x).to_sexp x;
]
let value_of_sexp = function
| Sexp.List [Atom typeid; repr] ->
(info typeid).of_sexp repr
| _ -> invalid_arg "Value.t_of_sexp: broken representation"
module Univ = struct
type t = Value.t
let sexp_of_t = sexp_of_value
let t_of_sexp = value_of_sexp
let compare = compare_value
module Repr = struct
type t = {
typeid : string;
data : string;
} [@@deriving bin_io]
end
include Binable.Of_binable(Repr)(struct
type t = Value.t
let to_binable x = Repr.{
typeid = typeid x;
data = (ops x).to_string x;
}
let of_binable {Repr.typeid; data} =
(info typeid).of_string data
end) [@@warning "-D"]
end
let create {key} x = Value.T (key,x)
let is {key} (Value.T (k,_)) = Type_equal.Id.same key k
let get
: type a. a tag -> Value.t -> a option =
fun {key} (Value.T (k,x)) ->
if Type_equal.Id.same key k
then
let T = Equal.proof key k in
Some x
else None
let get_exn
: type a. a tag -> Value.t -> a =
fun {key} (Value.T (k,x)) ->
let T = Equal.proof key k in
x
module Tag = struct
type 'a t = 'a tag
let name tag = key_name tag.key
let typeid tag = key_typeid tag.key
let key tag = tag.key
let uid tag = uid tag.key
let register (type a) ?public ?desc ?package ~name ~uuid
(typ : (module S with type t = a)) : a tag =
register ?public ?desc ?package ~name ~uuid typ
let register_slot = register_slot
let slot tag = tag.slot
let same_witness t1 t2 =
Option.try_with (fun () ->
Type_equal.Id.same_witness_exn t1.key t2.key)
let same_witness_exn t1 t2 =
Type_equal.Id.same_witness_exn t1.key t2.key
let same t1 t2 = Type_equal.Id.same t1.key t2.key
end
module Match = struct
type 's t = {
default : (unit -> 's);
handlers : (Value.t -> 's) Map.M(Uid).t;
}
let empty = Map.empty (module Uid)
let default default = {
handlers = empty;
default = default;
}
let case t f (tab : 's t) =
let h = Map.set tab.handlers (Tag.uid t) (fun v -> f (get_exn t v)) in
{tab with handlers = h}
let run (Value.T (k,_) as v) tab =
match Map.find tab.handlers (uid k) with
| Some f -> f v
| None -> tab.default ()
let switch = run
let select x y = switch y x
end
module Dict = struct
type t = Univ_map.t
let empty = Univ_map.empty
let is_empty = Univ_map.is_empty
let set dict {key} x = Univ_map.set dict key x
let remove dict {key} = Univ_map.remove dict key
let mem dict {key} = Univ_map.mem dict key
let find dict {key} = Univ_map.find dict key
let add dict {key} x = Univ_map.add dict key x
let change dict {key} f = Univ_map.change dict key ~f
let data dict =
Univ_map.to_alist dict |>
Seq.of_list
let to_sequence dict =
Seq.map (data dict) ~f:(fun v -> typeid v,v)
let filter t ~f =
data t |>
Seq.fold ~init:empty ~f:(fun dict (Value.T (k,x) as v) ->
if f v then Univ_map.set dict k x else dict)
let compare x y =
compare_list
compare_value
(Univ_map.to_alist x)
(Univ_map.to_alist y)
module Data = struct
type t = Univ.t list [@@deriving bin_io, sexp]
let of_dict = Univ_map.to_alist
let to_dict =
List.fold ~init:empty ~f:(fun dict (Value.T (k,x)) ->
Univ_map.set dict k x)
end
include Binable.Of_binable(Data)(struct
type t = Univ_map.t
let to_binable = Data.of_dict
let of_binable = Data.to_dict
end) [@@warning "-D"]
include Sexpable.Of_sexpable(Data)(struct
type t = Univ_map.t
let to_sexpable = Data.of_dict
let of_sexpable = Data.to_dict
end)
end
type dict = Dict.t [@@deriving bin_io, compare, sexp]
type t = Univ.t [@@deriving bin_io, compare, sexp]
include struct type value = Univ.t [@@deriving bin_io] end
include Regular.Make(struct
type t = Univ.t [@@deriving bin_io, compare, sexp]
let compare = Univ.compare
let hash = Hashtbl.hash
let pp ppf v = (ops v).pp ppf v
let module_name = Some "Bap.Std.Value"
let version = "2.0.0"
end)
| |
6f1f5f0053496772fd081cf1390b8309a3c14ab16429915710dedf4879bdf7b4 | mejgun/haskell-tdlib | PremiumStatePaymentOption.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
module TD.Data.PremiumStatePaymentOption where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified TD.Data.PremiumPaymentOption as PremiumPaymentOption
import qualified Utils as U
-- |
| Describes an option for buying or upgrading Telegram Premium for self
PremiumStatePaymentOption
{ -- | Identifier of the last in-store transaction for the currently used option
last_transaction_id :: Maybe String,
| True , if the payment option can be used to upgrade the existing Telegram Premium subscription
is_upgrade :: Maybe Bool,
| True , if this is the currently used Telegram Premium subscription option
is_current :: Maybe Bool,
-- | Information about the payment option
payment_option :: Maybe PremiumPaymentOption.PremiumPaymentOption
}
deriving (Eq)
instance Show PremiumStatePaymentOption where
show
PremiumStatePaymentOption
{ last_transaction_id = last_transaction_id_,
is_upgrade = is_upgrade_,
is_current = is_current_,
payment_option = payment_option_
} =
"PremiumStatePaymentOption"
++ U.cc
[ U.p "last_transaction_id" last_transaction_id_,
U.p "is_upgrade" is_upgrade_,
U.p "is_current" is_current_,
U.p "payment_option" payment_option_
]
instance T.FromJSON PremiumStatePaymentOption where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"premiumStatePaymentOption" -> parsePremiumStatePaymentOption v
_ -> mempty
where
parsePremiumStatePaymentOption :: A.Value -> T.Parser PremiumStatePaymentOption
parsePremiumStatePaymentOption = A.withObject "PremiumStatePaymentOption" $ \o -> do
last_transaction_id_ <- o A..:? "last_transaction_id"
is_upgrade_ <- o A..:? "is_upgrade"
is_current_ <- o A..:? "is_current"
payment_option_ <- o A..:? "payment_option"
return $ PremiumStatePaymentOption {last_transaction_id = last_transaction_id_, is_upgrade = is_upgrade_, is_current = is_current_, payment_option = payment_option_}
parseJSON _ = mempty
instance T.ToJSON PremiumStatePaymentOption where
toJSON
PremiumStatePaymentOption
{ last_transaction_id = last_transaction_id_,
is_upgrade = is_upgrade_,
is_current = is_current_,
payment_option = payment_option_
} =
A.object
[ "@type" A..= T.String "premiumStatePaymentOption",
"last_transaction_id" A..= last_transaction_id_,
"is_upgrade" A..= is_upgrade_,
"is_current" A..= is_current_,
"payment_option" A..= payment_option_
]
| null | https://raw.githubusercontent.com/mejgun/haskell-tdlib/b088f5062023fa201a68ebda128ab2fc489329ab/src/TD/Data/PremiumStatePaymentOption.hs | haskell | # LANGUAGE OverloadedStrings #
|
|
| Identifier of the last in-store transaction for the currently used option
| Information about the payment option |
module TD.Data.PremiumStatePaymentOption where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified TD.Data.PremiumPaymentOption as PremiumPaymentOption
import qualified Utils as U
| Describes an option for buying or upgrading Telegram Premium for self
PremiumStatePaymentOption
last_transaction_id :: Maybe String,
| True , if the payment option can be used to upgrade the existing Telegram Premium subscription
is_upgrade :: Maybe Bool,
| True , if this is the currently used Telegram Premium subscription option
is_current :: Maybe Bool,
payment_option :: Maybe PremiumPaymentOption.PremiumPaymentOption
}
deriving (Eq)
instance Show PremiumStatePaymentOption where
show
PremiumStatePaymentOption
{ last_transaction_id = last_transaction_id_,
is_upgrade = is_upgrade_,
is_current = is_current_,
payment_option = payment_option_
} =
"PremiumStatePaymentOption"
++ U.cc
[ U.p "last_transaction_id" last_transaction_id_,
U.p "is_upgrade" is_upgrade_,
U.p "is_current" is_current_,
U.p "payment_option" payment_option_
]
instance T.FromJSON PremiumStatePaymentOption where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"premiumStatePaymentOption" -> parsePremiumStatePaymentOption v
_ -> mempty
where
parsePremiumStatePaymentOption :: A.Value -> T.Parser PremiumStatePaymentOption
parsePremiumStatePaymentOption = A.withObject "PremiumStatePaymentOption" $ \o -> do
last_transaction_id_ <- o A..:? "last_transaction_id"
is_upgrade_ <- o A..:? "is_upgrade"
is_current_ <- o A..:? "is_current"
payment_option_ <- o A..:? "payment_option"
return $ PremiumStatePaymentOption {last_transaction_id = last_transaction_id_, is_upgrade = is_upgrade_, is_current = is_current_, payment_option = payment_option_}
parseJSON _ = mempty
instance T.ToJSON PremiumStatePaymentOption where
toJSON
PremiumStatePaymentOption
{ last_transaction_id = last_transaction_id_,
is_upgrade = is_upgrade_,
is_current = is_current_,
payment_option = payment_option_
} =
A.object
[ "@type" A..= T.String "premiumStatePaymentOption",
"last_transaction_id" A..= last_transaction_id_,
"is_upgrade" A..= is_upgrade_,
"is_current" A..= is_current_,
"payment_option" A..= payment_option_
]
|
c6864f6b2dadec50364ada2425d76f51e9d60072d2c7d50aa9112e6d08eafbb7 | MarcosPividori/push-notify | Gcm.hs | -- GSoC 2013 - Communicating with mobile devices.
-- | This library defines an API for communicating with Android powered devices, sending Push Notifications through Google Cloud Messaging (HTTP connection).
module Network.PushNotify.Gcm
(
-- * GCM Service
sendGCM
-- * GCM Settings
, GCMHttpConfig(..)
, RegId
-- * GCM Messages
, GCMmessage(..)
-- * GCM Result
, GCMresult(..)
) where
import Network.PushNotify.Gcm.Types
import Network.PushNotify.Gcm.Send
| null | https://raw.githubusercontent.com/MarcosPividori/push-notify/4c023c3fd731178d1d114774993a5e337225baa1/push-notify/Network/PushNotify/Gcm.hs | haskell | GSoC 2013 - Communicating with mobile devices.
| This library defines an API for communicating with Android powered devices, sending Push Notifications through Google Cloud Messaging (HTTP connection).
* GCM Service
* GCM Settings
* GCM Messages
* GCM Result |
module Network.PushNotify.Gcm
(
sendGCM
, GCMHttpConfig(..)
, RegId
, GCMmessage(..)
, GCMresult(..)
) where
import Network.PushNotify.Gcm.Types
import Network.PushNotify.Gcm.Send
|
1fc4dabb6c011f93851f6ccc2581a9feea86905de545df8a3f3d5684d4ab145b | aniketpant/fraskell | filtering.hs |
Use the functions mentioned in this section ( you will need two of them ) to
compute the number of lower - case letters in a string . For instance , on “ aBCde ”
it should return 3 .
Use the functions mentioned in this section (you will need two of them) to
compute the number of lower-case letters in a string. For instance, on “aBCde”
it should return 3.
-}
module Main where
import Data.Char
main = do
let input = "aBCde"
let output = length(filter isLower input)
print output | null | https://raw.githubusercontent.com/aniketpant/fraskell/e1c0f9a11bada28907980f08eff86106d67bf4f5/basics/filtering.hs | haskell |
Use the functions mentioned in this section ( you will need two of them ) to
compute the number of lower - case letters in a string . For instance , on “ aBCde ”
it should return 3 .
Use the functions mentioned in this section (you will need two of them) to
compute the number of lower-case letters in a string. For instance, on “aBCde”
it should return 3.
-}
module Main where
import Data.Char
main = do
let input = "aBCde"
let output = length(filter isLower input)
print output | |
019b719a9408c552cef910c0c74f6702e1f87a1e91b6d616b69107a645f8f83e | ralsei/graphite | col-test.rkt | #lang racket
(require data-frame graphite plot/utils)
(define df (make-data-frame))
(df-add-series! df (make-series "trt" #:data (vector "a" "b" "c")))
(df-add-series! df (make-series "outcome" #:data (vector 2.3 1.9 3.2)))
(graph #:data df
#:mapping (aes #:x "trt" #:y "outcome")
(col))
| null | https://raw.githubusercontent.com/ralsei/graphite/9b3de31156543dd135e6286132a88a48af2b8298/graphite-examples/col-test.rkt | racket | #lang racket
(require data-frame graphite plot/utils)
(define df (make-data-frame))
(df-add-series! df (make-series "trt" #:data (vector "a" "b" "c")))
(df-add-series! df (make-series "outcome" #:data (vector 2.3 1.9 3.2)))
(graph #:data df
#:mapping (aes #:x "trt" #:y "outcome")
(col))
| |
3376ebf620ddf4340f8a06ef5733e87338f486a2edc38d46a0c4c04005247e18 | xtdb/xtdb | console.clj | (ns xtdb.metrics.console
(:require [xtdb.metrics :as metrics]
[clojure.string :as string]
[xtdb.system :as sys])
(:import (com.codahale.metrics MetricRegistry ConsoleReporter ScheduledReporter)
(java.util Locale)
(java.util.concurrent TimeUnit)
(java.time Duration)
(java.io Closeable)))
(defn ->reporter {::sys/deps {:registry ::metrics/registry
:metrics ::metrics/metrics}
::sys/args {:report-frequency {:doc "Frequency of reporting metrics"
:default (Duration/ofSeconds 1)
:spec ::sys/duration}
:rate-unit {:doc "Set rate unit"
:required? false
:default TimeUnit/SECONDS
:spec ::sys/time-unit}
:duration-unit {:doc "Set duration unit"
:required? false
:default TimeUnit/MILLISECONDS
:spec ::sys/time-unit}}}
^com.codahale.metrics.ConsoleReporter
[{:keys [^MetricRegistry registry stream metric-filter locale clock report-frequency rate-unit duration-unit]}]
(-> (ConsoleReporter/forRegistry registry)
(cond-> stream (.outputTo stream)
locale (.formattedFor ^Locale locale)
clock (.withClock clock)
rate-unit (.convertRatesTo rate-unit)
duration-unit (.convertDurationsTo duration-unit)
metric-filter (.filter metric-filter))
(.build)
(doto (.start (.toMillis ^Duration report-frequency) TimeUnit/MILLISECONDS))))
| null | https://raw.githubusercontent.com/xtdb/xtdb/e2f51ed99fc2716faa8ad254c0b18166c937b134/modules/metrics/src/xtdb/metrics/console.clj | clojure | (ns xtdb.metrics.console
(:require [xtdb.metrics :as metrics]
[clojure.string :as string]
[xtdb.system :as sys])
(:import (com.codahale.metrics MetricRegistry ConsoleReporter ScheduledReporter)
(java.util Locale)
(java.util.concurrent TimeUnit)
(java.time Duration)
(java.io Closeable)))
(defn ->reporter {::sys/deps {:registry ::metrics/registry
:metrics ::metrics/metrics}
::sys/args {:report-frequency {:doc "Frequency of reporting metrics"
:default (Duration/ofSeconds 1)
:spec ::sys/duration}
:rate-unit {:doc "Set rate unit"
:required? false
:default TimeUnit/SECONDS
:spec ::sys/time-unit}
:duration-unit {:doc "Set duration unit"
:required? false
:default TimeUnit/MILLISECONDS
:spec ::sys/time-unit}}}
^com.codahale.metrics.ConsoleReporter
[{:keys [^MetricRegistry registry stream metric-filter locale clock report-frequency rate-unit duration-unit]}]
(-> (ConsoleReporter/forRegistry registry)
(cond-> stream (.outputTo stream)
locale (.formattedFor ^Locale locale)
clock (.withClock clock)
rate-unit (.convertRatesTo rate-unit)
duration-unit (.convertDurationsTo duration-unit)
metric-filter (.filter metric-filter))
(.build)
(doto (.start (.toMillis ^Duration report-frequency) TimeUnit/MILLISECONDS))))
| |
db50ddc053ac3f74bd3071a2629017386b37bd71225a34d403faf2171fd6d6e4 | weblocks-framework/weblocks | template-utils.lisp | (in-package :cm)
(defparameter *template-temporary-validation-errors* nil)
(defparameter *template-temporary-intermediate-values* nil)
(defparameter *out-of-band-template-vars* nil)
;============ utils
(defmacro string+ (&rest args)
`(concatenate 'string ,@args))
(defun fill-template-widget (name &key (language "it") assoc assoc2)
(declare (special *out-of-band-template-vars*))
(setf html-template:*string-modifier* #'CL:IDENTITY)
( warn ( format nil " ~A " assoc ) )
(warn (format nil "filltemplatewidget oob ~A" assoc2))
(let ((filename (merge-pathnames (make-pathname :directory '(:relative "templates") :name name :type language )
*public-files-path*)))
(make-instance 'html-template :file filename :vars (append assoc assoc2))))
(defun make-main-page-employee ()
(with-html (:p "You are an employee")))
from template-form-view.lisp put this into utils -- same as weblocks alist->plist , but converts clos object to obj - class - name
(defun my-alist->plist (alist)
"Converts an alist to plist."
(let ((keyword-package (find-package :keyword)))
(loop for i in alist
collect (if (symbolp (car i))
(intern (symbol-name (car i)) keyword-package)
"DONTCARE")
collect (cdr i))))
;; (if (symbolp (car i))
;; (intern (symbol-name (car i)) keyword-package)
;; (intern (string-upcase (car i)) keyword-package))
; was to be used in conjunction with (clos->string (car i))
; in the original alist->plist above (in place of string-upcase (car i))
(defun clos->string (some)
(ecase (class-of some)
(:templform-view-field (view-field-name some))))
| null | https://raw.githubusercontent.com/weblocks-framework/weblocks/fe96152458c8eb54d74751b3201db42dafe1708b/contrib/nunb/templates-crufty/template-utils.lisp | lisp | ============ utils
(if (symbolp (car i))
(intern (symbol-name (car i)) keyword-package)
(intern (string-upcase (car i)) keyword-package))
was to be used in conjunction with (clos->string (car i))
in the original alist->plist above (in place of string-upcase (car i)) | (in-package :cm)
(defparameter *template-temporary-validation-errors* nil)
(defparameter *template-temporary-intermediate-values* nil)
(defparameter *out-of-band-template-vars* nil)
(defmacro string+ (&rest args)
`(concatenate 'string ,@args))
(defun fill-template-widget (name &key (language "it") assoc assoc2)
(declare (special *out-of-band-template-vars*))
(setf html-template:*string-modifier* #'CL:IDENTITY)
( warn ( format nil " ~A " assoc ) )
(warn (format nil "filltemplatewidget oob ~A" assoc2))
(let ((filename (merge-pathnames (make-pathname :directory '(:relative "templates") :name name :type language )
*public-files-path*)))
(make-instance 'html-template :file filename :vars (append assoc assoc2))))
(defun make-main-page-employee ()
(with-html (:p "You are an employee")))
from template-form-view.lisp put this into utils -- same as weblocks alist->plist , but converts clos object to obj - class - name
(defun my-alist->plist (alist)
"Converts an alist to plist."
(let ((keyword-package (find-package :keyword)))
(loop for i in alist
collect (if (symbolp (car i))
(intern (symbol-name (car i)) keyword-package)
"DONTCARE")
collect (cdr i))))
(defun clos->string (some)
(ecase (class-of some)
(:templform-view-field (view-field-name some))))
|
d661633b47d38e4d0f75a96eb263ba8659e627cb68237b4589b0d1b952cdfcb7 | digitallyinduced/ihp | ParamSpec.hs | |
Module : Test . Controller . ParamSpec
Copyright : ( c ) digitally induced GmbH , 2020
Module: Test.Controller.ParamSpec
Copyright: (c) digitally induced GmbH, 2020
-}
module Test.Controller.ParamSpec where
import IHP.Prelude
import IHP.HaskellSupport
import Test.Hspec
import IHP.Controller.Param
import IHP.Controller.Context
import IHP.Controller.RequestContext
import IHP.ModelSupport
import qualified Data.Aeson as Aeson
import qualified Data.UUID as UUID
import qualified Data.TMap as TypeMap
import qualified Network.Wai as Wai
import qualified GHC.IO as IO
import Data.Scientific (Scientific)
tests = do
describe "IHP.Controller.Param" do
describe "param" do
it "should parse valid input" do
let ?context = createControllerContextWithParams [("page", "1")]
(param @Int "page") `shouldBe` 1
it "should fail on empty input" do
let ?context = createControllerContextWithParams [("page", "")]
(IO.evaluate (param @Int "page")) `shouldThrow` (== ParamCouldNotBeParsedException { name = "page", parserError = "has to be an integer" })
it "should fail if param not provided" do
let ?context = createControllerContextWithParams []
(IO.evaluate (param @Int "page")) `shouldThrow` (== ParamNotFoundException { name = "page" })
it "should fail with a parser error on invalid input" do
let ?context = createControllerContextWithParams [("page", "NaN")]
(IO.evaluate (param @Int "page")) `shouldThrow` (== ParamCouldNotBeParsedException { name = "page", parserError = "has to be an integer" })
describe "paramOrNothing" do
it "should parse valid input" do
let ?context = createControllerContextWithParams [("referredBy", "776ab71d-327f-41b3-90a8-7b5a251c4b88")]
(paramOrNothing @UUID "referredBy") `shouldBe` (Just "776ab71d-327f-41b3-90a8-7b5a251c4b88")
it "should return Nothing on empty input" do
let ?context = createControllerContextWithParams [("referredBy", "")]
(paramOrNothing @UUID "referredBy") `shouldBe` Nothing
it "should return Nothing if param not provided" do
let ?context = createControllerContextWithParams []
(paramOrNothing @UUID "referredBy") `shouldBe` Nothing
it "should fail with a parser error on invalid input" do
let ?context = createControllerContextWithParams [("referredBy", "not a uuid")]
(IO.evaluate (paramOrNothing @UUID "referredBy")) `shouldThrow` (== ParamCouldNotBeParsedException { name = "referredBy", parserError = "has to be an UUID" })
describe "paramOrDefault" do
it "should parse valid input" do
let ?context = createControllerContextWithParams [("page", "1")]
(paramOrDefault @Int 0 "page") `shouldBe` 1
it "should return default value on empty input" do
let ?context = createControllerContextWithParams [("page", "")]
(paramOrDefault @Int 10 "page") `shouldBe` 10
it "should return default value if param not provided" do
let ?context = createControllerContextWithParams []
(paramOrDefault @Int 10 "page") `shouldBe` 10
it "should fail with a parser error on invalid input" do
let ?context = createControllerContextWithParams [("page", "NaN")]
(IO.evaluate (paramOrDefault @Int 10 "page")) `shouldThrow` (== ParamCouldNotBeParsedException { name = "page", parserError = "has to be an integer" })
describe "paramList" do
it "should parse valid input" do
let ?context = createControllerContextWithParams [("ingredients", "milk"), ("ingredients", "egg")]
(paramList @Text "ingredients") `shouldBe` ["milk", "egg"]
it "should fail on invalid input" do
let ?context = createControllerContextWithParams [("numbers", "1"), ("numbers", "NaN")]
(IO.evaluate (paramList @Int "numbers")) `shouldThrow` (errorCall "param: Parameter 'numbers' is invalid")
it "should deal with empty input" do
let ?context = createControllerContextWithParams []
(paramList @Int "numbers") `shouldBe` []
describe "paramListOrNothing" do
it "should parse valid input" do
let ?context = createControllerContextWithParams [("ingredients", "milk"), ("ingredients", ""), ("ingredients", "egg")]
(paramListOrNothing @Text "ingredients") `shouldBe` [Just "milk", Nothing, Just "egg"]
it "should not fail on invalid input" do
let ?context = createControllerContextWithParams [("numbers", "1"), ("numbers", "NaN")]
(paramListOrNothing @Int "numbers") `shouldBe` [Just 1, Nothing]
it "should deal with empty input" do
let ?context = createControllerContextWithParams []
(paramListOrNothing @Int "numbers") `shouldBe` []
describe "hasParam" do
it "returns True if param given" do
let ?context = createControllerContextWithParams [("a", "test")]
hasParam "a" `shouldBe` True
it "returns True if param given but empty" do
let ?context = createControllerContextWithParams [("a", "")]
hasParam "a" `shouldBe` True
it "returns False if param missing" do
let ?context = createControllerContextWithParams []
hasParam "a" `shouldBe` False
describe "ParamReader" do
describe "ByteString" do
it "should handle text input" do
(readParameter @ByteString "test") `shouldBe` (Right "test")
it "should handle JSON strings" do
(readParameterJSON @ByteString (json "\"test\"")) `shouldBe` (Right ("test" :: ByteString))
it "should fail on other JSON input" do
(readParameterJSON @ByteString (json "1")) `shouldBe` (Left ("Expected String" :: ByteString))
describe "Int" do
it "should accept numeric input" do
(readParameter @Int "1337") `shouldBe` (Right 1337)
it "should accept negative numbers" do
(readParameter @Int "-1337") `shouldBe` (Right (-1337))
it "should accept JSON numerics " do
(readParameterJSON @Int (json "1337")) `shouldBe` (Right 1337)
it "should fail on other JSON input " do
(readParameterJSON @Int (json "true")) `shouldBe` (Left "Expected Int")
describe "Integer" do
it "should accept numeric input" do
(readParameter @Integer "1337") `shouldBe` (Right 1337)
it "should accept negative numbers" do
(readParameter @Integer "-1337") `shouldBe` (Right (-1337))
it "should accept JSON numerics " do
(readParameterJSON @Integer (json "1337")) `shouldBe` (Right 1337)
it "should fail on other JSON input " do
(readParameterJSON @Integer (json "true")) `shouldBe` (Left "Expected Integer")
(readParameterJSON @Integer (json "\"1\"")) `shouldBe` (Left "Expected Integer")
describe "Double" do
it "should accept integer input" do
(readParameter @Double "1337") `shouldBe` (Right 1337)
it "should accept floating point input" do
(readParameter @Double "1.2") `shouldBe` (Right 1.2)
(readParameter @Double "1.2345679") `shouldBe` (Right 1.2345679)
it "should accept JSON integer input" do
(readParameterJSON @Double (json "1337")) `shouldBe` (Right 1337)
it "should accept JSON floating point input" do
(readParameterJSON @Double (json "1.2")) `shouldBe` (Right 1.2)
it "should fail on other JSON input " do
(readParameterJSON @Double (json "true")) `shouldBe` (Left "Expected Double")
(readParameterJSON @Double (json "\"1\"")) `shouldBe` (Left "Expected Double")
describe "Scientific" do
it "should accept integer input" do
(readParameter @Scientific "1337") `shouldBe` (Right 1337)
it "should accept floating point input" do
(readParameter @Scientific "1.2") `shouldBe` (Right 1.2)
(readParameter @Scientific "1.2345679") `shouldBe` (Right 1.2345679)
let x = "1e-1024" -- -1024 is smaller than minimal Double exponent of -1021
y = "1.0e-1024"
(show <$> readParameter @Scientific x) `shouldBe` (Right y)
it "should accept JSON integer input" do
(readParameterJSON @Scientific (json "1337")) `shouldBe` (Right 1337)
it "should accept JSON floating point input" do
(readParameterJSON @Scientific (json "1.2")) `shouldBe` (Right 1.2)
it "should fail on other JSON input " do
(readParameterJSON @Scientific (json "true")) `shouldBe` (Left "Expected Scientific")
(readParameterJSON @Scientific (json "\"1\"")) `shouldBe` (Left "Expected Scientific")
describe "Float" do
it "should accept integer input" do
(readParameter @Float "1337") `shouldBe` (Right 1337)
it "should accept floating point input" do
(readParameter @Float "1.2") `shouldBe` (Right 1.2)
(readParameter @Float "1.2345679") `shouldBe` (Right 1.2345679)
it "should accept JSON integer input" do
(readParameterJSON @Float (json "1337")) `shouldBe` (Right 1337)
it "should accept JSON floating point input" do
(readParameterJSON @Float (json "1.2")) `shouldBe` (Right 1.2)
it "should fail on other JSON input " do
(readParameterJSON @Float (json "true")) `shouldBe` (Left "Expected Float")
(readParameterJSON @Float (json "\"1\"")) `shouldBe` (Left "Expected Float")
describe "Point" do
it "should accept integer input" do
(readParameter @Point "1337,1338") `shouldBe` (Right Point { x = 1337, y = 1338 })
it "should accept floating point input" do
(readParameter @Point "1.2,1.3") `shouldBe` (Right Point { x = 1.2, y = 1.3 })
it "should accept JSON integer input" do
(readParameterJSON @Point (json "\"1337,1338\"")) `shouldBe` (Right Point { x = 1337, y = 1338 })
it "should accept JSON floating point input" do
(readParameterJSON @Point (json "\"1.2,1.3\"")) `shouldBe` (Right Point { x = 1.2, y = 1.3 })
it "should fail on other JSON input " do
(readParameterJSON @Point (json "true")) `shouldBe` (Left "Expected Point")
(readParameterJSON @Point (json "\"1\"")) `shouldBe` (Left "has to be two numbers with a comma, e.g. '1,2'")
(readParameterJSON @Point (json "\"1.2\"")) `shouldBe` (Left "has to be two numbers with a comma, e.g. '1,2'")
describe "Polygon" do
it "should accept integer input" do
(readParameter @Polygon "(100,200),(300,400)") `shouldBe`
(Right Polygon { points = [ Point { x = 100, y = 200 }, Point { x = 300, y = 400 } ] })
it "should accept floating-point input" do
(readParameter @Polygon "(100.1,200.2),(300.3,400.4)") `shouldBe`
(Right Polygon { points = [ Point { x = 100.1, y = 200.2 }, Point { x = 300.3, y = 400.4 } ] })
it "should accept JSON integer input" do
(readParameterJSON @Polygon (json "\"(100,200),(300,400)\"")) `shouldBe`
(Right Polygon { points = [ Point { x = 100, y = 200 }, Point { x = 300, y = 400 } ] })
it "should accept JSON floating-point input" do
(readParameterJSON @Polygon (json "\"(100.1,200.2),(300.3,400.4)\"")) `shouldBe`
(Right Polygon { points = [ Point { x = 100.1, y = 200.2 }, Point { x = 300.3, y = 400.4 } ] })
describe "Text" do
it "should handle text input" do
(readParameter @Text "test") `shouldBe` (Right "test")
it "should handle JSON strings" do
(readParameterJSON @Text (json "\"test\"")) `shouldBe` (Right ("test"))
it "should fail on other JSON input" do
(readParameterJSON @Text (json "1")) `shouldBe` (Left ("Expected String"))
describe "CSV" do
it "should handle empty input" do
(readParameter @[Int] "") `shouldBe` (Right [])
it "should handle a single value" do
(readParameter @[Int] "1") `shouldBe` (Right [1])
it "should handle comma separated values" do
(readParameter @[Int] "1,2,3") `shouldBe` (Right [1,2,3])
it "should fail if a single value is invalid" do
(readParameter @[Int] "1,a,3") `shouldBe` (Left "has to be an integer")
it "should handle JSON arrays" do
(readParameterJSON @[Int] (json "[1,2,3]")) `shouldBe` (Right [1,2,3])
it "should fail on JSON input that is not an array" do
(readParameterJSON @[Int] (json "true")) `shouldBe` (Left "Expected Array")
describe "Bool" do
it "should accept 'on' as True" do
(readParameter @Bool "on") `shouldBe` (Right True)
it "should accept 'true' as True" do
(readParameter @Bool "true") `shouldBe` (Right True)
(readParameter @Bool "TruE") `shouldBe` (Right True)
it "should accept everything else as false input" do
(readParameter @Bool "off") `shouldBe` (Right False)
(readParameter @Bool "false") `shouldBe` (Right False)
(readParameter @Bool "invalid") `shouldBe` (Right False)
describe "UUID" do
it "should accept UUID values" do
(readParameter @UUID "6188329c-6bad-47f6-800c-2fd19ce0b2df") `shouldBe` (Right "6188329c-6bad-47f6-800c-2fd19ce0b2df")
(readParameter @UUID "a020ba17-a94e-453f-9414-c54aa30caa54") `shouldBe` (Right "a020ba17-a94e-453f-9414-c54aa30caa54")
it "should fail on invalid values" do
(readParameter @UUID "not a uuid") `shouldBe` (Left "has to be an UUID")
it "should accept JSON UUIDs" do
(readParameterJSON @UUID (json "\"6188329c-6bad-47f6-800c-2fd19ce0b2df\"")) `shouldBe` (Right "6188329c-6bad-47f6-800c-2fd19ce0b2df")
it "should fail on invalid JSON input" do
(readParameterJSON @UUID (json "\"not a uuid\"")) `shouldBe` (Left "Invalid UUID")
(readParameterJSON @UUID (json "false")) `shouldBe` (Left "Expected String with an UUID")
describe "UTCTime" do
it "should accept timestamps" do
(tshow (readParameter @UTCTime "2020-11-08T12:03:35Z")) `shouldBe` ("Right 2020-11-08 12:03:35 UTC")
it "should accept dates" do
(tshow (readParameter @UTCTime "2020-11-08")) `shouldBe` ("Right 2020-11-08 00:00:00 UTC")
it "should fail on invalid inputs" do
(readParameter @UTCTime "not a timestamp") `shouldBe` (Left "has to be a valid date and time, e.g. 2020-11-08T12:03:35Z")
it "should accept JSON strings" do
(tshow (readParameterJSON @UTCTime (json "\"2020-11-08T12:03:35Z\""))) `shouldBe` ("Right 2020-11-08 12:03:35 UTC")
describe "LocalTime" do
it "should accept timestamps" do
(tshow (readParameter @LocalTime "2020-11-08T12:03:35Z")) `shouldBe` ("Right 2020-11-08 12:03:35")
it "should accept dates" do
(tshow (readParameter @LocalTime "2020-11-08")) `shouldBe` ("Right 2020-11-08 00:00:00")
it "should fail on invalid inputs" do
(readParameter @LocalTime "not a timestamp") `shouldBe` (Left "has to be a valid date and time, e.g. 2020-11-08T12:03:35Z")
it "should accept JSON strings" do
(tshow (readParameterJSON @LocalTime (json "\"2020-11-08T12:03:35Z\""))) `shouldBe` ("Right 2020-11-08 12:03:35")
describe "Day" do
it "should accept dates" do
(tshow (readParameter @Day "2020-11-08")) `shouldBe` ("Right 2020-11-08")
it "should fail on invalid inputs" do
(readParameter @Day "not a timestamp") `shouldBe` (Left "has to be a date, e.g. 2020-11-08")
it "should accept JSON strings" do
(tshow (readParameterJSON @Day (json "\"2020-11-08\""))) `shouldBe` ("Right 2020-11-08")
describe "TimeOfDay" do
it "should accept time values" do
(tshow (readParameter @TimeOfDay "12:00:00")) `shouldBe` ("Right 12:00:00")
it "should fail on invalid inputs" do
(readParameter @TimeOfDay "not a time") `shouldBe` (Left "has to be time in the format hh:mm:ss")
(readParameter @TimeOfDay "25:00:00") `shouldBe` (Left "has to be time in the format hh:mm:ss")
it "should accept JSON strings" do
(tshow (readParameterJSON @TimeOfDay (json "\"13:37:00\""))) `shouldBe` ("Right 13:37:00")
describe "Maybe" do
it "should accept values" do
(readParameter @(Maybe Int) "1") `shouldBe` (Right (Just 1))
(readParameter @(Maybe Text) "hello") `shouldBe` (Right (Just "hello"))
it "should handle empty input as Nothing" do
(readParameter @(Maybe Int) "") `shouldBe` (Right Nothing)
(readParameter @(Maybe UUID) "") `shouldBe` (Right Nothing)
(readParameterJSON @(Maybe Bool) "") `shouldBe` (Right Nothing)
it "should handle empty Text as Just" do
(readParameter @(Maybe Text) "") `shouldBe` (Right (Just ""))
(readParameter @(Maybe ByteString) "") `shouldBe` (Right (Just ""))
it "should handle empty Bool as False" do
(readParameter @(Maybe Bool) "") `shouldBe` (Right (Just False))
it "should deal with parser errors" do
(readParameter @(Maybe Int) "not a number") `shouldBe` (Left "has to be an integer")
describe "Enum" do
it "should accept values" do
(readParameter "Yellow") `shouldBe` (Right Yellow)
(readParameter "Red") `shouldBe` (Right Red)
(readParameter "Blue") `shouldBe` (Right Blue)
it "should fail on invalid values" do
(readParameter @Color "black") `shouldBe` (Left "Invalid value")
(readParameter @Color "") `shouldBe` (Left "Invalid value")
it "should deal with JSON" do
(readParameterJSON (json "\"Yellow\"")) `shouldBe` (Right Yellow)
(readParameterJSON (json "\"Red\"")) `shouldBe` (Right Red)
(readParameterJSON (json "\"Blue\"")) `shouldBe` (Right Blue)
it "should fail on invalid JSON" do
(readParameterJSON @Color (json "\"\"")) `shouldBe` (Left "Invalid value")
(readParameterJSON @Color (json "1337")) `shouldBe` (Left "enumParamReaderJSON: Invalid value, expected a string but got something else")
describe "fill" do
it "should fill provided values if valid" do
let ?context = createControllerContextWithParams [("boolField", "on"), ("colorField", "Red")]
let emptyRecord = FillRecord { boolField = False, colorField = Yellow, meta = def }
let expectedRecord = FillRecord { boolField = True, colorField = Red, meta = def { touchedFields = ["colorField", "boolField"] } }
let filledRecord = emptyRecord |> fill @["boolField", "colorField"]
filledRecord `shouldBe` expectedRecord
it "should not touch fields if a field is missing" do
let ?context = createControllerContextWithParams [("colorField", "Red")]
let emptyRecord = FillRecord { boolField = False, colorField = Yellow, meta = def }
let expectedRecord = FillRecord { boolField = False, colorField = Red, meta = def { touchedFields = ["colorField"] } }
let filledRecord = emptyRecord |> fill @["boolField", "colorField"]
filledRecord `shouldBe` expectedRecord
it "should add validation errors if the parsing fails" do
let ?context = createControllerContextWithParams [("colorField", "invalid color")]
let emptyRecord = FillRecord { boolField = False, colorField = Yellow, meta = def }
let expectedRecord = FillRecord { boolField = False, colorField = Yellow, meta = def { annotations = [("colorField", TextViolation "Invalid value")] } }
let filledRecord = emptyRecord |> fill @["boolField", "colorField"]
filledRecord `shouldBe` expectedRecord
it "should deal with json values" do
let ?context = createControllerContextWithJson "{\"colorField\":\"Red\",\"boolField\":true}"
let emptyRecord = FillRecord { boolField = False, colorField = Yellow, meta = def }
let expectedRecord = FillRecord { boolField = True, colorField = Red, meta = def { touchedFields = ["colorField", "boolField"] } }
let filledRecord = emptyRecord |> fill @["boolField", "colorField"]
filledRecord `shouldBe` expectedRecord
it "should deal with empty json values" do
let ?context = createControllerContextWithJson "{}"
let emptyRecord = FillRecord { boolField = False, colorField = Yellow, meta = def }
let expectedRecord = FillRecord { boolField = False, colorField = Yellow, meta = def }
let filledRecord = emptyRecord |> fill @["boolField", "colorField"]
filledRecord `shouldBe` expectedRecord
createControllerContextWithParams params =
let
requestBody = FormBody { params, files = [] }
request = Wai.defaultRequest
requestContext = RequestContext { request, respond = error "respond", requestBody, vault = error "vault", frameworkConfig = error "frameworkConfig" }
in FrozenControllerContext { requestContext, customFields = TypeMap.empty }
createControllerContextWithJson params =
let
requestBody = JSONBody { jsonPayload = Just (json params), rawPayload = cs params }
request = Wai.defaultRequest
requestContext = RequestContext { request, respond = error "respond", requestBody, vault = error "vault", frameworkConfig = error "frameworkConfig" }
in FrozenControllerContext { requestContext, customFields = TypeMap.empty }
json :: Text -> Aeson.Value
json string =
let (Just value) :: Maybe Aeson.Value = Aeson.decode (cs string)
in value
data Color = Yellow | Red | Blue deriving (Enum, Show, Eq)
instance ParamReader Color where
readParameter = enumParamReader
readParameterJSON = enumParamReaderJSON
instance InputValue Color where inputValue = tshow
data FillRecord = FillRecord { boolField :: Bool, colorField :: Color, meta :: MetaBag }
deriving (Show, Eq)
instance SetField "boolField" FillRecord Bool where
setField value record = record { boolField = value } |> modify #meta (modify #touchedFields ("boolField":))
instance SetField "colorField" FillRecord Color where
setField value record = record { colorField = value } |> modify #meta (modify #touchedFields ("colorField":))
instance SetField "meta" FillRecord MetaBag where
setField value record = record { meta = value } | null | https://raw.githubusercontent.com/digitallyinduced/ihp/3cd00517ff3f6e97e1ca0d68a4ce61d61c66d4aa/Test/Controller/ParamSpec.hs | haskell | -1024 is smaller than minimal Double exponent of -1021 | |
Module : Test . Controller . ParamSpec
Copyright : ( c ) digitally induced GmbH , 2020
Module: Test.Controller.ParamSpec
Copyright: (c) digitally induced GmbH, 2020
-}
module Test.Controller.ParamSpec where
import IHP.Prelude
import IHP.HaskellSupport
import Test.Hspec
import IHP.Controller.Param
import IHP.Controller.Context
import IHP.Controller.RequestContext
import IHP.ModelSupport
import qualified Data.Aeson as Aeson
import qualified Data.UUID as UUID
import qualified Data.TMap as TypeMap
import qualified Network.Wai as Wai
import qualified GHC.IO as IO
import Data.Scientific (Scientific)
tests = do
describe "IHP.Controller.Param" do
describe "param" do
it "should parse valid input" do
let ?context = createControllerContextWithParams [("page", "1")]
(param @Int "page") `shouldBe` 1
it "should fail on empty input" do
let ?context = createControllerContextWithParams [("page", "")]
(IO.evaluate (param @Int "page")) `shouldThrow` (== ParamCouldNotBeParsedException { name = "page", parserError = "has to be an integer" })
it "should fail if param not provided" do
let ?context = createControllerContextWithParams []
(IO.evaluate (param @Int "page")) `shouldThrow` (== ParamNotFoundException { name = "page" })
it "should fail with a parser error on invalid input" do
let ?context = createControllerContextWithParams [("page", "NaN")]
(IO.evaluate (param @Int "page")) `shouldThrow` (== ParamCouldNotBeParsedException { name = "page", parserError = "has to be an integer" })
describe "paramOrNothing" do
it "should parse valid input" do
let ?context = createControllerContextWithParams [("referredBy", "776ab71d-327f-41b3-90a8-7b5a251c4b88")]
(paramOrNothing @UUID "referredBy") `shouldBe` (Just "776ab71d-327f-41b3-90a8-7b5a251c4b88")
it "should return Nothing on empty input" do
let ?context = createControllerContextWithParams [("referredBy", "")]
(paramOrNothing @UUID "referredBy") `shouldBe` Nothing
it "should return Nothing if param not provided" do
let ?context = createControllerContextWithParams []
(paramOrNothing @UUID "referredBy") `shouldBe` Nothing
it "should fail with a parser error on invalid input" do
let ?context = createControllerContextWithParams [("referredBy", "not a uuid")]
(IO.evaluate (paramOrNothing @UUID "referredBy")) `shouldThrow` (== ParamCouldNotBeParsedException { name = "referredBy", parserError = "has to be an UUID" })
describe "paramOrDefault" do
it "should parse valid input" do
let ?context = createControllerContextWithParams [("page", "1")]
(paramOrDefault @Int 0 "page") `shouldBe` 1
it "should return default value on empty input" do
let ?context = createControllerContextWithParams [("page", "")]
(paramOrDefault @Int 10 "page") `shouldBe` 10
it "should return default value if param not provided" do
let ?context = createControllerContextWithParams []
(paramOrDefault @Int 10 "page") `shouldBe` 10
it "should fail with a parser error on invalid input" do
let ?context = createControllerContextWithParams [("page", "NaN")]
(IO.evaluate (paramOrDefault @Int 10 "page")) `shouldThrow` (== ParamCouldNotBeParsedException { name = "page", parserError = "has to be an integer" })
describe "paramList" do
it "should parse valid input" do
let ?context = createControllerContextWithParams [("ingredients", "milk"), ("ingredients", "egg")]
(paramList @Text "ingredients") `shouldBe` ["milk", "egg"]
it "should fail on invalid input" do
let ?context = createControllerContextWithParams [("numbers", "1"), ("numbers", "NaN")]
(IO.evaluate (paramList @Int "numbers")) `shouldThrow` (errorCall "param: Parameter 'numbers' is invalid")
it "should deal with empty input" do
let ?context = createControllerContextWithParams []
(paramList @Int "numbers") `shouldBe` []
describe "paramListOrNothing" do
it "should parse valid input" do
let ?context = createControllerContextWithParams [("ingredients", "milk"), ("ingredients", ""), ("ingredients", "egg")]
(paramListOrNothing @Text "ingredients") `shouldBe` [Just "milk", Nothing, Just "egg"]
it "should not fail on invalid input" do
let ?context = createControllerContextWithParams [("numbers", "1"), ("numbers", "NaN")]
(paramListOrNothing @Int "numbers") `shouldBe` [Just 1, Nothing]
it "should deal with empty input" do
let ?context = createControllerContextWithParams []
(paramListOrNothing @Int "numbers") `shouldBe` []
describe "hasParam" do
it "returns True if param given" do
let ?context = createControllerContextWithParams [("a", "test")]
hasParam "a" `shouldBe` True
it "returns True if param given but empty" do
let ?context = createControllerContextWithParams [("a", "")]
hasParam "a" `shouldBe` True
it "returns False if param missing" do
let ?context = createControllerContextWithParams []
hasParam "a" `shouldBe` False
describe "ParamReader" do
describe "ByteString" do
it "should handle text input" do
(readParameter @ByteString "test") `shouldBe` (Right "test")
it "should handle JSON strings" do
(readParameterJSON @ByteString (json "\"test\"")) `shouldBe` (Right ("test" :: ByteString))
it "should fail on other JSON input" do
(readParameterJSON @ByteString (json "1")) `shouldBe` (Left ("Expected String" :: ByteString))
describe "Int" do
it "should accept numeric input" do
(readParameter @Int "1337") `shouldBe` (Right 1337)
it "should accept negative numbers" do
(readParameter @Int "-1337") `shouldBe` (Right (-1337))
it "should accept JSON numerics " do
(readParameterJSON @Int (json "1337")) `shouldBe` (Right 1337)
it "should fail on other JSON input " do
(readParameterJSON @Int (json "true")) `shouldBe` (Left "Expected Int")
describe "Integer" do
it "should accept numeric input" do
(readParameter @Integer "1337") `shouldBe` (Right 1337)
it "should accept negative numbers" do
(readParameter @Integer "-1337") `shouldBe` (Right (-1337))
it "should accept JSON numerics " do
(readParameterJSON @Integer (json "1337")) `shouldBe` (Right 1337)
it "should fail on other JSON input " do
(readParameterJSON @Integer (json "true")) `shouldBe` (Left "Expected Integer")
(readParameterJSON @Integer (json "\"1\"")) `shouldBe` (Left "Expected Integer")
describe "Double" do
it "should accept integer input" do
(readParameter @Double "1337") `shouldBe` (Right 1337)
it "should accept floating point input" do
(readParameter @Double "1.2") `shouldBe` (Right 1.2)
(readParameter @Double "1.2345679") `shouldBe` (Right 1.2345679)
it "should accept JSON integer input" do
(readParameterJSON @Double (json "1337")) `shouldBe` (Right 1337)
it "should accept JSON floating point input" do
(readParameterJSON @Double (json "1.2")) `shouldBe` (Right 1.2)
it "should fail on other JSON input " do
(readParameterJSON @Double (json "true")) `shouldBe` (Left "Expected Double")
(readParameterJSON @Double (json "\"1\"")) `shouldBe` (Left "Expected Double")
describe "Scientific" do
it "should accept integer input" do
(readParameter @Scientific "1337") `shouldBe` (Right 1337)
it "should accept floating point input" do
(readParameter @Scientific "1.2") `shouldBe` (Right 1.2)
(readParameter @Scientific "1.2345679") `shouldBe` (Right 1.2345679)
y = "1.0e-1024"
(show <$> readParameter @Scientific x) `shouldBe` (Right y)
it "should accept JSON integer input" do
(readParameterJSON @Scientific (json "1337")) `shouldBe` (Right 1337)
it "should accept JSON floating point input" do
(readParameterJSON @Scientific (json "1.2")) `shouldBe` (Right 1.2)
it "should fail on other JSON input " do
(readParameterJSON @Scientific (json "true")) `shouldBe` (Left "Expected Scientific")
(readParameterJSON @Scientific (json "\"1\"")) `shouldBe` (Left "Expected Scientific")
describe "Float" do
it "should accept integer input" do
(readParameter @Float "1337") `shouldBe` (Right 1337)
it "should accept floating point input" do
(readParameter @Float "1.2") `shouldBe` (Right 1.2)
(readParameter @Float "1.2345679") `shouldBe` (Right 1.2345679)
it "should accept JSON integer input" do
(readParameterJSON @Float (json "1337")) `shouldBe` (Right 1337)
it "should accept JSON floating point input" do
(readParameterJSON @Float (json "1.2")) `shouldBe` (Right 1.2)
it "should fail on other JSON input " do
(readParameterJSON @Float (json "true")) `shouldBe` (Left "Expected Float")
(readParameterJSON @Float (json "\"1\"")) `shouldBe` (Left "Expected Float")
describe "Point" do
it "should accept integer input" do
(readParameter @Point "1337,1338") `shouldBe` (Right Point { x = 1337, y = 1338 })
it "should accept floating point input" do
(readParameter @Point "1.2,1.3") `shouldBe` (Right Point { x = 1.2, y = 1.3 })
it "should accept JSON integer input" do
(readParameterJSON @Point (json "\"1337,1338\"")) `shouldBe` (Right Point { x = 1337, y = 1338 })
it "should accept JSON floating point input" do
(readParameterJSON @Point (json "\"1.2,1.3\"")) `shouldBe` (Right Point { x = 1.2, y = 1.3 })
it "should fail on other JSON input " do
(readParameterJSON @Point (json "true")) `shouldBe` (Left "Expected Point")
(readParameterJSON @Point (json "\"1\"")) `shouldBe` (Left "has to be two numbers with a comma, e.g. '1,2'")
(readParameterJSON @Point (json "\"1.2\"")) `shouldBe` (Left "has to be two numbers with a comma, e.g. '1,2'")
describe "Polygon" do
it "should accept integer input" do
(readParameter @Polygon "(100,200),(300,400)") `shouldBe`
(Right Polygon { points = [ Point { x = 100, y = 200 }, Point { x = 300, y = 400 } ] })
it "should accept floating-point input" do
(readParameter @Polygon "(100.1,200.2),(300.3,400.4)") `shouldBe`
(Right Polygon { points = [ Point { x = 100.1, y = 200.2 }, Point { x = 300.3, y = 400.4 } ] })
it "should accept JSON integer input" do
(readParameterJSON @Polygon (json "\"(100,200),(300,400)\"")) `shouldBe`
(Right Polygon { points = [ Point { x = 100, y = 200 }, Point { x = 300, y = 400 } ] })
it "should accept JSON floating-point input" do
(readParameterJSON @Polygon (json "\"(100.1,200.2),(300.3,400.4)\"")) `shouldBe`
(Right Polygon { points = [ Point { x = 100.1, y = 200.2 }, Point { x = 300.3, y = 400.4 } ] })
describe "Text" do
it "should handle text input" do
(readParameter @Text "test") `shouldBe` (Right "test")
it "should handle JSON strings" do
(readParameterJSON @Text (json "\"test\"")) `shouldBe` (Right ("test"))
it "should fail on other JSON input" do
(readParameterJSON @Text (json "1")) `shouldBe` (Left ("Expected String"))
describe "CSV" do
it "should handle empty input" do
(readParameter @[Int] "") `shouldBe` (Right [])
it "should handle a single value" do
(readParameter @[Int] "1") `shouldBe` (Right [1])
it "should handle comma separated values" do
(readParameter @[Int] "1,2,3") `shouldBe` (Right [1,2,3])
it "should fail if a single value is invalid" do
(readParameter @[Int] "1,a,3") `shouldBe` (Left "has to be an integer")
it "should handle JSON arrays" do
(readParameterJSON @[Int] (json "[1,2,3]")) `shouldBe` (Right [1,2,3])
it "should fail on JSON input that is not an array" do
(readParameterJSON @[Int] (json "true")) `shouldBe` (Left "Expected Array")
describe "Bool" do
it "should accept 'on' as True" do
(readParameter @Bool "on") `shouldBe` (Right True)
it "should accept 'true' as True" do
(readParameter @Bool "true") `shouldBe` (Right True)
(readParameter @Bool "TruE") `shouldBe` (Right True)
it "should accept everything else as false input" do
(readParameter @Bool "off") `shouldBe` (Right False)
(readParameter @Bool "false") `shouldBe` (Right False)
(readParameter @Bool "invalid") `shouldBe` (Right False)
describe "UUID" do
it "should accept UUID values" do
(readParameter @UUID "6188329c-6bad-47f6-800c-2fd19ce0b2df") `shouldBe` (Right "6188329c-6bad-47f6-800c-2fd19ce0b2df")
(readParameter @UUID "a020ba17-a94e-453f-9414-c54aa30caa54") `shouldBe` (Right "a020ba17-a94e-453f-9414-c54aa30caa54")
it "should fail on invalid values" do
(readParameter @UUID "not a uuid") `shouldBe` (Left "has to be an UUID")
it "should accept JSON UUIDs" do
(readParameterJSON @UUID (json "\"6188329c-6bad-47f6-800c-2fd19ce0b2df\"")) `shouldBe` (Right "6188329c-6bad-47f6-800c-2fd19ce0b2df")
it "should fail on invalid JSON input" do
(readParameterJSON @UUID (json "\"not a uuid\"")) `shouldBe` (Left "Invalid UUID")
(readParameterJSON @UUID (json "false")) `shouldBe` (Left "Expected String with an UUID")
describe "UTCTime" do
it "should accept timestamps" do
(tshow (readParameter @UTCTime "2020-11-08T12:03:35Z")) `shouldBe` ("Right 2020-11-08 12:03:35 UTC")
it "should accept dates" do
(tshow (readParameter @UTCTime "2020-11-08")) `shouldBe` ("Right 2020-11-08 00:00:00 UTC")
it "should fail on invalid inputs" do
(readParameter @UTCTime "not a timestamp") `shouldBe` (Left "has to be a valid date and time, e.g. 2020-11-08T12:03:35Z")
it "should accept JSON strings" do
(tshow (readParameterJSON @UTCTime (json "\"2020-11-08T12:03:35Z\""))) `shouldBe` ("Right 2020-11-08 12:03:35 UTC")
describe "LocalTime" do
it "should accept timestamps" do
(tshow (readParameter @LocalTime "2020-11-08T12:03:35Z")) `shouldBe` ("Right 2020-11-08 12:03:35")
it "should accept dates" do
(tshow (readParameter @LocalTime "2020-11-08")) `shouldBe` ("Right 2020-11-08 00:00:00")
it "should fail on invalid inputs" do
(readParameter @LocalTime "not a timestamp") `shouldBe` (Left "has to be a valid date and time, e.g. 2020-11-08T12:03:35Z")
it "should accept JSON strings" do
(tshow (readParameterJSON @LocalTime (json "\"2020-11-08T12:03:35Z\""))) `shouldBe` ("Right 2020-11-08 12:03:35")
describe "Day" do
it "should accept dates" do
(tshow (readParameter @Day "2020-11-08")) `shouldBe` ("Right 2020-11-08")
it "should fail on invalid inputs" do
(readParameter @Day "not a timestamp") `shouldBe` (Left "has to be a date, e.g. 2020-11-08")
it "should accept JSON strings" do
(tshow (readParameterJSON @Day (json "\"2020-11-08\""))) `shouldBe` ("Right 2020-11-08")
describe "TimeOfDay" do
it "should accept time values" do
(tshow (readParameter @TimeOfDay "12:00:00")) `shouldBe` ("Right 12:00:00")
it "should fail on invalid inputs" do
(readParameter @TimeOfDay "not a time") `shouldBe` (Left "has to be time in the format hh:mm:ss")
(readParameter @TimeOfDay "25:00:00") `shouldBe` (Left "has to be time in the format hh:mm:ss")
it "should accept JSON strings" do
(tshow (readParameterJSON @TimeOfDay (json "\"13:37:00\""))) `shouldBe` ("Right 13:37:00")
describe "Maybe" do
it "should accept values" do
(readParameter @(Maybe Int) "1") `shouldBe` (Right (Just 1))
(readParameter @(Maybe Text) "hello") `shouldBe` (Right (Just "hello"))
it "should handle empty input as Nothing" do
(readParameter @(Maybe Int) "") `shouldBe` (Right Nothing)
(readParameter @(Maybe UUID) "") `shouldBe` (Right Nothing)
(readParameterJSON @(Maybe Bool) "") `shouldBe` (Right Nothing)
it "should handle empty Text as Just" do
(readParameter @(Maybe Text) "") `shouldBe` (Right (Just ""))
(readParameter @(Maybe ByteString) "") `shouldBe` (Right (Just ""))
it "should handle empty Bool as False" do
(readParameter @(Maybe Bool) "") `shouldBe` (Right (Just False))
it "should deal with parser errors" do
(readParameter @(Maybe Int) "not a number") `shouldBe` (Left "has to be an integer")
describe "Enum" do
it "should accept values" do
(readParameter "Yellow") `shouldBe` (Right Yellow)
(readParameter "Red") `shouldBe` (Right Red)
(readParameter "Blue") `shouldBe` (Right Blue)
it "should fail on invalid values" do
(readParameter @Color "black") `shouldBe` (Left "Invalid value")
(readParameter @Color "") `shouldBe` (Left "Invalid value")
it "should deal with JSON" do
(readParameterJSON (json "\"Yellow\"")) `shouldBe` (Right Yellow)
(readParameterJSON (json "\"Red\"")) `shouldBe` (Right Red)
(readParameterJSON (json "\"Blue\"")) `shouldBe` (Right Blue)
it "should fail on invalid JSON" do
(readParameterJSON @Color (json "\"\"")) `shouldBe` (Left "Invalid value")
(readParameterJSON @Color (json "1337")) `shouldBe` (Left "enumParamReaderJSON: Invalid value, expected a string but got something else")
describe "fill" do
it "should fill provided values if valid" do
let ?context = createControllerContextWithParams [("boolField", "on"), ("colorField", "Red")]
let emptyRecord = FillRecord { boolField = False, colorField = Yellow, meta = def }
let expectedRecord = FillRecord { boolField = True, colorField = Red, meta = def { touchedFields = ["colorField", "boolField"] } }
let filledRecord = emptyRecord |> fill @["boolField", "colorField"]
filledRecord `shouldBe` expectedRecord
it "should not touch fields if a field is missing" do
let ?context = createControllerContextWithParams [("colorField", "Red")]
let emptyRecord = FillRecord { boolField = False, colorField = Yellow, meta = def }
let expectedRecord = FillRecord { boolField = False, colorField = Red, meta = def { touchedFields = ["colorField"] } }
let filledRecord = emptyRecord |> fill @["boolField", "colorField"]
filledRecord `shouldBe` expectedRecord
it "should add validation errors if the parsing fails" do
let ?context = createControllerContextWithParams [("colorField", "invalid color")]
let emptyRecord = FillRecord { boolField = False, colorField = Yellow, meta = def }
let expectedRecord = FillRecord { boolField = False, colorField = Yellow, meta = def { annotations = [("colorField", TextViolation "Invalid value")] } }
let filledRecord = emptyRecord |> fill @["boolField", "colorField"]
filledRecord `shouldBe` expectedRecord
it "should deal with json values" do
let ?context = createControllerContextWithJson "{\"colorField\":\"Red\",\"boolField\":true}"
let emptyRecord = FillRecord { boolField = False, colorField = Yellow, meta = def }
let expectedRecord = FillRecord { boolField = True, colorField = Red, meta = def { touchedFields = ["colorField", "boolField"] } }
let filledRecord = emptyRecord |> fill @["boolField", "colorField"]
filledRecord `shouldBe` expectedRecord
it "should deal with empty json values" do
let ?context = createControllerContextWithJson "{}"
let emptyRecord = FillRecord { boolField = False, colorField = Yellow, meta = def }
let expectedRecord = FillRecord { boolField = False, colorField = Yellow, meta = def }
let filledRecord = emptyRecord |> fill @["boolField", "colorField"]
filledRecord `shouldBe` expectedRecord
createControllerContextWithParams params =
let
requestBody = FormBody { params, files = [] }
request = Wai.defaultRequest
requestContext = RequestContext { request, respond = error "respond", requestBody, vault = error "vault", frameworkConfig = error "frameworkConfig" }
in FrozenControllerContext { requestContext, customFields = TypeMap.empty }
createControllerContextWithJson params =
let
requestBody = JSONBody { jsonPayload = Just (json params), rawPayload = cs params }
request = Wai.defaultRequest
requestContext = RequestContext { request, respond = error "respond", requestBody, vault = error "vault", frameworkConfig = error "frameworkConfig" }
in FrozenControllerContext { requestContext, customFields = TypeMap.empty }
json :: Text -> Aeson.Value
json string =
let (Just value) :: Maybe Aeson.Value = Aeson.decode (cs string)
in value
data Color = Yellow | Red | Blue deriving (Enum, Show, Eq)
instance ParamReader Color where
readParameter = enumParamReader
readParameterJSON = enumParamReaderJSON
instance InputValue Color where inputValue = tshow
data FillRecord = FillRecord { boolField :: Bool, colorField :: Color, meta :: MetaBag }
deriving (Show, Eq)
instance SetField "boolField" FillRecord Bool where
setField value record = record { boolField = value } |> modify #meta (modify #touchedFields ("boolField":))
instance SetField "colorField" FillRecord Color where
setField value record = record { colorField = value } |> modify #meta (modify #touchedFields ("colorField":))
instance SetField "meta" FillRecord MetaBag where
setField value record = record { meta = value } |
bf7ad48e4e54d6830b8e1d72e42ec1bc00624911754fe2912bccdf5af71a15bb | obsidiansystems/directory-contents | Zipper.hs | {-# Language LambdaCase #-}
|
Description :
Cursor - based navigation and modification of .
This module should be imported qualified due to the very short names it exports .
Description:
Cursor-based navigation and modification of 'DirTree's.
This module should be imported qualified due to the very short names it exports.
-}
module System.Directory.Contents.Zipper where
import Control.Applicative
import Data.Map (Map)
import qualified Data.Map as Map
import System.FilePath
import System.Directory.Contents.Types
-- * Zipper
-- | A zipper for a 'DirTree'. As you navigate the tree, this keeps track of where
-- you are (which node is "focused" under your cursor) and how to reach parent, child,
-- and sibling nodes.
data DirZipper a = DirZipper
^ Cursor position
, _dirZipper_siblings :: Map FilePath (DirTree a) -- ^ Siblings
, _dirZipper_elders :: [(DirTree a, Map FilePath (DirTree a))]
^ Parents and aunts / uncles , in reverse order ( i.e. , immediate ancestors first )
}
deriving (Show, Read, Eq, Ord)
-- | Construct a zipper out of a 'DirTree'. Use 'focused' or 'unzipped' to get back
-- a 'DirTree'
zipped :: DirTree a -> DirZipper a
zipped a = DirZipper a Map.empty []
-- | The currently focused/selected node (and its children). In other words,
-- where you are in the directory hierarchy.
focused :: DirZipper a -> DirTree a
focused = _dirZipper_cursor
-- | Throws away your current cursor information and returns the entire 'DirTree'
contained by the ' DirZipper ' .
--
-- > unzipped . zipped == id
--
unzipped :: DirZipper a -> DirTree a
unzipped = focused . home
-- | Move down a level in the directory hierarchy. To move down to a specific child,
-- use 'downTo'.
down :: DirZipper a -> Maybe (DirZipper a)
down dz = case dz of
DirZipper p@(DirTree_Dir _ xs) siblings parents ->
withFirstChild xs $ \firstChild children ->
DirZipper firstChild children $ (p, siblings) : parents
DirZipper p@(DirTree_Symlink _ (Symlink_External _ xs)) siblings parents ->
withFirstChild xs $ \firstChild children ->
DirZipper firstChild children $ (p, siblings) : parents
DirZipper (DirTree_Symlink _ (Symlink_Internal _ ref)) _ _ ->
followRelative ref $ home dz
_ -> Nothing
-- | Move up a level in the directory hierarchy, back to the parent that you
-- previously moved 'down' through.
up :: DirZipper a -> Maybe (DirZipper a)
up = \case
DirZipper c s ((parent, uncles):ps) ->
Just $ DirZipper (update c s parent) uncles ps
_ -> Nothing
where
update :: DirTree a -> Map FilePath (DirTree a) -> DirTree a -> DirTree a
update child siblings parent = case parent of
DirTree_Dir f _ -> DirTree_Dir f $ insertSibling child siblings
DirTree_Symlink f (Symlink_External s _) ->
DirTree_Symlink f $ Symlink_External s $ insertSibling child siblings
_ -> parent
-- | Go to the top of the directory hierarchy.
home :: DirZipper a -> DirZipper a
home dz =
let upmost z = maybe z upmost $ up z
in upmost dz
-- | Navigation directions for sibling nodes
data NavSibling = NavLeft | NavRight
-- | Move to the sibling next to the focused node
nextSibling :: NavSibling -> DirZipper a -> Maybe (DirZipper a)
nextSibling nav (DirZipper cursor siblings parents) =
let kids = insertSibling cursor siblings
next = case nav of
NavRight -> Map.lookupGT (fileName cursor) kids
NavLeft -> Map.lookupLT (fileName cursor) kids
in case next of
Nothing -> Nothing
Just (_, sibling) -> Just $
DirZipper sibling (removeSibling sibling kids) parents
-- | Move to the sibling to the left of the focused node
left :: DirZipper a -> Maybe (DirZipper a)
left = nextSibling NavLeft
-- | Move to the sibling to the right of the focused node
right :: DirZipper a -> Maybe (DirZipper a)
right = nextSibling NavRight
-- | Go to a particular sibling
toSibling :: FileName -> DirZipper a -> Maybe (DirZipper a)
toSibling name (DirZipper cursor siblings parents) =
case Map.lookup name siblings of
Nothing -> Nothing
Just sibling ->
let otherSiblings = insertSibling cursor $
removeSibling sibling siblings
in Just $ DirZipper sibling otherSiblings parents
-- | Move down in the directory hierarchy to a particular child
downTo :: FileName -> DirZipper a -> Maybe (DirZipper a)
downTo name z = do
d <- down z
if fileName (focused d) == name
then pure d
else toSibling name d
-- | Modify the focused node
mapCursor
:: (DirTree a -> DirTree a)
-> DirZipper a
-> DirZipper a
mapCursor f (DirZipper cursor siblings parents) =
DirZipper (f cursor) siblings parents
-- | Replace the focused node
replaceCursor
:: DirTree a
-> DirZipper a
-> DirZipper a
replaceCursor = mapCursor . const
-- | Add a new sibling to the focused node's generation and focus on it
insert
:: DirTree a
-> DirZipper a
-> DirZipper a
insert d (DirZipper cursor siblings parents) =
DirZipper
d
(insertSibling cursor siblings)
parents
-- | Remove the focused node
remove
:: DirZipper a
-> Maybe (DirZipper a)
remove z@(DirZipper cursor _ _) =
let rm (DirZipper c s p) =
DirZipper c (removeSibling cursor s) p
in case rm <$> (left z <|> right z) of
Just s -> Just s
Nothing -> case up z of
Nothing -> Nothing
Just dz -> Just $ flip replaceCursor dz $
case _dirZipper_cursor dz of
DirTree_Dir f _ -> DirTree_Dir f Map.empty
DirTree_Symlink f (Symlink_External s _) ->
DirTree_Symlink f (Symlink_External s Map.empty)
x -> x
-- | Try to navigate the provided (possibly relative) path.
followRelative
:: FilePath
-> DirZipper a
-> Maybe (DirZipper a)
followRelative path dz =
let follow r z = case r of
"." -> Just z
".." -> up z
_ -> downTo r z <|> toSibling r z
go rs z = case rs of
[] -> Just z
(r:more) -> go more =<< follow r z
in go (splitDirectories path) dz
-- | If the focused node is an internal symlink (see 'Symlink'), try to get
-- to the target.
followLink
:: DirZipper a
-> Maybe (DirZipper a)
followLink z = case z of
DirZipper (DirTree_Symlink _ (Symlink_Internal s _)) _ _ -> followRelative s z
_ -> Nothing
| null | https://raw.githubusercontent.com/obsidiansystems/directory-contents/579fc07d336da72a367871f0b45ab9107683327e/src/System/Directory/Contents/Zipper.hs | haskell | # Language LambdaCase #
* Zipper
| A zipper for a 'DirTree'. As you navigate the tree, this keeps track of where
you are (which node is "focused" under your cursor) and how to reach parent, child,
and sibling nodes.
^ Siblings
| Construct a zipper out of a 'DirTree'. Use 'focused' or 'unzipped' to get back
a 'DirTree'
| The currently focused/selected node (and its children). In other words,
where you are in the directory hierarchy.
| Throws away your current cursor information and returns the entire 'DirTree'
> unzipped . zipped == id
| Move down a level in the directory hierarchy. To move down to a specific child,
use 'downTo'.
| Move up a level in the directory hierarchy, back to the parent that you
previously moved 'down' through.
| Go to the top of the directory hierarchy.
| Navigation directions for sibling nodes
| Move to the sibling next to the focused node
| Move to the sibling to the left of the focused node
| Move to the sibling to the right of the focused node
| Go to a particular sibling
| Move down in the directory hierarchy to a particular child
| Modify the focused node
| Replace the focused node
| Add a new sibling to the focused node's generation and focus on it
| Remove the focused node
| Try to navigate the provided (possibly relative) path.
| If the focused node is an internal symlink (see 'Symlink'), try to get
to the target. | |
Description :
Cursor - based navigation and modification of .
This module should be imported qualified due to the very short names it exports .
Description:
Cursor-based navigation and modification of 'DirTree's.
This module should be imported qualified due to the very short names it exports.
-}
module System.Directory.Contents.Zipper where
import Control.Applicative
import Data.Map (Map)
import qualified Data.Map as Map
import System.FilePath
import System.Directory.Contents.Types
data DirZipper a = DirZipper
^ Cursor position
, _dirZipper_elders :: [(DirTree a, Map FilePath (DirTree a))]
^ Parents and aunts / uncles , in reverse order ( i.e. , immediate ancestors first )
}
deriving (Show, Read, Eq, Ord)
zipped :: DirTree a -> DirZipper a
zipped a = DirZipper a Map.empty []
focused :: DirZipper a -> DirTree a
focused = _dirZipper_cursor
contained by the ' DirZipper ' .
unzipped :: DirZipper a -> DirTree a
unzipped = focused . home
down :: DirZipper a -> Maybe (DirZipper a)
down dz = case dz of
DirZipper p@(DirTree_Dir _ xs) siblings parents ->
withFirstChild xs $ \firstChild children ->
DirZipper firstChild children $ (p, siblings) : parents
DirZipper p@(DirTree_Symlink _ (Symlink_External _ xs)) siblings parents ->
withFirstChild xs $ \firstChild children ->
DirZipper firstChild children $ (p, siblings) : parents
DirZipper (DirTree_Symlink _ (Symlink_Internal _ ref)) _ _ ->
followRelative ref $ home dz
_ -> Nothing
up :: DirZipper a -> Maybe (DirZipper a)
up = \case
DirZipper c s ((parent, uncles):ps) ->
Just $ DirZipper (update c s parent) uncles ps
_ -> Nothing
where
update :: DirTree a -> Map FilePath (DirTree a) -> DirTree a -> DirTree a
update child siblings parent = case parent of
DirTree_Dir f _ -> DirTree_Dir f $ insertSibling child siblings
DirTree_Symlink f (Symlink_External s _) ->
DirTree_Symlink f $ Symlink_External s $ insertSibling child siblings
_ -> parent
home :: DirZipper a -> DirZipper a
home dz =
let upmost z = maybe z upmost $ up z
in upmost dz
data NavSibling = NavLeft | NavRight
nextSibling :: NavSibling -> DirZipper a -> Maybe (DirZipper a)
nextSibling nav (DirZipper cursor siblings parents) =
let kids = insertSibling cursor siblings
next = case nav of
NavRight -> Map.lookupGT (fileName cursor) kids
NavLeft -> Map.lookupLT (fileName cursor) kids
in case next of
Nothing -> Nothing
Just (_, sibling) -> Just $
DirZipper sibling (removeSibling sibling kids) parents
left :: DirZipper a -> Maybe (DirZipper a)
left = nextSibling NavLeft
right :: DirZipper a -> Maybe (DirZipper a)
right = nextSibling NavRight
toSibling :: FileName -> DirZipper a -> Maybe (DirZipper a)
toSibling name (DirZipper cursor siblings parents) =
case Map.lookup name siblings of
Nothing -> Nothing
Just sibling ->
let otherSiblings = insertSibling cursor $
removeSibling sibling siblings
in Just $ DirZipper sibling otherSiblings parents
downTo :: FileName -> DirZipper a -> Maybe (DirZipper a)
downTo name z = do
d <- down z
if fileName (focused d) == name
then pure d
else toSibling name d
mapCursor
:: (DirTree a -> DirTree a)
-> DirZipper a
-> DirZipper a
mapCursor f (DirZipper cursor siblings parents) =
DirZipper (f cursor) siblings parents
replaceCursor
:: DirTree a
-> DirZipper a
-> DirZipper a
replaceCursor = mapCursor . const
insert
:: DirTree a
-> DirZipper a
-> DirZipper a
insert d (DirZipper cursor siblings parents) =
DirZipper
d
(insertSibling cursor siblings)
parents
remove
:: DirZipper a
-> Maybe (DirZipper a)
remove z@(DirZipper cursor _ _) =
let rm (DirZipper c s p) =
DirZipper c (removeSibling cursor s) p
in case rm <$> (left z <|> right z) of
Just s -> Just s
Nothing -> case up z of
Nothing -> Nothing
Just dz -> Just $ flip replaceCursor dz $
case _dirZipper_cursor dz of
DirTree_Dir f _ -> DirTree_Dir f Map.empty
DirTree_Symlink f (Symlink_External s _) ->
DirTree_Symlink f (Symlink_External s Map.empty)
x -> x
followRelative
:: FilePath
-> DirZipper a
-> Maybe (DirZipper a)
followRelative path dz =
let follow r z = case r of
"." -> Just z
".." -> up z
_ -> downTo r z <|> toSibling r z
go rs z = case rs of
[] -> Just z
(r:more) -> go more =<< follow r z
in go (splitDirectories path) dz
followLink
:: DirZipper a
-> Maybe (DirZipper a)
followLink z = case z of
DirZipper (DirTree_Symlink _ (Symlink_Internal s _)) _ _ -> followRelative s z
_ -> Nothing
|
4d960a71b8046c3e8bd14eef178147f183438b3b38593debe77278cc65daf3cf | ocsigen/lwt | test_mcast.ml | This file is part of Lwt , released under the MIT license . See LICENSE.md for
details , or visit .
details, or visit . *)
open Lwt.Infix
open Test
let debug = false
let hello = Bytes.unsafe_of_string "Hello, World!"
let mcast_addr =
let last_group = ref 0 in
fun () ->
incr last_group;
Printf.sprintf "225.0.0.%i" !last_group
let mcast_port =
let last_port = ref 4421 in
fun () ->
incr last_port;
!last_port
let child mcast_addr join fd =
if join then Lwt_unix.mcast_add_membership fd (Unix.inet_addr_of_string mcast_addr);
let buf = Bytes.create 50 in
Lwt_unix.with_timeout 1. (fun () -> Lwt_unix.read fd buf 0 (Bytes.length buf)) >>= fun n ->
if debug then
Printf.printf "\nReceived multicast message %S\n%!" (Bytes.unsafe_to_string (Bytes.sub buf 0 n));
if Bytes.sub buf 0 n <> hello then
Lwt.fail (Failure "unexpected multicast message")
else
Lwt.return_unit
let parent mcast_addr mcast_port set_loop fd =
Lwt_unix.mcast_set_loop fd set_loop;
let addr = Lwt_unix.ADDR_INET (Unix.inet_addr_of_string mcast_addr, mcast_port) in
Lwt_unix.sendto fd hello 0 (Bytes.length hello) [] addr >>= fun _ ->
if debug then
Printf.printf "\nSending multicast message %S to %s:%d\n%!" (Bytes.unsafe_to_string hello)
mcast_addr mcast_port;
Lwt.return_unit
let test_mcast name join set_loop =
test name ~only_if:(fun () -> not Sys.win32) begin fun () ->
let mcast_addr = mcast_addr () in
let mcast_port = mcast_port () in
let should_timeout = not join || not set_loop in
let fd1 = Lwt_unix.(socket PF_INET SOCK_DGRAM 0) in
let fd2 = Lwt_unix.(socket PF_INET SOCK_DGRAM 0) in
let t () =
Lwt.catch
(fun () ->
Lwt_unix.(bind
fd1 (ADDR_INET (Unix.inet_addr_any, mcast_port))) >>= fun () ->
let t1 = child mcast_addr join fd1 in
let t2 = parent mcast_addr mcast_port set_loop fd2 in
Lwt.join [t1; t2] >>= fun () -> Lwt.return_true
)
(function
| Lwt_unix.Timeout ->
Lwt.return should_timeout
| Unix.Unix_error (Unix.EINVAL, "send", _)
| Unix.Unix_error (Unix.ENODEV, "setsockopt", _)
| Unix.Unix_error (Unix.ENETUNREACH, "send", _) ->
Lwt.fail Skip
| e ->
Lwt.fail e
)
in
Lwt.finalize t (fun () -> Lwt.join [Lwt_unix.close fd1; Lwt_unix.close fd2])
end
let suite =
suite "unix_mcast"
[
test_mcast "mcast-join-loop" true true;
test_mcast "mcast-nojoin-loop" false true;
test_mcast "mcast-join-noloop" true false;
test_mcast "mcast-nojoin-noloop" false false;
]
| null | https://raw.githubusercontent.com/ocsigen/lwt/aa9d18a550da444e1a889867dad52a32f162b262/test/unix/test_mcast.ml | ocaml | This file is part of Lwt , released under the MIT license . See LICENSE.md for
details , or visit .
details, or visit . *)
open Lwt.Infix
open Test
let debug = false
let hello = Bytes.unsafe_of_string "Hello, World!"
let mcast_addr =
let last_group = ref 0 in
fun () ->
incr last_group;
Printf.sprintf "225.0.0.%i" !last_group
let mcast_port =
let last_port = ref 4421 in
fun () ->
incr last_port;
!last_port
let child mcast_addr join fd =
if join then Lwt_unix.mcast_add_membership fd (Unix.inet_addr_of_string mcast_addr);
let buf = Bytes.create 50 in
Lwt_unix.with_timeout 1. (fun () -> Lwt_unix.read fd buf 0 (Bytes.length buf)) >>= fun n ->
if debug then
Printf.printf "\nReceived multicast message %S\n%!" (Bytes.unsafe_to_string (Bytes.sub buf 0 n));
if Bytes.sub buf 0 n <> hello then
Lwt.fail (Failure "unexpected multicast message")
else
Lwt.return_unit
let parent mcast_addr mcast_port set_loop fd =
Lwt_unix.mcast_set_loop fd set_loop;
let addr = Lwt_unix.ADDR_INET (Unix.inet_addr_of_string mcast_addr, mcast_port) in
Lwt_unix.sendto fd hello 0 (Bytes.length hello) [] addr >>= fun _ ->
if debug then
Printf.printf "\nSending multicast message %S to %s:%d\n%!" (Bytes.unsafe_to_string hello)
mcast_addr mcast_port;
Lwt.return_unit
let test_mcast name join set_loop =
test name ~only_if:(fun () -> not Sys.win32) begin fun () ->
let mcast_addr = mcast_addr () in
let mcast_port = mcast_port () in
let should_timeout = not join || not set_loop in
let fd1 = Lwt_unix.(socket PF_INET SOCK_DGRAM 0) in
let fd2 = Lwt_unix.(socket PF_INET SOCK_DGRAM 0) in
let t () =
Lwt.catch
(fun () ->
Lwt_unix.(bind
fd1 (ADDR_INET (Unix.inet_addr_any, mcast_port))) >>= fun () ->
let t1 = child mcast_addr join fd1 in
let t2 = parent mcast_addr mcast_port set_loop fd2 in
Lwt.join [t1; t2] >>= fun () -> Lwt.return_true
)
(function
| Lwt_unix.Timeout ->
Lwt.return should_timeout
| Unix.Unix_error (Unix.EINVAL, "send", _)
| Unix.Unix_error (Unix.ENODEV, "setsockopt", _)
| Unix.Unix_error (Unix.ENETUNREACH, "send", _) ->
Lwt.fail Skip
| e ->
Lwt.fail e
)
in
Lwt.finalize t (fun () -> Lwt.join [Lwt_unix.close fd1; Lwt_unix.close fd2])
end
let suite =
suite "unix_mcast"
[
test_mcast "mcast-join-loop" true true;
test_mcast "mcast-nojoin-loop" false true;
test_mcast "mcast-join-noloop" true false;
test_mcast "mcast-nojoin-noloop" false false;
]
| |
5d84a2f828c13b52bb38f7f236b62331c28f1002255cad7543ddffad76593278 | grin-compiler/ghc-wpc-sample-programs | Conversion.hs | # LANGUAGE NondecreasingIndentation #
module Agda.TypeChecking.Conversion where
import Control.Arrow (first, second)
import Control.Monad
import Control.Monad.Fail (MonadFail)
import Data.Function
import qualified Data.List as List
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.IntSet as IntSet
import Agda.Syntax.Abstract.Views (isSet)
import Agda.Syntax.Common
import Agda.Syntax.Internal
import Agda.Syntax.Internal.MetaVars
import Agda.Syntax.Translation.InternalToAbstract (reify)
import Agda.TypeChecking.Monad
import Agda.TypeChecking.Monad.Builtin
import Agda.TypeChecking.MetaVars
import Agda.TypeChecking.MetaVars.Occurs (killArgs,PruneResult(..),rigidVarsNotContainedIn)
import Agda.TypeChecking.Names
import Agda.TypeChecking.Reduce
import Agda.TypeChecking.Substitute
import qualified Agda.TypeChecking.SyntacticEquality as SynEq
import Agda.TypeChecking.Telescope
import Agda.TypeChecking.Constraints
import Agda.TypeChecking.Conversion.Pure (pureCompareAs)
import {-# SOURCE #-} Agda.TypeChecking.CheckInternal (infer)
import Agda.TypeChecking.Forcing (isForced, nextIsForced)
import Agda.TypeChecking.Free
import Agda.TypeChecking.Datatypes (getConType, getFullyAppliedConType)
import Agda.TypeChecking.Records
import Agda.TypeChecking.Pretty
import Agda.TypeChecking.Injectivity
import Agda.TypeChecking.Polarity
import Agda.TypeChecking.SizedTypes
import Agda.TypeChecking.Level
import Agda.TypeChecking.Implicit (implicitArgs)
import Agda.TypeChecking.Irrelevance
import Agda.TypeChecking.Primitive
import Agda.TypeChecking.Warnings (MonadWarning)
import Agda.Interaction.Options
import Agda.Utils.Except ( MonadError(catchError, throwError) )
import Agda.Utils.Functor
import Agda.Utils.Monad
import Agda.Utils.Maybe
import Agda.Utils.Permutation
import Agda.Utils.Size
import Agda.Utils.Tuple
import Agda.Utils.WithDefault
import Agda.Utils.Impossible
type MonadConversion m =
( MonadReduce m
, MonadAddContext m
, MonadConstraint m
, MonadMetaSolver m
, MonadError TCErr m
, MonadWarning m
, MonadDebug m
, MonadStatistics m
, MonadFresh ProblemId m
, MonadFresh Int m
, HasBuiltins m
, HasConstInfo m
, HasOptions m
, MonadFail m
)
-- | Try whether a computation runs without errors or new constraints
-- (may create new metas, though).
-- Restores state upon failure.
tryConversion
:: (MonadConstraint m, MonadWarning m, MonadError TCErr m, MonadFresh ProblemId m)
=> m () -> m Bool
tryConversion = isJust <.> tryConversion'
-- | Try whether a computation runs without errors or new constraints
-- (may create new metas, though).
-- Return 'Just' the result upon success.
-- Return 'Nothing' and restore state upon failure.
tryConversion'
:: (MonadConstraint m, MonadWarning m, MonadError TCErr m, MonadFresh ProblemId m)
=> m a -> m (Maybe a)
tryConversion' m = tryMaybe $ noConstraints m
-- | Check if to lists of arguments are the same (and all variables).
-- Precondition: the lists have the same length.
sameVars :: Elims -> Elims -> Bool
sameVars xs ys = and $ zipWith same xs ys
where
same (Apply (Arg _ (Var n []))) (Apply (Arg _ (Var m []))) = n == m
same _ _ = False
| @intersectVars us vs@ checks whether all relevant elements in @us@ and @vs@
are variables , and if yes , returns a prune list which says @True@ for
-- arguments which are different and can be pruned.
intersectVars :: Elims -> Elims -> Maybe [Bool]
intersectVars = zipWithM areVars where
-- ignore irrelevant args
areVars (Apply u) v | isIrrelevant u = Just False -- do not prune
areVars (Apply (Arg _ (Var n []))) (Apply (Arg _ (Var m []))) = Just $ n /= m -- prune different vars
areVars _ _ = Nothing
equalTerm :: MonadConversion m => Type -> Term -> Term -> m ()
equalTerm = compareTerm CmpEq
equalAtom :: MonadConversion m => CompareAs -> Term -> Term -> m ()
equalAtom = compareAtom CmpEq
equalType :: MonadConversion m => Type -> Type -> m ()
equalType = compareType CmpEq
{- Comparing in irrelevant context always succeeds.
However, we might want to dig for solutions of irrelevant metas.
To this end, we can just ignore errors during conversion checking.
-}
convError : : MonadTCM tcm = > TypeError - > tcm a
-- | Ignore errors in irrelevant context.
convError :: TypeError -> TCM ()
convError err = ifM ((==) Irrelevant <$> asksTC getRelevance) (return ()) $ typeError err
-- | Type directed equality on values.
--
compareTerm :: forall m. MonadConversion m => Comparison -> Type -> Term -> Term -> m ()
compareTerm cmp a u v = compareAs cmp (AsTermsOf a) u v
-- | Type directed equality on terms or types.
compareAs :: forall m. MonadConversion m => Comparison -> CompareAs -> Term -> Term -> m ()
If one term is a meta , try to instantiate right away . This avoids unnecessary unfolding .
, 2012 - 02 - 14 : This is UNSOUND for subtyping !
compareAs cmp a u v = do
reportSDoc "tc.conv.term" 10 $ sep $
[ "compareTerm"
, nest 2 $ prettyTCM u <+> prettyTCM cmp <+> prettyTCM v
, nest 2 $ prettyTCM a
]
-- Check syntactic equality. This actually saves us quite a bit of work.
((u, v), equal) <- SynEq.checkSyntacticEquality u v
-- OLD CODE, traverses the *full* terms u v at each step, even if they
are different somewhere . Leads to infeasibility in issue 854 .
-- (u, v) <- instantiateFull (u, v)
-- let equal = u == v
if equal then verboseS "profile.sharing" 20 $ tick "equal terms" else do
verboseS "profile.sharing" 20 $ tick "unequal terms"
reportSDoc "tc.conv.term" 15 $ sep $
[ "compareTerm (not syntactically equal)"
, nest 2 $ prettyTCM u <+> prettyTCM cmp <+> prettyTCM v
, nest 2 $ prettyTCM a
]
If we are at type , we can not short - cut comparison
-- against metas by assignment.
, 2014 - 04 - 12 : this looks incomplete .
-- It seems to assume we are never comparing
at function types into Size .
let fallback = compareAs' cmp a u v
unlessSubtyping :: m () -> m ()
unlessSubtyping cont =
if cmp == CmpEq then cont else do
, 2014 - 04 - 12 do not short cut if type is blocked .
ifBlocked a (\ _ _ -> fallback) {-else-} $ \ _ a -> do
-- do not short circuit size comparison!
caseMaybeM (isSizeType a) cont (\ _ -> fallback)
dir = fromCmp cmp
rid = flipCmp dir -- The reverse direction. Bad name, I know.
case (u, v) of
(MetaV x us, MetaV y vs)
| x /= y -> unlessSubtyping $ solve1 `orelse` solve2 `orelse` fallback
| otherwise -> fallback
where
(solve1, solve2) | x > y = (assign dir x us v, assign rid y vs u)
| otherwise = (assign rid y vs u, assign dir x us v)
(MetaV x us, _) -> unlessSubtyping $ assign dir x us v `orelse` fallback
(_, MetaV y vs) -> unlessSubtyping $ assign rid y vs u `orelse` fallback
_ -> fallback
where
assign :: CompareDirection -> MetaId -> Elims -> Term -> m ()
assign dir x es v = do
, 2013 - 10 - 19 can only solve if no projections
reportSDoc "tc.conv.term.shortcut" 20 $ sep
[ "attempting shortcut"
, nest 2 $ prettyTCM (MetaV x es) <+> ":=" <+> prettyTCM v
]
whenM (isInstantiatedMeta x) patternViolation
assignE dir x es v a $ compareAsDir dir a
reportSDoc "tc.conv.term.shortcut" 50 $
"shortcut successful" $$ nest 2 ("result:" <+> (pretty =<< instantiate (MetaV x es)))
Should be ok with _ but is much safer since we do n't
-- rethrow errors.
orelse :: m () -> m () -> m ()
orelse m h = catchError m (\_ -> h)
-- | Try to assign meta. If meta is projected, try to eta-expand
-- and run conversion check again.
assignE :: (MonadConversion m)
=> CompareDirection -> MetaId -> Elims -> Term -> CompareAs -> (Term -> Term -> m ()) -> m ()
assignE dir x es v a comp = assignWrapper dir x es v $ do
case allApplyElims es of
Just vs -> assignV dir x vs v a
Nothing -> do
reportSDoc "tc.conv.assign" 30 $ sep
[ "assigning to projected meta "
, prettyTCM x <+> sep (map prettyTCM es) <+> text (":" ++ show dir) <+> prettyTCM v
]
etaExpandMeta [Records] x
res <- isInstantiatedMeta' x
case res of
Just u -> do
reportSDoc "tc.conv.assign" 30 $ sep
[ "seems like eta expansion instantiated meta "
, prettyTCM x <+> text (":" ++ show dir) <+> prettyTCM u
]
let w = u `applyE` es
comp w v
Nothing -> do
reportSLn "tc.conv.assign" 30 "eta expansion did not instantiate meta"
patternViolation -- nothing happened, give up
compareAsDir :: MonadConversion m => CompareDirection -> CompareAs -> Term -> Term -> m ()
compareAsDir dir a = dirToCmp (`compareAs'` a) dir
compareAs' :: forall m. MonadConversion m => Comparison -> CompareAs -> Term -> Term -> m ()
compareAs' cmp tt m n = case tt of
AsTermsOf a -> compareTerm' cmp a m n
AsSizes -> compareSizes cmp m n
AsTypes -> compareAtom cmp AsTypes m n
compareTerm' :: forall m. MonadConversion m => Comparison -> Type -> Term -> Term -> m ()
compareTerm' cmp a m n =
verboseBracket "tc.conv.term" 20 "compareTerm" $ do
a' <- reduce a
(catchConstraint (ValueCmp cmp (AsTermsOf a') m n) :: m () -> m ()) $ do
reportSDoc "tc.conv.term" 30 $ fsep
[ "compareTerm", prettyTCM m, prettyTCM cmp, prettyTCM n, ":", prettyTCM a' ]
propIrr <- isPropEnabled
isSize <- isJust <$> isSizeType a'
s <- reduce $ getSort a'
mlvl <- getBuiltin' builtinLevel
reportSDoc "tc.conv.level" 60 $ nest 2 $ sep
[ "a' =" <+> pretty a'
, "mlvl =" <+> pretty mlvl
, text $ "(Just (unEl a') == mlvl) = " ++ show (Just (unEl a') == mlvl)
]
case s of
Prop{} | propIrr -> compareIrrelevant a' m n
_ | isSize -> compareSizes cmp m n
_ -> case unEl a' of
a | Just a == mlvl -> do
a <- levelView m
b <- levelView n
equalLevel a b
a@Pi{} -> equalFun s a m n
Lam _ _ -> __IMPOSSIBLE__
Def r es -> do
isrec <- isEtaRecord r
if isrec
then do
sig <- getSignature
let ps = fromMaybe __IMPOSSIBLE__ $ allApplyElims es
, 2010 - 10 - 11 : allowing neutrals to be blocked things does not seem
to change Agda 's behavior
-- isNeutral Blocked{} = False
isNeutral (NotBlocked _ Con{}) = return False
, 2013 - 09 - 18 / 2015 - 06 - 29 : a Def by copatterns is
-- not neutral if it is blocked (there can be missing projections
-- to trigger a reduction.
, 2014 - 12 - 06 optimize this using r ! !
not <$> usesCopatterns q -- a def by copattern can reduce if projected
isNeutral _ = return True
isMeta (NotBlocked _ MetaV{}) = True
isMeta _ = False
reportSDoc "tc.conv.term" 30 $ prettyTCM a <+> "is eta record type"
m <- reduceB m
mNeutral <- isNeutral m
n <- reduceB n
nNeutral <- isNeutral n
case (m, n) of
_ | isMeta m || isMeta n ->
compareAtom cmp (AsTermsOf a') (ignoreBlocking m) (ignoreBlocking n)
_ | mNeutral && nNeutral -> do
Andreas 2011 - 03 - 23 : ( fixing issue 396 )
-- if we are dealing with a singleton record,
-- we can succeed immediately
isSing <- isSingletonRecordModuloRelevance r ps
case isSing of
Right True -> return ()
do not eta - expand if comparing two neutrals
_ -> compareAtom cmp (AsTermsOf a') (ignoreBlocking m) (ignoreBlocking n)
_ -> do
(tel, m') <- etaExpandRecord r ps $ ignoreBlocking m
(_ , n') <- etaExpandRecord r ps $ ignoreBlocking n
-- No subtyping on record terms
c <- getRecordConstructor r
Record constructors are covariant ( see test / succeed / CovariantConstructors ) .
compareArgs (repeat $ polFromCmp cmp) [] (telePi_ tel __DUMMY_TYPE__) (Con c ConOSystem []) m' n'
else (do pathview <- pathView a'
equalPath pathview a' m n)
_ -> compareAtom cmp (AsTermsOf a') m n
where
-- equality at function type (accounts for eta)
equalFun :: (MonadConversion m) => Sort -> Term -> Term -> Term -> m ()
equalFun s a@(Pi dom b) m n | domFinite dom = do
mp <- fmap getPrimName <$> getBuiltin' builtinIsOne
case unEl $ unDom dom of
Def q [Apply phi]
| Just q == mp -> compareTermOnFace cmp (unArg phi) (El s (Pi (dom {domFinite = False}) b)) m n
_ -> equalFun s (Pi (dom{domFinite = False}) b) m n
equalFun _ (Pi dom@Dom{domInfo = info} b) m n | not $ domFinite dom = do
let name = suggests [ Suggestion b , Suggestion m , Suggestion n ]
addContext (name, dom) $ compareTerm cmp (absBody b) m' n'
where
(m',n') = raise 1 (m,n) `apply` [Arg info $ var 0]
equalFun _ _ _ _ = __IMPOSSIBLE__
equalPath :: (MonadConversion m) => PathView -> Type -> Term -> Term -> m ()
equalPath (PathType s _ l a x y) _ m n = do
let name = "i" :: String
interval <- el primInterval
let (m',n') = raise 1 (m, n) `applyE` [IApply (raise 1 $ unArg x) (raise 1 $ unArg y) (var 0)]
addContext (name, defaultDom interval) $ compareTerm cmp (El (raise 1 s) $ (raise 1 $ unArg a) `apply` [argN $ var 0]) m' n'
equalPath OType{} a' m n = cmpDef a' m n
cmpDef a'@(El s ty) m n = do
mI <- getBuiltinName' builtinInterval
mIsOne <- getBuiltinName' builtinIsOne
mGlue <- getPrimitiveName' builtinGlue
mHComp <- getPrimitiveName' builtinHComp
mSub <- getBuiltinName' builtinSub
case ty of
Def q es | Just q == mIsOne -> return ()
Def q es | Just q == mGlue, Just args@(l:_:a:phi:_) <- allApplyElims es -> do
ty <- el' (pure $ unArg l) (pure $ unArg a)
unglue <- prim_unglue
let mkUnglue m = apply unglue $ map (setHiding Hidden) args ++ [argN m]
reportSDoc "conv.glue" 20 $ prettyTCM (ty,mkUnglue m,mkUnglue n)
compareTermOnFace cmp (unArg phi) ty m n
compareTerm cmp ty (mkUnglue m) (mkUnglue n)
Def q es | Just q == mHComp, Just (sl:s:args@[phi,u,u0]) <- allApplyElims es
, Sort (Type lvl) <- unArg s -> do
let l = Level lvl
ty <- el' (pure $ l) (pure $ unArg u0)
unglueU <- prim_unglueU
subIn <- primSubIn
let bA = subIn `apply` [sl,s,phi,u0]
let mkUnglue m = apply unglueU $ [argH l] ++ map (setHiding Hidden) [phi,u] ++ [argH bA,argN m]
reportSDoc "conv.hcompU" 20 $ prettyTCM (ty,mkUnglue m,mkUnglue n)
compareTermOnFace cmp (unArg phi) ty m n
compareTerm cmp ty (mkUnglue m) (mkUnglue n)
Def q es | Just q == mSub, Just args@(l:a:_) <- allApplyElims es -> do
ty <- el' (pure $ unArg l) (pure $ unArg a)
out <- primSubOut
let mkOut m = apply out $ map (setHiding Hidden) args ++ [argN m]
compareTerm cmp ty (mkOut m) (mkOut n)
Def q [] | Just q == mI -> compareInterval cmp a' m n
_ -> compareAtom cmp (AsTermsOf a') m n
| @compareTel t1 t2 cmp tel1 tel1@ checks whether pointwise
-- @tel1 \`cmp\` tel2@ and complains that @t2 \`cmp\` t1@ failed if
-- not.
compareTel :: MonadConversion m => Type -> Type ->
Comparison -> Telescope -> Telescope -> m ()
compareTel t1 t2 cmp tel1 tel2 =
verboseBracket "tc.conv.tel" 20 "compareTel" $
catchConstraint (TelCmp t1 t2 cmp tel1 tel2) $ case (tel1, tel2) of
(EmptyTel, EmptyTel) -> return ()
(EmptyTel, _) -> bad
(_, EmptyTel) -> bad
(ExtendTel dom1{-@(Dom i1 a1)-} tel1, ExtendTel dom2{-@(Dom i2 a2)-} tel2) -> do
compareDom cmp dom1 dom2 tel1 tel2 bad bad bad bad $
compareTel t1 t2 cmp (absBody tel1) (absBody tel2)
where
, 2011 - 05 - 10 better report message about types
bad = typeError $ UnequalTypes cmp t2 t1
-- switch t2 and t1 because of contravariance!
compareAtomDir :: MonadConversion m => CompareDirection -> CompareAs -> Term -> Term -> m ()
compareAtomDir dir a = dirToCmp (`compareAtom` a) dir
-- | Compute the head type of an elimination. For projection-like functions
-- this requires inferring the type of the principal argument.
computeElimHeadType :: MonadConversion m => QName -> Elims -> Elims -> m Type
computeElimHeadType f es es' = do
def <- getConstInfo f
To compute the type @a@ of a projection - like @f@ ,
we have to infer the type of its first argument .
if projectionArgs (theDef def) <= 0 then return $ defType def else do
Find an first argument to @f@.
let arg = case (es, es') of
(Apply arg : _, _) -> arg
(_, Apply arg : _) -> arg
_ -> __IMPOSSIBLE__
-- Infer its type.
reportSDoc "tc.conv.infer" 30 $
"inferring type of internal arg: " <+> prettyTCM arg
targ <- infer $ unArg arg
reportSDoc "tc.conv.infer" 30 $
"inferred type: " <+> prettyTCM targ
-- getDefType wants the argument type reduced.
, 2016 - 02 - 09 , Issue 1825 : The type of arg might be
-- a meta-variable, e.g. in interactive development.
-- In this case, we postpone.
fromMaybeM patternViolation $ getDefType f =<< reduce targ
-- | Syntax directed equality on atomic values
--
compareAtom :: forall m. MonadConversion m => Comparison -> CompareAs -> Term -> Term -> m ()
compareAtom cmp t m n =
verboseBracket "tc.conv.atom" 20 "compareAtom" $
if a PatternErr is thrown , rebuild constraint !
(catchConstraint (ValueCmp cmp t m n) :: m () -> m ()) $ do
reportSDoc "tc.conv.atom" 50 $
"compareAtom" <+> fsep [ prettyTCM m <+> prettyTCM cmp
, prettyTCM n
, prettyTCM t
]
: what happens if I cut out the eta expansion here ?
Answer : Triggers issue 245 , does not resolve 348
(mb',nb') <- ifM (asksTC envCompareBlocked) ((notBlocked -*- notBlocked) <$> reduce (m,n)) $ do
mb' <- etaExpandBlocked =<< reduceB m
nb' <- etaExpandBlocked =<< reduceB n
return (mb', nb')
-- constructorForm changes literal to constructors
-- only needed if the other side is not a literal
(mb'', nb'') <- case (ignoreBlocking mb', ignoreBlocking nb') of
(Lit _, Lit _) -> return (mb', nb')
_ -> (,) <$> traverse constructorForm mb'
<*> traverse constructorForm nb'
mb <- traverse unLevel mb''
nb <- traverse unLevel nb''
cmpBlocked <- viewTC eCompareBlocked
let m = ignoreBlocking mb
n = ignoreBlocking nb
postpone = addConstraint $ ValueCmp cmp t m n
Jesper , 2019 - 05 - 14 , Issue # 3776 : If the type is blocked ,
-- the comparison could be solved by eta-expansion so we
-- cannot fail hard
postponeIfBlockedAs :: CompareAs -> (Blocked CompareAs -> m ()) -> m ()
postponeIfBlockedAs AsTypes f = f $ NotBlocked ReallyNotBlocked AsTypes
postponeIfBlockedAs AsSizes f = f $ NotBlocked ReallyNotBlocked AsSizes
postponeIfBlockedAs (AsTermsOf t) f = ifBlocked t
(\m t -> (f $ Blocked m $ AsTermsOf t) `catchError` \case
TypeError{} -> postpone
err -> throwError err)
(\nb t -> f $ NotBlocked nb $ AsTermsOf t)
checkDefinitionalEquality = unlessM (pureCompareAs CmpEq t m n) postpone
dir = fromCmp cmp
rid = flipCmp dir -- The reverse direction. Bad name, I know.
assign dir x es v = assignE dir x es v t $ compareAtomDir dir t
reportSDoc "tc.conv.atom" 30 $
"compareAtom" <+> fsep [ prettyTCM mb <+> prettyTCM cmp
, prettyTCM nb
, prettyTCM t
]
reportSDoc "tc.conv.atom" 80 $
"compareAtom" <+> fsep [ (text . show) mb <+> prettyTCM cmp
, (text . show) nb
, ":" <+> (text . show) t ]
case (mb, nb) of
equate two metas x and y. if y is the younger meta ,
try first y : = x and then x : = y
(NotBlocked _ (MetaV x xArgs), NotBlocked _ (MetaV y yArgs))
| x == y , cmpBlocked -> do
a <- metaType x
compareElims [] [] a (MetaV x []) xArgs yArgs
| x == y ->
case intersectVars xArgs yArgs of
-- all relevant arguments are variables
Just kills -> do
-- kills is a list with 'True' for each different var
killResult <- killArgs kills x
case killResult of
NothingToPrune -> return ()
PrunedEverything -> return ()
PrunedNothing -> postpone
PrunedSomething -> postpone
-- not all relevant arguments are variables
Nothing -> checkDefinitionalEquality -- Check definitional equality on meta-variables
-- (same as for blocked terms)
| otherwise -> do
[p1, p2] <- mapM getMetaPriority [x,y]
First try the one with the highest priority . If that does n't
-- work, try the low priority one.
let (solve1, solve2)
| (p1, x) > (p2, y) = (l1, r2)
| otherwise = (r1, l2)
where l1 = assign dir x xArgs n
r1 = assign rid y yArgs m
Careful : the first attempt might prune the low
priority meta ! ( Issue # 2978 )
l2 = ifM (isInstantiatedMeta x) (compareAsDir dir t m n) l1
r2 = ifM (isInstantiatedMeta y) (compareAsDir rid t n m) r1
catchPatternErr solve2 solve1
-- one side a meta, the other an unblocked term
(NotBlocked _ (MetaV x es), _) -> assign dir x es n
(_, NotBlocked _ (MetaV x es)) -> assign rid x es m
(Blocked{}, Blocked{}) -> checkDefinitionalEquality
The blocked term goes first
(_, Blocked{}) -> useInjectivity (flipCmp $ fromCmp cmp) t n m
_ -> postponeIfBlockedAs t $ \bt -> do
-- , 2013 - 10 - 20 put projection - like function
-- into the spine , to make work .
-- -- 'False' means: leave (Def f []) unchanged even for
-- -- proj-like funs.
-- m <- elimView False m
-- n <- elimView False n
, 2015 - 07 - 01 , actually , do n't put them into the spine .
-- Polarity cannot be communicated properly if projection-like
-- functions are post-fix.
case (m, n) of
(Pi{}, Pi{}) -> equalFun m n
(Sort s1, Sort s2) ->
ifM (optCumulativity <$> pragmaOptions)
(compareSort cmp s1 s2)
(equalSort s1 s2)
(Lit l1, Lit l2) | l1 == l2 -> return ()
(Var i es, Var i' es') | i == i' -> do
a <- typeOfBV i
-- Variables are invariant in their arguments
compareElims [] [] a (var i) es es'
-- The case of definition application:
(Def f es, Def f' es') -> do
1 . All absurd lambdas are equal .
unlessM (bothAbsurd f f') $ do
2 . If the heads are unequal , the only chance is subtyping between SIZE and SIZELT .
if f /= f' then trySizeUniv cmp t m n f es f' es' else do
3 . If the heads are equal :
3a . If there are no arguments , we are done .
unless (null es && null es') $ do
3b . If some cubical magic kicks in , we are done .
unlessM (compareEtaPrims f es es') $ do
3c . Oh no , we actually have to work and compare the eliminations !
a <- computeElimHeadType f es es'
-- The polarity vector of projection-like functions
-- does not include the parameters.
pol <- getPolarity' cmp f
compareElims pol [] a (Def f []) es es'
-- Due to eta-expansion, these constructors are fully applied.
(Con x ci xArgs, Con y _ yArgs)
| x == y -> do
-- Get the type of the constructor instantiated to the datatype parameters.
a' <- case t of
AsTermsOf a -> conType x a
AsSizes -> __IMPOSSIBLE__
AsTypes -> __IMPOSSIBLE__
forcedArgs <- getForcedArgs $ conName x
-- Constructors are covariant in their arguments
( see test / succeed / CovariantConstructors ) .
compareElims (repeat $ polFromCmp cmp) forcedArgs a' (Con x ci []) xArgs yArgs
_ -> typeError $ UnequalTerms cmp m n $ ignoreBlocking bt
where
-- returns True in case we handled the comparison already.
compareEtaPrims :: MonadConversion m => QName -> Elims -> Elims -> m Bool
compareEtaPrims q es es' = do
munglue <- getPrimitiveName' builtin_unglue
munglueU <- getPrimitiveName' builtin_unglueU
msubout <- getPrimitiveName' builtinSubOut
case () of
_ | Just q == munglue -> compareUnglueApp q es es'
_ | Just q == munglueU -> compareUnglueUApp q es es'
_ | Just q == msubout -> compareSubApp q es es'
_ -> return False
compareSubApp q es es' = do
let (as,bs) = splitAt 5 es; (as',bs') = splitAt 5 es'
case (allApplyElims as, allApplyElims as') of
(Just [a,bA,phi,u,x], Just [a',bA',phi',u',x']) -> do
tSub <- primSub
, 28 - 07 - 16 :
-- comparing the types is most probably wasteful,
-- since b and b' should be neutral terms, but it's a
-- precondition for the compareAtom call to make
-- sense.
equalType (El Inf $ apply tSub $ [a] ++ map (setHiding NotHidden) [bA,phi,u])
(El Inf $ apply tSub $ [a] ++ map (setHiding NotHidden) [bA',phi',u'])
compareAtom cmp (AsTermsOf $ El Inf $ apply tSub $ [a] ++ map (setHiding NotHidden) [bA,phi,u])
(unArg x) (unArg x')
compareElims [] [] (El (tmSort (unArg a)) (unArg bA)) (Def q as) bs bs'
return True
_ -> return False
compareUnglueApp q es es' = do
let (as,bs) = splitAt 7 es; (as',bs') = splitAt 7 es'
case (allApplyElims as, allApplyElims as') of
(Just [la,lb,bA,phi,bT,e,b], Just [la',lb',bA',phi',bT',e',b']) -> do
tGlue <- getPrimitiveTerm builtinGlue
, 28 - 07 - 16 :
-- comparing the types is most probably wasteful,
-- since b and b' should be neutral terms, but it's a
-- precondition for the compareAtom call to make
-- sense.
equalType ( El ( tmSort ( unArg lb ) ) $ apply tGlue $ [ la , lb ] + + map ( setHiding NotHidden ) [ bA , phi , bT , e ] )
( El ( tmSort ( unArg lb ' ) ) $ apply tGlue $ [ la',lb ' ] + + map ( setHiding NotHidden ) [ bA',phi',bT',e ' ] )
compareAtom cmp (AsTermsOf $ El (tmSort (unArg lb)) $ apply tGlue $ [la,lb] ++ map (setHiding NotHidden) [bA,phi,bT,e])
(unArg b) (unArg b')
compareElims [] [] (El (tmSort (unArg la)) (unArg bA)) (Def q as) bs bs'
return True
_ -> return False
compareUnglueUApp :: MonadConversion m => QName -> Elims -> Elims -> m Bool
compareUnglueUApp q es es' = do
let (as,bs) = splitAt 5 es; (as',bs') = splitAt 5 es'
case (allApplyElims as, allApplyElims as') of
(Just [la,phi,bT,bAS,b], Just [la',phi',bT',bA',b']) -> do
tHComp <- primHComp
tLSuc <- primLevelSuc
tSubOut <- primSubOut
iz <- primIZero
let lsuc t = tLSuc `apply` [argN t]
s = tmSort $ unArg la
sucla = lsuc <$> la
bA <- runNamesT [] $ do
[la,phi,bT,bAS] <- mapM (open . unArg) [la,phi,bT,bAS]
(pure tSubOut <#> (pure tLSuc <@> la) <#> (Sort . tmSort <$> la) <#> phi <#> (bT <@> primIZero) <@> bAS)
compareAtom cmp (AsTermsOf $ El (tmSort . unArg $ sucla) $ apply tHComp $ [sucla, argH (Sort s), phi] ++ [argH (unArg bT), argH bA])
(unArg b) (unArg b')
compareElims [] [] (El s bA) (Def q as) bs bs'
return True
_ -> return False
, 2013 - 05 - 15 due to new postponement strategy , type can now be blocked
conType c t = ifBlocked t (\ _ _ -> patternViolation) $ \ _ t -> do
let impossible = do
reportSDoc "impossible" 10 $
"expected data/record type, found " <+> prettyTCM t
reportSDoc "impossible" 70 $ nest 2 $ "raw =" <+> pretty t
-- __IMPOSSIBLE__
, 2013 - 10 - 20 : in case termination checking fails
-- we might get some unreduced types here.
In issue 921 , this happens during the final attempt
-- to solve left-over constraints.
-- Thus, instead of crashing, just give up gracefully.
patternViolation
maybe impossible (return . snd) =<< getFullyAppliedConType c t
equalFun t1 t2 = case (t1, t2) of
(Pi dom1 b1, Pi dom2 b2) -> do
verboseBracket "tc.conv.fun" 15 "compare function types" $ do
reportSDoc "tc.conv.fun" 20 $ nest 2 $ vcat
[ "t1 =" <+> prettyTCM t1
, "t2 =" <+> prettyTCM t2
]
compareDom cmp dom2 dom1 b1 b2 errH errR errQ errC $
compareType cmp (absBody b1) (absBody b2)
where
errH = typeError $ UnequalHiding t1 t2
errR = typeError $ UnequalRelevance cmp t1 t2
errQ = typeError $ UnequalQuantity cmp t1 t2
errC = typeError $ UnequalCohesion cmp t1 t2
_ -> __IMPOSSIBLE__
-- | Check whether @a1 `cmp` a2@ and continue in context extended by @a1@.
compareDom :: (MonadConversion m , Free c)
=> Comparison -- ^ @cmp@ The comparison direction
^ @a1@ The smaller domain .
-> Dom Type -- ^ @a2@ The other domain.
-> Abs b -- ^ @b1@ The smaller codomain.
-> Abs c -- ^ @b2@ The bigger codomain.
-> m () -- ^ Continuation if mismatch in 'Hiding'.
-> m () -- ^ Continuation if mismatch in 'Relevance'.
-> m () -- ^ Continuation if mismatch in 'Quantity'.
-> m () -- ^ Continuation if mismatch in 'Cohesion'.
-> m () -- ^ Continuation if comparison is successful.
-> m ()
compareDom cmp0
dom1@(Dom{domInfo = i1, unDom = a1})
dom2@(Dom{domInfo = i2, unDom = a2})
b1 b2 errH errR errQ errC cont = do
hasSubtyping <- collapseDefault . optSubtyping <$> pragmaOptions
let cmp = if hasSubtyping then cmp0 else CmpEq
if | not $ sameHiding dom1 dom2 -> errH
| not $ compareRelevance cmp (getRelevance dom1) (getRelevance dom2) -> errR
| not $ compareQuantity cmp (getQuantity dom1) (getQuantity dom2) -> errQ
| not $ compareCohesion cmp (getCohesion dom1) (getCohesion dom2) -> errC
| otherwise -> do
let r = max (getRelevance dom1) (getRelevance dom2)
-- take "most irrelevant"
dependent = (r /= Irrelevant) && isBinderUsed b2
pid <- newProblem_ $ compareType cmp0 a1 a2
dom <- if dependent
then (\ a -> dom1 {unDom = a}) <$> blockTypeOnProblem a1 pid
else return dom1
-- We only need to require a1 == a2 if b2 is dependent
-- If it's non-dependent it doesn't matter what we add to the context.
let name = suggests [ Suggestion b1 , Suggestion b2 ]
addContext (name, dom) $ cont
stealConstraints pid
, 2013 - 05 - 15 Now , comparison of codomains is not
-- blocked any more by getting stuck on domains.
-- Only the domain type in context will be blocked.
But see issue # 1258 .
compareRelevance :: Comparison -> Relevance -> Relevance -> Bool
compareRelevance CmpEq = (==)
compareRelevance CmpLeq = (<=)
compareQuantity :: Comparison -> Quantity -> Quantity -> Bool
compareQuantity CmpEq = sameQuantity
compareQuantity CmpLeq = moreQuantity
compareCohesion :: Comparison -> Cohesion -> Cohesion -> Bool
compareCohesion CmpEq = sameCohesion
compareCohesion CmpLeq = moreCohesion
| When comparing argument spines ( in ) where the first arguments
do n't match , we keep going , substituting the anti - unification of the two
-- terms in the telescope. More precisely:
--
-- @@
( u = v : A)[pid ] w = A u v us = vs : Δ[w / x ]
-- -------------------------------------------------------------
-- u us = v vs : (x : A) Δ
-- @@
--
-- The simplest case of anti-unification is to return a fresh metavariable
-- (created by blockTermOnProblem), but if there's shared structure between
the two terms we can expose that .
--
-- This is really a crutch that lets us get away with things that otherwise
-- would require heterogenous conversion checking. See for instance issue
# 2384 .
antiUnify :: MonadConversion m => ProblemId -> Type -> Term -> Term -> m Term
antiUnify pid a u v = do
((u, v), eq) <- SynEq.checkSyntacticEquality u v
if eq then return u else do
(u, v) <- reduce (u, v)
reportSDoc "tc.conv.antiUnify" 30 $ vcat
[ "antiUnify"
, "a =" <+> prettyTCM a
, "u =" <+> prettyTCM u
, "v =" <+> prettyTCM v
]
case (u, v) of
(Pi ua ub, Pi va vb) -> do
wa0 <- antiUnifyType pid (unDom ua) (unDom va)
let wa = wa0 <$ ua
wb <- addContext wa $ antiUnifyType pid (absBody ub) (absBody vb)
return $ Pi wa (mkAbs (absName ub) wb)
(Lam i u, Lam _ v) ->
reduce (unEl a) >>= \case
Pi a b -> Lam i . (mkAbs (absName u)) <$> addContext a (antiUnify pid (absBody b) (absBody u) (absBody v))
_ -> fallback
(Var i us, Var j vs) | i == j -> maybeGiveUp $ do
a <- typeOfBV i
antiUnifyElims pid a (var i) us vs
, 2017 - 07 - 27 :
-- It seems that nothing guarantees here that the constructors are fully
-- applied!? Thus, @a@ could be a function type and we need the robust
@getConType@ here .
( Note that @patternViolation@ swallows exceptions coming from @getConType@
-- thus, we would not see clearly if we used @getFullyAppliedConType@ instead.)
(Con x ci us, Con y _ vs) | x == y -> maybeGiveUp $ do
a <- maybe patternViolation (return . snd) =<< getConType x a
antiUnifyElims pid a (Con x ci []) us vs
(Def f us, Def g vs) | f == g, length us == length vs -> maybeGiveUp $ do
a <- computeElimHeadType f us vs
antiUnifyElims pid a (Def f []) us vs
_ -> fallback
where
maybeGiveUp = catchPatternErr fallback
fallback = blockTermOnProblem a u pid
antiUnifyArgs :: MonadConversion m => ProblemId -> Dom Type -> Arg Term -> Arg Term -> m (Arg Term)
antiUnifyArgs pid dom u v
| getModality u /= getModality v = patternViolation
| otherwise = applyModalityToContext u $
ifM (isIrrelevantOrPropM dom)
{-then-} (return u)
{-else-} ((<$ u) <$> antiUnify pid (unDom dom) (unArg u) (unArg v))
antiUnifyType :: MonadConversion m => ProblemId -> Type -> Type -> m Type
antiUnifyType pid (El s a) (El _ b) = workOnTypes $ El s <$> antiUnify pid (sort s) a b
antiUnifyElims :: MonadConversion m => ProblemId -> Type -> Term -> Elims -> Elims -> m Term
antiUnifyElims pid a self [] [] = return self
antiUnifyElims pid a self (Proj o f : es1) (Proj _ g : es2) | f == g = do
res <- projectTyped self a o f
case res of
Just (_, self, a) -> antiUnifyElims pid a self es1 es2
Nothing -> patternViolation -- can fail for projection like
antiUnifyElims pid a self (Apply u : es1) (Apply v : es2) = do
reduce (unEl a) >>= \case
Pi a b -> do
w <- antiUnifyArgs pid a u v
antiUnifyElims pid (b `lazyAbsApp` unArg w) (apply self [w]) es1 es2
_ -> patternViolation
antiUnifyElims _ _ _ _ _ = patternViolation -- trigger maybeGiveUp in antiUnify
-- | @compareElims pols a v els1 els2@ performs type-directed equality on eliminator spines.
-- @t@ is the type of the head @v@.
compareElims :: forall m. MonadConversion m => [Polarity] -> [IsForced] -> Type -> Term -> [Elim] -> [Elim] -> m ()
compareElims pols0 fors0 a v els01 els02 = (catchConstraint (ElimCmp pols0 fors0 a v els01 els02) :: m () -> m ()) $ do
let v1 = applyE v els01
v2 = applyE v els02
failure = typeError $ UnequalTerms CmpEq v1 v2 (AsTermsOf a)
, 2013 - 03 - 15 since one of the spines is empty , @a@
-- is the correct type here.
unless (null els01) $ do
reportSDoc "tc.conv.elim" 25 $ "compareElims" $$ do
nest 2 $ vcat
[ "a =" <+> prettyTCM a
, "pols0 (truncated to 10) =" <+> hsep (map prettyTCM $ take 10 pols0)
, "fors0 (truncated to 10) =" <+> hsep (map prettyTCM $ take 10 fors0)
, "v =" <+> prettyTCM v
, "els01 =" <+> prettyTCM els01
, "els02 =" <+> prettyTCM els02
]
case (els01, els02) of
([] , [] ) -> return ()
not impossible , see issue 821
(Proj{} : _, [] ) -> failure -- could be x.p =?= x for projection p
not impossible , see issue 878
(Apply{} : _, [] ) -> failure
([] , IApply{} : _) -> failure
(IApply{} : _, [] ) -> failure
NB : popped up in issue 889
but should be impossible ( but again in issue 1467 )
(IApply{} : _, Proj{} : _) -> __IMPOSSIBLE__ <$ solveAwakeConstraints' True
(Proj{} : _, IApply{} : _) -> __IMPOSSIBLE__ <$ solveAwakeConstraints' True
(IApply{} : _, Apply{} : _) -> __IMPOSSIBLE__ <$ solveAwakeConstraints' True
(Apply{} : _, IApply{} : _) -> __IMPOSSIBLE__ <$ solveAwakeConstraints' True
(e@(IApply x1 y1 r1) : els1, IApply x2 y2 r2 : els2) -> do
reportSDoc "tc.conv.elim" 25 $ "compareElims IApply"
: copying stuff from the Apply case ..
let (pol, pols) = nextPolarity pols0
ifBlocked a (\ m t -> patternViolation) $ \ _ a -> do
va <- pathView a
reportSDoc "tc.conv.elim.iapply" 60 $ "compareElims IApply" $$ do
nest 2 $ "va =" <+> text (show (isPathType va))
case va of
PathType s path l bA x y -> do
b <- elInf primInterval
compareWithPol pol (flip compareTerm b)
r1 r2
TODO : compare ( ) and ( y1,y2 ) ?
let r = r1 -- TODO Andrea: do blocking
codom <- el' (pure . unArg $ l) ((pure . unArg $ bA) <@> pure r)
Path non - dependent ( codom ` lazyAbsApp ` unArg arg )
(applyE v [e]) els1 els2
-- We allow for functions (i : I) -> ... to also be heads of a IApply,
-- because @etaContract@ can produce such terms
OType t@(El _ Pi{}) -> compareElims pols0 fors0 t v (Apply (defaultArg r1) : els1) (Apply (defaultArg r2) : els2)
OType{} -> patternViolation
(Apply arg1 : els1, Apply arg2 : els2) ->
(verboseBracket "tc.conv.elim" 20 "compare Apply" :: m () -> m ()) $ do
reportSDoc "tc.conv.elim" 10 $ nest 2 $ vcat
[ "a =" <+> prettyTCM a
, "v =" <+> prettyTCM v
, "arg1 =" <+> prettyTCM arg1
, "arg2 =" <+> prettyTCM arg2
]
reportSDoc "tc.conv.elim" 50 $ nest 2 $ vcat
[ "raw:"
, "a =" <+> pretty a
, "v =" <+> pretty v
, "arg1 =" <+> pretty arg1
, "arg2 =" <+> pretty arg2
]
let (pol, pols) = nextPolarity pols0
(for, fors) = nextIsForced fors0
ifBlocked a (\ m t -> patternViolation) $ \ _ a -> do
reportSLn "tc.conv.elim" 90 $ "type is not blocked"
case unEl a of
(Pi (Dom{domInfo = info, unDom = b}) codom) -> do
reportSLn "tc.conv.elim" 90 $ "type is a function type"
mlvl <- tryMaybe primLevel
let freeInCoDom (Abs _ c) = 0 `freeInIgnoringSorts` c
freeInCoDom _ = False
dependent = (Just (unEl b) /= mlvl) && freeInCoDom codom
-- Level-polymorphism (x : Level) -> ... does not count as dependency here
NB : we could drop the free variable test and still be sound .
-- It is a trade-off between the administrative effort of
-- creating a blocking and traversing a term for free variables.
-- Apparently, it is believed that checking free vars is cheaper.
, 2013 - 05 - 15
NEW , , 2013 - 05 - 15
compare arg1 and
pid <- newProblem_ $ applyModalityToContext info $
if isForced for then
reportSLn "tc.conv.elim" 90 $ "argument is forced"
else if isIrrelevant info then do
reportSLn "tc.conv.elim" 90 $ "argument is irrelevant"
compareIrrelevant b (unArg arg1) (unArg arg2)
else do
reportSLn "tc.conv.elim" 90 $ "argument has polarity " ++ show pol
compareWithPol pol (flip compareTerm b)
(unArg arg1) (unArg arg2)
-- if comparison got stuck and function type is dependent, block arg
solved <- isProblemSolved pid
reportSLn "tc.conv.elim" 90 $ "solved = " ++ show solved
arg <- if dependent && not solved
then applyModalityToContext info $ do
reportSDoc "tc.conv.elims" 30 $ vcat $
[ "Trying antiUnify:"
, nest 2 $ "b =" <+> prettyTCM b
, nest 2 $ "arg1 =" <+> prettyTCM arg1
, nest 2 $ "arg2 =" <+> prettyTCM arg2
]
arg <- (arg1 $>) <$> antiUnify pid b (unArg arg1) (unArg arg2)
reportSDoc "tc.conv.elims" 30 $ hang "Anti-unification:" 2 (prettyTCM arg)
reportSDoc "tc.conv.elims" 70 $ nest 2 $ "raw:" <+> pretty arg
return arg
else return arg1
-- continue, possibly with blocked instantiation
compareElims pols fors (codom `lazyAbsApp` unArg arg) (apply v [arg]) els1 els2
-- any left over constraints of arg are associated to the comparison
reportSLn "tc.conv.elim" 90 $ "stealing constraints from problem " ++ show pid
stealConstraints pid
Stealing solves this issue :
Does not create enough blocked tc - problems ,
see test / fail / DontPrune .
( There are remaining problems which do not show up as yellow . )
Need to find a way to associate pid also to result of .
Does not create enough blocked tc-problems,
see test/fail/DontPrune.
(There are remaining problems which do not show up as yellow.)
Need to find a way to associate pid also to result of compareElims.
-}
a -> do
reportSDoc "impossible" 10 $
"unexpected type when comparing apply eliminations " <+> prettyTCM a
reportSDoc "impossible" 50 $ "raw type:" <+> pretty a
patternViolation
, 2013 - 10 - 22
-- in case of disabled reductions (due to failing termination check)
-- we might get stuck, so do not crash, but fail gently.
-- __IMPOSSIBLE__
-- case: f == f' are projections
(Proj o f : els1, Proj _ f' : els2)
| f /= f' -> typeError . GenericError . show =<< prettyTCM f <+> "/=" <+> prettyTCM f'
| otherwise -> ifBlocked a (\ m t -> patternViolation) $ \ _ a -> do
res <- projectTyped v a o f -- fails only if f is proj.like but parameters cannot be retrieved
case res of
Just (_, u, t) -> do
, 2015 - 07 - 01 :
-- The arguments following the principal argument of a projection
-- are invariant. (At least as long as we have no explicit polarity
-- annotations.)
compareElims [] [] t u els1 els2
Nothing -> do
reportSDoc "tc.conv.elims" 30 $ sep
[ text $ "projection " ++ show f
, text "applied to value " <+> prettyTCM v
, text "of unexpected type " <+> prettyTCM a
]
patternViolation
| " Compare " two terms in irrelevant position . This always succeeds .
-- However, we can dig for solutions of irrelevant metas in the
-- terms we compare.
-- (Certainly not the systematic solution, that'd be proof search...)
compareIrrelevant :: MonadConversion m => Type -> Term -> Term -> m ()
2012 - 04 - 02 DontCare no longer present
compareIrrelevant t ( DontCare v ) w = compareIrrelevant t v w
compareIrrelevant t v ( DontCare w ) = compareIrrelevant t v w
compareIrrelevant t (DontCare v) w = compareIrrelevant t v w
compareIrrelevant t v (DontCare w) = compareIrrelevant t v w
-}
compareIrrelevant t v0 w0 = do
let v = stripDontCare v0
w = stripDontCare w0
reportSDoc "tc.conv.irr" 20 $ vcat
[ "compareIrrelevant"
, nest 2 $ "v =" <+> prettyTCM v
, nest 2 $ "w =" <+> prettyTCM w
]
reportSDoc "tc.conv.irr" 50 $ vcat
[ nest 2 $ "v =" <+> pretty v
, nest 2 $ "w =" <+> pretty w
]
try v w $ try w v $ return ()
where
try (MetaV x es) w fallback = do
mv <- lookupMeta x
let rel = getMetaRelevance mv
inst = case mvInstantiation mv of
InstV{} -> True
_ -> False
reportSDoc "tc.conv.irr" 20 $ vcat
[ nest 2 $ text $ "rel = " ++ show rel
, nest 2 $ "inst =" <+> pretty inst
]
if not (isIrrelevant rel) || inst
then fallback
, 2016 - 08 - 08 , issue # 2131 :
-- Mining for solutions for irrelevant metas is not definite.
-- Thus, in case of error, leave meta unsolved.
else (assignE DirEq x es w (AsTermsOf t) $ compareIrrelevant t) `catchError` \ _ -> fallback
-- the value of irrelevant or unused meta does not matter
try v w fallback = fallback
compareWithPol :: MonadConversion m => Polarity -> (Comparison -> a -> a -> m ()) -> a -> a -> m ()
compareWithPol Invariant cmp x y = cmp CmpEq x y
compareWithPol Covariant cmp x y = cmp CmpLeq x y
compareWithPol Contravariant cmp x y = cmp CmpLeq y x
compareWithPol Nonvariant cmp x y = return ()
polFromCmp :: Comparison -> Polarity
polFromCmp CmpLeq = Covariant
polFromCmp CmpEq = Invariant
-- | Type-directed equality on argument lists
--
compareArgs :: MonadConversion m => [Polarity] -> [IsForced] -> Type -> Term -> Args -> Args -> m ()
compareArgs pol for a v args1 args2 =
compareElims pol for a v (map Apply args1) (map Apply args2)
---------------------------------------------------------------------------
-- * Types
---------------------------------------------------------------------------
-- | Equality on Types
compareType :: MonadConversion m => Comparison -> Type -> Type -> m ()
compareType cmp ty1@(El s1 a1) ty2@(El s2 a2) =
workOnTypes $
verboseBracket "tc.conv.type" 20 "compareType" $ do
reportSDoc "tc.conv.type" 50 $ vcat
[ "compareType" <+> sep [ prettyTCM ty1 <+> prettyTCM cmp
, prettyTCM ty2 ]
, hsep [ " sorts:", prettyTCM s1, " and ", prettyTCM s2 ]
]
compareAs cmp AsTypes a1 a2
unlessM ((optCumulativity <$> pragmaOptions) `or2M`
(not . optCompareSorts <$> pragmaOptions)) $
compareSort CmpEq s1 s2
return ()
leqType :: MonadConversion m => Type -> Type -> m ()
leqType = compareType CmpLeq
| @coerce v a b@ coerces @v : a@ to type @b@ , returning a @v ' :
-- with maybe extra hidden applications or hidden abstractions.
--
-- In principle, this function can host coercive subtyping, but
-- currently it only tries to fix problems with hidden function types.
--
coerce :: (MonadConversion m, MonadTCM m) => Comparison -> Term -> Type -> Type -> m Term
coerce cmp v t1 t2 = blockTerm t2 $ do
verboseS "tc.conv.coerce" 10 $ do
(a1,a2) <- reify (t1,t2)
let dbglvl = if isSet a1 && isSet a2 then 50 else 10
reportSDoc "tc.conv.coerce" dbglvl $
"coerce" <+> vcat
[ "term v =" <+> prettyTCM v
, "from type t1 =" <+> prettyTCM a1
, "to type t2 =" <+> prettyTCM a2
, "comparison =" <+> prettyTCM cmp
]
reportSDoc "tc.conv.coerce" 70 $
"coerce" <+> vcat
[ "term v =" <+> pretty v
, "from type t1 =" <+> pretty t1
, "to type t2 =" <+> pretty t2
, "comparison =" <+> pretty cmp
]
-- v <$ do workOnTypes $ leqType t1 t2
-- take off hidden/instance domains from t1 and t2
TelV tel1 b1 <- telViewUpTo' (-1) notVisible t1
TelV tel2 b2 <- telViewUpTo' (-1) notVisible t2
let n = size tel1 - size tel2
-- the crude solution would be
v ' = λ { tel2 } → v { tel1 }
-- however, that may introduce unneccessary many function types
-- If n > 0 and b2 is not blocked, it is safe to
-- insert n many hidden args
if n <= 0 then fallback else do
ifBlocked b2 (\ _ _ -> fallback) $ \ _ _ -> do
(args, t1') <- implicitArgs n notVisible t1
let v' = v `apply` args
v' <$ coerceSize (compareType cmp) v' t1' t2
where
fallback = v <$ coerceSize (compareType cmp) v t1 t2
-- | Account for situations like @k : (Size< j) <= (Size< k + 1)@
--
-- Actually, the semantics is
-- @(Size<= k) ∩ (Size< j) ⊆ rhs@
-- which gives a disjunctive constraint. Mmmh, looks like stuff
TODO .
--
-- For now, we do a cheap heuristics.
--
coerceSize :: MonadConversion m => (Type -> Type -> m ()) -> Term -> Type -> Type -> m ()
coerceSize leqType v t1 t2 = verboseBracket "tc.conv.size.coerce" 45 "coerceSize" $
workOnTypes $ do
reportSDoc "tc.conv.size.coerce" 70 $
"coerceSize" <+> vcat
[ "term v =" <+> pretty v
, "from type t1 =" <+> pretty t1
, "to type t2 =" <+> pretty t2
]
let fallback = leqType t1 t2
done = caseMaybeM (isSizeType =<< reduce t1) fallback $ \ _ -> return ()
, 2015 - 07 - 22 , Issue 1615 :
-- If t1 is a meta and t2 a type like Size< v2, we need to make sure we do not miss
-- the constraint v < v2!
caseMaybeM (isSizeType =<< reduce t2) fallback $ \ b2 -> do
, 2017 - 01 - 20 , issue # 2329 :
-- If v is not a size suitable for the solver, like a neutral term,
-- we can only rely on the type.
mv <- sizeMaxView v
if any (\case{ DOtherSize{} -> True; _ -> False }) mv then fallback else do
, 2015 - 02 - 11 do not instantiate metas here ( triggers issue 1203 ) .
unlessM (tryConversion $ dontAssignMetas $ leqType t1 t2) $ do
A ( most probably weaker ) alternative is to just check syn.eq .
ifM ( snd < $ > checkSyntacticEquality t1 t2 ) ( return v ) $ { - else - } do
reportSDoc "tc.conv.size.coerce" 20 $ "coercing to a size type"
case b2 of
@t2 = Size@. We are done !
BoundedNo -> done
-- @t2 = Size< v2@
BoundedLt v2 -> do
sv2 <- sizeView v2
case sv2 of
SizeInf -> done
OtherSize{} -> do
, 2014 - 06 - 16 :
-- Issue 1203: For now, just treat v < v2 as suc v <= v2
-- TODO: Need proper < comparison
vinc <- sizeSuc 1 v
compareSizes CmpLeq vinc v2
done
@v2 = a2 + 1@ : In this case , we can try @v < = a2@
SizeSuc a2 -> do
compareSizes CmpLeq v a2
to pass Issue 1136
---------------------------------------------------------------------------
-- * Sorts and levels
---------------------------------------------------------------------------
compareLevel :: MonadConversion m => Comparison -> Level -> Level -> m ()
compareLevel CmpLeq u v = leqLevel u v
compareLevel CmpEq u v = equalLevel u v
compareSort :: MonadConversion m => Comparison -> Sort -> Sort -> m ()
compareSort CmpEq = equalSort
compareSort CmpLeq = leqSort
| Check that the first sort is less or equal to the second .
--
We can put @SizeUniv@ below @Inf@ , but otherwise , it is
-- unrelated to the other universes.
--
leqSort :: forall m. MonadConversion m => Sort -> Sort -> m ()
leqSort s1 s2 = (catchConstraint (SortCmp CmpLeq s1 s2) :: m () -> m ()) $ do
(s1,s2) <- reduce (s1,s2)
let postpone = addConstraint (SortCmp CmpLeq s1 s2)
no = typeError $ NotLeqSort s1 s2
yes = return ()
synEq = ifNotM (optSyntacticEquality <$> pragmaOptions) postpone $ do
((s1,s2) , equal) <- SynEq.checkSyntacticEquality s1 s2
if | equal -> yes
| otherwise -> postpone
reportSDoc "tc.conv.sort" 30 $
sep [ "leqSort"
, nest 2 $ fsep [ prettyTCM s1 <+> "=<"
, prettyTCM s2 ]
]
propEnabled <- isPropEnabled
let fvsRHS = (`IntSet.member` allFreeVars s2)
badRigid <- s1 `rigidVarsNotContainedIn` fvsRHS
case (s1, s2) of
, 2018 - 09 - 03 : crash on dummy sort
(DummyS s, _) -> impossibleSort s
(_, DummyS s) -> impossibleSort s
-- The most basic rule: @Set l =< Set l'@ iff @l =< l'@
(Type a , Type b ) -> leqLevel a b
-- Likewise for @Prop@
(Prop a , Prop b ) -> leqLevel a b
-- @Prop l@ is below @Set l@
(Prop a , Type b ) -> leqLevel a b
(Type a , Prop b ) -> no
Setω is the top sort
(_ , Inf ) -> yes
(Inf , _ ) -> equalSort s1 s2
-- @SizeUniv@ and @Prop0@ are bottom sorts.
So is @Set0@ if @Prop@ is not enabled .
(_ , SizeUniv) -> equalSort s1 s2
(_ , Prop (Max 0 [])) -> equalSort s1 s2
(_ , Type (Max 0 []))
| not propEnabled -> equalSort s1 s2
SizeUniv is unrelated to any @Set l@ or @Prop l@
(SizeUniv, Type{} ) -> no
(SizeUniv, Prop{} ) -> no
If the first sort rigidly depends on a variable and the second
sort does not mention this variable , the second sort must be Inf .
(_ , _ ) | badRigid -> equalSort s2 Inf
-- This shouldn't be necessary
(UnivSort Inf , UnivSort Inf) -> yes
PiSort , FunSort , UnivSort and MetaS might reduce once we instantiate
-- more metas, so we postpone.
(PiSort{}, _ ) -> synEq
(_ , PiSort{}) -> synEq
(FunSort{}, _ ) -> synEq
(_ , FunSort{}) -> synEq
(UnivSort{}, _ ) -> synEq
(_ , UnivSort{}) -> synEq
(MetaS{} , _ ) -> synEq
(_ , MetaS{} ) -> synEq
-- DefS are postulated sorts, so they do not reduce.
(DefS{} , _ ) -> synEq
(_ , DefS{}) -> synEq
where
impossibleSort s = do
reportS "impossible" 10
[ "leqSort: found dummy sort with description:"
, s
]
__IMPOSSIBLE__
leqLevel :: MonadConversion m => Level -> Level -> m ()
leqLevel a b = do
reportSDoc "tc.conv.nat" 30 $
"compareLevel" <+>
sep [ prettyTCM a <+> "=<"
, prettyTCM b ]
, 2015 - 12 - 28 Issue 1757
-- We normalize both sides to make the syntactic equality check (==) stronger.
-- See case for `same term` below.
a <- normalise a
b <- normalise b
leqView a b
where
, 2016 - 09 - 28
-- If we have to postpone a constraint, then its simplified form!
leqView :: MonadConversion m => Level -> Level -> m ()
leqView a b = catchConstraint (LevelCmp CmpLeq a b) $ do
reportSDoc "tc.conv.level" 30 $
"compareLevelView" <+>
sep [ pretty a <+> "=<"
, pretty b ]
cumulativity <- optCumulativity <$> pragmaOptions
reportSDoc "tc.conv.level" 40 $
"compareLevelView" <+>
sep [ prettyList_ (map (pretty . unSingleLevel) $ NonEmpty.toList $ levelMaxView a)
, "=<"
, prettyList_ (map (pretty . unSingleLevel) $ NonEmpty.toList $ levelMaxView b)
]
wrap $ case (levelMaxView a, levelMaxView b) of
-- same term
_ | a == b -> ok
-- 0 ≤ any
(SingleClosed 0 :| [] , _) -> ok
-- any ≤ 0
(as , SingleClosed 0 :| []) ->
sequence_ [ equalLevel (unSingleLevel a') (ClosedLevel 0) | a' <- NonEmpty.toList as ]
-- closed ≤ closed
(SingleClosed m :| [], SingleClosed n :| []) -> if m <= n then ok else notok
-- closed ≤ b
(SingleClosed m :| [] , _)
| m <= levelLowerBound b -> ok
-- as ≤ neutral/closed
(as, bs)
| all neutralOrClosed bs , levelLowerBound a > levelLowerBound b -> notok
-- ⊔ as ≤ single
(as@(_:|_:_), b :| []) ->
sequence_ [ leqView (unSingleLevel a') (unSingleLevel b) | a' <- NonEmpty.toList as ]
-- reduce constants
(as, bs)
| let minN = min (fst $ levelPlusView a) (fst $ levelPlusView b)
a' = fromMaybe __IMPOSSIBLE__ $ subLevel minN a
b' = fromMaybe __IMPOSSIBLE__ $ subLevel minN b
, minN > 0 -> leqView a' b'
-- remove subsumed
, 2014 - 04 - 07 : This is ok if we do not go back to equalLevel
(as, bs)
| (subsumed@(_:_) , as') <- List.partition isSubsumed (NonEmpty.toList as)
-> leqView (unSingleLevels as') b
where
isSubsumed a = any (`subsumes` a) (NonEmpty.toList bs)
subsumes :: SingleLevel -> SingleLevel -> Bool
subsumes (SingleClosed m) (SingleClosed n) = m >= n
subsumes (SinglePlus (Plus m _)) (SingleClosed n) = m >= n
subsumes (SinglePlus (Plus m a)) (SinglePlus (Plus n b)) = a == b && m >= n
subsumes _ _ = False
-- as ≤ _l x₁ .. xₙ ⊔ bs
-- We can solve _l := λ x₁ .. xₙ -> as ⊔ (_l' x₁ .. xₙ)
-- (where _l' is a new metavariable)
(as , bs)
| cumulativity
, Just (mb@(MetaLevel x es) , bs') <- singleMetaView (NonEmpty.toList bs)
, null bs' || noMetas (Level a , unSingleLevels bs') -> do
mv <- lookupMeta x
Jesper , 2019 - 10 - 13 : abort if this is an interaction
-- meta or a generalizable meta
abort <- (isJust <$> isInteractionMeta x) `or2M`
((== YesGeneralize) <$> isGeneralizableMeta x)
if | abort -> postpone
| otherwise -> do
x' <- case mvJudgement mv of
IsSort{} -> __IMPOSSIBLE__
HasType _ cmp t -> do
TelV tel t' <- telView t
newMeta Instantiable (mvInfo mv) normalMetaPriority (idP $ size tel) $ HasType () cmp t
reportSDoc "tc.conv.level" 20 $ fsep
[ "attempting to solve" , prettyTCM (MetaV x es) , "to the maximum of"
, prettyTCM (Level a) , "and the fresh meta" , prettyTCM (MetaV x' es)
]
equalLevel (atomicLevel mb) $ levelLub a (atomicLevel $ MetaLevel x' es)
, 2016 - 09 - 28 : This simplification loses the solution lzero .
-- Thus, it is invalid.
-- See test/Succeed/LevelMetaLeqNeutralLevel.agda.
-- -- [a] ≤ [neutral]
-- ([a@(Plus n _)], [b@(Plus m NeutralLevel{})])
| m = = n - > equalLevel ' ( [ a ] ) ( [ b ] )
-- , 2014 - 04 - 07 : This call to equalLevel is ok even if we removed
-- subsumed terms from the lhs .
-- anything else
_ | noMetas (Level a , Level b) -> notok
| otherwise -> postpone
where
ok = return ()
notok = unlessM typeInType $ typeError $ NotLeqSort (Type a) (Type b)
postpone = patternViolation
wrap m = m `catchError` \case
TypeError{} -> notok
err -> throwError err
neutralOrClosed (SingleClosed _) = True
neutralOrClosed (SinglePlus (Plus _ NeutralLevel{})) = True
neutralOrClosed _ = False
Is there exactly one @MetaLevel@ in the list of single levels ?
singleMetaView :: [SingleLevel] -> Maybe (LevelAtom, [SingleLevel])
singleMetaView (SinglePlus (Plus 0 l@(MetaLevel m es)) : ls)
| all (not . isMetaLevel) ls = Just (l,ls)
singleMetaView (l : ls)
| not $ isMetaLevel l = second (l:) <$> singleMetaView ls
singleMetaView _ = Nothing
isMetaLevel :: SingleLevel -> Bool
isMetaLevel (SinglePlus (Plus _ MetaLevel{})) = True
isMetaLevel (SinglePlus (Plus _ UnreducedLevel{})) = __IMPOSSIBLE__
isMetaLevel _ = False
equalLevel :: MonadConversion m => Level -> Level -> m ()
equalLevel a b = do
, 2013 - 10 - 31 Use normalization to make syntactic equality stronger
(a, b) <- normalise (a, b)
equalLevel' a b
-- | Precondition: levels are 'normalise'd.
equalLevel' :: forall m. MonadConversion m => Level -> Level -> m ()
equalLevel' a b = do
reportSDoc "tc.conv.level" 50 $ sep [ "equalLevel", nest 2 $ parens $ pretty a, nest 2 $ parens $ pretty b ]
, 2013 - 10 - 31 remove common terms ( that do n't contain metas ! )
THAT 's actually UNSOUND when metas are instantiated , because
-- max a b == max a c does not imply b == c
as < - return $ Set.fromList $ closed0 as
bs < - return $ Set.fromList $ closed0 bs
let cs = Set.filter ( not . ) $ Set.intersection as bs
as < - return $ Set.toList $ as Set.\\ cs
bs < - return $ Set.toList $ bs Set.\\ cs
reportSDoc "tc.conv.level" 40 $
sep [ "equalLevel"
, vcat [ nest 2 $ sep [ prettyTCM a <+> "=="
, prettyTCM b
]
]
]
Jesper , 2014 - 02 - 02 remove terms that certainly do not contribute
-- to the maximum
let (a',b') = removeSubsumed a b
reportSDoc "tc.conv.level" 50 $
sep [ "equalLevel (w/o subsumed)"
, vcat [ nest 2 $ sep [ prettyTCM a' <+> "=="
, prettyTCM b'
]
]
]
let as = levelMaxView a'
bs = levelMaxView b'
reportSDoc "tc.conv.level" 50 $
sep [ text "equalLevel"
, vcat [ nest 2 $ sep [ prettyList_ (map (pretty . unSingleLevel) $ NonEmpty.toList $ as)
, "=="
, prettyList_ (map (pretty . unSingleLevel) $ NonEmpty.toList $ bs)
]
]
]
reportSDoc "tc.conv.level" 80 $
sep [ text "equalLevel"
, vcat [ nest 2 $ sep [ prettyList_ (map (text . show . unSingleLevel) $ NonEmpty.toList $ as)
, "=="
, prettyList_ (map (text . show . unSingleLevel) $ NonEmpty.toList $ bs)
]
]
]
catchConstraint (LevelCmp CmpEq a b) $ case (as, bs) of
-- equal levels
_ | a == b -> ok
-- closed == closed
(SingleClosed m :| [], SingleClosed n :| [])
| m == n -> ok
| otherwise -> notok
-- closed == neutral
(SingleClosed m :| [] , bs) | any isNeutral bs -> notok
(as , SingleClosed n :| []) | any isNeutral as -> notok
-- closed == b
(SingleClosed m :| [] , _) | m < levelLowerBound b -> notok
(_ , SingleClosed n :| []) | n < levelLowerBound a -> notok
-- 0 == a ⊔ b
(SingleClosed 0 :| [] , bs@(_:|_:_)) ->
sequence_ [ equalLevel' (ClosedLevel 0) (unSingleLevel b') | b' <- NonEmpty.toList bs ]
(as@(_:|_:_) , SingleClosed 0 :| []) ->
sequence_ [ equalLevel' (unSingleLevel a') (ClosedLevel 0) | a' <- NonEmpty.toList as ]
-- meta == any
(SinglePlus (Plus k (MetaLevel x as)) :| [] , bs)
| any (isThisMeta x) bs -> postpone
(as , SinglePlus (Plus k (MetaLevel x bs)) :| [])
| any (isThisMeta x) as -> postpone
(SinglePlus (Plus k (MetaLevel x as')) :| [] , SinglePlus (Plus l (MetaLevel y bs')) :| [])
-- there is only a potential choice when k == l
| k == l -> if
| y < x -> meta x as' $ atomicLevel $ MetaLevel y bs'
| otherwise -> meta y bs' $ atomicLevel $ MetaLevel x as'
(SinglePlus (Plus k (MetaLevel x as')) :| [] , _)
| Just b' <- subLevel k b -> meta x as' b'
(_ , SinglePlus (Plus l (MetaLevel y bs')) :| [])
| Just a' <- subLevel l a -> meta y bs' a'
-- a' ⊔ b == b
_ | Just a' <- levelMaxDiff a b
, b /= ClosedLevel 0 -> leqLevel a' b
-- a == b' ⊔ a
_ | Just b' <- levelMaxDiff b a
, a /= ClosedLevel 0 -> leqLevel b' a
-- neutral/closed == neutral/closed
(as , bs)
| all isNeutralOrClosed (NonEmpty.toList as ++ NonEmpty.toList bs)
, 2013 - 10 - 31 : There could be metas in neutral levels ( see Issue 930 ) .
-- Should not we postpone there as well? Yes!
, not (any hasMeta (NonEmpty.toList as ++ NonEmpty.toList bs))
, length as == length bs -> do
reportSLn "tc.conv.level" 60 $ "equalLevel: all are neutral or closed"
zipWithM_ ((===) `on` levelTm . unSingleLevel) (NonEmpty.toList as) (NonEmpty.toList bs)
-- more cases?
_ | noMetas (Level a , Level b) -> notok
| otherwise -> postpone
where
a === b = unlessM typeInType $ do
lvl <- levelType
equalAtom (AsTermsOf lvl) a b
ok = return ()
notok = unlessM typeInType notOk
notOk = typeError $ UnequalLevel CmpEq a b
postpone = do
reportSDoc "tc.conv.level" 30 $ hang "postponing:" 2 $ hang (pretty a <+> "==") 0 (pretty b)
patternViolation
perform assignment ( MetaLevel x as ) : = b
meta x as b = do
reportSLn "tc.meta.level" 30 $ "Assigning meta level"
reportSDoc "tc.meta.level" 50 $ "meta" <+> sep [prettyList $ map pretty as, pretty b]
lvl <- levelType
assignE DirEq x as (levelTm b) (AsTermsOf lvl) (===) -- fallback: check equality as atoms
-- Make sure to give a sensible error message
wrap m = m `catchError` \case
TypeError{} -> notok
err -> throwError err
isNeutral (SinglePlus (Plus _ NeutralLevel{})) = True
isNeutral _ = False
isNeutralOrClosed (SingleClosed _) = True
isNeutralOrClosed (SinglePlus (Plus _ NeutralLevel{})) = True
isNeutralOrClosed _ = False
hasMeta (SinglePlus a) = case a of
Plus _ MetaLevel{} -> True
Plus _ (BlockedLevel _ v) -> isJust $ firstMeta v
Plus _ (NeutralLevel _ v) -> isJust $ firstMeta v
Plus _ (UnreducedLevel v) -> isJust $ firstMeta v
hasMeta (SingleClosed _) = False
isThisMeta x (SinglePlus (Plus _ (MetaLevel y _))) = x == y
isThisMeta _ _ = False
removeSubsumed a b =
let as = NonEmpty.toList $ levelMaxView a
bs = NonEmpty.toList $ levelMaxView b
a' = unSingleLevels $ filter (not . (`isStrictlySubsumedBy` bs)) as
b' = unSingleLevels $ filter (not . (`isStrictlySubsumedBy` as)) bs
in (a',b')
x `isStrictlySubsumedBy` ys = any (`strictlySubsumes` x) ys
SingleClosed m `strictlySubsumes` SingleClosed n = m > n
SinglePlus (Plus m a) `strictlySubsumes` SingleClosed n = m > n
SinglePlus (Plus m a) `strictlySubsumes` SinglePlus (Plus n b) = a == b && m > n
_ `strictlySubsumes` _ = False
| Check that the first sort equal to the second .
equalSort :: forall m. MonadConversion m => Sort -> Sort -> m ()
equalSort s1 s2 = do
catchConstraint (SortCmp CmpEq s1 s2) $ do
(s1,s2) <- reduce (s1,s2)
let yes = return ()
no = typeError $ UnequalSorts s1 s2
reportSDoc "tc.conv.sort" 30 $ sep
[ "equalSort"
, vcat [ nest 2 $ fsep [ prettyTCM s1 <+> "=="
, prettyTCM s2 ]
, nest 2 $ fsep [ pretty s1 <+> "=="
, pretty s2 ]
]
]
propEnabled <- isPropEnabled
typeInTypeEnabled <- typeInType
case (s1, s2) of
, 2018 - 09 - 03 : crash on dummy sort
(DummyS s, _) -> impossibleSort s
(_, DummyS s) -> impossibleSort s
-- one side is a meta sort: try to instantiate
-- In case both sides are meta sorts, instantiate the
-- bigger (i.e. more recent) one.
(MetaS x es , MetaS y es')
| x == y -> synEq s1 s2
| x < y -> meta y es' s1
| otherwise -> meta x es s2
(MetaS x es , _ ) -> meta x es s2
(_ , MetaS x es ) -> meta x es s1
-- diagonal cases for rigid sorts
(Type a , Type b ) -> equalLevel a b `catchInequalLevel` no
(SizeUniv , SizeUniv ) -> yes
(Prop a , Prop b ) -> equalLevel a b `catchInequalLevel` no
(Inf , Inf ) -> yes
if --type - in - type is enabled , Setω is equal to any Set ℓ ( see # 3439 )
(Type{} , Inf )
| typeInTypeEnabled -> yes
(Inf , Type{} )
| typeInTypeEnabled -> yes
equating @PiSort a b@ to another sort
(s1 , PiSort a b) -> piSortEquals s1 a b
(PiSort a b , s2) -> piSortEquals s2 a b
-- equating @FunSort a b@ to another sort
(s1 , FunSort a b) -> funSortEquals s1 a b
(FunSort a b , s2) -> funSortEquals s2 a b
-- equating @UnivSort s@ to another sort
(s1 , UnivSort s2) -> univSortEquals s1 s2
(UnivSort s1 , s2 ) -> univSortEquals s2 s1
-- postulated sorts can only be equal if they have the same head
(DefS d es , DefS d' es')
| d == d' -> synEq s1 s2
| otherwise -> no
-- any other combinations of sorts are not equal
(_ , _ ) -> no
where
perform assignment ( MetaS x es ) : = s
meta :: MetaId -> [Elim' Term] -> Sort -> m ()
meta x es s = do
reportSLn "tc.meta.sort" 30 $ "Assigning meta sort"
reportSDoc "tc.meta.sort" 50 $ "meta" <+> sep [pretty x, prettyList $ map pretty es, pretty s]
assignE DirEq x es (Sort s) AsTypes __IMPOSSIBLE__
-- fall back to syntactic equality check, postpone if it fails
synEq :: Sort -> Sort -> m ()
synEq s1 s2 = do
let postpone = addConstraint $ SortCmp CmpEq s1 s2
doSynEq <- optSyntacticEquality <$> pragmaOptions
if | doSynEq -> do
((s1,s2) , equal) <- SynEq.checkSyntacticEquality s1 s2
if | equal -> return ()
| otherwise -> postpone
| otherwise -> postpone
set0 = mkType 0
prop0 = mkProp 0
Equate a sort @s1@ to @univSort s2@
-- Precondition: @s1@ and @univSort s2@ are already reduced.
univSortEquals :: Sort -> Sort -> m ()
univSortEquals s1 s2 = do
reportSDoc "tc.conv.sort" 35 $ vcat
[ "univSortEquals"
, " s1 =" <+> prettyTCM s1
, " s2 =" <+> prettyTCM s2
]
let no = typeError $ UnequalSorts s1 (UnivSort s2)
case s1 of
-- @Set l1@ is the successor sort of either @Set l2@ or
-- @Prop l2@ where @l1 == lsuc l2@.
Type l1 -> do
propEnabled <- isPropEnabled
-- @s2@ is definitely not @Inf@ or @SizeUniv@
if | Inf <- s2 -> no
| SizeUniv <- s2 -> no
-- If @Prop@ is not used, then @s2@ must be of the form
-- @Set l2@
| not propEnabled -> do
l2 <- case subLevel 1 l1 of
Just l2 -> return l2
Nothing -> do
l2 <- newLevelMeta
equalLevel l1 (levelSuc l2)
return l2
equalSort (Type l2) s2
-- Otherwise we postpone
| otherwise -> synEq (Type l1) (UnivSort s2)
-- @Setω@ is only a successor sort if --type-in-type or
-- --omega-in-omega is enabled.
Inf -> do
infInInf <- (optOmegaInOmega <$> pragmaOptions) `or2M` typeInType
if | infInInf -> equalSort Inf s2
| otherwise -> no
-- @Prop l@ and @SizeUniv@ are not successor sorts
Prop{} -> no
SizeUniv{} -> no
-- Anything else: postpone
_ -> synEq s1 (UnivSort s2)
Equate a sort @s@ to @piSort a b@
Precondition : @s@ and @piSort a b@ are already reduced .
piSortEquals :: Sort -> Dom Type -> Abs Sort -> m ()
piSortEquals s a NoAbs{} = __IMPOSSIBLE__
piSortEquals s a bAbs@(Abs x b) = do
reportSDoc "tc.conv.sort" 35 $ vcat
[ "piSortEquals"
, " s =" <+> prettyTCM s
, " a =" <+> prettyTCM a
, " b =" <+> addContext (x,a) (prettyTCM b)
]
propEnabled <- isPropEnabled
If @b@ is dependent , then @piSort a b@ computes to
-- @Setω@. Hence, if @s@ is definitely not @Setω@, then @b@
-- cannot be dependent.
if | definitelyNotInf s -> do
-- We force @b@ to be non-dependent by unifying it with
a fresh meta that does not depend on @x : a@
b' <- newSortMeta
addContext (x,a) $ equalSort b (raise 1 b')
funSortEquals s (getSort a) b'
-- Otherwise: postpone
| otherwise -> synEq (PiSort a bAbs) s
Equate a sort @s@ to @funSort s1 s2@
-- Precondition: @s@ and @funSort s1 s2@ are already reduced
funSortEquals :: Sort -> Sort -> Sort -> m ()
funSortEquals s0 s1 s2 = do
reportSDoc "tc.conv.sort" 35 $ vcat
[ "funSortEquals"
, " s0 =" <+> prettyTCM s0
, " s1 =" <+> prettyTCM s1
, " s2 =" <+> prettyTCM s2
]
propEnabled <- isPropEnabled
sizedTypesEnabled <- sizedTypesOption
case s0 of
-- If @Setω == funSort s1 s2@, then either @s1@ or @s2@ must
-- be @Setω@.
Inf | definitelyNotInf s1 && definitelyNotInf s2 -> do
typeError $ UnequalSorts s0 (FunSort s1 s2)
| definitelyNotInf s1 -> equalSort Inf s2
| definitelyNotInf s2 -> equalSort Inf s1
| otherwise -> synEq s0 (FunSort s1 s2)
-- If @Set l == funSort s1 s2@, then @s2@ must be of the
-- form @Set l2@. @s1@ can be one of @Set l1@, @Prop l1@, or
-- @SizeUniv@.
Type l -> do
l2 <- forceType s2
-- We must have @l2 =< l@, this might help us to solve
-- more constraints (in particular when @l == 0@).
leqLevel l2 l
Jesper , 2019 - 12 - 27 : SizeUniv is disabled at the moment .
if | {- sizedTypesEnabled || -} propEnabled -> case funSort' s1 (Type l2) of
-- If the work we did makes the @funSort@ compute,
-- continue working.
Just s -> equalSort (Type l) s
-- Otherwise: postpone
Nothing -> synEq (Type l) (FunSort s1 $ Type l2)
If both Prop and sized types are disabled , only the
case @s1 = = Set l1@ remains .
| otherwise -> do
l1 <- forceType s1
equalLevel l (levelLub l1 l2)
-- If @Prop l == funSort s1 s2@, then @s2@ must be of the
-- form @Prop l2@, and @s1@ can be one of @Set l1@, Prop
-- l1@, or @SizeUniv@.
Prop l -> do
l2 <- forceProp s2
leqLevel l2 l
case funSort' s1 (Prop l2) of
-- If the work we did makes the @funSort@ compute,
-- continue working.
Just s -> equalSort (Prop l) s
-- Otherwise: postpone
Nothing -> synEq (Prop l) (FunSort s1 $ Prop l2)
-- We have @SizeUniv == funSort s1 s2@ iff @s2 == SizeUniv@
SizeUniv -> equalSort SizeUniv s2
-- Anything else: postpone
_ -> synEq s0 (FunSort s1 s2)
-- check if the given sort @s0@ is a (closed) bottom sort
i.e. @piSort a b = = s0@ implies @b = = s0@.
isBottomSort :: Bool -> Sort -> Bool
isBottomSort propEnabled (Prop (ClosedLevel 0)) = True
isBottomSort propEnabled (Type (ClosedLevel 0)) = not propEnabled
isBottomSort propEnabled _ = False
definitelyNotInf :: Sort -> Bool
definitelyNotInf = \case
Inf -> False
Type{} -> True
Prop{} -> True
SizeUniv -> True
PiSort{} -> False
FunSort{} -> False
UnivSort{} -> False
MetaS{} -> False
DefS{} -> False
DummyS{} -> False
forceType :: Sort -> m Level
forceType (Type l) = return l
forceType s = do
l <- newLevelMeta
equalSort s (Type l)
return l
forceProp :: Sort -> m Level
forceProp (Prop l) = return l
forceProp s = do
l <- newLevelMeta
equalSort s (Prop l)
return l
impossibleSort s = do
reportS "impossible" 10
[ "equalSort: found dummy sort with description:"
, s
]
__IMPOSSIBLE__
catchInequalLevel m fail = m `catchError` \case
TypeError{} -> fail
err -> throwError err
-- -- This should probably represent face maps with a more precise type
-- toFaceMaps :: Term -> TCM [[(Int,Term)]]
-- toFaceMaps t = do
-- view <- intervalView'
-- iz <- primIZero
-- io <- primIOne
< - ( \ q t - > Def q [ Apply $ Arg defaultArgInfo t ] ) < $ > fromMaybe _ _ IMPOSSIBLE _ _ < $ > getPrimitiveName ' " primINeg "
-- let f IZero = mzero
-- f IOne = return []
-- f (IMin x y) = do xs <- (f . view . unArg) x; ys <- (f . view . unArg) y; return (xs ++ ys)
f ( ) = msum $ map ( f . view . unArg ) [ x , y ]
-- f (INeg x) = map (id -*- not) <$> (f . view . unArg) x
f ( OTerm ( Var i [ ] ) ) = return [ ( i , True ) ]
-- f (OTerm _) = return [] -- what about metas? we should suspend? maybe no metas is a precondition?
isConsistent xs = all ( \ xs - > length xs = = 1 ) . map nub . Map.elems $ xs -- optimize by not doing generate + filter
-- as = map (map (id -*- head) . Map.toAscList) . filter isConsistent . map (Map.fromListWith (++) . map (id -*- (:[]))) $ (f (view t))
xs < - mapM ( mapM ( \ ( i , b ) - > ( , ) i < $ > intervalUnview ( if b then IOne else ) ) ) as
-- return xs
forallFaceMaps :: MonadConversion m => Term -> (Map.Map Int Bool -> MetaId -> Term -> m a) -> (Substitution -> m a) -> m [a]
forallFaceMaps t kb k = do
reportSDoc "conv.forall" 20 $
fsep ["forallFaceMaps"
, prettyTCM t
]
as <- decomposeInterval t
boolToI <- do
io <- primIOne
iz <- primIZero
return (\b -> if b then io else iz)
forM as $ \ (ms,ts) -> do
ifBlockeds ts (kb ms) $ \ _ _ -> do
let xs = map (id -*- boolToI) $ Map.toAscList ms
cxt <- getContext
reportSDoc "conv.forall" 20 $
fsep ["substContextN"
, prettyTCM cxt
, prettyTCM xs
]
(cxt',sigma) <- substContextN cxt xs
resolved <- forM xs (\ (i,t) -> (,) <$> lookupBV i <*> return (applySubst sigma t))
updateContext sigma (const cxt') $
addBindings resolved $ do
cl <- buildClosure ()
tel <- getContextTelescope
m <- currentModule
sub <- getModuleParameterSub m
reportS "conv.forall" 10
[ replicate 10 '-'
, show (envCurrentModule $ clEnv cl)
, show (envLetBindings $ clEnv cl)
, show tel -- (toTelescope $ envContext $ clEnv cl)
, show sigma
, show m
, show sub
]
k sigma
where
-- TODO Andrea: inefficient because we try to reduce the ts which we know are in whnf
ifBlockeds ts blocked unblocked = do
and <- getPrimitiveTerm "primIMin"
io <- primIOne
let t = foldr (\ x r -> and `apply` [argN x,argN r]) io ts
ifBlocked t blocked unblocked
addBindings [] m = m
addBindings ((Dom{domInfo = info,unDom = (nm,ty)},t):bs) m = addLetBinding info nm t ty (addBindings bs m)
substContextN :: MonadConversion m => Context -> [(Int,Term)] -> m (Context , Substitution)
substContextN c [] = return (c, idS)
substContextN c ((i,t):xs) = do
(c', sigma) <- substContext i t c
(c'', sigma') <- substContextN c' (map (subtract 1 -*- applySubst sigma) xs)
return (c'', applySubst sigma' sigma)
-- assumes the term can be typed in the shorter telescope
-- the terms we get from toFaceMaps are closed.
substContext :: MonadConversion m => Int -> Term -> Context -> m (Context , Substitution)
substContext i t [] = __IMPOSSIBLE__
substContext i t (x:xs) | i == 0 = return $ (xs , singletonS 0 t)
substContext i t (x:xs) | i > 0 = do
reportSDoc "conv.forall" 20 $
fsep ["substContext"
, text (show (i-1))
, prettyTCM t
, prettyTCM xs
]
(c,sigma) <- substContext (i-1) t xs
let e = applySubst sigma x
return (e:c, liftS 1 sigma)
substContext i t (x:xs) = __IMPOSSIBLE__
compareInterval :: MonadConversion m => Comparison -> Type -> Term -> Term -> m ()
compareInterval cmp i t u = do
reportSDoc "tc.conv.interval" 15 $
sep [ "{ compareInterval" <+> prettyTCM t <+> "=" <+> prettyTCM u ]
tb <- reduceB t
ub <- reduceB u
let t = ignoreBlocking tb
u = ignoreBlocking ub
it <- decomposeInterval' t
iu <- decomposeInterval' u
case () of
_ | blockedOrMeta tb || blockedOrMeta ub -> do
in case of metas we would n't be able to make progress by how we deal with laws .
-- (because the constraints generated by decomposition are sufficient but not necessary).
-- but we could still prune/solve some metas by comparing the terms as atoms.
-- also if blocked we won't find the terms conclusively unequal(?) so compareAtom
-- won't report type errors when we should accept.
interval <- elInf $ primInterval
compareAtom CmpEq (AsTermsOf interval) t u
_ | otherwise -> do
x <- leqInterval it iu
y <- leqInterval iu it
let final = isCanonical it && isCanonical iu
if x && y then reportSDoc "tc.conv.interval" 15 $ "Ok! }" else
if final then typeError $ UnequalTerms cmp t u (AsTermsOf i)
else do
reportSDoc "tc.conv.interval" 15 $ "Giving up! }"
patternViolation
where
blockedOrMeta Blocked{} = True
blockedOrMeta (NotBlocked _ (MetaV{})) = True
blockedOrMeta _ = False
type Conj = (Map.Map Int (Set.Set Bool),[Term])
isCanonical :: [Conj] -> Bool
isCanonical = all (null . snd)
-- | leqInterval r q = r ≤ q in the I lattice.
( ∨ r_i ) ≤ ( ∨ q_j ) q_j
leqInterval :: MonadConversion m => [Conj] -> [Conj] -> m Bool
leqInterval r q =
and <$> forM r (\ r_i ->
TODO shortcut
-- | leqConj r q = r ≤ q in the I lattice, when r and q are conjuctions.
-- ' (∧ r_i) ≤ (∧ q_j) iff
-- ' (∧ r_i) ∧ (∧ q_j) = (∧ r_i) iff
' { r_i | i } { q_j | j } = { r_i | i } iff
-- ' {q_j | j} ⊆ {r_i | i}
leqConj :: MonadConversion m => Conj -> Conj -> m Bool
leqConj (rs,rst) (qs,qst) = do
case toSet qs `Set.isSubsetOf` toSet rs of
False -> return False
True -> do
interval <- elInf $ fromMaybe __IMPOSSIBLE__ <$> getBuiltin' builtinInterval
-- we don't want to generate new constraints here because
1 ) in some situations the same constraint would get generated twice .
2 ) unless things are completely accepted we are going to
-- throw patternViolation in compareInterval.
let eqT t u = tryConversion (compareAtom CmpEq (AsTermsOf interval) t u)
let listSubset ts us = and <$> forM ts (\ t ->
TODO shortcut
listSubset qst rst
where
toSet m = Set.fromList [ (i,b) | (i,bs) <- Map.toList m, b <- Set.toList bs]
| equalTermOnFace φ A u v = _ , φ ⊢ u = v : A
equalTermOnFace :: MonadConversion m => Term -> Type -> Term -> Term -> m ()
equalTermOnFace = compareTermOnFace CmpEq
compareTermOnFace :: MonadConversion m => Comparison -> Term -> Type -> Term -> Term -> m ()
compareTermOnFace = compareTermOnFace' compareTerm
compareTermOnFace' :: MonadConversion m => (Comparison -> Type -> Term -> Term -> m ()) -> Comparison -> Term -> Type -> Term -> Term -> m ()
compareTermOnFace' k cmp phi ty u v = do
phi <- reduce phi
_ <- forallFaceMaps phi postponed
$ \ alpha -> k cmp (applySubst alpha ty) (applySubst alpha u) (applySubst alpha v)
return ()
where
postponed ms i psi = do
phi <- runNamesT [] $ do
imin <- cl $ getPrimitiveTerm "primIMin"
ineg <- cl $ getPrimitiveTerm "primINeg"
psi <- open psi
let phi = foldr (\ (i,b) r -> do i <- open (var i); pure imin <@> (if b then i else pure ineg <@> i) <@> r)
psi (Map.toList ms) -- TODO Andrea: make a view?
phi
addConstraint (ValueCmpOnFace cmp phi ty u v)
---------------------------------------------------------------------------
-- * Definitions
---------------------------------------------------------------------------
bothAbsurd :: MonadConversion m => QName -> QName -> m Bool
bothAbsurd f f'
| isAbsurdLambdaName f, isAbsurdLambdaName f' = do
Double check we are really dealing with absurd :
-- Their functions should not have bodies.
def <- getConstInfo f
def' <- getConstInfo f'
case (theDef def, theDef def') of
(Function{ funClauses = [Clause{ clauseBody = Nothing }] },
Function{ funClauses = [Clause{ clauseBody = Nothing }] }) -> return True
_ -> return False
| otherwise = return False
| null | https://raw.githubusercontent.com/grin-compiler/ghc-wpc-sample-programs/0e3a9b8b7cc3fa0da7c77fb7588dd4830fb087f7/Agda-2.6.1/src/full/Agda/TypeChecking/Conversion.hs | haskell | # SOURCE #
| Try whether a computation runs without errors or new constraints
(may create new metas, though).
Restores state upon failure.
| Try whether a computation runs without errors or new constraints
(may create new metas, though).
Return 'Just' the result upon success.
Return 'Nothing' and restore state upon failure.
| Check if to lists of arguments are the same (and all variables).
Precondition: the lists have the same length.
arguments which are different and can be pruned.
ignore irrelevant args
do not prune
prune different vars
Comparing in irrelevant context always succeeds.
However, we might want to dig for solutions of irrelevant metas.
To this end, we can just ignore errors during conversion checking.
| Ignore errors in irrelevant context.
| Type directed equality on values.
| Type directed equality on terms or types.
Check syntactic equality. This actually saves us quite a bit of work.
OLD CODE, traverses the *full* terms u v at each step, even if they
(u, v) <- instantiateFull (u, v)
let equal = u == v
against metas by assignment.
It seems to assume we are never comparing
else
do not short circuit size comparison!
The reverse direction. Bad name, I know.
rethrow errors.
| Try to assign meta. If meta is projected, try to eta-expand
and run conversion check again.
nothing happened, give up
isNeutral Blocked{} = False
not neutral if it is blocked (there can be missing projections
to trigger a reduction.
a def by copattern can reduce if projected
if we are dealing with a singleton record,
we can succeed immediately
No subtyping on record terms
equality at function type (accounts for eta)
@tel1 \`cmp\` tel2@ and complains that @t2 \`cmp\` t1@ failed if
not.
@(Dom i1 a1)
@(Dom i2 a2)
switch t2 and t1 because of contravariance!
| Compute the head type of an elimination. For projection-like functions
this requires inferring the type of the principal argument.
Infer its type.
getDefType wants the argument type reduced.
a meta-variable, e.g. in interactive development.
In this case, we postpone.
| Syntax directed equality on atomic values
constructorForm changes literal to constructors
only needed if the other side is not a literal
the comparison could be solved by eta-expansion so we
cannot fail hard
The reverse direction. Bad name, I know.
all relevant arguments are variables
kills is a list with 'True' for each different var
not all relevant arguments are variables
Check definitional equality on meta-variables
(same as for blocked terms)
work, try the low priority one.
one side a meta, the other an unblocked term
, 2013 - 10 - 20 put projection - like function
into the spine , to make work .
-- 'False' means: leave (Def f []) unchanged even for
-- proj-like funs.
m <- elimView False m
n <- elimView False n
Polarity cannot be communicated properly if projection-like
functions are post-fix.
Variables are invariant in their arguments
The case of definition application:
The polarity vector of projection-like functions
does not include the parameters.
Due to eta-expansion, these constructors are fully applied.
Get the type of the constructor instantiated to the datatype parameters.
Constructors are covariant in their arguments
returns True in case we handled the comparison already.
comparing the types is most probably wasteful,
since b and b' should be neutral terms, but it's a
precondition for the compareAtom call to make
sense.
comparing the types is most probably wasteful,
since b and b' should be neutral terms, but it's a
precondition for the compareAtom call to make
sense.
__IMPOSSIBLE__
we might get some unreduced types here.
to solve left-over constraints.
Thus, instead of crashing, just give up gracefully.
| Check whether @a1 `cmp` a2@ and continue in context extended by @a1@.
^ @cmp@ The comparison direction
^ @a2@ The other domain.
^ @b1@ The smaller codomain.
^ @b2@ The bigger codomain.
^ Continuation if mismatch in 'Hiding'.
^ Continuation if mismatch in 'Relevance'.
^ Continuation if mismatch in 'Quantity'.
^ Continuation if mismatch in 'Cohesion'.
^ Continuation if comparison is successful.
take "most irrelevant"
We only need to require a1 == a2 if b2 is dependent
If it's non-dependent it doesn't matter what we add to the context.
blocked any more by getting stuck on domains.
Only the domain type in context will be blocked.
terms in the telescope. More precisely:
@@
-------------------------------------------------------------
u us = v vs : (x : A) Δ
@@
The simplest case of anti-unification is to return a fresh metavariable
(created by blockTermOnProblem), but if there's shared structure between
This is really a crutch that lets us get away with things that otherwise
would require heterogenous conversion checking. See for instance issue
It seems that nothing guarantees here that the constructors are fully
applied!? Thus, @a@ could be a function type and we need the robust
thus, we would not see clearly if we used @getFullyAppliedConType@ instead.)
then
else
can fail for projection like
trigger maybeGiveUp in antiUnify
| @compareElims pols a v els1 els2@ performs type-directed equality on eliminator spines.
@t@ is the type of the head @v@.
is the correct type here.
could be x.p =?= x for projection p
TODO Andrea: do blocking
We allow for functions (i : I) -> ... to also be heads of a IApply,
because @etaContract@ can produce such terms
Level-polymorphism (x : Level) -> ... does not count as dependency here
It is a trade-off between the administrative effort of
creating a blocking and traversing a term for free variables.
Apparently, it is believed that checking free vars is cheaper.
if comparison got stuck and function type is dependent, block arg
continue, possibly with blocked instantiation
any left over constraints of arg are associated to the comparison
in case of disabled reductions (due to failing termination check)
we might get stuck, so do not crash, but fail gently.
__IMPOSSIBLE__
case: f == f' are projections
fails only if f is proj.like but parameters cannot be retrieved
The arguments following the principal argument of a projection
are invariant. (At least as long as we have no explicit polarity
annotations.)
However, we can dig for solutions of irrelevant metas in the
terms we compare.
(Certainly not the systematic solution, that'd be proof search...)
Mining for solutions for irrelevant metas is not definite.
Thus, in case of error, leave meta unsolved.
the value of irrelevant or unused meta does not matter
| Type-directed equality on argument lists
-------------------------------------------------------------------------
* Types
-------------------------------------------------------------------------
| Equality on Types
with maybe extra hidden applications or hidden abstractions.
In principle, this function can host coercive subtyping, but
currently it only tries to fix problems with hidden function types.
v <$ do workOnTypes $ leqType t1 t2
take off hidden/instance domains from t1 and t2
the crude solution would be
however, that may introduce unneccessary many function types
If n > 0 and b2 is not blocked, it is safe to
insert n many hidden args
| Account for situations like @k : (Size< j) <= (Size< k + 1)@
Actually, the semantics is
@(Size<= k) ∩ (Size< j) ⊆ rhs@
which gives a disjunctive constraint. Mmmh, looks like stuff
For now, we do a cheap heuristics.
If t1 is a meta and t2 a type like Size< v2, we need to make sure we do not miss
the constraint v < v2!
If v is not a size suitable for the solver, like a neutral term,
we can only rely on the type.
@t2 = Size< v2@
Issue 1203: For now, just treat v < v2 as suc v <= v2
TODO: Need proper < comparison
-------------------------------------------------------------------------
* Sorts and levels
-------------------------------------------------------------------------
unrelated to the other universes.
The most basic rule: @Set l =< Set l'@ iff @l =< l'@
Likewise for @Prop@
@Prop l@ is below @Set l@
@SizeUniv@ and @Prop0@ are bottom sorts.
This shouldn't be necessary
more metas, so we postpone.
DefS are postulated sorts, so they do not reduce.
We normalize both sides to make the syntactic equality check (==) stronger.
See case for `same term` below.
If we have to postpone a constraint, then its simplified form!
same term
0 ≤ any
any ≤ 0
closed ≤ closed
closed ≤ b
as ≤ neutral/closed
⊔ as ≤ single
reduce constants
remove subsumed
as ≤ _l x₁ .. xₙ ⊔ bs
We can solve _l := λ x₁ .. xₙ -> as ⊔ (_l' x₁ .. xₙ)
(where _l' is a new metavariable)
meta or a generalizable meta
Thus, it is invalid.
See test/Succeed/LevelMetaLeqNeutralLevel.agda.
-- [a] ≤ [neutral]
([a@(Plus n _)], [b@(Plus m NeutralLevel{})])
, 2014 - 04 - 07 : This call to equalLevel is ok even if we removed
subsumed terms from the lhs .
anything else
| Precondition: levels are 'normalise'd.
max a b == max a c does not imply b == c
to the maximum
equal levels
closed == closed
closed == neutral
closed == b
0 == a ⊔ b
meta == any
there is only a potential choice when k == l
a' ⊔ b == b
a == b' ⊔ a
neutral/closed == neutral/closed
Should not we postpone there as well? Yes!
more cases?
fallback: check equality as atoms
Make sure to give a sensible error message
one side is a meta sort: try to instantiate
In case both sides are meta sorts, instantiate the
bigger (i.e. more recent) one.
diagonal cases for rigid sorts
type - in - type is enabled , Setω is equal to any Set ℓ ( see # 3439 )
equating @FunSort a b@ to another sort
equating @UnivSort s@ to another sort
postulated sorts can only be equal if they have the same head
any other combinations of sorts are not equal
fall back to syntactic equality check, postpone if it fails
Precondition: @s1@ and @univSort s2@ are already reduced.
@Set l1@ is the successor sort of either @Set l2@ or
@Prop l2@ where @l1 == lsuc l2@.
@s2@ is definitely not @Inf@ or @SizeUniv@
If @Prop@ is not used, then @s2@ must be of the form
@Set l2@
Otherwise we postpone
@Setω@ is only a successor sort if --type-in-type or
--omega-in-omega is enabled.
@Prop l@ and @SizeUniv@ are not successor sorts
Anything else: postpone
@Setω@. Hence, if @s@ is definitely not @Setω@, then @b@
cannot be dependent.
We force @b@ to be non-dependent by unifying it with
Otherwise: postpone
Precondition: @s@ and @funSort s1 s2@ are already reduced
If @Setω == funSort s1 s2@, then either @s1@ or @s2@ must
be @Setω@.
If @Set l == funSort s1 s2@, then @s2@ must be of the
form @Set l2@. @s1@ can be one of @Set l1@, @Prop l1@, or
@SizeUniv@.
We must have @l2 =< l@, this might help us to solve
more constraints (in particular when @l == 0@).
sizedTypesEnabled ||
If the work we did makes the @funSort@ compute,
continue working.
Otherwise: postpone
If @Prop l == funSort s1 s2@, then @s2@ must be of the
form @Prop l2@, and @s1@ can be one of @Set l1@, Prop
l1@, or @SizeUniv@.
If the work we did makes the @funSort@ compute,
continue working.
Otherwise: postpone
We have @SizeUniv == funSort s1 s2@ iff @s2 == SizeUniv@
Anything else: postpone
check if the given sort @s0@ is a (closed) bottom sort
-- This should probably represent face maps with a more precise type
toFaceMaps :: Term -> TCM [[(Int,Term)]]
toFaceMaps t = do
view <- intervalView'
iz <- primIZero
io <- primIOne
let f IZero = mzero
f IOne = return []
f (IMin x y) = do xs <- (f . view . unArg) x; ys <- (f . view . unArg) y; return (xs ++ ys)
f (INeg x) = map (id -*- not) <$> (f . view . unArg) x
f (OTerm _) = return [] -- what about metas? we should suspend? maybe no metas is a precondition?
optimize by not doing generate + filter
as = map (map (id -*- head) . Map.toAscList) . filter isConsistent . map (Map.fromListWith (++) . map (id -*- (:[]))) $ (f (view t))
return xs
(toTelescope $ envContext $ clEnv cl)
TODO Andrea: inefficient because we try to reduce the ts which we know are in whnf
assumes the term can be typed in the shorter telescope
the terms we get from toFaceMaps are closed.
(because the constraints generated by decomposition are sufficient but not necessary).
but we could still prune/solve some metas by comparing the terms as atoms.
also if blocked we won't find the terms conclusively unequal(?) so compareAtom
won't report type errors when we should accept.
| leqInterval r q = r ≤ q in the I lattice.
| leqConj r q = r ≤ q in the I lattice, when r and q are conjuctions.
' (∧ r_i) ≤ (∧ q_j) iff
' (∧ r_i) ∧ (∧ q_j) = (∧ r_i) iff
' {q_j | j} ⊆ {r_i | i}
we don't want to generate new constraints here because
throw patternViolation in compareInterval.
TODO Andrea: make a view?
-------------------------------------------------------------------------
* Definitions
-------------------------------------------------------------------------
Their functions should not have bodies. | # LANGUAGE NondecreasingIndentation #
module Agda.TypeChecking.Conversion where
import Control.Arrow (first, second)
import Control.Monad
import Control.Monad.Fail (MonadFail)
import Data.Function
import qualified Data.List as List
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.IntSet as IntSet
import Agda.Syntax.Abstract.Views (isSet)
import Agda.Syntax.Common
import Agda.Syntax.Internal
import Agda.Syntax.Internal.MetaVars
import Agda.Syntax.Translation.InternalToAbstract (reify)
import Agda.TypeChecking.Monad
import Agda.TypeChecking.Monad.Builtin
import Agda.TypeChecking.MetaVars
import Agda.TypeChecking.MetaVars.Occurs (killArgs,PruneResult(..),rigidVarsNotContainedIn)
import Agda.TypeChecking.Names
import Agda.TypeChecking.Reduce
import Agda.TypeChecking.Substitute
import qualified Agda.TypeChecking.SyntacticEquality as SynEq
import Agda.TypeChecking.Telescope
import Agda.TypeChecking.Constraints
import Agda.TypeChecking.Conversion.Pure (pureCompareAs)
import Agda.TypeChecking.Forcing (isForced, nextIsForced)
import Agda.TypeChecking.Free
import Agda.TypeChecking.Datatypes (getConType, getFullyAppliedConType)
import Agda.TypeChecking.Records
import Agda.TypeChecking.Pretty
import Agda.TypeChecking.Injectivity
import Agda.TypeChecking.Polarity
import Agda.TypeChecking.SizedTypes
import Agda.TypeChecking.Level
import Agda.TypeChecking.Implicit (implicitArgs)
import Agda.TypeChecking.Irrelevance
import Agda.TypeChecking.Primitive
import Agda.TypeChecking.Warnings (MonadWarning)
import Agda.Interaction.Options
import Agda.Utils.Except ( MonadError(catchError, throwError) )
import Agda.Utils.Functor
import Agda.Utils.Monad
import Agda.Utils.Maybe
import Agda.Utils.Permutation
import Agda.Utils.Size
import Agda.Utils.Tuple
import Agda.Utils.WithDefault
import Agda.Utils.Impossible
type MonadConversion m =
( MonadReduce m
, MonadAddContext m
, MonadConstraint m
, MonadMetaSolver m
, MonadError TCErr m
, MonadWarning m
, MonadDebug m
, MonadStatistics m
, MonadFresh ProblemId m
, MonadFresh Int m
, HasBuiltins m
, HasConstInfo m
, HasOptions m
, MonadFail m
)
tryConversion
:: (MonadConstraint m, MonadWarning m, MonadError TCErr m, MonadFresh ProblemId m)
=> m () -> m Bool
tryConversion = isJust <.> tryConversion'
tryConversion'
:: (MonadConstraint m, MonadWarning m, MonadError TCErr m, MonadFresh ProblemId m)
=> m a -> m (Maybe a)
tryConversion' m = tryMaybe $ noConstraints m
sameVars :: Elims -> Elims -> Bool
sameVars xs ys = and $ zipWith same xs ys
where
same (Apply (Arg _ (Var n []))) (Apply (Arg _ (Var m []))) = n == m
same _ _ = False
| @intersectVars us vs@ checks whether all relevant elements in @us@ and @vs@
are variables , and if yes , returns a prune list which says @True@ for
intersectVars :: Elims -> Elims -> Maybe [Bool]
intersectVars = zipWithM areVars where
areVars _ _ = Nothing
equalTerm :: MonadConversion m => Type -> Term -> Term -> m ()
equalTerm = compareTerm CmpEq
equalAtom :: MonadConversion m => CompareAs -> Term -> Term -> m ()
equalAtom = compareAtom CmpEq
equalType :: MonadConversion m => Type -> Type -> m ()
equalType = compareType CmpEq
convError : : MonadTCM tcm = > TypeError - > tcm a
convError :: TypeError -> TCM ()
convError err = ifM ((==) Irrelevant <$> asksTC getRelevance) (return ()) $ typeError err
compareTerm :: forall m. MonadConversion m => Comparison -> Type -> Term -> Term -> m ()
compareTerm cmp a u v = compareAs cmp (AsTermsOf a) u v
compareAs :: forall m. MonadConversion m => Comparison -> CompareAs -> Term -> Term -> m ()
If one term is a meta , try to instantiate right away . This avoids unnecessary unfolding .
, 2012 - 02 - 14 : This is UNSOUND for subtyping !
compareAs cmp a u v = do
reportSDoc "tc.conv.term" 10 $ sep $
[ "compareTerm"
, nest 2 $ prettyTCM u <+> prettyTCM cmp <+> prettyTCM v
, nest 2 $ prettyTCM a
]
((u, v), equal) <- SynEq.checkSyntacticEquality u v
are different somewhere . Leads to infeasibility in issue 854 .
if equal then verboseS "profile.sharing" 20 $ tick "equal terms" else do
verboseS "profile.sharing" 20 $ tick "unequal terms"
reportSDoc "tc.conv.term" 15 $ sep $
[ "compareTerm (not syntactically equal)"
, nest 2 $ prettyTCM u <+> prettyTCM cmp <+> prettyTCM v
, nest 2 $ prettyTCM a
]
If we are at type , we can not short - cut comparison
, 2014 - 04 - 12 : this looks incomplete .
at function types into Size .
let fallback = compareAs' cmp a u v
unlessSubtyping :: m () -> m ()
unlessSubtyping cont =
if cmp == CmpEq then cont else do
, 2014 - 04 - 12 do not short cut if type is blocked .
caseMaybeM (isSizeType a) cont (\ _ -> fallback)
dir = fromCmp cmp
case (u, v) of
(MetaV x us, MetaV y vs)
| x /= y -> unlessSubtyping $ solve1 `orelse` solve2 `orelse` fallback
| otherwise -> fallback
where
(solve1, solve2) | x > y = (assign dir x us v, assign rid y vs u)
| otherwise = (assign rid y vs u, assign dir x us v)
(MetaV x us, _) -> unlessSubtyping $ assign dir x us v `orelse` fallback
(_, MetaV y vs) -> unlessSubtyping $ assign rid y vs u `orelse` fallback
_ -> fallback
where
assign :: CompareDirection -> MetaId -> Elims -> Term -> m ()
assign dir x es v = do
, 2013 - 10 - 19 can only solve if no projections
reportSDoc "tc.conv.term.shortcut" 20 $ sep
[ "attempting shortcut"
, nest 2 $ prettyTCM (MetaV x es) <+> ":=" <+> prettyTCM v
]
whenM (isInstantiatedMeta x) patternViolation
assignE dir x es v a $ compareAsDir dir a
reportSDoc "tc.conv.term.shortcut" 50 $
"shortcut successful" $$ nest 2 ("result:" <+> (pretty =<< instantiate (MetaV x es)))
Should be ok with _ but is much safer since we do n't
orelse :: m () -> m () -> m ()
orelse m h = catchError m (\_ -> h)
assignE :: (MonadConversion m)
=> CompareDirection -> MetaId -> Elims -> Term -> CompareAs -> (Term -> Term -> m ()) -> m ()
assignE dir x es v a comp = assignWrapper dir x es v $ do
case allApplyElims es of
Just vs -> assignV dir x vs v a
Nothing -> do
reportSDoc "tc.conv.assign" 30 $ sep
[ "assigning to projected meta "
, prettyTCM x <+> sep (map prettyTCM es) <+> text (":" ++ show dir) <+> prettyTCM v
]
etaExpandMeta [Records] x
res <- isInstantiatedMeta' x
case res of
Just u -> do
reportSDoc "tc.conv.assign" 30 $ sep
[ "seems like eta expansion instantiated meta "
, prettyTCM x <+> text (":" ++ show dir) <+> prettyTCM u
]
let w = u `applyE` es
comp w v
Nothing -> do
reportSLn "tc.conv.assign" 30 "eta expansion did not instantiate meta"
compareAsDir :: MonadConversion m => CompareDirection -> CompareAs -> Term -> Term -> m ()
compareAsDir dir a = dirToCmp (`compareAs'` a) dir
compareAs' :: forall m. MonadConversion m => Comparison -> CompareAs -> Term -> Term -> m ()
compareAs' cmp tt m n = case tt of
AsTermsOf a -> compareTerm' cmp a m n
AsSizes -> compareSizes cmp m n
AsTypes -> compareAtom cmp AsTypes m n
compareTerm' :: forall m. MonadConversion m => Comparison -> Type -> Term -> Term -> m ()
compareTerm' cmp a m n =
verboseBracket "tc.conv.term" 20 "compareTerm" $ do
a' <- reduce a
(catchConstraint (ValueCmp cmp (AsTermsOf a') m n) :: m () -> m ()) $ do
reportSDoc "tc.conv.term" 30 $ fsep
[ "compareTerm", prettyTCM m, prettyTCM cmp, prettyTCM n, ":", prettyTCM a' ]
propIrr <- isPropEnabled
isSize <- isJust <$> isSizeType a'
s <- reduce $ getSort a'
mlvl <- getBuiltin' builtinLevel
reportSDoc "tc.conv.level" 60 $ nest 2 $ sep
[ "a' =" <+> pretty a'
, "mlvl =" <+> pretty mlvl
, text $ "(Just (unEl a') == mlvl) = " ++ show (Just (unEl a') == mlvl)
]
case s of
Prop{} | propIrr -> compareIrrelevant a' m n
_ | isSize -> compareSizes cmp m n
_ -> case unEl a' of
a | Just a == mlvl -> do
a <- levelView m
b <- levelView n
equalLevel a b
a@Pi{} -> equalFun s a m n
Lam _ _ -> __IMPOSSIBLE__
Def r es -> do
isrec <- isEtaRecord r
if isrec
then do
sig <- getSignature
let ps = fromMaybe __IMPOSSIBLE__ $ allApplyElims es
, 2010 - 10 - 11 : allowing neutrals to be blocked things does not seem
to change Agda 's behavior
isNeutral (NotBlocked _ Con{}) = return False
, 2013 - 09 - 18 / 2015 - 06 - 29 : a Def by copatterns is
, 2014 - 12 - 06 optimize this using r ! !
isNeutral _ = return True
isMeta (NotBlocked _ MetaV{}) = True
isMeta _ = False
reportSDoc "tc.conv.term" 30 $ prettyTCM a <+> "is eta record type"
m <- reduceB m
mNeutral <- isNeutral m
n <- reduceB n
nNeutral <- isNeutral n
case (m, n) of
_ | isMeta m || isMeta n ->
compareAtom cmp (AsTermsOf a') (ignoreBlocking m) (ignoreBlocking n)
_ | mNeutral && nNeutral -> do
Andreas 2011 - 03 - 23 : ( fixing issue 396 )
isSing <- isSingletonRecordModuloRelevance r ps
case isSing of
Right True -> return ()
do not eta - expand if comparing two neutrals
_ -> compareAtom cmp (AsTermsOf a') (ignoreBlocking m) (ignoreBlocking n)
_ -> do
(tel, m') <- etaExpandRecord r ps $ ignoreBlocking m
(_ , n') <- etaExpandRecord r ps $ ignoreBlocking n
c <- getRecordConstructor r
Record constructors are covariant ( see test / succeed / CovariantConstructors ) .
compareArgs (repeat $ polFromCmp cmp) [] (telePi_ tel __DUMMY_TYPE__) (Con c ConOSystem []) m' n'
else (do pathview <- pathView a'
equalPath pathview a' m n)
_ -> compareAtom cmp (AsTermsOf a') m n
where
equalFun :: (MonadConversion m) => Sort -> Term -> Term -> Term -> m ()
equalFun s a@(Pi dom b) m n | domFinite dom = do
mp <- fmap getPrimName <$> getBuiltin' builtinIsOne
case unEl $ unDom dom of
Def q [Apply phi]
| Just q == mp -> compareTermOnFace cmp (unArg phi) (El s (Pi (dom {domFinite = False}) b)) m n
_ -> equalFun s (Pi (dom{domFinite = False}) b) m n
equalFun _ (Pi dom@Dom{domInfo = info} b) m n | not $ domFinite dom = do
let name = suggests [ Suggestion b , Suggestion m , Suggestion n ]
addContext (name, dom) $ compareTerm cmp (absBody b) m' n'
where
(m',n') = raise 1 (m,n) `apply` [Arg info $ var 0]
equalFun _ _ _ _ = __IMPOSSIBLE__
equalPath :: (MonadConversion m) => PathView -> Type -> Term -> Term -> m ()
equalPath (PathType s _ l a x y) _ m n = do
let name = "i" :: String
interval <- el primInterval
let (m',n') = raise 1 (m, n) `applyE` [IApply (raise 1 $ unArg x) (raise 1 $ unArg y) (var 0)]
addContext (name, defaultDom interval) $ compareTerm cmp (El (raise 1 s) $ (raise 1 $ unArg a) `apply` [argN $ var 0]) m' n'
equalPath OType{} a' m n = cmpDef a' m n
cmpDef a'@(El s ty) m n = do
mI <- getBuiltinName' builtinInterval
mIsOne <- getBuiltinName' builtinIsOne
mGlue <- getPrimitiveName' builtinGlue
mHComp <- getPrimitiveName' builtinHComp
mSub <- getBuiltinName' builtinSub
case ty of
Def q es | Just q == mIsOne -> return ()
Def q es | Just q == mGlue, Just args@(l:_:a:phi:_) <- allApplyElims es -> do
ty <- el' (pure $ unArg l) (pure $ unArg a)
unglue <- prim_unglue
let mkUnglue m = apply unglue $ map (setHiding Hidden) args ++ [argN m]
reportSDoc "conv.glue" 20 $ prettyTCM (ty,mkUnglue m,mkUnglue n)
compareTermOnFace cmp (unArg phi) ty m n
compareTerm cmp ty (mkUnglue m) (mkUnglue n)
Def q es | Just q == mHComp, Just (sl:s:args@[phi,u,u0]) <- allApplyElims es
, Sort (Type lvl) <- unArg s -> do
let l = Level lvl
ty <- el' (pure $ l) (pure $ unArg u0)
unglueU <- prim_unglueU
subIn <- primSubIn
let bA = subIn `apply` [sl,s,phi,u0]
let mkUnglue m = apply unglueU $ [argH l] ++ map (setHiding Hidden) [phi,u] ++ [argH bA,argN m]
reportSDoc "conv.hcompU" 20 $ prettyTCM (ty,mkUnglue m,mkUnglue n)
compareTermOnFace cmp (unArg phi) ty m n
compareTerm cmp ty (mkUnglue m) (mkUnglue n)
Def q es | Just q == mSub, Just args@(l:a:_) <- allApplyElims es -> do
ty <- el' (pure $ unArg l) (pure $ unArg a)
out <- primSubOut
let mkOut m = apply out $ map (setHiding Hidden) args ++ [argN m]
compareTerm cmp ty (mkOut m) (mkOut n)
Def q [] | Just q == mI -> compareInterval cmp a' m n
_ -> compareAtom cmp (AsTermsOf a') m n
| @compareTel t1 t2 cmp tel1 tel1@ checks whether pointwise
compareTel :: MonadConversion m => Type -> Type ->
Comparison -> Telescope -> Telescope -> m ()
compareTel t1 t2 cmp tel1 tel2 =
verboseBracket "tc.conv.tel" 20 "compareTel" $
catchConstraint (TelCmp t1 t2 cmp tel1 tel2) $ case (tel1, tel2) of
(EmptyTel, EmptyTel) -> return ()
(EmptyTel, _) -> bad
(_, EmptyTel) -> bad
compareDom cmp dom1 dom2 tel1 tel2 bad bad bad bad $
compareTel t1 t2 cmp (absBody tel1) (absBody tel2)
where
, 2011 - 05 - 10 better report message about types
bad = typeError $ UnequalTypes cmp t2 t1
compareAtomDir :: MonadConversion m => CompareDirection -> CompareAs -> Term -> Term -> m ()
compareAtomDir dir a = dirToCmp (`compareAtom` a) dir
computeElimHeadType :: MonadConversion m => QName -> Elims -> Elims -> m Type
computeElimHeadType f es es' = do
def <- getConstInfo f
To compute the type @a@ of a projection - like @f@ ,
we have to infer the type of its first argument .
if projectionArgs (theDef def) <= 0 then return $ defType def else do
Find an first argument to @f@.
let arg = case (es, es') of
(Apply arg : _, _) -> arg
(_, Apply arg : _) -> arg
_ -> __IMPOSSIBLE__
reportSDoc "tc.conv.infer" 30 $
"inferring type of internal arg: " <+> prettyTCM arg
targ <- infer $ unArg arg
reportSDoc "tc.conv.infer" 30 $
"inferred type: " <+> prettyTCM targ
, 2016 - 02 - 09 , Issue 1825 : The type of arg might be
fromMaybeM patternViolation $ getDefType f =<< reduce targ
compareAtom :: forall m. MonadConversion m => Comparison -> CompareAs -> Term -> Term -> m ()
compareAtom cmp t m n =
verboseBracket "tc.conv.atom" 20 "compareAtom" $
if a PatternErr is thrown , rebuild constraint !
(catchConstraint (ValueCmp cmp t m n) :: m () -> m ()) $ do
reportSDoc "tc.conv.atom" 50 $
"compareAtom" <+> fsep [ prettyTCM m <+> prettyTCM cmp
, prettyTCM n
, prettyTCM t
]
: what happens if I cut out the eta expansion here ?
Answer : Triggers issue 245 , does not resolve 348
(mb',nb') <- ifM (asksTC envCompareBlocked) ((notBlocked -*- notBlocked) <$> reduce (m,n)) $ do
mb' <- etaExpandBlocked =<< reduceB m
nb' <- etaExpandBlocked =<< reduceB n
return (mb', nb')
(mb'', nb'') <- case (ignoreBlocking mb', ignoreBlocking nb') of
(Lit _, Lit _) -> return (mb', nb')
_ -> (,) <$> traverse constructorForm mb'
<*> traverse constructorForm nb'
mb <- traverse unLevel mb''
nb <- traverse unLevel nb''
cmpBlocked <- viewTC eCompareBlocked
let m = ignoreBlocking mb
n = ignoreBlocking nb
postpone = addConstraint $ ValueCmp cmp t m n
Jesper , 2019 - 05 - 14 , Issue # 3776 : If the type is blocked ,
postponeIfBlockedAs :: CompareAs -> (Blocked CompareAs -> m ()) -> m ()
postponeIfBlockedAs AsTypes f = f $ NotBlocked ReallyNotBlocked AsTypes
postponeIfBlockedAs AsSizes f = f $ NotBlocked ReallyNotBlocked AsSizes
postponeIfBlockedAs (AsTermsOf t) f = ifBlocked t
(\m t -> (f $ Blocked m $ AsTermsOf t) `catchError` \case
TypeError{} -> postpone
err -> throwError err)
(\nb t -> f $ NotBlocked nb $ AsTermsOf t)
checkDefinitionalEquality = unlessM (pureCompareAs CmpEq t m n) postpone
dir = fromCmp cmp
assign dir x es v = assignE dir x es v t $ compareAtomDir dir t
reportSDoc "tc.conv.atom" 30 $
"compareAtom" <+> fsep [ prettyTCM mb <+> prettyTCM cmp
, prettyTCM nb
, prettyTCM t
]
reportSDoc "tc.conv.atom" 80 $
"compareAtom" <+> fsep [ (text . show) mb <+> prettyTCM cmp
, (text . show) nb
, ":" <+> (text . show) t ]
case (mb, nb) of
equate two metas x and y. if y is the younger meta ,
try first y : = x and then x : = y
(NotBlocked _ (MetaV x xArgs), NotBlocked _ (MetaV y yArgs))
| x == y , cmpBlocked -> do
a <- metaType x
compareElims [] [] a (MetaV x []) xArgs yArgs
| x == y ->
case intersectVars xArgs yArgs of
Just kills -> do
killResult <- killArgs kills x
case killResult of
NothingToPrune -> return ()
PrunedEverything -> return ()
PrunedNothing -> postpone
PrunedSomething -> postpone
| otherwise -> do
[p1, p2] <- mapM getMetaPriority [x,y]
First try the one with the highest priority . If that does n't
let (solve1, solve2)
| (p1, x) > (p2, y) = (l1, r2)
| otherwise = (r1, l2)
where l1 = assign dir x xArgs n
r1 = assign rid y yArgs m
Careful : the first attempt might prune the low
priority meta ! ( Issue # 2978 )
l2 = ifM (isInstantiatedMeta x) (compareAsDir dir t m n) l1
r2 = ifM (isInstantiatedMeta y) (compareAsDir rid t n m) r1
catchPatternErr solve2 solve1
(NotBlocked _ (MetaV x es), _) -> assign dir x es n
(_, NotBlocked _ (MetaV x es)) -> assign rid x es m
(Blocked{}, Blocked{}) -> checkDefinitionalEquality
The blocked term goes first
(_, Blocked{}) -> useInjectivity (flipCmp $ fromCmp cmp) t n m
_ -> postponeIfBlockedAs t $ \bt -> do
, 2015 - 07 - 01 , actually , do n't put them into the spine .
case (m, n) of
(Pi{}, Pi{}) -> equalFun m n
(Sort s1, Sort s2) ->
ifM (optCumulativity <$> pragmaOptions)
(compareSort cmp s1 s2)
(equalSort s1 s2)
(Lit l1, Lit l2) | l1 == l2 -> return ()
(Var i es, Var i' es') | i == i' -> do
a <- typeOfBV i
compareElims [] [] a (var i) es es'
(Def f es, Def f' es') -> do
1 . All absurd lambdas are equal .
unlessM (bothAbsurd f f') $ do
2 . If the heads are unequal , the only chance is subtyping between SIZE and SIZELT .
if f /= f' then trySizeUniv cmp t m n f es f' es' else do
3 . If the heads are equal :
3a . If there are no arguments , we are done .
unless (null es && null es') $ do
3b . If some cubical magic kicks in , we are done .
unlessM (compareEtaPrims f es es') $ do
3c . Oh no , we actually have to work and compare the eliminations !
a <- computeElimHeadType f es es'
pol <- getPolarity' cmp f
compareElims pol [] a (Def f []) es es'
(Con x ci xArgs, Con y _ yArgs)
| x == y -> do
a' <- case t of
AsTermsOf a -> conType x a
AsSizes -> __IMPOSSIBLE__
AsTypes -> __IMPOSSIBLE__
forcedArgs <- getForcedArgs $ conName x
( see test / succeed / CovariantConstructors ) .
compareElims (repeat $ polFromCmp cmp) forcedArgs a' (Con x ci []) xArgs yArgs
_ -> typeError $ UnequalTerms cmp m n $ ignoreBlocking bt
where
compareEtaPrims :: MonadConversion m => QName -> Elims -> Elims -> m Bool
compareEtaPrims q es es' = do
munglue <- getPrimitiveName' builtin_unglue
munglueU <- getPrimitiveName' builtin_unglueU
msubout <- getPrimitiveName' builtinSubOut
case () of
_ | Just q == munglue -> compareUnglueApp q es es'
_ | Just q == munglueU -> compareUnglueUApp q es es'
_ | Just q == msubout -> compareSubApp q es es'
_ -> return False
compareSubApp q es es' = do
let (as,bs) = splitAt 5 es; (as',bs') = splitAt 5 es'
case (allApplyElims as, allApplyElims as') of
(Just [a,bA,phi,u,x], Just [a',bA',phi',u',x']) -> do
tSub <- primSub
, 28 - 07 - 16 :
equalType (El Inf $ apply tSub $ [a] ++ map (setHiding NotHidden) [bA,phi,u])
(El Inf $ apply tSub $ [a] ++ map (setHiding NotHidden) [bA',phi',u'])
compareAtom cmp (AsTermsOf $ El Inf $ apply tSub $ [a] ++ map (setHiding NotHidden) [bA,phi,u])
(unArg x) (unArg x')
compareElims [] [] (El (tmSort (unArg a)) (unArg bA)) (Def q as) bs bs'
return True
_ -> return False
compareUnglueApp q es es' = do
let (as,bs) = splitAt 7 es; (as',bs') = splitAt 7 es'
case (allApplyElims as, allApplyElims as') of
(Just [la,lb,bA,phi,bT,e,b], Just [la',lb',bA',phi',bT',e',b']) -> do
tGlue <- getPrimitiveTerm builtinGlue
, 28 - 07 - 16 :
equalType ( El ( tmSort ( unArg lb ) ) $ apply tGlue $ [ la , lb ] + + map ( setHiding NotHidden ) [ bA , phi , bT , e ] )
( El ( tmSort ( unArg lb ' ) ) $ apply tGlue $ [ la',lb ' ] + + map ( setHiding NotHidden ) [ bA',phi',bT',e ' ] )
compareAtom cmp (AsTermsOf $ El (tmSort (unArg lb)) $ apply tGlue $ [la,lb] ++ map (setHiding NotHidden) [bA,phi,bT,e])
(unArg b) (unArg b')
compareElims [] [] (El (tmSort (unArg la)) (unArg bA)) (Def q as) bs bs'
return True
_ -> return False
compareUnglueUApp :: MonadConversion m => QName -> Elims -> Elims -> m Bool
compareUnglueUApp q es es' = do
let (as,bs) = splitAt 5 es; (as',bs') = splitAt 5 es'
case (allApplyElims as, allApplyElims as') of
(Just [la,phi,bT,bAS,b], Just [la',phi',bT',bA',b']) -> do
tHComp <- primHComp
tLSuc <- primLevelSuc
tSubOut <- primSubOut
iz <- primIZero
let lsuc t = tLSuc `apply` [argN t]
s = tmSort $ unArg la
sucla = lsuc <$> la
bA <- runNamesT [] $ do
[la,phi,bT,bAS] <- mapM (open . unArg) [la,phi,bT,bAS]
(pure tSubOut <#> (pure tLSuc <@> la) <#> (Sort . tmSort <$> la) <#> phi <#> (bT <@> primIZero) <@> bAS)
compareAtom cmp (AsTermsOf $ El (tmSort . unArg $ sucla) $ apply tHComp $ [sucla, argH (Sort s), phi] ++ [argH (unArg bT), argH bA])
(unArg b) (unArg b')
compareElims [] [] (El s bA) (Def q as) bs bs'
return True
_ -> return False
, 2013 - 05 - 15 due to new postponement strategy , type can now be blocked
conType c t = ifBlocked t (\ _ _ -> patternViolation) $ \ _ t -> do
let impossible = do
reportSDoc "impossible" 10 $
"expected data/record type, found " <+> prettyTCM t
reportSDoc "impossible" 70 $ nest 2 $ "raw =" <+> pretty t
, 2013 - 10 - 20 : in case termination checking fails
In issue 921 , this happens during the final attempt
patternViolation
maybe impossible (return . snd) =<< getFullyAppliedConType c t
equalFun t1 t2 = case (t1, t2) of
(Pi dom1 b1, Pi dom2 b2) -> do
verboseBracket "tc.conv.fun" 15 "compare function types" $ do
reportSDoc "tc.conv.fun" 20 $ nest 2 $ vcat
[ "t1 =" <+> prettyTCM t1
, "t2 =" <+> prettyTCM t2
]
compareDom cmp dom2 dom1 b1 b2 errH errR errQ errC $
compareType cmp (absBody b1) (absBody b2)
where
errH = typeError $ UnequalHiding t1 t2
errR = typeError $ UnequalRelevance cmp t1 t2
errQ = typeError $ UnequalQuantity cmp t1 t2
errC = typeError $ UnequalCohesion cmp t1 t2
_ -> __IMPOSSIBLE__
compareDom :: (MonadConversion m , Free c)
^ @a1@ The smaller domain .
-> m ()
compareDom cmp0
dom1@(Dom{domInfo = i1, unDom = a1})
dom2@(Dom{domInfo = i2, unDom = a2})
b1 b2 errH errR errQ errC cont = do
hasSubtyping <- collapseDefault . optSubtyping <$> pragmaOptions
let cmp = if hasSubtyping then cmp0 else CmpEq
if | not $ sameHiding dom1 dom2 -> errH
| not $ compareRelevance cmp (getRelevance dom1) (getRelevance dom2) -> errR
| not $ compareQuantity cmp (getQuantity dom1) (getQuantity dom2) -> errQ
| not $ compareCohesion cmp (getCohesion dom1) (getCohesion dom2) -> errC
| otherwise -> do
let r = max (getRelevance dom1) (getRelevance dom2)
dependent = (r /= Irrelevant) && isBinderUsed b2
pid <- newProblem_ $ compareType cmp0 a1 a2
dom <- if dependent
then (\ a -> dom1 {unDom = a}) <$> blockTypeOnProblem a1 pid
else return dom1
let name = suggests [ Suggestion b1 , Suggestion b2 ]
addContext (name, dom) $ cont
stealConstraints pid
, 2013 - 05 - 15 Now , comparison of codomains is not
But see issue # 1258 .
compareRelevance :: Comparison -> Relevance -> Relevance -> Bool
compareRelevance CmpEq = (==)
compareRelevance CmpLeq = (<=)
compareQuantity :: Comparison -> Quantity -> Quantity -> Bool
compareQuantity CmpEq = sameQuantity
compareQuantity CmpLeq = moreQuantity
compareCohesion :: Comparison -> Cohesion -> Cohesion -> Bool
compareCohesion CmpEq = sameCohesion
compareCohesion CmpLeq = moreCohesion
| When comparing argument spines ( in ) where the first arguments
do n't match , we keep going , substituting the anti - unification of the two
( u = v : A)[pid ] w = A u v us = vs : Δ[w / x ]
the two terms we can expose that .
# 2384 .
antiUnify :: MonadConversion m => ProblemId -> Type -> Term -> Term -> m Term
antiUnify pid a u v = do
((u, v), eq) <- SynEq.checkSyntacticEquality u v
if eq then return u else do
(u, v) <- reduce (u, v)
reportSDoc "tc.conv.antiUnify" 30 $ vcat
[ "antiUnify"
, "a =" <+> prettyTCM a
, "u =" <+> prettyTCM u
, "v =" <+> prettyTCM v
]
case (u, v) of
(Pi ua ub, Pi va vb) -> do
wa0 <- antiUnifyType pid (unDom ua) (unDom va)
let wa = wa0 <$ ua
wb <- addContext wa $ antiUnifyType pid (absBody ub) (absBody vb)
return $ Pi wa (mkAbs (absName ub) wb)
(Lam i u, Lam _ v) ->
reduce (unEl a) >>= \case
Pi a b -> Lam i . (mkAbs (absName u)) <$> addContext a (antiUnify pid (absBody b) (absBody u) (absBody v))
_ -> fallback
(Var i us, Var j vs) | i == j -> maybeGiveUp $ do
a <- typeOfBV i
antiUnifyElims pid a (var i) us vs
, 2017 - 07 - 27 :
@getConType@ here .
( Note that @patternViolation@ swallows exceptions coming from @getConType@
(Con x ci us, Con y _ vs) | x == y -> maybeGiveUp $ do
a <- maybe patternViolation (return . snd) =<< getConType x a
antiUnifyElims pid a (Con x ci []) us vs
(Def f us, Def g vs) | f == g, length us == length vs -> maybeGiveUp $ do
a <- computeElimHeadType f us vs
antiUnifyElims pid a (Def f []) us vs
_ -> fallback
where
maybeGiveUp = catchPatternErr fallback
fallback = blockTermOnProblem a u pid
antiUnifyArgs :: MonadConversion m => ProblemId -> Dom Type -> Arg Term -> Arg Term -> m (Arg Term)
antiUnifyArgs pid dom u v
| getModality u /= getModality v = patternViolation
| otherwise = applyModalityToContext u $
ifM (isIrrelevantOrPropM dom)
antiUnifyType :: MonadConversion m => ProblemId -> Type -> Type -> m Type
antiUnifyType pid (El s a) (El _ b) = workOnTypes $ El s <$> antiUnify pid (sort s) a b
antiUnifyElims :: MonadConversion m => ProblemId -> Type -> Term -> Elims -> Elims -> m Term
antiUnifyElims pid a self [] [] = return self
antiUnifyElims pid a self (Proj o f : es1) (Proj _ g : es2) | f == g = do
res <- projectTyped self a o f
case res of
Just (_, self, a) -> antiUnifyElims pid a self es1 es2
antiUnifyElims pid a self (Apply u : es1) (Apply v : es2) = do
reduce (unEl a) >>= \case
Pi a b -> do
w <- antiUnifyArgs pid a u v
antiUnifyElims pid (b `lazyAbsApp` unArg w) (apply self [w]) es1 es2
_ -> patternViolation
compareElims :: forall m. MonadConversion m => [Polarity] -> [IsForced] -> Type -> Term -> [Elim] -> [Elim] -> m ()
compareElims pols0 fors0 a v els01 els02 = (catchConstraint (ElimCmp pols0 fors0 a v els01 els02) :: m () -> m ()) $ do
let v1 = applyE v els01
v2 = applyE v els02
failure = typeError $ UnequalTerms CmpEq v1 v2 (AsTermsOf a)
, 2013 - 03 - 15 since one of the spines is empty , @a@
unless (null els01) $ do
reportSDoc "tc.conv.elim" 25 $ "compareElims" $$ do
nest 2 $ vcat
[ "a =" <+> prettyTCM a
, "pols0 (truncated to 10) =" <+> hsep (map prettyTCM $ take 10 pols0)
, "fors0 (truncated to 10) =" <+> hsep (map prettyTCM $ take 10 fors0)
, "v =" <+> prettyTCM v
, "els01 =" <+> prettyTCM els01
, "els02 =" <+> prettyTCM els02
]
case (els01, els02) of
([] , [] ) -> return ()
not impossible , see issue 821
not impossible , see issue 878
(Apply{} : _, [] ) -> failure
([] , IApply{} : _) -> failure
(IApply{} : _, [] ) -> failure
NB : popped up in issue 889
but should be impossible ( but again in issue 1467 )
(IApply{} : _, Proj{} : _) -> __IMPOSSIBLE__ <$ solveAwakeConstraints' True
(Proj{} : _, IApply{} : _) -> __IMPOSSIBLE__ <$ solveAwakeConstraints' True
(IApply{} : _, Apply{} : _) -> __IMPOSSIBLE__ <$ solveAwakeConstraints' True
(Apply{} : _, IApply{} : _) -> __IMPOSSIBLE__ <$ solveAwakeConstraints' True
(e@(IApply x1 y1 r1) : els1, IApply x2 y2 r2 : els2) -> do
reportSDoc "tc.conv.elim" 25 $ "compareElims IApply"
: copying stuff from the Apply case ..
let (pol, pols) = nextPolarity pols0
ifBlocked a (\ m t -> patternViolation) $ \ _ a -> do
va <- pathView a
reportSDoc "tc.conv.elim.iapply" 60 $ "compareElims IApply" $$ do
nest 2 $ "va =" <+> text (show (isPathType va))
case va of
PathType s path l bA x y -> do
b <- elInf primInterval
compareWithPol pol (flip compareTerm b)
r1 r2
TODO : compare ( ) and ( y1,y2 ) ?
codom <- el' (pure . unArg $ l) ((pure . unArg $ bA) <@> pure r)
Path non - dependent ( codom ` lazyAbsApp ` unArg arg )
(applyE v [e]) els1 els2
OType t@(El _ Pi{}) -> compareElims pols0 fors0 t v (Apply (defaultArg r1) : els1) (Apply (defaultArg r2) : els2)
OType{} -> patternViolation
(Apply arg1 : els1, Apply arg2 : els2) ->
(verboseBracket "tc.conv.elim" 20 "compare Apply" :: m () -> m ()) $ do
reportSDoc "tc.conv.elim" 10 $ nest 2 $ vcat
[ "a =" <+> prettyTCM a
, "v =" <+> prettyTCM v
, "arg1 =" <+> prettyTCM arg1
, "arg2 =" <+> prettyTCM arg2
]
reportSDoc "tc.conv.elim" 50 $ nest 2 $ vcat
[ "raw:"
, "a =" <+> pretty a
, "v =" <+> pretty v
, "arg1 =" <+> pretty arg1
, "arg2 =" <+> pretty arg2
]
let (pol, pols) = nextPolarity pols0
(for, fors) = nextIsForced fors0
ifBlocked a (\ m t -> patternViolation) $ \ _ a -> do
reportSLn "tc.conv.elim" 90 $ "type is not blocked"
case unEl a of
(Pi (Dom{domInfo = info, unDom = b}) codom) -> do
reportSLn "tc.conv.elim" 90 $ "type is a function type"
mlvl <- tryMaybe primLevel
let freeInCoDom (Abs _ c) = 0 `freeInIgnoringSorts` c
freeInCoDom _ = False
dependent = (Just (unEl b) /= mlvl) && freeInCoDom codom
NB : we could drop the free variable test and still be sound .
, 2013 - 05 - 15
NEW , , 2013 - 05 - 15
compare arg1 and
pid <- newProblem_ $ applyModalityToContext info $
if isForced for then
reportSLn "tc.conv.elim" 90 $ "argument is forced"
else if isIrrelevant info then do
reportSLn "tc.conv.elim" 90 $ "argument is irrelevant"
compareIrrelevant b (unArg arg1) (unArg arg2)
else do
reportSLn "tc.conv.elim" 90 $ "argument has polarity " ++ show pol
compareWithPol pol (flip compareTerm b)
(unArg arg1) (unArg arg2)
solved <- isProblemSolved pid
reportSLn "tc.conv.elim" 90 $ "solved = " ++ show solved
arg <- if dependent && not solved
then applyModalityToContext info $ do
reportSDoc "tc.conv.elims" 30 $ vcat $
[ "Trying antiUnify:"
, nest 2 $ "b =" <+> prettyTCM b
, nest 2 $ "arg1 =" <+> prettyTCM arg1
, nest 2 $ "arg2 =" <+> prettyTCM arg2
]
arg <- (arg1 $>) <$> antiUnify pid b (unArg arg1) (unArg arg2)
reportSDoc "tc.conv.elims" 30 $ hang "Anti-unification:" 2 (prettyTCM arg)
reportSDoc "tc.conv.elims" 70 $ nest 2 $ "raw:" <+> pretty arg
return arg
else return arg1
compareElims pols fors (codom `lazyAbsApp` unArg arg) (apply v [arg]) els1 els2
reportSLn "tc.conv.elim" 90 $ "stealing constraints from problem " ++ show pid
stealConstraints pid
Stealing solves this issue :
Does not create enough blocked tc - problems ,
see test / fail / DontPrune .
( There are remaining problems which do not show up as yellow . )
Need to find a way to associate pid also to result of .
Does not create enough blocked tc-problems,
see test/fail/DontPrune.
(There are remaining problems which do not show up as yellow.)
Need to find a way to associate pid also to result of compareElims.
-}
a -> do
reportSDoc "impossible" 10 $
"unexpected type when comparing apply eliminations " <+> prettyTCM a
reportSDoc "impossible" 50 $ "raw type:" <+> pretty a
patternViolation
, 2013 - 10 - 22
(Proj o f : els1, Proj _ f' : els2)
| f /= f' -> typeError . GenericError . show =<< prettyTCM f <+> "/=" <+> prettyTCM f'
| otherwise -> ifBlocked a (\ m t -> patternViolation) $ \ _ a -> do
case res of
Just (_, u, t) -> do
, 2015 - 07 - 01 :
compareElims [] [] t u els1 els2
Nothing -> do
reportSDoc "tc.conv.elims" 30 $ sep
[ text $ "projection " ++ show f
, text "applied to value " <+> prettyTCM v
, text "of unexpected type " <+> prettyTCM a
]
patternViolation
| " Compare " two terms in irrelevant position . This always succeeds .
compareIrrelevant :: MonadConversion m => Type -> Term -> Term -> m ()
2012 - 04 - 02 DontCare no longer present
compareIrrelevant t ( DontCare v ) w = compareIrrelevant t v w
compareIrrelevant t v ( DontCare w ) = compareIrrelevant t v w
compareIrrelevant t (DontCare v) w = compareIrrelevant t v w
compareIrrelevant t v (DontCare w) = compareIrrelevant t v w
-}
compareIrrelevant t v0 w0 = do
let v = stripDontCare v0
w = stripDontCare w0
reportSDoc "tc.conv.irr" 20 $ vcat
[ "compareIrrelevant"
, nest 2 $ "v =" <+> prettyTCM v
, nest 2 $ "w =" <+> prettyTCM w
]
reportSDoc "tc.conv.irr" 50 $ vcat
[ nest 2 $ "v =" <+> pretty v
, nest 2 $ "w =" <+> pretty w
]
try v w $ try w v $ return ()
where
try (MetaV x es) w fallback = do
mv <- lookupMeta x
let rel = getMetaRelevance mv
inst = case mvInstantiation mv of
InstV{} -> True
_ -> False
reportSDoc "tc.conv.irr" 20 $ vcat
[ nest 2 $ text $ "rel = " ++ show rel
, nest 2 $ "inst =" <+> pretty inst
]
if not (isIrrelevant rel) || inst
then fallback
, 2016 - 08 - 08 , issue # 2131 :
else (assignE DirEq x es w (AsTermsOf t) $ compareIrrelevant t) `catchError` \ _ -> fallback
try v w fallback = fallback
compareWithPol :: MonadConversion m => Polarity -> (Comparison -> a -> a -> m ()) -> a -> a -> m ()
compareWithPol Invariant cmp x y = cmp CmpEq x y
compareWithPol Covariant cmp x y = cmp CmpLeq x y
compareWithPol Contravariant cmp x y = cmp CmpLeq y x
compareWithPol Nonvariant cmp x y = return ()
polFromCmp :: Comparison -> Polarity
polFromCmp CmpLeq = Covariant
polFromCmp CmpEq = Invariant
compareArgs :: MonadConversion m => [Polarity] -> [IsForced] -> Type -> Term -> Args -> Args -> m ()
compareArgs pol for a v args1 args2 =
compareElims pol for a v (map Apply args1) (map Apply args2)
compareType :: MonadConversion m => Comparison -> Type -> Type -> m ()
compareType cmp ty1@(El s1 a1) ty2@(El s2 a2) =
workOnTypes $
verboseBracket "tc.conv.type" 20 "compareType" $ do
reportSDoc "tc.conv.type" 50 $ vcat
[ "compareType" <+> sep [ prettyTCM ty1 <+> prettyTCM cmp
, prettyTCM ty2 ]
, hsep [ " sorts:", prettyTCM s1, " and ", prettyTCM s2 ]
]
compareAs cmp AsTypes a1 a2
unlessM ((optCumulativity <$> pragmaOptions) `or2M`
(not . optCompareSorts <$> pragmaOptions)) $
compareSort CmpEq s1 s2
return ()
leqType :: MonadConversion m => Type -> Type -> m ()
leqType = compareType CmpLeq
| @coerce v a b@ coerces @v : a@ to type @b@ , returning a @v ' :
coerce :: (MonadConversion m, MonadTCM m) => Comparison -> Term -> Type -> Type -> m Term
coerce cmp v t1 t2 = blockTerm t2 $ do
verboseS "tc.conv.coerce" 10 $ do
(a1,a2) <- reify (t1,t2)
let dbglvl = if isSet a1 && isSet a2 then 50 else 10
reportSDoc "tc.conv.coerce" dbglvl $
"coerce" <+> vcat
[ "term v =" <+> prettyTCM v
, "from type t1 =" <+> prettyTCM a1
, "to type t2 =" <+> prettyTCM a2
, "comparison =" <+> prettyTCM cmp
]
reportSDoc "tc.conv.coerce" 70 $
"coerce" <+> vcat
[ "term v =" <+> pretty v
, "from type t1 =" <+> pretty t1
, "to type t2 =" <+> pretty t2
, "comparison =" <+> pretty cmp
]
TelV tel1 b1 <- telViewUpTo' (-1) notVisible t1
TelV tel2 b2 <- telViewUpTo' (-1) notVisible t2
let n = size tel1 - size tel2
v ' = λ { tel2 } → v { tel1 }
if n <= 0 then fallback else do
ifBlocked b2 (\ _ _ -> fallback) $ \ _ _ -> do
(args, t1') <- implicitArgs n notVisible t1
let v' = v `apply` args
v' <$ coerceSize (compareType cmp) v' t1' t2
where
fallback = v <$ coerceSize (compareType cmp) v t1 t2
TODO .
coerceSize :: MonadConversion m => (Type -> Type -> m ()) -> Term -> Type -> Type -> m ()
coerceSize leqType v t1 t2 = verboseBracket "tc.conv.size.coerce" 45 "coerceSize" $
workOnTypes $ do
reportSDoc "tc.conv.size.coerce" 70 $
"coerceSize" <+> vcat
[ "term v =" <+> pretty v
, "from type t1 =" <+> pretty t1
, "to type t2 =" <+> pretty t2
]
let fallback = leqType t1 t2
done = caseMaybeM (isSizeType =<< reduce t1) fallback $ \ _ -> return ()
, 2015 - 07 - 22 , Issue 1615 :
caseMaybeM (isSizeType =<< reduce t2) fallback $ \ b2 -> do
, 2017 - 01 - 20 , issue # 2329 :
mv <- sizeMaxView v
if any (\case{ DOtherSize{} -> True; _ -> False }) mv then fallback else do
, 2015 - 02 - 11 do not instantiate metas here ( triggers issue 1203 ) .
unlessM (tryConversion $ dontAssignMetas $ leqType t1 t2) $ do
A ( most probably weaker ) alternative is to just check syn.eq .
ifM ( snd < $ > checkSyntacticEquality t1 t2 ) ( return v ) $ { - else - } do
reportSDoc "tc.conv.size.coerce" 20 $ "coercing to a size type"
case b2 of
@t2 = Size@. We are done !
BoundedNo -> done
BoundedLt v2 -> do
sv2 <- sizeView v2
case sv2 of
SizeInf -> done
OtherSize{} -> do
, 2014 - 06 - 16 :
vinc <- sizeSuc 1 v
compareSizes CmpLeq vinc v2
done
@v2 = a2 + 1@ : In this case , we can try @v < = a2@
SizeSuc a2 -> do
compareSizes CmpLeq v a2
to pass Issue 1136
compareLevel :: MonadConversion m => Comparison -> Level -> Level -> m ()
compareLevel CmpLeq u v = leqLevel u v
compareLevel CmpEq u v = equalLevel u v
compareSort :: MonadConversion m => Comparison -> Sort -> Sort -> m ()
compareSort CmpEq = equalSort
compareSort CmpLeq = leqSort
| Check that the first sort is less or equal to the second .
We can put @SizeUniv@ below @Inf@ , but otherwise , it is
leqSort :: forall m. MonadConversion m => Sort -> Sort -> m ()
leqSort s1 s2 = (catchConstraint (SortCmp CmpLeq s1 s2) :: m () -> m ()) $ do
(s1,s2) <- reduce (s1,s2)
let postpone = addConstraint (SortCmp CmpLeq s1 s2)
no = typeError $ NotLeqSort s1 s2
yes = return ()
synEq = ifNotM (optSyntacticEquality <$> pragmaOptions) postpone $ do
((s1,s2) , equal) <- SynEq.checkSyntacticEquality s1 s2
if | equal -> yes
| otherwise -> postpone
reportSDoc "tc.conv.sort" 30 $
sep [ "leqSort"
, nest 2 $ fsep [ prettyTCM s1 <+> "=<"
, prettyTCM s2 ]
]
propEnabled <- isPropEnabled
let fvsRHS = (`IntSet.member` allFreeVars s2)
badRigid <- s1 `rigidVarsNotContainedIn` fvsRHS
case (s1, s2) of
, 2018 - 09 - 03 : crash on dummy sort
(DummyS s, _) -> impossibleSort s
(_, DummyS s) -> impossibleSort s
(Type a , Type b ) -> leqLevel a b
(Prop a , Prop b ) -> leqLevel a b
(Prop a , Type b ) -> leqLevel a b
(Type a , Prop b ) -> no
Setω is the top sort
(_ , Inf ) -> yes
(Inf , _ ) -> equalSort s1 s2
So is @Set0@ if @Prop@ is not enabled .
(_ , SizeUniv) -> equalSort s1 s2
(_ , Prop (Max 0 [])) -> equalSort s1 s2
(_ , Type (Max 0 []))
| not propEnabled -> equalSort s1 s2
SizeUniv is unrelated to any @Set l@ or @Prop l@
(SizeUniv, Type{} ) -> no
(SizeUniv, Prop{} ) -> no
If the first sort rigidly depends on a variable and the second
sort does not mention this variable , the second sort must be Inf .
(_ , _ ) | badRigid -> equalSort s2 Inf
(UnivSort Inf , UnivSort Inf) -> yes
PiSort , FunSort , UnivSort and MetaS might reduce once we instantiate
(PiSort{}, _ ) -> synEq
(_ , PiSort{}) -> synEq
(FunSort{}, _ ) -> synEq
(_ , FunSort{}) -> synEq
(UnivSort{}, _ ) -> synEq
(_ , UnivSort{}) -> synEq
(MetaS{} , _ ) -> synEq
(_ , MetaS{} ) -> synEq
(DefS{} , _ ) -> synEq
(_ , DefS{}) -> synEq
where
impossibleSort s = do
reportS "impossible" 10
[ "leqSort: found dummy sort with description:"
, s
]
__IMPOSSIBLE__
leqLevel :: MonadConversion m => Level -> Level -> m ()
leqLevel a b = do
reportSDoc "tc.conv.nat" 30 $
"compareLevel" <+>
sep [ prettyTCM a <+> "=<"
, prettyTCM b ]
, 2015 - 12 - 28 Issue 1757
a <- normalise a
b <- normalise b
leqView a b
where
, 2016 - 09 - 28
leqView :: MonadConversion m => Level -> Level -> m ()
leqView a b = catchConstraint (LevelCmp CmpLeq a b) $ do
reportSDoc "tc.conv.level" 30 $
"compareLevelView" <+>
sep [ pretty a <+> "=<"
, pretty b ]
cumulativity <- optCumulativity <$> pragmaOptions
reportSDoc "tc.conv.level" 40 $
"compareLevelView" <+>
sep [ prettyList_ (map (pretty . unSingleLevel) $ NonEmpty.toList $ levelMaxView a)
, "=<"
, prettyList_ (map (pretty . unSingleLevel) $ NonEmpty.toList $ levelMaxView b)
]
wrap $ case (levelMaxView a, levelMaxView b) of
_ | a == b -> ok
(SingleClosed 0 :| [] , _) -> ok
(as , SingleClosed 0 :| []) ->
sequence_ [ equalLevel (unSingleLevel a') (ClosedLevel 0) | a' <- NonEmpty.toList as ]
(SingleClosed m :| [], SingleClosed n :| []) -> if m <= n then ok else notok
(SingleClosed m :| [] , _)
| m <= levelLowerBound b -> ok
(as, bs)
| all neutralOrClosed bs , levelLowerBound a > levelLowerBound b -> notok
(as@(_:|_:_), b :| []) ->
sequence_ [ leqView (unSingleLevel a') (unSingleLevel b) | a' <- NonEmpty.toList as ]
(as, bs)
| let minN = min (fst $ levelPlusView a) (fst $ levelPlusView b)
a' = fromMaybe __IMPOSSIBLE__ $ subLevel minN a
b' = fromMaybe __IMPOSSIBLE__ $ subLevel minN b
, minN > 0 -> leqView a' b'
, 2014 - 04 - 07 : This is ok if we do not go back to equalLevel
(as, bs)
| (subsumed@(_:_) , as') <- List.partition isSubsumed (NonEmpty.toList as)
-> leqView (unSingleLevels as') b
where
isSubsumed a = any (`subsumes` a) (NonEmpty.toList bs)
subsumes :: SingleLevel -> SingleLevel -> Bool
subsumes (SingleClosed m) (SingleClosed n) = m >= n
subsumes (SinglePlus (Plus m _)) (SingleClosed n) = m >= n
subsumes (SinglePlus (Plus m a)) (SinglePlus (Plus n b)) = a == b && m >= n
subsumes _ _ = False
(as , bs)
| cumulativity
, Just (mb@(MetaLevel x es) , bs') <- singleMetaView (NonEmpty.toList bs)
, null bs' || noMetas (Level a , unSingleLevels bs') -> do
mv <- lookupMeta x
Jesper , 2019 - 10 - 13 : abort if this is an interaction
abort <- (isJust <$> isInteractionMeta x) `or2M`
((== YesGeneralize) <$> isGeneralizableMeta x)
if | abort -> postpone
| otherwise -> do
x' <- case mvJudgement mv of
IsSort{} -> __IMPOSSIBLE__
HasType _ cmp t -> do
TelV tel t' <- telView t
newMeta Instantiable (mvInfo mv) normalMetaPriority (idP $ size tel) $ HasType () cmp t
reportSDoc "tc.conv.level" 20 $ fsep
[ "attempting to solve" , prettyTCM (MetaV x es) , "to the maximum of"
, prettyTCM (Level a) , "and the fresh meta" , prettyTCM (MetaV x' es)
]
equalLevel (atomicLevel mb) $ levelLub a (atomicLevel $ MetaLevel x' es)
, 2016 - 09 - 28 : This simplification loses the solution lzero .
| m = = n - > equalLevel ' ( [ a ] ) ( [ b ] )
_ | noMetas (Level a , Level b) -> notok
| otherwise -> postpone
where
ok = return ()
notok = unlessM typeInType $ typeError $ NotLeqSort (Type a) (Type b)
postpone = patternViolation
wrap m = m `catchError` \case
TypeError{} -> notok
err -> throwError err
neutralOrClosed (SingleClosed _) = True
neutralOrClosed (SinglePlus (Plus _ NeutralLevel{})) = True
neutralOrClosed _ = False
Is there exactly one @MetaLevel@ in the list of single levels ?
singleMetaView :: [SingleLevel] -> Maybe (LevelAtom, [SingleLevel])
singleMetaView (SinglePlus (Plus 0 l@(MetaLevel m es)) : ls)
| all (not . isMetaLevel) ls = Just (l,ls)
singleMetaView (l : ls)
| not $ isMetaLevel l = second (l:) <$> singleMetaView ls
singleMetaView _ = Nothing
isMetaLevel :: SingleLevel -> Bool
isMetaLevel (SinglePlus (Plus _ MetaLevel{})) = True
isMetaLevel (SinglePlus (Plus _ UnreducedLevel{})) = __IMPOSSIBLE__
isMetaLevel _ = False
equalLevel :: MonadConversion m => Level -> Level -> m ()
equalLevel a b = do
, 2013 - 10 - 31 Use normalization to make syntactic equality stronger
(a, b) <- normalise (a, b)
equalLevel' a b
equalLevel' :: forall m. MonadConversion m => Level -> Level -> m ()
equalLevel' a b = do
reportSDoc "tc.conv.level" 50 $ sep [ "equalLevel", nest 2 $ parens $ pretty a, nest 2 $ parens $ pretty b ]
, 2013 - 10 - 31 remove common terms ( that do n't contain metas ! )
THAT 's actually UNSOUND when metas are instantiated , because
as < - return $ Set.fromList $ closed0 as
bs < - return $ Set.fromList $ closed0 bs
let cs = Set.filter ( not . ) $ Set.intersection as bs
as < - return $ Set.toList $ as Set.\\ cs
bs < - return $ Set.toList $ bs Set.\\ cs
reportSDoc "tc.conv.level" 40 $
sep [ "equalLevel"
, vcat [ nest 2 $ sep [ prettyTCM a <+> "=="
, prettyTCM b
]
]
]
Jesper , 2014 - 02 - 02 remove terms that certainly do not contribute
let (a',b') = removeSubsumed a b
reportSDoc "tc.conv.level" 50 $
sep [ "equalLevel (w/o subsumed)"
, vcat [ nest 2 $ sep [ prettyTCM a' <+> "=="
, prettyTCM b'
]
]
]
let as = levelMaxView a'
bs = levelMaxView b'
reportSDoc "tc.conv.level" 50 $
sep [ text "equalLevel"
, vcat [ nest 2 $ sep [ prettyList_ (map (pretty . unSingleLevel) $ NonEmpty.toList $ as)
, "=="
, prettyList_ (map (pretty . unSingleLevel) $ NonEmpty.toList $ bs)
]
]
]
reportSDoc "tc.conv.level" 80 $
sep [ text "equalLevel"
, vcat [ nest 2 $ sep [ prettyList_ (map (text . show . unSingleLevel) $ NonEmpty.toList $ as)
, "=="
, prettyList_ (map (text . show . unSingleLevel) $ NonEmpty.toList $ bs)
]
]
]
catchConstraint (LevelCmp CmpEq a b) $ case (as, bs) of
_ | a == b -> ok
(SingleClosed m :| [], SingleClosed n :| [])
| m == n -> ok
| otherwise -> notok
(SingleClosed m :| [] , bs) | any isNeutral bs -> notok
(as , SingleClosed n :| []) | any isNeutral as -> notok
(SingleClosed m :| [] , _) | m < levelLowerBound b -> notok
(_ , SingleClosed n :| []) | n < levelLowerBound a -> notok
(SingleClosed 0 :| [] , bs@(_:|_:_)) ->
sequence_ [ equalLevel' (ClosedLevel 0) (unSingleLevel b') | b' <- NonEmpty.toList bs ]
(as@(_:|_:_) , SingleClosed 0 :| []) ->
sequence_ [ equalLevel' (unSingleLevel a') (ClosedLevel 0) | a' <- NonEmpty.toList as ]
(SinglePlus (Plus k (MetaLevel x as)) :| [] , bs)
| any (isThisMeta x) bs -> postpone
(as , SinglePlus (Plus k (MetaLevel x bs)) :| [])
| any (isThisMeta x) as -> postpone
(SinglePlus (Plus k (MetaLevel x as')) :| [] , SinglePlus (Plus l (MetaLevel y bs')) :| [])
| k == l -> if
| y < x -> meta x as' $ atomicLevel $ MetaLevel y bs'
| otherwise -> meta y bs' $ atomicLevel $ MetaLevel x as'
(SinglePlus (Plus k (MetaLevel x as')) :| [] , _)
| Just b' <- subLevel k b -> meta x as' b'
(_ , SinglePlus (Plus l (MetaLevel y bs')) :| [])
| Just a' <- subLevel l a -> meta y bs' a'
_ | Just a' <- levelMaxDiff a b
, b /= ClosedLevel 0 -> leqLevel a' b
_ | Just b' <- levelMaxDiff b a
, a /= ClosedLevel 0 -> leqLevel b' a
(as , bs)
| all isNeutralOrClosed (NonEmpty.toList as ++ NonEmpty.toList bs)
, 2013 - 10 - 31 : There could be metas in neutral levels ( see Issue 930 ) .
, not (any hasMeta (NonEmpty.toList as ++ NonEmpty.toList bs))
, length as == length bs -> do
reportSLn "tc.conv.level" 60 $ "equalLevel: all are neutral or closed"
zipWithM_ ((===) `on` levelTm . unSingleLevel) (NonEmpty.toList as) (NonEmpty.toList bs)
_ | noMetas (Level a , Level b) -> notok
| otherwise -> postpone
where
a === b = unlessM typeInType $ do
lvl <- levelType
equalAtom (AsTermsOf lvl) a b
ok = return ()
notok = unlessM typeInType notOk
notOk = typeError $ UnequalLevel CmpEq a b
postpone = do
reportSDoc "tc.conv.level" 30 $ hang "postponing:" 2 $ hang (pretty a <+> "==") 0 (pretty b)
patternViolation
perform assignment ( MetaLevel x as ) : = b
meta x as b = do
reportSLn "tc.meta.level" 30 $ "Assigning meta level"
reportSDoc "tc.meta.level" 50 $ "meta" <+> sep [prettyList $ map pretty as, pretty b]
lvl <- levelType
wrap m = m `catchError` \case
TypeError{} -> notok
err -> throwError err
isNeutral (SinglePlus (Plus _ NeutralLevel{})) = True
isNeutral _ = False
isNeutralOrClosed (SingleClosed _) = True
isNeutralOrClosed (SinglePlus (Plus _ NeutralLevel{})) = True
isNeutralOrClosed _ = False
hasMeta (SinglePlus a) = case a of
Plus _ MetaLevel{} -> True
Plus _ (BlockedLevel _ v) -> isJust $ firstMeta v
Plus _ (NeutralLevel _ v) -> isJust $ firstMeta v
Plus _ (UnreducedLevel v) -> isJust $ firstMeta v
hasMeta (SingleClosed _) = False
isThisMeta x (SinglePlus (Plus _ (MetaLevel y _))) = x == y
isThisMeta _ _ = False
removeSubsumed a b =
let as = NonEmpty.toList $ levelMaxView a
bs = NonEmpty.toList $ levelMaxView b
a' = unSingleLevels $ filter (not . (`isStrictlySubsumedBy` bs)) as
b' = unSingleLevels $ filter (not . (`isStrictlySubsumedBy` as)) bs
in (a',b')
x `isStrictlySubsumedBy` ys = any (`strictlySubsumes` x) ys
SingleClosed m `strictlySubsumes` SingleClosed n = m > n
SinglePlus (Plus m a) `strictlySubsumes` SingleClosed n = m > n
SinglePlus (Plus m a) `strictlySubsumes` SinglePlus (Plus n b) = a == b && m > n
_ `strictlySubsumes` _ = False
| Check that the first sort equal to the second .
equalSort :: forall m. MonadConversion m => Sort -> Sort -> m ()
equalSort s1 s2 = do
catchConstraint (SortCmp CmpEq s1 s2) $ do
(s1,s2) <- reduce (s1,s2)
let yes = return ()
no = typeError $ UnequalSorts s1 s2
reportSDoc "tc.conv.sort" 30 $ sep
[ "equalSort"
, vcat [ nest 2 $ fsep [ prettyTCM s1 <+> "=="
, prettyTCM s2 ]
, nest 2 $ fsep [ pretty s1 <+> "=="
, pretty s2 ]
]
]
propEnabled <- isPropEnabled
typeInTypeEnabled <- typeInType
case (s1, s2) of
, 2018 - 09 - 03 : crash on dummy sort
(DummyS s, _) -> impossibleSort s
(_, DummyS s) -> impossibleSort s
(MetaS x es , MetaS y es')
| x == y -> synEq s1 s2
| x < y -> meta y es' s1
| otherwise -> meta x es s2
(MetaS x es , _ ) -> meta x es s2
(_ , MetaS x es ) -> meta x es s1
(Type a , Type b ) -> equalLevel a b `catchInequalLevel` no
(SizeUniv , SizeUniv ) -> yes
(Prop a , Prop b ) -> equalLevel a b `catchInequalLevel` no
(Inf , Inf ) -> yes
(Type{} , Inf )
| typeInTypeEnabled -> yes
(Inf , Type{} )
| typeInTypeEnabled -> yes
equating @PiSort a b@ to another sort
(s1 , PiSort a b) -> piSortEquals s1 a b
(PiSort a b , s2) -> piSortEquals s2 a b
(s1 , FunSort a b) -> funSortEquals s1 a b
(FunSort a b , s2) -> funSortEquals s2 a b
(s1 , UnivSort s2) -> univSortEquals s1 s2
(UnivSort s1 , s2 ) -> univSortEquals s2 s1
(DefS d es , DefS d' es')
| d == d' -> synEq s1 s2
| otherwise -> no
(_ , _ ) -> no
where
perform assignment ( MetaS x es ) : = s
meta :: MetaId -> [Elim' Term] -> Sort -> m ()
meta x es s = do
reportSLn "tc.meta.sort" 30 $ "Assigning meta sort"
reportSDoc "tc.meta.sort" 50 $ "meta" <+> sep [pretty x, prettyList $ map pretty es, pretty s]
assignE DirEq x es (Sort s) AsTypes __IMPOSSIBLE__
synEq :: Sort -> Sort -> m ()
synEq s1 s2 = do
let postpone = addConstraint $ SortCmp CmpEq s1 s2
doSynEq <- optSyntacticEquality <$> pragmaOptions
if | doSynEq -> do
((s1,s2) , equal) <- SynEq.checkSyntacticEquality s1 s2
if | equal -> return ()
| otherwise -> postpone
| otherwise -> postpone
set0 = mkType 0
prop0 = mkProp 0
Equate a sort @s1@ to @univSort s2@
univSortEquals :: Sort -> Sort -> m ()
univSortEquals s1 s2 = do
reportSDoc "tc.conv.sort" 35 $ vcat
[ "univSortEquals"
, " s1 =" <+> prettyTCM s1
, " s2 =" <+> prettyTCM s2
]
let no = typeError $ UnequalSorts s1 (UnivSort s2)
case s1 of
Type l1 -> do
propEnabled <- isPropEnabled
if | Inf <- s2 -> no
| SizeUniv <- s2 -> no
| not propEnabled -> do
l2 <- case subLevel 1 l1 of
Just l2 -> return l2
Nothing -> do
l2 <- newLevelMeta
equalLevel l1 (levelSuc l2)
return l2
equalSort (Type l2) s2
| otherwise -> synEq (Type l1) (UnivSort s2)
Inf -> do
infInInf <- (optOmegaInOmega <$> pragmaOptions) `or2M` typeInType
if | infInInf -> equalSort Inf s2
| otherwise -> no
Prop{} -> no
SizeUniv{} -> no
_ -> synEq s1 (UnivSort s2)
Equate a sort @s@ to @piSort a b@
Precondition : @s@ and @piSort a b@ are already reduced .
piSortEquals :: Sort -> Dom Type -> Abs Sort -> m ()
piSortEquals s a NoAbs{} = __IMPOSSIBLE__
piSortEquals s a bAbs@(Abs x b) = do
reportSDoc "tc.conv.sort" 35 $ vcat
[ "piSortEquals"
, " s =" <+> prettyTCM s
, " a =" <+> prettyTCM a
, " b =" <+> addContext (x,a) (prettyTCM b)
]
propEnabled <- isPropEnabled
If @b@ is dependent , then @piSort a b@ computes to
if | definitelyNotInf s -> do
a fresh meta that does not depend on @x : a@
b' <- newSortMeta
addContext (x,a) $ equalSort b (raise 1 b')
funSortEquals s (getSort a) b'
| otherwise -> synEq (PiSort a bAbs) s
Equate a sort @s@ to @funSort s1 s2@
funSortEquals :: Sort -> Sort -> Sort -> m ()
funSortEquals s0 s1 s2 = do
reportSDoc "tc.conv.sort" 35 $ vcat
[ "funSortEquals"
, " s0 =" <+> prettyTCM s0
, " s1 =" <+> prettyTCM s1
, " s2 =" <+> prettyTCM s2
]
propEnabled <- isPropEnabled
sizedTypesEnabled <- sizedTypesOption
case s0 of
Inf | definitelyNotInf s1 && definitelyNotInf s2 -> do
typeError $ UnequalSorts s0 (FunSort s1 s2)
| definitelyNotInf s1 -> equalSort Inf s2
| definitelyNotInf s2 -> equalSort Inf s1
| otherwise -> synEq s0 (FunSort s1 s2)
Type l -> do
l2 <- forceType s2
leqLevel l2 l
Jesper , 2019 - 12 - 27 : SizeUniv is disabled at the moment .
Just s -> equalSort (Type l) s
Nothing -> synEq (Type l) (FunSort s1 $ Type l2)
If both Prop and sized types are disabled , only the
case @s1 = = Set l1@ remains .
| otherwise -> do
l1 <- forceType s1
equalLevel l (levelLub l1 l2)
Prop l -> do
l2 <- forceProp s2
leqLevel l2 l
case funSort' s1 (Prop l2) of
Just s -> equalSort (Prop l) s
Nothing -> synEq (Prop l) (FunSort s1 $ Prop l2)
SizeUniv -> equalSort SizeUniv s2
_ -> synEq s0 (FunSort s1 s2)
i.e. @piSort a b = = s0@ implies @b = = s0@.
isBottomSort :: Bool -> Sort -> Bool
isBottomSort propEnabled (Prop (ClosedLevel 0)) = True
isBottomSort propEnabled (Type (ClosedLevel 0)) = not propEnabled
isBottomSort propEnabled _ = False
definitelyNotInf :: Sort -> Bool
definitelyNotInf = \case
Inf -> False
Type{} -> True
Prop{} -> True
SizeUniv -> True
PiSort{} -> False
FunSort{} -> False
UnivSort{} -> False
MetaS{} -> False
DefS{} -> False
DummyS{} -> False
forceType :: Sort -> m Level
forceType (Type l) = return l
forceType s = do
l <- newLevelMeta
equalSort s (Type l)
return l
forceProp :: Sort -> m Level
forceProp (Prop l) = return l
forceProp s = do
l <- newLevelMeta
equalSort s (Prop l)
return l
impossibleSort s = do
reportS "impossible" 10
[ "equalSort: found dummy sort with description:"
, s
]
__IMPOSSIBLE__
catchInequalLevel m fail = m `catchError` \case
TypeError{} -> fail
err -> throwError err
< - ( \ q t - > Def q [ Apply $ Arg defaultArgInfo t ] ) < $ > fromMaybe _ _ IMPOSSIBLE _ _ < $ > getPrimitiveName ' " primINeg "
f ( ) = msum $ map ( f . view . unArg ) [ x , y ]
f ( OTerm ( Var i [ ] ) ) = return [ ( i , True ) ]
xs < - mapM ( mapM ( \ ( i , b ) - > ( , ) i < $ > intervalUnview ( if b then IOne else ) ) ) as
forallFaceMaps :: MonadConversion m => Term -> (Map.Map Int Bool -> MetaId -> Term -> m a) -> (Substitution -> m a) -> m [a]
forallFaceMaps t kb k = do
reportSDoc "conv.forall" 20 $
fsep ["forallFaceMaps"
, prettyTCM t
]
as <- decomposeInterval t
boolToI <- do
io <- primIOne
iz <- primIZero
return (\b -> if b then io else iz)
forM as $ \ (ms,ts) -> do
ifBlockeds ts (kb ms) $ \ _ _ -> do
let xs = map (id -*- boolToI) $ Map.toAscList ms
cxt <- getContext
reportSDoc "conv.forall" 20 $
fsep ["substContextN"
, prettyTCM cxt
, prettyTCM xs
]
(cxt',sigma) <- substContextN cxt xs
resolved <- forM xs (\ (i,t) -> (,) <$> lookupBV i <*> return (applySubst sigma t))
updateContext sigma (const cxt') $
addBindings resolved $ do
cl <- buildClosure ()
tel <- getContextTelescope
m <- currentModule
sub <- getModuleParameterSub m
reportS "conv.forall" 10
[ replicate 10 '-'
, show (envCurrentModule $ clEnv cl)
, show (envLetBindings $ clEnv cl)
, show sigma
, show m
, show sub
]
k sigma
where
ifBlockeds ts blocked unblocked = do
and <- getPrimitiveTerm "primIMin"
io <- primIOne
let t = foldr (\ x r -> and `apply` [argN x,argN r]) io ts
ifBlocked t blocked unblocked
addBindings [] m = m
addBindings ((Dom{domInfo = info,unDom = (nm,ty)},t):bs) m = addLetBinding info nm t ty (addBindings bs m)
substContextN :: MonadConversion m => Context -> [(Int,Term)] -> m (Context , Substitution)
substContextN c [] = return (c, idS)
substContextN c ((i,t):xs) = do
(c', sigma) <- substContext i t c
(c'', sigma') <- substContextN c' (map (subtract 1 -*- applySubst sigma) xs)
return (c'', applySubst sigma' sigma)
substContext :: MonadConversion m => Int -> Term -> Context -> m (Context , Substitution)
substContext i t [] = __IMPOSSIBLE__
substContext i t (x:xs) | i == 0 = return $ (xs , singletonS 0 t)
substContext i t (x:xs) | i > 0 = do
reportSDoc "conv.forall" 20 $
fsep ["substContext"
, text (show (i-1))
, prettyTCM t
, prettyTCM xs
]
(c,sigma) <- substContext (i-1) t xs
let e = applySubst sigma x
return (e:c, liftS 1 sigma)
substContext i t (x:xs) = __IMPOSSIBLE__
compareInterval :: MonadConversion m => Comparison -> Type -> Term -> Term -> m ()
compareInterval cmp i t u = do
reportSDoc "tc.conv.interval" 15 $
sep [ "{ compareInterval" <+> prettyTCM t <+> "=" <+> prettyTCM u ]
tb <- reduceB t
ub <- reduceB u
let t = ignoreBlocking tb
u = ignoreBlocking ub
it <- decomposeInterval' t
iu <- decomposeInterval' u
case () of
_ | blockedOrMeta tb || blockedOrMeta ub -> do
in case of metas we would n't be able to make progress by how we deal with laws .
interval <- elInf $ primInterval
compareAtom CmpEq (AsTermsOf interval) t u
_ | otherwise -> do
x <- leqInterval it iu
y <- leqInterval iu it
let final = isCanonical it && isCanonical iu
if x && y then reportSDoc "tc.conv.interval" 15 $ "Ok! }" else
if final then typeError $ UnequalTerms cmp t u (AsTermsOf i)
else do
reportSDoc "tc.conv.interval" 15 $ "Giving up! }"
patternViolation
where
blockedOrMeta Blocked{} = True
blockedOrMeta (NotBlocked _ (MetaV{})) = True
blockedOrMeta _ = False
type Conj = (Map.Map Int (Set.Set Bool),[Term])
isCanonical :: [Conj] -> Bool
isCanonical = all (null . snd)
( ∨ r_i ) ≤ ( ∨ q_j ) q_j
leqInterval :: MonadConversion m => [Conj] -> [Conj] -> m Bool
leqInterval r q =
and <$> forM r (\ r_i ->
TODO shortcut
' { r_i | i } { q_j | j } = { r_i | i } iff
leqConj :: MonadConversion m => Conj -> Conj -> m Bool
leqConj (rs,rst) (qs,qst) = do
case toSet qs `Set.isSubsetOf` toSet rs of
False -> return False
True -> do
interval <- elInf $ fromMaybe __IMPOSSIBLE__ <$> getBuiltin' builtinInterval
1 ) in some situations the same constraint would get generated twice .
2 ) unless things are completely accepted we are going to
let eqT t u = tryConversion (compareAtom CmpEq (AsTermsOf interval) t u)
let listSubset ts us = and <$> forM ts (\ t ->
TODO shortcut
listSubset qst rst
where
toSet m = Set.fromList [ (i,b) | (i,bs) <- Map.toList m, b <- Set.toList bs]
| equalTermOnFace φ A u v = _ , φ ⊢ u = v : A
equalTermOnFace :: MonadConversion m => Term -> Type -> Term -> Term -> m ()
equalTermOnFace = compareTermOnFace CmpEq
compareTermOnFace :: MonadConversion m => Comparison -> Term -> Type -> Term -> Term -> m ()
compareTermOnFace = compareTermOnFace' compareTerm
compareTermOnFace' :: MonadConversion m => (Comparison -> Type -> Term -> Term -> m ()) -> Comparison -> Term -> Type -> Term -> Term -> m ()
compareTermOnFace' k cmp phi ty u v = do
phi <- reduce phi
_ <- forallFaceMaps phi postponed
$ \ alpha -> k cmp (applySubst alpha ty) (applySubst alpha u) (applySubst alpha v)
return ()
where
postponed ms i psi = do
phi <- runNamesT [] $ do
imin <- cl $ getPrimitiveTerm "primIMin"
ineg <- cl $ getPrimitiveTerm "primINeg"
psi <- open psi
let phi = foldr (\ (i,b) r -> do i <- open (var i); pure imin <@> (if b then i else pure ineg <@> i) <@> r)
phi
addConstraint (ValueCmpOnFace cmp phi ty u v)
bothAbsurd :: MonadConversion m => QName -> QName -> m Bool
bothAbsurd f f'
| isAbsurdLambdaName f, isAbsurdLambdaName f' = do
Double check we are really dealing with absurd :
def <- getConstInfo f
def' <- getConstInfo f'
case (theDef def, theDef def') of
(Function{ funClauses = [Clause{ clauseBody = Nothing }] },
Function{ funClauses = [Clause{ clauseBody = Nothing }] }) -> return True
_ -> return False
| otherwise = return False
|
9bcc6b78018e4acbce47ae9f7849df3478b283a05ef1b4bd1bb8e2691bee3389 | martijnbastiaan/doctest-parallel | Fixity.hs | module Fixity where
foo :: Int
foo = 23 + 42
| null | https://raw.githubusercontent.com/martijnbastiaan/doctest-parallel/f70d6a1c946cc0ada88571b90a39a7cd4d065452/test/extract/regression/Fixity.hs | haskell | module Fixity where
foo :: Int
foo = 23 + 42
| |
63f733a98f4d379a0728f33fd9211b6035e51f2af0b84400b6482d7886706e9f | TyOverby/mono | to_incr_dom.ml | open! Core
open! Async_kernel
open! Import
open Incr.Let_syntax
include To_incr_dom_intf
module State = struct
type t = { mutable last_lifecycle : Bonsai.Private.Lifecycle.Collection.t }
let create () = { last_lifecycle = Bonsai.Private.Lifecycle.Collection.empty }
end
module Action = struct
type ('dynamic_action, 'static_action) t =
| Dynamic of 'dynamic_action
| Static of 'static_action
[@@deriving sexp_of]
end
module Action_unshadowed = Action
let create_generic
computation
~fresh
~input
~model
~inject_dynamic
~inject_static
~apply_static
=
let environment =
Bonsai.Private.Environment.(empty |> add_exn ~key:fresh ~data:input)
in
let snapshot =
Bonsai.Private.eval
~environment
~path:Bonsai.Private.Path.empty
~clock:Incr.clock
~model
~inject_dynamic
~inject_static
computation
in
let%map view, extra = Bonsai.Private.Snapshot.result snapshot
and dynamic_apply_action =
Bonsai.Private.Apply_action.to_incremental
(Bonsai.Private.Snapshot.apply_action snapshot)
and lifecycle = Bonsai.Private.Snapshot.lifecycle_or_empty snapshot
and model = model in
let schedule_event = Vdom.Effect.Expert.handle_non_dom_event_exn in
let apply_action action _state ~schedule_action:_ =
match action with
| Action.Dynamic action -> dynamic_apply_action model action ~schedule_event
| Action.Static action ->
apply_static ~inject:inject_static ~schedule_event model action
in
let on_display state ~schedule_action:_ =
let diff =
Bonsai.Private.Lifecycle.Collection.diff state.State.last_lifecycle lifecycle
in
state.State.last_lifecycle <- lifecycle;
Vdom.Effect.Expert.handle_non_dom_event_exn diff
in
Incr_dom.Component.create_with_extra ~on_display ~extra ~apply_action model view
;;
let convert_generic
(type input model dynamic_action static_action extra)
~fresh
~(computation :
( model
, dynamic_action
, static_action
, Vdom.Node.t * extra )
Bonsai.Private.Computation.t)
~default_model
~(dynamic_action_type_id : dynamic_action Type_equal.Id.t)
~(static_action_type_id : static_action Type_equal.Id.t)
~apply_static
~equal_model
~sexp_of_model
~model_of_sexp
: (module S with type Input.t = input and type Extra.t = extra)
=
(module struct
module Input = struct
type t = input
end
module Model = struct
type t = model [@@deriving equal, sexp]
let default = default_model
end
module Action = struct
let sexp_of_dynamic_action = Type_equal.Id.to_sexp dynamic_action_type_id
let sexp_of_static_action = Type_equal.Id.to_sexp static_action_type_id
type t = (dynamic_action, static_action) Action.t [@@deriving sexp_of]
end
module Extra = struct
type t = extra
end
module State = State
type t = (Action.t, Model.t, State.t, Extra.t) Incr_dom.Component.with_extra
let create ~input ~old_model:_ ~model ~inject =
let inject_dynamic a = inject (Action_unshadowed.Dynamic a) in
let inject_static a = inject (Action_unshadowed.Static a) in
create_generic
computation
~fresh
~input
~model
~inject_dynamic
~inject_static
~apply_static
;;
end)
;;
let convert_with_extra component =
let fresh = Type_equal.Id.create ~name:"" sexp_of_opaque in
let var = Bonsai.Private.(Value.named fresh |> conceal_value) in
let component = component var |> Bonsai.Private.reveal_computation in
let (Bonsai.Private.Computation.T
{ t; model; dynamic_action; static_action; apply_static })
=
component
in
convert_generic
~computation:t
~fresh
~dynamic_action_type_id:dynamic_action
~static_action_type_id:static_action
~apply_static
~default_model:model.default
~equal_model:model.equal
~sexp_of_model:model.sexp_of
~model_of_sexp:model.of_sexp
;;
let convert component =
convert_with_extra (Bonsai.Arrow_deprecated.map component ~f:(fun r -> r, ()))
;;
| null | https://raw.githubusercontent.com/TyOverby/mono/7666c0328d194bf9a569fb65babc0486f2aaa40d/vendor/janestreet-bonsai/web/to_incr_dom.ml | ocaml | open! Core
open! Async_kernel
open! Import
open Incr.Let_syntax
include To_incr_dom_intf
module State = struct
type t = { mutable last_lifecycle : Bonsai.Private.Lifecycle.Collection.t }
let create () = { last_lifecycle = Bonsai.Private.Lifecycle.Collection.empty }
end
module Action = struct
type ('dynamic_action, 'static_action) t =
| Dynamic of 'dynamic_action
| Static of 'static_action
[@@deriving sexp_of]
end
module Action_unshadowed = Action
let create_generic
computation
~fresh
~input
~model
~inject_dynamic
~inject_static
~apply_static
=
let environment =
Bonsai.Private.Environment.(empty |> add_exn ~key:fresh ~data:input)
in
let snapshot =
Bonsai.Private.eval
~environment
~path:Bonsai.Private.Path.empty
~clock:Incr.clock
~model
~inject_dynamic
~inject_static
computation
in
let%map view, extra = Bonsai.Private.Snapshot.result snapshot
and dynamic_apply_action =
Bonsai.Private.Apply_action.to_incremental
(Bonsai.Private.Snapshot.apply_action snapshot)
and lifecycle = Bonsai.Private.Snapshot.lifecycle_or_empty snapshot
and model = model in
let schedule_event = Vdom.Effect.Expert.handle_non_dom_event_exn in
let apply_action action _state ~schedule_action:_ =
match action with
| Action.Dynamic action -> dynamic_apply_action model action ~schedule_event
| Action.Static action ->
apply_static ~inject:inject_static ~schedule_event model action
in
let on_display state ~schedule_action:_ =
let diff =
Bonsai.Private.Lifecycle.Collection.diff state.State.last_lifecycle lifecycle
in
state.State.last_lifecycle <- lifecycle;
Vdom.Effect.Expert.handle_non_dom_event_exn diff
in
Incr_dom.Component.create_with_extra ~on_display ~extra ~apply_action model view
;;
let convert_generic
(type input model dynamic_action static_action extra)
~fresh
~(computation :
( model
, dynamic_action
, static_action
, Vdom.Node.t * extra )
Bonsai.Private.Computation.t)
~default_model
~(dynamic_action_type_id : dynamic_action Type_equal.Id.t)
~(static_action_type_id : static_action Type_equal.Id.t)
~apply_static
~equal_model
~sexp_of_model
~model_of_sexp
: (module S with type Input.t = input and type Extra.t = extra)
=
(module struct
module Input = struct
type t = input
end
module Model = struct
type t = model [@@deriving equal, sexp]
let default = default_model
end
module Action = struct
let sexp_of_dynamic_action = Type_equal.Id.to_sexp dynamic_action_type_id
let sexp_of_static_action = Type_equal.Id.to_sexp static_action_type_id
type t = (dynamic_action, static_action) Action.t [@@deriving sexp_of]
end
module Extra = struct
type t = extra
end
module State = State
type t = (Action.t, Model.t, State.t, Extra.t) Incr_dom.Component.with_extra
let create ~input ~old_model:_ ~model ~inject =
let inject_dynamic a = inject (Action_unshadowed.Dynamic a) in
let inject_static a = inject (Action_unshadowed.Static a) in
create_generic
computation
~fresh
~input
~model
~inject_dynamic
~inject_static
~apply_static
;;
end)
;;
let convert_with_extra component =
let fresh = Type_equal.Id.create ~name:"" sexp_of_opaque in
let var = Bonsai.Private.(Value.named fresh |> conceal_value) in
let component = component var |> Bonsai.Private.reveal_computation in
let (Bonsai.Private.Computation.T
{ t; model; dynamic_action; static_action; apply_static })
=
component
in
convert_generic
~computation:t
~fresh
~dynamic_action_type_id:dynamic_action
~static_action_type_id:static_action
~apply_static
~default_model:model.default
~equal_model:model.equal
~sexp_of_model:model.sexp_of
~model_of_sexp:model.of_sexp
;;
let convert component =
convert_with_extra (Bonsai.Arrow_deprecated.map component ~f:(fun r -> r, ()))
;;
| |
475a8af4b3e49d91e5e78e36758c3da5ef2cc262d79d1b323651109daeca1407 | scrintal/heroicons-reagent | minus_circle.cljs | (ns com.scrintal.heroicons.outline.minus-circle)
(defn render []
[:svg {:xmlns ""
:fill "none"
:viewBox "0 0 24 24"
:strokeWidth "1.5"
:stroke "currentColor"
:aria-hidden "true"}
[:path {:strokeLinecap "round"
:strokeLinejoin "round"
:d "M15 12H9m12 0a9 9 0 11-18 0 9 9 0 0118 0z"}]]) | null | https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/outline/minus_circle.cljs | clojure | (ns com.scrintal.heroicons.outline.minus-circle)
(defn render []
[:svg {:xmlns ""
:fill "none"
:viewBox "0 0 24 24"
:strokeWidth "1.5"
:stroke "currentColor"
:aria-hidden "true"}
[:path {:strokeLinecap "round"
:strokeLinejoin "round"
:d "M15 12H9m12 0a9 9 0 11-18 0 9 9 0 0118 0z"}]]) | |
5abb40a340a08de12171523a137c5755c4fca3dfbb060ba079e598ecdf7c8b57 | PacktWorkshops/The-Clojure-Workshop | monitored.clj | (ns packt-clj.monitored)
(defn wrap-fn-body [fn-name tx-fn b]
(let [arg-list (first b)
fn-body (rest b)]
(when-not (first (filter #(= % 'client-id) arg-list))
(throw (ex-info "Missing client-id argument" {})))
`(~arg-list
(let [start-time# (System/nanoTime)]
(try
(let [result# (do ~@fn-body)]
(~tx-fn {:name ~(name fn-name)
:client-id ~'client-id
:status :complete
:start-time start-time#
:end-time (System/nanoTime)})
result#)
(catch Exception e#
(~tx-fn {:name ~(name fn-name)
:client-id ~'client-id
:status :error
:start-time start-time#
:end-time (System/nanoTime)})
(throw e#)))))))
(defmacro defmonitored
[fn-name tx-fn & args-and-body]
(let [pre-arg-list (take-while (complement sequential?) args-and-body)
fn-content (drop-while (complement sequential?) args-and-body)
fn-bodies (if (vector? (first fn-content))
`(~fn-content)
fn-content)]
`(defn ~fn-name ~@pre-arg-list
~@(map (partial wrap-fn-body fn-name tx-fn) fn-bodies))))
| null | https://raw.githubusercontent.com/PacktWorkshops/The-Clojure-Workshop/3d309bb0e46a41ce2c93737870433b47ce0ba6a2/Chapter11/Exercise11.04/monitored.clj | clojure | (ns packt-clj.monitored)
(defn wrap-fn-body [fn-name tx-fn b]
(let [arg-list (first b)
fn-body (rest b)]
(when-not (first (filter #(= % 'client-id) arg-list))
(throw (ex-info "Missing client-id argument" {})))
`(~arg-list
(let [start-time# (System/nanoTime)]
(try
(let [result# (do ~@fn-body)]
(~tx-fn {:name ~(name fn-name)
:client-id ~'client-id
:status :complete
:start-time start-time#
:end-time (System/nanoTime)})
result#)
(catch Exception e#
(~tx-fn {:name ~(name fn-name)
:client-id ~'client-id
:status :error
:start-time start-time#
:end-time (System/nanoTime)})
(throw e#)))))))
(defmacro defmonitored
[fn-name tx-fn & args-and-body]
(let [pre-arg-list (take-while (complement sequential?) args-and-body)
fn-content (drop-while (complement sequential?) args-and-body)
fn-bodies (if (vector? (first fn-content))
`(~fn-content)
fn-content)]
`(defn ~fn-name ~@pre-arg-list
~@(map (partial wrap-fn-body fn-name tx-fn) fn-bodies))))
| |
b071ca097f49b2286d6148942c188b69ea679ea624744488cd16f55eaf2c7142 | kawu/concraft-pl | Polish.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
module NLP.Concraft.Polish
(
-- * Model
C.Concraft
, C.saveModel
, C.loadModel
-- * Tagging
, tag
, marginals
-- * Analysis
, macaPar
-- * Training
, TrainConf (..)
, train
-- * Pruning
, C.prune
-- -- * Analysis
-- , anaSent
-- , reAnaPar
) where
import Prelude hiding (Word)
import Control.Applicative ((<$>))
import qualified Data.Text.Lazy as L
import qualified Data.Set as S
import qualified Data.Tagset.Positional as P
import qualified Numeric.SGD as SGD
import qualified NLP.Concraft.Morphosyntax as X
import qualified NLP.Concraft.Schema as S
import NLP.Concraft.Schema (SchemaConf(..), entry, entryWith)
import qualified NLP.Concraft.Guess as G
import qualified NLP.Concraft.Disamb as D
import qualified NLP.Concraft as C
import qualified NLP.Concraft . Analysis as A
import NLP.Concraft.Polish.Morphosyntax hiding (tag)
import NLP.Concraft.Polish.Maca
-------------------------------------------------
-- Default configuration
-------------------------------------------------
-- | Default configuration for the guessing observation schema.
guessSchemaDefault :: SchemaConf
guessSchemaDefault = S.nullConf
{ lowPrefixesC = entryWith [1, 2] [0]
, lowSuffixesC = entryWith [1, 2] [0]
, knownC = entry [0]
, begPackedC = entry [0] }
-- | Default configuration for the guessing observation schema.
disambSchemaDefault :: SchemaConf
disambSchemaDefault = S.nullConf
{ lowOrthC = entry [-2, -1, 0, 1]
, lowPrefixesC = oov $ entryWith [1, 2, 3] [0]
, lowSuffixesC = oov $ entryWith [1, 2, 3] [0]
, begPackedC = oov $ entry [0] }
where
oov (Just body) = Just $ body { S.oovOnly = True }
oov Nothing = Nothing
-- | Default tiered tagging configuration.
tiersDefault :: [D.Tier]
tiersDefault =
[tier1, tier2]
where
tier1 = D.Tier True False $ S.fromList ["cas", "per"]
tier2 = D.Tier False False $ S.fromList
[ "nmb", "gnd", "deg", "asp" , "ngt", "acm"
, "acn", "ppr", "agg", "vlc", "dot" ]
-------------------------------------------------
-- Tagging
-------------------------------------------------
-- | Tag the analysed sentence.
tag :: C.Concraft -> Sent Tag -> Sent Tag
tag concraft sent =
[ select' gs t seg
| (seg, gs, t) <- zip3 sent gss ts ]
where
tagset = C.tagset concraft
packed = packSent tagset sent
tagged = C.tag concraft packed
gss = map (map showTag . S.toList . fst) tagged
ts = map (showTag . snd) tagged
showTag = P.showTag tagset
-- | Tag the sentence with marginal probabilities.
marginals :: C.Concraft -> Sent Tag -> Sent Tag
marginals concraft sent
= map (uncurry selectWMap)
$ zip wmaps sent
where
tagset = C.tagset concraft
packed = packSent tagset sent
wmaps = map
(X.mapWMap showTag)
(C.marginals concraft packed)
showTag = P.showTag tagset
-------------------------------------------------
-- Training
-------------------------------------------------
-- | Training configuration.
data TrainConf = TrainConf {
-- | Tagset.
tagset :: P.Tagset
-- | SGD parameters.
, sgdArgs :: SGD.SgdArgs
-- | Perform reanalysis.
, reana :: Bool
-- | Store SGD dataset on disk.
, onDisk :: Bool
-- | Numer of guessed tags for each word.
, guessNum :: Int
-- | `G.r0T` parameter.
, r0 :: G.R0T }
-- | Train concraft model.
TODO : It should be possible to supply the two training procedures with
-- different SGD arguments.
train
:: TrainConf
-> IO [SentO Tag] -- ^ Training data
-> IO [SentO Tag] -- ^ Evaluation data
-> IO C.Concraft
train TrainConf{..} train0 eval0 = do
pool <- newMacaPool 1
let ana = anaSent tagset pool
train1 = map (packSentO tagset) <$> train0
eval1 = map (packSentO tagset) <$> eval0
if reana
then doReana ana train1 eval1
else noReana train1 eval1
where
doReana ana = C.reAnaTrain tagset ana guessNum guessConf disambConf
noReana tr ev = C.train tagset guessNum guessConf disambConf
(map X.segs <$> tr) (map X.segs <$> ev)
guessConf = G.TrainConf guessSchemaDefault sgdArgs onDisk r0
disambConf = D.TrainConf tiersDefault disambSchemaDefault sgdArgs onDisk
-------------------------------------------------
-- Re-analysis
-------------------------------------------------
| Analyse the given sentence with .
anaSent : : MacaPool - > L.Text - > IO ( Sent Tag )
anaSent :: P.Tagset -> MacaPool -> L.Text -> IO (X.Sent Word P.Tag)
anaSent tagset pool
= fmap (packSent tagset . concat)
. macaPar pool . L.toStrict
-- -- | Reanalyse the input paragraph (lazy IO).
-- reAnaPar :: P.Tagset -> [SentO Tag] -> IO [Sent Tag]
-- reAnaPar tagset inp = do
-- pool <- newMacaPool 1
A.reAnaPar tagset ( anaSent pool ) inp
| null | https://raw.githubusercontent.com/kawu/concraft-pl/1fca1d1e51da751a8ae566395b0ebbfe7ce69bab/src/NLP/Concraft/Polish.hs | haskell | # LANGUAGE OverloadedStrings #
* Model
* Tagging
* Analysis
* Training
* Pruning
-- * Analysis
, anaSent
, reAnaPar
-----------------------------------------------
Default configuration
-----------------------------------------------
| Default configuration for the guessing observation schema.
| Default configuration for the guessing observation schema.
| Default tiered tagging configuration.
-----------------------------------------------
Tagging
-----------------------------------------------
| Tag the analysed sentence.
| Tag the sentence with marginal probabilities.
-----------------------------------------------
Training
-----------------------------------------------
| Training configuration.
| Tagset.
| SGD parameters.
| Perform reanalysis.
| Store SGD dataset on disk.
| Numer of guessed tags for each word.
| `G.r0T` parameter.
| Train concraft model.
different SGD arguments.
^ Training data
^ Evaluation data
-----------------------------------------------
Re-analysis
-----------------------------------------------
-- | Reanalyse the input paragraph (lazy IO).
reAnaPar :: P.Tagset -> [SentO Tag] -> IO [Sent Tag]
reAnaPar tagset inp = do
pool <- newMacaPool 1 | # LANGUAGE RecordWildCards #
module NLP.Concraft.Polish
(
C.Concraft
, C.saveModel
, C.loadModel
, tag
, marginals
, macaPar
, TrainConf (..)
, train
, C.prune
) where
import Prelude hiding (Word)
import Control.Applicative ((<$>))
import qualified Data.Text.Lazy as L
import qualified Data.Set as S
import qualified Data.Tagset.Positional as P
import qualified Numeric.SGD as SGD
import qualified NLP.Concraft.Morphosyntax as X
import qualified NLP.Concraft.Schema as S
import NLP.Concraft.Schema (SchemaConf(..), entry, entryWith)
import qualified NLP.Concraft.Guess as G
import qualified NLP.Concraft.Disamb as D
import qualified NLP.Concraft as C
import qualified NLP.Concraft . Analysis as A
import NLP.Concraft.Polish.Morphosyntax hiding (tag)
import NLP.Concraft.Polish.Maca
guessSchemaDefault :: SchemaConf
guessSchemaDefault = S.nullConf
{ lowPrefixesC = entryWith [1, 2] [0]
, lowSuffixesC = entryWith [1, 2] [0]
, knownC = entry [0]
, begPackedC = entry [0] }
disambSchemaDefault :: SchemaConf
disambSchemaDefault = S.nullConf
{ lowOrthC = entry [-2, -1, 0, 1]
, lowPrefixesC = oov $ entryWith [1, 2, 3] [0]
, lowSuffixesC = oov $ entryWith [1, 2, 3] [0]
, begPackedC = oov $ entry [0] }
where
oov (Just body) = Just $ body { S.oovOnly = True }
oov Nothing = Nothing
tiersDefault :: [D.Tier]
tiersDefault =
[tier1, tier2]
where
tier1 = D.Tier True False $ S.fromList ["cas", "per"]
tier2 = D.Tier False False $ S.fromList
[ "nmb", "gnd", "deg", "asp" , "ngt", "acm"
, "acn", "ppr", "agg", "vlc", "dot" ]
tag :: C.Concraft -> Sent Tag -> Sent Tag
tag concraft sent =
[ select' gs t seg
| (seg, gs, t) <- zip3 sent gss ts ]
where
tagset = C.tagset concraft
packed = packSent tagset sent
tagged = C.tag concraft packed
gss = map (map showTag . S.toList . fst) tagged
ts = map (showTag . snd) tagged
showTag = P.showTag tagset
marginals :: C.Concraft -> Sent Tag -> Sent Tag
marginals concraft sent
= map (uncurry selectWMap)
$ zip wmaps sent
where
tagset = C.tagset concraft
packed = packSent tagset sent
wmaps = map
(X.mapWMap showTag)
(C.marginals concraft packed)
showTag = P.showTag tagset
data TrainConf = TrainConf {
tagset :: P.Tagset
, sgdArgs :: SGD.SgdArgs
, reana :: Bool
, onDisk :: Bool
, guessNum :: Int
, r0 :: G.R0T }
TODO : It should be possible to supply the two training procedures with
train
:: TrainConf
-> IO C.Concraft
train TrainConf{..} train0 eval0 = do
pool <- newMacaPool 1
let ana = anaSent tagset pool
train1 = map (packSentO tagset) <$> train0
eval1 = map (packSentO tagset) <$> eval0
if reana
then doReana ana train1 eval1
else noReana train1 eval1
where
doReana ana = C.reAnaTrain tagset ana guessNum guessConf disambConf
noReana tr ev = C.train tagset guessNum guessConf disambConf
(map X.segs <$> tr) (map X.segs <$> ev)
guessConf = G.TrainConf guessSchemaDefault sgdArgs onDisk r0
disambConf = D.TrainConf tiersDefault disambSchemaDefault sgdArgs onDisk
| Analyse the given sentence with .
anaSent : : MacaPool - > L.Text - > IO ( Sent Tag )
anaSent :: P.Tagset -> MacaPool -> L.Text -> IO (X.Sent Word P.Tag)
anaSent tagset pool
= fmap (packSent tagset . concat)
. macaPar pool . L.toStrict
A.reAnaPar tagset ( anaSent pool ) inp
|
fa86e3a5f9654418e1afa1a7e4189d2d00c7332ba6097a7085270c47ac0dcbf4 | futurice/haskell-mega-repo | Haxl.hs | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeFamilies #
module Google.Haxl where
import Control.Concurrent.ParallelIO.Local (parallel_, withPool)
import Futurice.Prelude
import Google
import Haxl.Core
import Prelude ()
newtype GoogleRequest a = GR (Req a) deriving (Show,Eq)
instance ShowP GoogleRequest where showp = show
instance Hashable (GoogleRequest a) where
hashWithSalt salt (GR req) = hashWithSalt salt req
instance StateKey GoogleRequest where
data State GoogleRequest = GoogleState GoogleCredentials Manager
initDataSource
:: GoogleCredentials -- ^ Credentials to Google API
-> Manager -- ^ HTTP Manager
-> State GoogleRequest
initDataSource = GoogleState
instance DataSourceName GoogleRequest where
dataSourceName _ = "GoogleDataSource"
instance DataSource u GoogleRequest where
fetch = googleFetch
googleFetch :: State GoogleRequest -> Flags -> u -> PerformFetch GoogleRequest
googleFetch (GoogleState cred mgr) _f _u = SyncFetch $ batchFetch cred mgr
batchFetch :: GoogleCredentials -> Manager -> [BlockedFetch GoogleRequest] -> IO ()
batchFetch cred mgr fetches =
withPool 10 $ \pool ->
parallel_ pool (doFetch cred mgr <$> fetches)
doFetch :: GoogleCredentials -> Manager -> BlockedFetch GoogleRequest -> IO ()
doFetch cred mgr (BlockedFetch (GR r) v) = do
res <- evalGoogleReqIO cred mgr r
putSuccess v res
events :: ReadOnlyScope -> Day -> Day -> Text -> GenHaxl u w [Event]
events readonly x y z = dataFetch $ GR $ ReqEvents readonly x y z
calendarResources :: ReadOnlyScope -> GenHaxl u w [CalendarResource]
calendarResources = dataFetch . GR . ReqCalendarResources
request :: (Show a, Typeable a) => Req a -> GenHaxl u w a
request = dataFetch . GR
| null | https://raw.githubusercontent.com/futurice/haskell-mega-repo/2647723f12f5435e2edc373f6738386a9668f603/haxl-fxtra/src/Google/Haxl.hs | haskell | ^ Credentials to Google API
^ HTTP Manager | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeFamilies #
module Google.Haxl where
import Control.Concurrent.ParallelIO.Local (parallel_, withPool)
import Futurice.Prelude
import Google
import Haxl.Core
import Prelude ()
newtype GoogleRequest a = GR (Req a) deriving (Show,Eq)
instance ShowP GoogleRequest where showp = show
instance Hashable (GoogleRequest a) where
hashWithSalt salt (GR req) = hashWithSalt salt req
instance StateKey GoogleRequest where
data State GoogleRequest = GoogleState GoogleCredentials Manager
initDataSource
-> State GoogleRequest
initDataSource = GoogleState
instance DataSourceName GoogleRequest where
dataSourceName _ = "GoogleDataSource"
instance DataSource u GoogleRequest where
fetch = googleFetch
googleFetch :: State GoogleRequest -> Flags -> u -> PerformFetch GoogleRequest
googleFetch (GoogleState cred mgr) _f _u = SyncFetch $ batchFetch cred mgr
batchFetch :: GoogleCredentials -> Manager -> [BlockedFetch GoogleRequest] -> IO ()
batchFetch cred mgr fetches =
withPool 10 $ \pool ->
parallel_ pool (doFetch cred mgr <$> fetches)
doFetch :: GoogleCredentials -> Manager -> BlockedFetch GoogleRequest -> IO ()
doFetch cred mgr (BlockedFetch (GR r) v) = do
res <- evalGoogleReqIO cred mgr r
putSuccess v res
events :: ReadOnlyScope -> Day -> Day -> Text -> GenHaxl u w [Event]
events readonly x y z = dataFetch $ GR $ ReqEvents readonly x y z
calendarResources :: ReadOnlyScope -> GenHaxl u w [CalendarResource]
calendarResources = dataFetch . GR . ReqCalendarResources
request :: (Show a, Typeable a) => Req a -> GenHaxl u w a
request = dataFetch . GR
|
81ab3633bd1ad68c1af2d449ba603d253e7808e09ebdb4c6f40c0187a5081d5c | originrose/cortex | traverse_test.clj | (ns cortex.nn.traverse-test
(:require [clojure.test :refer :all]
[clojure.data :as data]
[clojure.core.matrix :as m]
[cortex.graph :as graph]
[cortex.loss.core :as loss]
[cortex.nn.traverse :as traverse]
[cortex.nn.layers :as layers]
[cortex.nn.network :as network]
[cortex.verify.nn.data :refer [CORN-DATA CORN-LABELS]]))
(def mnist-basic
[(layers/input 28 28 1)
(layers/linear 200)
(layers/relu)
(layers/linear 10)
(layers/softmax)])
(def mnist-description-with-toys
[(layers/input 28 28 1 :id :data)
(layers/multiplicative-dropout 0.1)
(layers/convolutional 5 0 1 20 :weights {:l1-regularization 0.001})
(layers/max-pooling 2 0 2)
(layers/relu)
(layers/dropout 0.75)
(layers/convolutional 5 0 1 50 :l2-regularization 0.01)
(layers/max-pooling 2 0 2)
(layers/relu)
(layers/dropout 0.75)
(layers/batch-normalization)
If you use this description put that at 1000
(layers/relu :id :feature :center-loss {:labels {:stream :output}
:label-indexes {:stream :output}
:label-inverse-counts {:stream :output}
:lambda 0.05
:alpha 0.9})
(layers/dropout 0.5)
(layers/linear 10)
(layers/softmax :id :output)])
(defn minimal-diff
[lhs rhs]
(->> (data/diff lhs rhs)
(take 2)
vec))
(defn build-big-description
[]
(network/linear-network mnist-description-with-toys))
(deftest big-description
(let [network (build-big-description)
training-traversal (traverse/training-traversal network)
inference-traversal (traverse/inference-traversal network)]
Adding in the parameters required for the center loss centers . 10 * 500 = 5000
extra parameters to a network with 434280 parameters
(is (= 439280 (graph/parameter-count (get network :compute-graph))))
(is (= 439280 (->> (get-in network [:compute-graph :buffers])
(map (comp m/ecount :buffer second))
(reduce +))))
(is (= :node-argument (-> (network/network->graph network)
(graph/get-node :l1-regularization-1)
(graph/get-node-argument :output)
(get :type))))
(is (= [nil nil]
(minimal-diff
[{:id :dropout-1, :incoming [{:stream :data}], :outgoing [{:id :dropout-1}]}
{:id :convolutional-1,
:incoming [{:id :dropout-1}],
:outgoing [{:id :convolutional-1}]}
{:id :max-pooling-1, :incoming [{:id :convolutional-1}], :outgoing [{:id :max-pooling-1}]}
{:id :relu-1, :incoming [{:id :max-pooling-1}], :outgoing [{:id :relu-1}]}
{:id :dropout-2, :incoming [{:id :relu-1}], :outgoing [{:id :dropout-2}]}
{:id :convolutional-2, :incoming [{:id :dropout-2}], :outgoing [{:id :convolutional-2}]}
{:id :max-pooling-2, :incoming [{:id :convolutional-2}], :outgoing [{:id :max-pooling-2}]}
{:id :relu-2, :incoming [{:id :max-pooling-2}], :outgoing [{:id :relu-2}]}
{:id :dropout-3, :incoming [{:id :relu-2}], :outgoing [{:id :dropout-3}]}
{:id :batch-normalization-1, :incoming [{:id :dropout-3}], :outgoing [{:id :batch-normalization-1}]}
{:id :linear-1, :incoming [{:id :batch-normalization-1}], :outgoing [{:id :linear-1}]}
{:id :feature, :incoming [{:id :linear-1}], :outgoing [{:id :feature}]}
{:id :dropout-4, :incoming [{:id :feature}], :outgoing [{:id :dropout-4}]}
{:id :linear-2, :incoming [{:id :dropout-4}], :outgoing [{:id :linear-2}]}
{:id :output, :incoming [{:id :linear-2}], :outgoing [{:id :output}]}]
(get training-traversal :forward))))
(is (= [nil nil]
(minimal-diff
{{:id :batch-normalization-1} {:dimension {:channels 50, :height 4, :width 4}},
{:id :convolutional-1} {:dimension {:channels 20, :height 24, :width 24}},
{:id :convolutional-2} {:dimension {:channels 50, :height 8, :width 8}},
{:id :dropout-1} {:dimension {:channels 1, :height 28, :width 28}},
{:id :dropout-2} {:dimension {:channels 20, :height 12, :width 12}},
{:id :dropout-3} {:dimension {:channels 50, :height 4, :width 4}},
{:id :dropout-4} {:dimension {:channels 1, :height 1, :width 500}},
{:id :feature} {:dimension {:channels 1, :height 1, :width 500}},
{:id :linear-1} {:dimension {:channels 1, :height 1, :width 500}},
{:id :linear-2} {:dimension {:channels 1, :height 1, :width 10}},
{:id :max-pooling-1} {:dimension {:channels 20, :height 12, :width 12}},
{:id :max-pooling-2} {:dimension {:channels 50, :height 4, :width 4}},
{:id :output} {:dimension {:channels 1, :height 1, :width 10}},
{:id :relu-1} {:dimension {:channels 20, :height 12, :width 12}},
{:id :relu-2} {:dimension {:channels 50, :height 4, :width 4}},
{:stream :data} {:dimension {:channels 1, :height 28, :width 28}}
{:stream :output} {:dimension {:channels 1, :height 1, :width 10}}
{:stream {:stream :output,
:augmentation :cortex.loss.center/labels->inverse-counts}} {:dimension {}},
{:stream
{:stream :output,
:augmentation :cortex.loss.center/labels->indexes}} {:dimension {}}}
(get training-traversal :buffers))))
;;Using set below to make the output order independent. Loss terms are added so the definition
;;of the loss function is independent of order.
(is (= [nil nil]
(minimal-diff
(set [{:type :softmax-loss,
:output {:type :node-output, :node-id :output},
:labels {:type :stream, :stream :output},
:id :softmax-loss-1}
{:type :center-loss,
:alpha 0.9,
:labels {:stream :output},
:label-indexes {:stream :output},
:label-inverse-counts {:stream :output},
:lambda 0.05,
:output {:type :node-output, :node-id :feature},
:id :center-loss-1,
:centers {:buffer-id :center-loss-1-centers-1}}
{:type :l2-regularization,
:lambda 0.01,
:output {:type :node-output, :node-id :convolutional-2},
:id :l2-regularization-1}
{:type :l1-regularization,
:lambda 0.001,
:output
{:type :node-argument, :node-id :convolutional-1, :argument :weights},
:id :l1-regularization-1}])
(network/loss-function network))))
(is #{{:type :softmax-loss,
:output {:type :node-output, :node-id :output},
:labels {:type :stream, :stream :output},
:id :softmax-loss-1}
{:type :center-loss,
:alpha 0.9,
:labels {:stream :output},
:label-indexes {:stream :output},
:label-inverse-counts {:stream :output},
:lambda 0.05,
:output {:type :node-output, :node-id :feature},
:id :center-loss-1,
:centers {:buffer-id :center-loss-1-centers-1}}
{:type :l2-regularization,
:lambda 0.01,
:output {:type :node-output, :node-id :convolutional-2},
:id :l2-regularization-1}
{:type :l1-regularization,
:lambda 0.001,
:output
{:type :node-argument, :node-id :convolutional-1, :argument :weights},
:id :l1-regularization-1}}
(traverse/gradient-loss-function network training-traversal))
(is (= [nil nil]
(minimal-diff
[{:id :convolutional-1, :incoming [{:stream :data}], :outgoing [{:id :convolutional-1}]}
{:id :max-pooling-1, :incoming [{:id :convolutional-1}], :outgoing [{:id :max-pooling-1}]}
{:id :relu-1, :incoming [{:id :max-pooling-1}], :outgoing [{:id :relu-1}]}
{:id :convolutional-2, :incoming [{:id :relu-1}], :outgoing [{:id :convolutional-2}]}
{:id :max-pooling-2, :incoming [{:id :convolutional-2}], :outgoing [{:id :max-pooling-2}]}
{:id :relu-2, :incoming [{:id :max-pooling-2}], :outgoing [{:id :relu-2}]}
{:id :batch-normalization-1, :incoming [{:id :relu-2}], :outgoing [{:id :batch-normalization-1}]}
{:id :linear-1, :incoming [{:id :batch-normalization-1}], :outgoing [{:id :linear-1}]}
{:id :feature, :incoming [{:id :linear-1}], :outgoing [{:id :feature}]}
{:id :linear-2, :incoming [{:id :feature}], :outgoing [{:id :linear-2}]}
{:id :output, :incoming [{:id :linear-2}], :outgoing [{:id :output}]}]
(get inference-traversal :forward))))
(is (= [nil nil]
(minimal-diff
{{:id :batch-normalization-1} {:dimension {:channels 50, :height 4, :width 4}},
{:id :convolutional-1} {:dimension {:channels 20, :height 24, :width 24}},
{:id :convolutional-2} {:dimension {:channels 50, :height 8, :width 8}},
{:id :feature} {:dimension {:channels 1, :height 1, :width 500}},
{:id :linear-1} {:dimension {:channels 1, :height 1, :width 500}},
{:id :linear-2} {:dimension {:channels 1, :height 1, :width 10}},
{:id :max-pooling-1} {:dimension {:channels 20, :height 12, :width 12}},
{:id :max-pooling-2} {:dimension {:channels 50, :height 4, :width 4}},
{:id :output} {:dimension {:channels 1, :height 1, :width 10}},
{:id :relu-1} {:dimension {:channels 20, :height 12, :width 12}},
{:id :relu-2} {:dimension {:channels 50, :height 4, :width 4}},
{:stream :data} {:dimension {:channels 1, :height 28, :width 28}}}
(get inference-traversal :buffers))))
(is (= [nil nil]
(minimal-diff
{0 {:buf-list #{{{:id :convolutional-1} :buffer} {{:stream :data} :buffer}}, :max-size 11520},
1 {:buf-list #{{{:id :dropout-1} :buffer}}, :max-size 784},
2 {:buf-list #{{{:id :max-pooling-1} :buffer}}, :max-size 2880},
3 {:buf-list #{{{:id :relu-1} :buffer}}, :max-size 2880},
4 {:buf-list #{{{:id :dropout-1} :gradient} {{:id :dropout-2} :buffer} {{:id :max-pooling-1} :gradient}}, :max-size 2880},
5 {:buf-list #{{{:id :convolutional-2} :buffer}}, :max-size 3200},
6 {:buf-list #{{{:id :max-pooling-2} :buffer}}, :max-size 800},
7 {:buf-list #{{{:id :convolutional-1} :gradient} {{:id :convolutional-2} :gradient} {{:id :relu-1} :gradient} {{:id :relu-2} :buffer}}, :max-size 11520},
8 {:buf-list #{{{:id :dropout-3} :buffer}}, :max-size 800},
9 {:buf-list #{{{:id :batch-normalization-1} :buffer}}, :max-size 800},
10 {:buf-list #{{{:id :linear-1} :buffer}}, :max-size 500},
11 {:buf-list #{{{:id :feature} :buffer}}, :max-size 500},
12 {:buf-list #{{{:id :dropout-4} :buffer}}, :max-size 500},
13 {:buf-list #{{{:id :batch-normalization-1} :gradient} {{:id :feature} :gradient} {{:id :linear-2} :buffer} {{:id :relu-2} :gradient}}, :max-size 800},
14 {:buf-list #{{{:id :output} :buffer}}, :max-size 10},
15 {:buf-list #{{{:id :dropout-2} :gradient}
{{:id :dropout-3} :gradient}
{{:id :dropout-4} :gradient}
{{:id :linear-1} :gradient}
{{:id :max-pooling-2} :gradient}
{{:id :output} :gradient}},
:max-size 2880},
16 {:buf-list #{{{:id :linear-2} :gradient}}, :max-size 10}}
(:pools (traverse/generate-traversal-buffer-pools training-traversal)))))
(is (= [nil nil]
(minimal-diff
{0 {:buf-list #{{{:id :convolutional-2} :buffer}
{{:id :linear-1} :buffer}
{{:id :linear-2} :buffer}
{{:id :max-pooling-1} :buffer}
{{:id :relu-2} :buffer}
{{:stream :data} :buffer}},
:max-size 3200},
1 {:buf-list #{{{:id :batch-normalization-1} :buffer}
{{:id :convolutional-1} :buffer}
{{:id :feature} :buffer}
{{:id :max-pooling-2} :buffer}
{{:id :output} :buffer}
{{:id :relu-1} :buffer}},
:max-size 11520}}
(:pools (traverse/generate-traversal-buffer-pools inference-traversal)))))))
(deftest non-trainable-zero-attenuation
(let [num-non-trainable 9
src-desc (flatten mnist-description-with-toys)
non-trainable-layers (take num-non-trainable src-desc)
trainable-layers (drop num-non-trainable src-desc)
new-desc (concat (map (fn [layer] (assoc layer :learning-attenuation 0))
non-trainable-layers)
trainable-layers)
network (network/linear-network new-desc)
traversal (traverse/training-traversal network)]
(is (= [nil nil]
(minimal-diff
[{:id :output, :incoming [{:id :output}], :outgoing [{:id :linear-2}]}
{:id :linear-2, :incoming [{:id :linear-2}], :outgoing [{:id :dropout-4}]}
{:id :dropout-4, :incoming [{:id :dropout-4}], :outgoing [{:id :feature}]}
{:id :feature, :incoming [{:id :feature}], :outgoing [{:id :linear-1}]}
{:id :linear-1, :incoming [{:id :linear-1}], :outgoing [{:id :batch-normalization-1}]}
{:id :batch-normalization-1,
:incoming [{:id :batch-normalization-1}],
:outgoing [{:id :dropout-3}]}]
(get traversal :backward))))
;;Note that loss functions on non-trainable 'parameters' do not survive however
;;loss functions on non-trainable 'layers' do because they change the input gradients
;;for previous layers.
(is (= #{{:type :softmax-loss,
:output {:type :node-output, :node-id :output},
:labels {:type :stream, :stream :output},
:id :softmax-loss-1}
{:type :center-loss,
:alpha 0.9,
:labels {:stream :output},
:label-indexes {:stream :output},
:label-inverse-counts {:stream :output},
:lambda 0.05,
:output {:type :node-output, :node-id :feature},
:id :center-loss-1,
:centers {:buffer-id :center-loss-1-centers-1}}}
(traverse/gradient-loss-function network traversal)))))
(deftest non-trainable-node-non-trainable
(let [num-non-trainable 9
src-desc (flatten mnist-description-with-toys)
non-trainable-layers (take num-non-trainable src-desc)
trainable-layers (drop num-non-trainable src-desc)
new-desc (concat (map (fn [layer] (assoc layer :non-trainable? true)) non-trainable-layers)
trainable-layers)
network (network/linear-network new-desc)
traversal (traverse/training-traversal network)]
(is (= [nil nil]
(minimal-diff
[{:id :output, :incoming [{:id :output}], :outgoing [{:id :linear-2}]}
{:id :linear-2, :incoming [{:id :linear-2}], :outgoing [{:id :dropout-4}]}
{:id :dropout-4, :incoming [{:id :dropout-4}], :outgoing [{:id :feature}]}
{:id :feature, :incoming [{:id :feature}], :outgoing [{:id :linear-1}]}
{:id :linear-1, :incoming [{:id :linear-1}], :outgoing [{:id :batch-normalization-1}]}
{:id :batch-normalization-1,
:incoming [{:id :batch-normalization-1}],
:outgoing [{:id :dropout-3}]}]
(get traversal :backward))))
(is (= #{{:type :softmax-loss,
:output {:type :node-output, :node-id :output},
:labels {:type :stream, :stream :output},
:id :softmax-loss-1}
{:type :center-loss,
:alpha 0.9,
:labels {:stream :output},
:label-indexes {:stream :output},
:label-inverse-counts {:stream :output},
:lambda 0.05,
:id :center-loss-1,
:output {:type :node-output, :node-id :feature},
:centers {:buffer-id :center-loss-1-centers-1}}}
(traverse/gradient-loss-function network traversal)))))
(deftest appending-layers-to-network
(testing
"Ensures that a network built by piecing together a built-network and set of layers is
effectively equal to building a network with a complete description"
(let [layer-split 9
src-desc (flatten mnist-description-with-toys)
bottom-layers (take layer-split src-desc)
bottom-network (network/linear-network bottom-layers)
;; Added io binding and traversals to make sure that when
the network is modified and rebuilt , these 2 steps are also rebuilt correctly
top-layers (drop layer-split src-desc)
top-network (network/assoc-layers-to-network bottom-network top-layers)
traversal-after-stacking (traverse/training-traversal top-network)
original-network (network/linear-network mnist-description-with-toys)
original-traversal (traverse/training-traversal original-network)
inference-traversal-top (traverse/inference-traversal top-network)
inference-traversal-original (traverse/inference-traversal original-network)
training-traversal-top (traverse/training-traversal top-network)
training-traversal-original (traverse/training-traversal original-network)
compute-graph->buffer-id-size-fn #(reduce (fn [m [id {:keys [buffer]}]]
(assoc m id (m/ecount buffer))) {} %)]
(is (= [nil nil]
(minimal-diff
(get original-traversal :backward)
(get traversal-after-stacking :backward))))
(is (= [nil nil]
(minimal-diff
(get original-traversal :forward)
(get traversal-after-stacking :forward))))
(is (= [nil nil]
(minimal-diff
(get inference-traversal-top :buffers)
(get inference-traversal-original :buffers))))
(is (= [nil nil]
(minimal-diff
(get training-traversal-top :buffers)
(get training-traversal-original :buffers))))
(is (nil? (:verification-failures top-network)))
(is (= (graph/parameter-count (network/network->graph top-network))
(graph/parameter-count (network/network->graph original-network))))
(is (= (compute-graph->buffer-id-size-fn (get-in top-network [:compute-graph :buffers]))
(compute-graph->buffer-id-size-fn (get-in original-network [:compute-graph :buffers])))))))
(deftest remove-layers-from-network
(let [mnist-net (network/linear-network mnist-description-with-toys)
chopped-net (network/dissoc-layers-from-network mnist-net :dropout-4)]
(is (= #{:output :linear-2 :dropout-4 :softmax-loss-1}
(clojure.set/difference
(set (keys (get-in mnist-net [:compute-graph :nodes])))
(set (keys (get-in chopped-net [:compute-graph :nodes]))))))
(is (= #{[:feature :dropout-4] [:dropout-4 :linear-2] [:linear-2 :output]
[:output :softmax-loss-1]}
(clojure.set/difference
(set (get-in mnist-net [:compute-graph :edges]))
(set (get-in chopped-net [:compute-graph :edges])))))
(is (= #{:linear-2-bias-1 :linear-2-weights-1}
(clojure.set/difference
(set (keys (get-in mnist-net [:compute-graph :buffers])))
(set (keys (get-in chopped-net [:compute-graph :buffers]))))))))
(deftest inference-after-train
(let [network (build-big-description)]
(is (= #{:output}
(network/output-node-ids network :inference)))
(is (= #{:output :feature :convolutional-2}
(network/output-node-ids network :training)))))
(deftest concatenate-traversal-1
(let [network (-> (network/linear-network [(layers/input 10 10 10)
(layers/linear 500 :id :right)
(layers/input 500 1 1 :parents [] :id :left)
(layers/concatenate :parents [:left :right]
:id :concat)
(layers/linear 10)]))
train-traversal (traverse/training-traversal network)
inference-traversal (traverse/inference-traversal network)]
(is (= [nil nil]
(minimal-diff
[{:id :linear-1, :incoming [{:id :linear-1}], :outgoing [{:id :concat}]}
{:id :concat, :incoming [{:id :concat}], :outgoing [{:stream :left} {:id :right}]}
{:id :right, :incoming [{:id :right}], :outgoing [{:stream :input-1}]}]
(get train-traversal :backward))))
(is (= [nil nil]
(minimal-diff
{{:id :concat} {:dimension {:channels 1, :height 1, :width 1000}},
{:id :linear-1} {:dimension {:channels 1, :height 1, :width 10}},
{:id :right} {:dimension {:channels 1, :height 1, :width 500}},
{:stream :input-1} {:dimension {:channels 10, :height 10, :width 10}},
{:stream :left} {:dimension {:channels 1, :height 1, :width 500}}
{:stream :linear-1} {:dimension {:channels 1, :height 1, :width 10}}}
(get train-traversal :buffers))))
(is (= [nil nil]
(minimal-diff
[{:id :right, :incoming [{:stream :input-1}], :outgoing [{:id :right}]}
{:id :concat, :incoming [{:stream :left} {:id :right}], :outgoing [{:id :concat}]}
{:id :linear-1, :incoming [{:id :concat}], :outgoing [{:id :linear-1}]}]
(get inference-traversal :forward))))))
(deftest concatenate-traversal-2
(let [train-traversal (-> (network/linear-network [(layers/input 10 10 10)
(layers/linear 500 :id :right)
(layers/input 500 1 1 :parents [] :id :left)
Switch the left and right nodes . Attempting to
;;ensure we don't have some hidden dependency upon
;;order of layer declaration.
(layers/concatenate :parents [:right :left]
:id :concat)
(layers/linear 10)])
traverse/training-traversal)]
(is (= [nil nil]
(minimal-diff
[{:id :linear-1, :incoming [{:id :linear-1}], :outgoing [{:id :concat}]}
{:id :concat, :incoming [{:id :concat}], :outgoing [{:id :right} {:stream :left}]}
{:id :right, :incoming [{:id :right}], :outgoing [{:stream :input-1}]}]
(get train-traversal :backward))))
(is (= [nil nil]
(minimal-diff
{{:id :concat} {:dimension {:channels 1, :height 1, :width 1000}},
{:id :linear-1} {:dimension {:channels 1, :height 1, :width 10}},
{:id :right} {:dimension {:channels 1, :height 1, :width 500}},
{:stream :input-1} {:dimension {:channels 10, :height 10, :width 10}},
{:stream :left} {:dimension {:channels 1, :height 1, :width 500}}
{:stream :linear-1} {:dimension {:channels 1, :height 1, :width 10}}},
(get train-traversal :buffers))))))
(deftest split-traversal
(let [train-traversal (-> (network/linear-network [(layers/input 50)
(layers/split :id :split)
;;Check for buffer id collision
(layers/split)
(layers/linear 10 :id :double-split)
(layers/linear 20
:parents [:split]
:id :single-split)])
traverse/training-traversal)]
(is (= [nil nil]
(minimal-diff
[{:id :split, :incoming [{:stream :input-1}], :outgoing [{:id :split-1} {:id :split-2}]}
{:id :split-1, :incoming [{:id :split-1}], :outgoing [{:id :split-1-1}]}
{:id :double-split, :incoming [{:id :split-1-1}], :outgoing [{:id :double-split}]}
{:id :single-split, :incoming [{:id :split-2}], :outgoing [{:id :single-split}]}]
(get train-traversal :forward))))
(is (= [nil nil]
(minimal-diff
{{:id :double-split} {:dimension {:channels 1, :height 1, :width 10}},
{:id :single-split} {:dimension {:channels 1, :height 1, :width 20}},
{:id :split-1} {:dimension {:channels 1, :height 1, :id :split-1, :width 50}},
{:id :split-1-1} {:dimension {:channels 1, :height 1, :id :double-split, :width 50}},
{:id :split-2} {:dimension {:channels 1, :height 1, :id :single-split, :width 50}},
{:stream :input-1} {:dimension {:channels 1, :height 1, :width 50}}
{:stream :single-split} {:dimension {:channels 1, :height 1, :width 20}},
{:stream :double-split} {:dimension {:channels 1, :height 1, :width 10}}}
(get train-traversal :buffers))))))
(def resizable-net
[(layers/input 28 28 1 :id :data)
(layers/convolutional 5 2 1 20)
(layers/max-pooling 2 0 2)
(layers/dropout 0.9)
(layers/convolutional 3 1 1 50)
(layers/max-pooling 2 0 2)
(layers/convolutional 3 1 1 100)
(layers/relu)
(layers/convolutional 7 0 7 400)
(layers/dropout 0.7)
(layers/relu)
(layers/convolutional 1 0 1 10 :id :chop-here)
(layers/softmax :id :label)])
(defn mnist-yolo-network []
(let [network (network/resize-input (network/linear-network resizable-net) 118 118 1)
chopped-net (network/dissoc-layers-from-network network :chop-here)
nodes (get-in chopped-net [:compute-graph :nodes])
new-node-params (mapv (fn [params] (assoc params :non-trainable? true)) (vals nodes))
frozen-nodes (zipmap (keys nodes) new-node-params)
frozen-net (assoc-in chopped-net [:compute-graph :nodes] frozen-nodes)
layers-to-add (map first [(layers/linear 480
:id :label
:yolo2 {:grid-x 2
:grid-y 2
:anchors [[1 1] [2 2]]
:labels {:type :stream
:stream :label}})])
modified-net (network/assoc-layers-to-network frozen-net layers-to-add)]
modified-net))
(deftest freeze-network
(let [test-net (mnist-yolo-network)
traversal (traverse/training-traversal test-net)]
(is (= [nil nil]
(minimal-diff
[{:id :convolutional-1 :incoming [{:stream :data}] :outgoing [{:id :convolutional-1}] :pass :inference}
{:id :max-pooling-1 :incoming [{:id :convolutional-1}] :outgoing [{:id :max-pooling-1}] :pass :inference}
{:id :convolutional-2 :incoming [{:id :max-pooling-1}] :outgoing [{:id :convolutional-2}] :pass :inference}
{:id :max-pooling-2 :incoming [{:id :convolutional-2}] :outgoing [{:id :max-pooling-2}] :pass :inference}
{:id :convolutional-3 :incoming [{:id :max-pooling-2}] :outgoing [{:id :convolutional-3}] :pass :inference}
{:id :relu-1 :incoming [{:id :convolutional-3}] :outgoing [{:id :relu-1}] :pass :inference}
{:id :convolutional-4 :incoming [{:id :relu-1}] :outgoing [{:id :convolutional-4}] :pass :inference}
{:id :relu-2 :incoming [{:id :convolutional-4}] :outgoing [{:id :relu-2}] :pass :inference}
{:id :label :incoming [{:id :relu-2}] :outgoing [{:id :label}]}]
(get traversal :forward))))))
(def resnet-like-net
[(layers/input 28 28 1 :id :data)
(layers/convolutional 3 0 1 20)
(layers/max-pooling 2 0 2)
(layers/split :id :s1)
(layers/convolutional 3 1 1 20)
(layers/relu :id :r1)
(layers/join :parents [:r1 :s1])
(layers/max-pooling 2 0 2)
(layers/split :id :s2)
(layers/convolutional 3 1 1 20)
(layers/relu :id :r2)
(layers/join :parents [:r2 :s2])
(layers/max-pooling 2 0 2)
(layers/split :id :s3)
(layers/convolutional 3 1 1 20)
(layers/relu :id :r3)
(layers/join :parents [:r3 :s3])
(layers/linear 10 :id :chop-here)
(layers/softmax)])
(deftest basic-resnet-traverse
(let [test-net (network/linear-network resnet-like-net)
infer-traversal (traverse/inference-traversal test-net)
train-traversal (traverse/training-traversal test-net)]
;;Note the lack of any splits in the forward pass. If we aren't generating gradients then the splits aren't
;;necessary.
(is (= [nil nil]
(minimal-diff
[{:id :convolutional-1, :incoming [{:stream :data}], :outgoing [{:id :convolutional-1}]}
{:id :max-pooling-1, :incoming [{:id :convolutional-1}], :outgoing [{:id :max-pooling-1}]}
{:id :convolutional-2, :incoming [{:id :max-pooling-1}], :outgoing [{:id :convolutional-2}]}
{:id :r1, :incoming [{:id :convolutional-2}], :outgoing [{:id :r1}]}
{:id :join-1, :incoming [{:id :r1} {:id :max-pooling-1}], :outgoing [{:id :join-1}]}
{:id :max-pooling-2, :incoming [{:id :join-1}], :outgoing [{:id :max-pooling-2}]}
{:id :convolutional-3, :incoming [{:id :max-pooling-2}], :outgoing [{:id :convolutional-3}]}
{:id :r2, :incoming [{:id :convolutional-3}], :outgoing [{:id :r2}]}
{:id :join-2, :incoming [{:id :r2} {:id :max-pooling-2}], :outgoing [{:id :join-2}]}
{:id :max-pooling-3, :incoming [{:id :join-2}], :outgoing [{:id :max-pooling-3}]}
{:id :convolutional-4, :incoming [{:id :max-pooling-3}], :outgoing [{:id :convolutional-4}]}
{:id :r3, :incoming [{:id :convolutional-4}], :outgoing [{:id :r3}]}
{:id :join-3, :incoming [{:id :r3} {:id :max-pooling-3}], :outgoing [{:id :join-3}]}
{:id :chop-here, :incoming [{:id :join-3}], :outgoing [{:id :chop-here}]}
{:id :softmax-1, :incoming [{:id :chop-here}], :outgoing [{:id :softmax-1}]}]
(get infer-traversal :forward))))
(is (= [nil nil]
(minimal-diff
[{:id :convolutional-1, :incoming [{:stream :data}], :outgoing [{:id :convolutional-1}]}
{:id :max-pooling-1, :incoming [{:id :convolutional-1}], :outgoing [{:id :max-pooling-1}]}
{:id :s1, :incoming [{:id :max-pooling-1}], :outgoing [{:id :s1-1} {:id :s1-2}]}
{:id :convolutional-2, :incoming [{:id :s1-1}], :outgoing [{:id :convolutional-2}]}
{:id :r1, :incoming [{:id :convolutional-2}], :outgoing [{:id :r1}]}
{:id :join-1, :incoming [{:id :r1} {:id :s1-2}], :outgoing [{:id :join-1}]}
{:id :max-pooling-2, :incoming [{:id :join-1}], :outgoing [{:id :max-pooling-2}]}
{:id :s2, :incoming [{:id :max-pooling-2}], :outgoing [{:id :s2-1} {:id :s2-2}]}
{:id :convolutional-3, :incoming [{:id :s2-1}], :outgoing [{:id :convolutional-3}]}
{:id :r2, :incoming [{:id :convolutional-3}], :outgoing [{:id :r2}]}
{:id :join-2, :incoming [{:id :r2} {:id :s2-2}], :outgoing [{:id :join-2}]}
{:id :max-pooling-3, :incoming [{:id :join-2}], :outgoing [{:id :max-pooling-3}]}
{:id :s3, :incoming [{:id :max-pooling-3}], :outgoing [{:id :s3-1} {:id :s3-2}]}
{:id :convolutional-4, :incoming [{:id :s3-1}], :outgoing [{:id :convolutional-4}]}
{:id :r3, :incoming [{:id :convolutional-4}], :outgoing [{:id :r3}]}
{:id :join-3, :incoming [{:id :r3} {:id :s3-2}], :outgoing [{:id :join-3}]}
{:id :chop-here, :incoming [{:id :join-3}], :outgoing [{:id :chop-here}]}
{:id :softmax-1, :incoming [{:id :chop-here}], :outgoing [{:id :softmax-1}]}]
(get train-traversal :forward))))))
(defn resnet-retrain-net
[]
(let [test-net (network/linear-network resnet-like-net)
chopped-net (network/dissoc-layers-from-network test-net :chop-here)
;;Freeze all the nodes
nodes (get-in chopped-net [:compute-graph :nodes])
new-node-params (mapv (fn [params] (assoc params :non-trainable? true)) (vals nodes))
frozen-nodes (zipmap (keys nodes) new-node-params)
frozen-net (assoc-in chopped-net [:compute-graph :nodes] frozen-nodes)
layers-to-add (map first [(layers/linear 50)
(layers/softmax)])]
(network/assoc-layers-to-network frozen-net layers-to-add)))
(deftest resnet-retrain-traverse
(let [test-net (resnet-retrain-net)
train-traversal (traverse/training-traversal test-net)]
(is (= [nil nil]
(minimal-diff
[{:id :convolutional-1 :incoming [{:stream :data}]
:outgoing [{:id :convolutional-1}] :pass :inference}
{:id :max-pooling-1 :incoming [{:id :convolutional-1}]
:outgoing [{:id :max-pooling-1}] :pass :inference}
{:id :convolutional-2 :incoming [{:id :max-pooling-1}]
:outgoing [{:id :convolutional-2}] :pass :inference}
{:id :r1 :incoming [{:id :convolutional-2}] :outgoing [{:id :r1}] :pass :inference}
{:id :join-1 :incoming [{:id :r1} {:id :max-pooling-1}]
:outgoing [{:id :join-1}] :pass :inference}
{:id :max-pooling-2 :incoming [{:id :join-1}]
:outgoing [{:id :max-pooling-2}] :pass :inference}
{:id :convolutional-3 :incoming [{:id :max-pooling-2}]
:outgoing [{:id :convolutional-3}] :pass :inference}
{:id :r2 :incoming [{:id :convolutional-3}] :outgoing [{:id :r2}] :pass :inference}
{:id :join-2 :incoming [{:id :r2} {:id :max-pooling-2}]
:outgoing [{:id :join-2}] :pass :inference}
{:id :max-pooling-3 :incoming [{:id :join-2}]
:outgoing [{:id :max-pooling-3}] :pass :inference}
{:id :convolutional-4 :incoming [{:id :max-pooling-3}]
:outgoing [{:id :convolutional-4}] :pass :inference}
{:id :r3 :incoming [{:id :convolutional-4}]
:outgoing [{:id :r3}] :pass :inference}
{:id :join-3 :incoming [{:id :r3} {:id :max-pooling-3}]
:outgoing [{:id :join-3}] :pass :inference}
{:id :linear-1 :incoming [{:id :join-3}]
:outgoing [{:id :linear-1}]}
{:id :softmax-1 :incoming [{:id :linear-1}]
:outgoing [{:id :softmax-1}]}]
(get train-traversal :forward))))
(is (= [nil nil]
(minimal-diff
{0 {:buf-list #{{{:id :linear-1} :buffer} {{:id :max-pooling-1} :buffer}
{{:id :max-pooling-2} :buffer} {{:id :max-pooling-3} :buffer}
{{:stream :data} :buffer}}
:max-size 3380}
1 {:buf-list #{{{:id :convolutional-1} :buffer}
{{:id :convolutional-2} :buffer}
{{:id :convolutional-3} :buffer}
{{:id :convolutional-4} :buffer}
{{:id :join-1} :buffer}
{{:id :join-2} :buffer}
{{:id :join-3} :buffer}}
:max-size 13520}
2 {:buf-list #{{{:id :r1} :buffer} {{:id :r2} :buffer} {{:id :r3} :buffer} {{:id :softmax-1} :buffer}} :max-size 3380}
3 {:buf-list #{{{:id :join-3} :gradient} {{:id :softmax-1} :gradient}} :max-size 320}
4 {:buf-list #{{{:id :linear-1} :gradient}} :max-size 50}}
(:pools (traverse/generate-traversal-buffer-pools train-traversal)))))))
| null | https://raw.githubusercontent.com/originrose/cortex/94b1430538e6187f3dfd1697c36ff2c62b475901/test/clj/cortex/nn/traverse_test.clj | clojure | Using set below to make the output order independent. Loss terms are added so the definition
of the loss function is independent of order.
Note that loss functions on non-trainable 'parameters' do not survive however
loss functions on non-trainable 'layers' do because they change the input gradients
for previous layers.
Added io binding and traversals to make sure that when
ensure we don't have some hidden dependency upon
order of layer declaration.
Check for buffer id collision
Note the lack of any splits in the forward pass. If we aren't generating gradients then the splits aren't
necessary.
Freeze all the nodes | (ns cortex.nn.traverse-test
(:require [clojure.test :refer :all]
[clojure.data :as data]
[clojure.core.matrix :as m]
[cortex.graph :as graph]
[cortex.loss.core :as loss]
[cortex.nn.traverse :as traverse]
[cortex.nn.layers :as layers]
[cortex.nn.network :as network]
[cortex.verify.nn.data :refer [CORN-DATA CORN-LABELS]]))
(def mnist-basic
[(layers/input 28 28 1)
(layers/linear 200)
(layers/relu)
(layers/linear 10)
(layers/softmax)])
(def mnist-description-with-toys
[(layers/input 28 28 1 :id :data)
(layers/multiplicative-dropout 0.1)
(layers/convolutional 5 0 1 20 :weights {:l1-regularization 0.001})
(layers/max-pooling 2 0 2)
(layers/relu)
(layers/dropout 0.75)
(layers/convolutional 5 0 1 50 :l2-regularization 0.01)
(layers/max-pooling 2 0 2)
(layers/relu)
(layers/dropout 0.75)
(layers/batch-normalization)
If you use this description put that at 1000
(layers/relu :id :feature :center-loss {:labels {:stream :output}
:label-indexes {:stream :output}
:label-inverse-counts {:stream :output}
:lambda 0.05
:alpha 0.9})
(layers/dropout 0.5)
(layers/linear 10)
(layers/softmax :id :output)])
(defn minimal-diff
[lhs rhs]
(->> (data/diff lhs rhs)
(take 2)
vec))
(defn build-big-description
[]
(network/linear-network mnist-description-with-toys))
(deftest big-description
(let [network (build-big-description)
training-traversal (traverse/training-traversal network)
inference-traversal (traverse/inference-traversal network)]
Adding in the parameters required for the center loss centers . 10 * 500 = 5000
extra parameters to a network with 434280 parameters
(is (= 439280 (graph/parameter-count (get network :compute-graph))))
(is (= 439280 (->> (get-in network [:compute-graph :buffers])
(map (comp m/ecount :buffer second))
(reduce +))))
(is (= :node-argument (-> (network/network->graph network)
(graph/get-node :l1-regularization-1)
(graph/get-node-argument :output)
(get :type))))
(is (= [nil nil]
(minimal-diff
[{:id :dropout-1, :incoming [{:stream :data}], :outgoing [{:id :dropout-1}]}
{:id :convolutional-1,
:incoming [{:id :dropout-1}],
:outgoing [{:id :convolutional-1}]}
{:id :max-pooling-1, :incoming [{:id :convolutional-1}], :outgoing [{:id :max-pooling-1}]}
{:id :relu-1, :incoming [{:id :max-pooling-1}], :outgoing [{:id :relu-1}]}
{:id :dropout-2, :incoming [{:id :relu-1}], :outgoing [{:id :dropout-2}]}
{:id :convolutional-2, :incoming [{:id :dropout-2}], :outgoing [{:id :convolutional-2}]}
{:id :max-pooling-2, :incoming [{:id :convolutional-2}], :outgoing [{:id :max-pooling-2}]}
{:id :relu-2, :incoming [{:id :max-pooling-2}], :outgoing [{:id :relu-2}]}
{:id :dropout-3, :incoming [{:id :relu-2}], :outgoing [{:id :dropout-3}]}
{:id :batch-normalization-1, :incoming [{:id :dropout-3}], :outgoing [{:id :batch-normalization-1}]}
{:id :linear-1, :incoming [{:id :batch-normalization-1}], :outgoing [{:id :linear-1}]}
{:id :feature, :incoming [{:id :linear-1}], :outgoing [{:id :feature}]}
{:id :dropout-4, :incoming [{:id :feature}], :outgoing [{:id :dropout-4}]}
{:id :linear-2, :incoming [{:id :dropout-4}], :outgoing [{:id :linear-2}]}
{:id :output, :incoming [{:id :linear-2}], :outgoing [{:id :output}]}]
(get training-traversal :forward))))
(is (= [nil nil]
(minimal-diff
{{:id :batch-normalization-1} {:dimension {:channels 50, :height 4, :width 4}},
{:id :convolutional-1} {:dimension {:channels 20, :height 24, :width 24}},
{:id :convolutional-2} {:dimension {:channels 50, :height 8, :width 8}},
{:id :dropout-1} {:dimension {:channels 1, :height 28, :width 28}},
{:id :dropout-2} {:dimension {:channels 20, :height 12, :width 12}},
{:id :dropout-3} {:dimension {:channels 50, :height 4, :width 4}},
{:id :dropout-4} {:dimension {:channels 1, :height 1, :width 500}},
{:id :feature} {:dimension {:channels 1, :height 1, :width 500}},
{:id :linear-1} {:dimension {:channels 1, :height 1, :width 500}},
{:id :linear-2} {:dimension {:channels 1, :height 1, :width 10}},
{:id :max-pooling-1} {:dimension {:channels 20, :height 12, :width 12}},
{:id :max-pooling-2} {:dimension {:channels 50, :height 4, :width 4}},
{:id :output} {:dimension {:channels 1, :height 1, :width 10}},
{:id :relu-1} {:dimension {:channels 20, :height 12, :width 12}},
{:id :relu-2} {:dimension {:channels 50, :height 4, :width 4}},
{:stream :data} {:dimension {:channels 1, :height 28, :width 28}}
{:stream :output} {:dimension {:channels 1, :height 1, :width 10}}
{:stream {:stream :output,
:augmentation :cortex.loss.center/labels->inverse-counts}} {:dimension {}},
{:stream
{:stream :output,
:augmentation :cortex.loss.center/labels->indexes}} {:dimension {}}}
(get training-traversal :buffers))))
(is (= [nil nil]
(minimal-diff
(set [{:type :softmax-loss,
:output {:type :node-output, :node-id :output},
:labels {:type :stream, :stream :output},
:id :softmax-loss-1}
{:type :center-loss,
:alpha 0.9,
:labels {:stream :output},
:label-indexes {:stream :output},
:label-inverse-counts {:stream :output},
:lambda 0.05,
:output {:type :node-output, :node-id :feature},
:id :center-loss-1,
:centers {:buffer-id :center-loss-1-centers-1}}
{:type :l2-regularization,
:lambda 0.01,
:output {:type :node-output, :node-id :convolutional-2},
:id :l2-regularization-1}
{:type :l1-regularization,
:lambda 0.001,
:output
{:type :node-argument, :node-id :convolutional-1, :argument :weights},
:id :l1-regularization-1}])
(network/loss-function network))))
(is #{{:type :softmax-loss,
:output {:type :node-output, :node-id :output},
:labels {:type :stream, :stream :output},
:id :softmax-loss-1}
{:type :center-loss,
:alpha 0.9,
:labels {:stream :output},
:label-indexes {:stream :output},
:label-inverse-counts {:stream :output},
:lambda 0.05,
:output {:type :node-output, :node-id :feature},
:id :center-loss-1,
:centers {:buffer-id :center-loss-1-centers-1}}
{:type :l2-regularization,
:lambda 0.01,
:output {:type :node-output, :node-id :convolutional-2},
:id :l2-regularization-1}
{:type :l1-regularization,
:lambda 0.001,
:output
{:type :node-argument, :node-id :convolutional-1, :argument :weights},
:id :l1-regularization-1}}
(traverse/gradient-loss-function network training-traversal))
(is (= [nil nil]
(minimal-diff
[{:id :convolutional-1, :incoming [{:stream :data}], :outgoing [{:id :convolutional-1}]}
{:id :max-pooling-1, :incoming [{:id :convolutional-1}], :outgoing [{:id :max-pooling-1}]}
{:id :relu-1, :incoming [{:id :max-pooling-1}], :outgoing [{:id :relu-1}]}
{:id :convolutional-2, :incoming [{:id :relu-1}], :outgoing [{:id :convolutional-2}]}
{:id :max-pooling-2, :incoming [{:id :convolutional-2}], :outgoing [{:id :max-pooling-2}]}
{:id :relu-2, :incoming [{:id :max-pooling-2}], :outgoing [{:id :relu-2}]}
{:id :batch-normalization-1, :incoming [{:id :relu-2}], :outgoing [{:id :batch-normalization-1}]}
{:id :linear-1, :incoming [{:id :batch-normalization-1}], :outgoing [{:id :linear-1}]}
{:id :feature, :incoming [{:id :linear-1}], :outgoing [{:id :feature}]}
{:id :linear-2, :incoming [{:id :feature}], :outgoing [{:id :linear-2}]}
{:id :output, :incoming [{:id :linear-2}], :outgoing [{:id :output}]}]
(get inference-traversal :forward))))
(is (= [nil nil]
(minimal-diff
{{:id :batch-normalization-1} {:dimension {:channels 50, :height 4, :width 4}},
{:id :convolutional-1} {:dimension {:channels 20, :height 24, :width 24}},
{:id :convolutional-2} {:dimension {:channels 50, :height 8, :width 8}},
{:id :feature} {:dimension {:channels 1, :height 1, :width 500}},
{:id :linear-1} {:dimension {:channels 1, :height 1, :width 500}},
{:id :linear-2} {:dimension {:channels 1, :height 1, :width 10}},
{:id :max-pooling-1} {:dimension {:channels 20, :height 12, :width 12}},
{:id :max-pooling-2} {:dimension {:channels 50, :height 4, :width 4}},
{:id :output} {:dimension {:channels 1, :height 1, :width 10}},
{:id :relu-1} {:dimension {:channels 20, :height 12, :width 12}},
{:id :relu-2} {:dimension {:channels 50, :height 4, :width 4}},
{:stream :data} {:dimension {:channels 1, :height 28, :width 28}}}
(get inference-traversal :buffers))))
(is (= [nil nil]
(minimal-diff
{0 {:buf-list #{{{:id :convolutional-1} :buffer} {{:stream :data} :buffer}}, :max-size 11520},
1 {:buf-list #{{{:id :dropout-1} :buffer}}, :max-size 784},
2 {:buf-list #{{{:id :max-pooling-1} :buffer}}, :max-size 2880},
3 {:buf-list #{{{:id :relu-1} :buffer}}, :max-size 2880},
4 {:buf-list #{{{:id :dropout-1} :gradient} {{:id :dropout-2} :buffer} {{:id :max-pooling-1} :gradient}}, :max-size 2880},
5 {:buf-list #{{{:id :convolutional-2} :buffer}}, :max-size 3200},
6 {:buf-list #{{{:id :max-pooling-2} :buffer}}, :max-size 800},
7 {:buf-list #{{{:id :convolutional-1} :gradient} {{:id :convolutional-2} :gradient} {{:id :relu-1} :gradient} {{:id :relu-2} :buffer}}, :max-size 11520},
8 {:buf-list #{{{:id :dropout-3} :buffer}}, :max-size 800},
9 {:buf-list #{{{:id :batch-normalization-1} :buffer}}, :max-size 800},
10 {:buf-list #{{{:id :linear-1} :buffer}}, :max-size 500},
11 {:buf-list #{{{:id :feature} :buffer}}, :max-size 500},
12 {:buf-list #{{{:id :dropout-4} :buffer}}, :max-size 500},
13 {:buf-list #{{{:id :batch-normalization-1} :gradient} {{:id :feature} :gradient} {{:id :linear-2} :buffer} {{:id :relu-2} :gradient}}, :max-size 800},
14 {:buf-list #{{{:id :output} :buffer}}, :max-size 10},
15 {:buf-list #{{{:id :dropout-2} :gradient}
{{:id :dropout-3} :gradient}
{{:id :dropout-4} :gradient}
{{:id :linear-1} :gradient}
{{:id :max-pooling-2} :gradient}
{{:id :output} :gradient}},
:max-size 2880},
16 {:buf-list #{{{:id :linear-2} :gradient}}, :max-size 10}}
(:pools (traverse/generate-traversal-buffer-pools training-traversal)))))
(is (= [nil nil]
(minimal-diff
{0 {:buf-list #{{{:id :convolutional-2} :buffer}
{{:id :linear-1} :buffer}
{{:id :linear-2} :buffer}
{{:id :max-pooling-1} :buffer}
{{:id :relu-2} :buffer}
{{:stream :data} :buffer}},
:max-size 3200},
1 {:buf-list #{{{:id :batch-normalization-1} :buffer}
{{:id :convolutional-1} :buffer}
{{:id :feature} :buffer}
{{:id :max-pooling-2} :buffer}
{{:id :output} :buffer}
{{:id :relu-1} :buffer}},
:max-size 11520}}
(:pools (traverse/generate-traversal-buffer-pools inference-traversal)))))))
(deftest non-trainable-zero-attenuation
(let [num-non-trainable 9
src-desc (flatten mnist-description-with-toys)
non-trainable-layers (take num-non-trainable src-desc)
trainable-layers (drop num-non-trainable src-desc)
new-desc (concat (map (fn [layer] (assoc layer :learning-attenuation 0))
non-trainable-layers)
trainable-layers)
network (network/linear-network new-desc)
traversal (traverse/training-traversal network)]
(is (= [nil nil]
(minimal-diff
[{:id :output, :incoming [{:id :output}], :outgoing [{:id :linear-2}]}
{:id :linear-2, :incoming [{:id :linear-2}], :outgoing [{:id :dropout-4}]}
{:id :dropout-4, :incoming [{:id :dropout-4}], :outgoing [{:id :feature}]}
{:id :feature, :incoming [{:id :feature}], :outgoing [{:id :linear-1}]}
{:id :linear-1, :incoming [{:id :linear-1}], :outgoing [{:id :batch-normalization-1}]}
{:id :batch-normalization-1,
:incoming [{:id :batch-normalization-1}],
:outgoing [{:id :dropout-3}]}]
(get traversal :backward))))
(is (= #{{:type :softmax-loss,
:output {:type :node-output, :node-id :output},
:labels {:type :stream, :stream :output},
:id :softmax-loss-1}
{:type :center-loss,
:alpha 0.9,
:labels {:stream :output},
:label-indexes {:stream :output},
:label-inverse-counts {:stream :output},
:lambda 0.05,
:output {:type :node-output, :node-id :feature},
:id :center-loss-1,
:centers {:buffer-id :center-loss-1-centers-1}}}
(traverse/gradient-loss-function network traversal)))))
(deftest non-trainable-node-non-trainable
(let [num-non-trainable 9
src-desc (flatten mnist-description-with-toys)
non-trainable-layers (take num-non-trainable src-desc)
trainable-layers (drop num-non-trainable src-desc)
new-desc (concat (map (fn [layer] (assoc layer :non-trainable? true)) non-trainable-layers)
trainable-layers)
network (network/linear-network new-desc)
traversal (traverse/training-traversal network)]
(is (= [nil nil]
(minimal-diff
[{:id :output, :incoming [{:id :output}], :outgoing [{:id :linear-2}]}
{:id :linear-2, :incoming [{:id :linear-2}], :outgoing [{:id :dropout-4}]}
{:id :dropout-4, :incoming [{:id :dropout-4}], :outgoing [{:id :feature}]}
{:id :feature, :incoming [{:id :feature}], :outgoing [{:id :linear-1}]}
{:id :linear-1, :incoming [{:id :linear-1}], :outgoing [{:id :batch-normalization-1}]}
{:id :batch-normalization-1,
:incoming [{:id :batch-normalization-1}],
:outgoing [{:id :dropout-3}]}]
(get traversal :backward))))
(is (= #{{:type :softmax-loss,
:output {:type :node-output, :node-id :output},
:labels {:type :stream, :stream :output},
:id :softmax-loss-1}
{:type :center-loss,
:alpha 0.9,
:labels {:stream :output},
:label-indexes {:stream :output},
:label-inverse-counts {:stream :output},
:lambda 0.05,
:id :center-loss-1,
:output {:type :node-output, :node-id :feature},
:centers {:buffer-id :center-loss-1-centers-1}}}
(traverse/gradient-loss-function network traversal)))))
(deftest appending-layers-to-network
(testing
"Ensures that a network built by piecing together a built-network and set of layers is
effectively equal to building a network with a complete description"
(let [layer-split 9
src-desc (flatten mnist-description-with-toys)
bottom-layers (take layer-split src-desc)
bottom-network (network/linear-network bottom-layers)
the network is modified and rebuilt , these 2 steps are also rebuilt correctly
top-layers (drop layer-split src-desc)
top-network (network/assoc-layers-to-network bottom-network top-layers)
traversal-after-stacking (traverse/training-traversal top-network)
original-network (network/linear-network mnist-description-with-toys)
original-traversal (traverse/training-traversal original-network)
inference-traversal-top (traverse/inference-traversal top-network)
inference-traversal-original (traverse/inference-traversal original-network)
training-traversal-top (traverse/training-traversal top-network)
training-traversal-original (traverse/training-traversal original-network)
compute-graph->buffer-id-size-fn #(reduce (fn [m [id {:keys [buffer]}]]
(assoc m id (m/ecount buffer))) {} %)]
(is (= [nil nil]
(minimal-diff
(get original-traversal :backward)
(get traversal-after-stacking :backward))))
(is (= [nil nil]
(minimal-diff
(get original-traversal :forward)
(get traversal-after-stacking :forward))))
(is (= [nil nil]
(minimal-diff
(get inference-traversal-top :buffers)
(get inference-traversal-original :buffers))))
(is (= [nil nil]
(minimal-diff
(get training-traversal-top :buffers)
(get training-traversal-original :buffers))))
(is (nil? (:verification-failures top-network)))
(is (= (graph/parameter-count (network/network->graph top-network))
(graph/parameter-count (network/network->graph original-network))))
(is (= (compute-graph->buffer-id-size-fn (get-in top-network [:compute-graph :buffers]))
(compute-graph->buffer-id-size-fn (get-in original-network [:compute-graph :buffers])))))))
(deftest remove-layers-from-network
(let [mnist-net (network/linear-network mnist-description-with-toys)
chopped-net (network/dissoc-layers-from-network mnist-net :dropout-4)]
(is (= #{:output :linear-2 :dropout-4 :softmax-loss-1}
(clojure.set/difference
(set (keys (get-in mnist-net [:compute-graph :nodes])))
(set (keys (get-in chopped-net [:compute-graph :nodes]))))))
(is (= #{[:feature :dropout-4] [:dropout-4 :linear-2] [:linear-2 :output]
[:output :softmax-loss-1]}
(clojure.set/difference
(set (get-in mnist-net [:compute-graph :edges]))
(set (get-in chopped-net [:compute-graph :edges])))))
(is (= #{:linear-2-bias-1 :linear-2-weights-1}
(clojure.set/difference
(set (keys (get-in mnist-net [:compute-graph :buffers])))
(set (keys (get-in chopped-net [:compute-graph :buffers]))))))))
(deftest inference-after-train
(let [network (build-big-description)]
(is (= #{:output}
(network/output-node-ids network :inference)))
(is (= #{:output :feature :convolutional-2}
(network/output-node-ids network :training)))))
(deftest concatenate-traversal-1
(let [network (-> (network/linear-network [(layers/input 10 10 10)
(layers/linear 500 :id :right)
(layers/input 500 1 1 :parents [] :id :left)
(layers/concatenate :parents [:left :right]
:id :concat)
(layers/linear 10)]))
train-traversal (traverse/training-traversal network)
inference-traversal (traverse/inference-traversal network)]
(is (= [nil nil]
(minimal-diff
[{:id :linear-1, :incoming [{:id :linear-1}], :outgoing [{:id :concat}]}
{:id :concat, :incoming [{:id :concat}], :outgoing [{:stream :left} {:id :right}]}
{:id :right, :incoming [{:id :right}], :outgoing [{:stream :input-1}]}]
(get train-traversal :backward))))
(is (= [nil nil]
(minimal-diff
{{:id :concat} {:dimension {:channels 1, :height 1, :width 1000}},
{:id :linear-1} {:dimension {:channels 1, :height 1, :width 10}},
{:id :right} {:dimension {:channels 1, :height 1, :width 500}},
{:stream :input-1} {:dimension {:channels 10, :height 10, :width 10}},
{:stream :left} {:dimension {:channels 1, :height 1, :width 500}}
{:stream :linear-1} {:dimension {:channels 1, :height 1, :width 10}}}
(get train-traversal :buffers))))
(is (= [nil nil]
(minimal-diff
[{:id :right, :incoming [{:stream :input-1}], :outgoing [{:id :right}]}
{:id :concat, :incoming [{:stream :left} {:id :right}], :outgoing [{:id :concat}]}
{:id :linear-1, :incoming [{:id :concat}], :outgoing [{:id :linear-1}]}]
(get inference-traversal :forward))))))
(deftest concatenate-traversal-2
(let [train-traversal (-> (network/linear-network [(layers/input 10 10 10)
(layers/linear 500 :id :right)
(layers/input 500 1 1 :parents [] :id :left)
Switch the left and right nodes . Attempting to
(layers/concatenate :parents [:right :left]
:id :concat)
(layers/linear 10)])
traverse/training-traversal)]
(is (= [nil nil]
(minimal-diff
[{:id :linear-1, :incoming [{:id :linear-1}], :outgoing [{:id :concat}]}
{:id :concat, :incoming [{:id :concat}], :outgoing [{:id :right} {:stream :left}]}
{:id :right, :incoming [{:id :right}], :outgoing [{:stream :input-1}]}]
(get train-traversal :backward))))
(is (= [nil nil]
(minimal-diff
{{:id :concat} {:dimension {:channels 1, :height 1, :width 1000}},
{:id :linear-1} {:dimension {:channels 1, :height 1, :width 10}},
{:id :right} {:dimension {:channels 1, :height 1, :width 500}},
{:stream :input-1} {:dimension {:channels 10, :height 10, :width 10}},
{:stream :left} {:dimension {:channels 1, :height 1, :width 500}}
{:stream :linear-1} {:dimension {:channels 1, :height 1, :width 10}}},
(get train-traversal :buffers))))))
(deftest split-traversal
(let [train-traversal (-> (network/linear-network [(layers/input 50)
(layers/split :id :split)
(layers/split)
(layers/linear 10 :id :double-split)
(layers/linear 20
:parents [:split]
:id :single-split)])
traverse/training-traversal)]
(is (= [nil nil]
(minimal-diff
[{:id :split, :incoming [{:stream :input-1}], :outgoing [{:id :split-1} {:id :split-2}]}
{:id :split-1, :incoming [{:id :split-1}], :outgoing [{:id :split-1-1}]}
{:id :double-split, :incoming [{:id :split-1-1}], :outgoing [{:id :double-split}]}
{:id :single-split, :incoming [{:id :split-2}], :outgoing [{:id :single-split}]}]
(get train-traversal :forward))))
(is (= [nil nil]
(minimal-diff
{{:id :double-split} {:dimension {:channels 1, :height 1, :width 10}},
{:id :single-split} {:dimension {:channels 1, :height 1, :width 20}},
{:id :split-1} {:dimension {:channels 1, :height 1, :id :split-1, :width 50}},
{:id :split-1-1} {:dimension {:channels 1, :height 1, :id :double-split, :width 50}},
{:id :split-2} {:dimension {:channels 1, :height 1, :id :single-split, :width 50}},
{:stream :input-1} {:dimension {:channels 1, :height 1, :width 50}}
{:stream :single-split} {:dimension {:channels 1, :height 1, :width 20}},
{:stream :double-split} {:dimension {:channels 1, :height 1, :width 10}}}
(get train-traversal :buffers))))))
(def resizable-net
[(layers/input 28 28 1 :id :data)
(layers/convolutional 5 2 1 20)
(layers/max-pooling 2 0 2)
(layers/dropout 0.9)
(layers/convolutional 3 1 1 50)
(layers/max-pooling 2 0 2)
(layers/convolutional 3 1 1 100)
(layers/relu)
(layers/convolutional 7 0 7 400)
(layers/dropout 0.7)
(layers/relu)
(layers/convolutional 1 0 1 10 :id :chop-here)
(layers/softmax :id :label)])
(defn mnist-yolo-network []
(let [network (network/resize-input (network/linear-network resizable-net) 118 118 1)
chopped-net (network/dissoc-layers-from-network network :chop-here)
nodes (get-in chopped-net [:compute-graph :nodes])
new-node-params (mapv (fn [params] (assoc params :non-trainable? true)) (vals nodes))
frozen-nodes (zipmap (keys nodes) new-node-params)
frozen-net (assoc-in chopped-net [:compute-graph :nodes] frozen-nodes)
layers-to-add (map first [(layers/linear 480
:id :label
:yolo2 {:grid-x 2
:grid-y 2
:anchors [[1 1] [2 2]]
:labels {:type :stream
:stream :label}})])
modified-net (network/assoc-layers-to-network frozen-net layers-to-add)]
modified-net))
(deftest freeze-network
(let [test-net (mnist-yolo-network)
traversal (traverse/training-traversal test-net)]
(is (= [nil nil]
(minimal-diff
[{:id :convolutional-1 :incoming [{:stream :data}] :outgoing [{:id :convolutional-1}] :pass :inference}
{:id :max-pooling-1 :incoming [{:id :convolutional-1}] :outgoing [{:id :max-pooling-1}] :pass :inference}
{:id :convolutional-2 :incoming [{:id :max-pooling-1}] :outgoing [{:id :convolutional-2}] :pass :inference}
{:id :max-pooling-2 :incoming [{:id :convolutional-2}] :outgoing [{:id :max-pooling-2}] :pass :inference}
{:id :convolutional-3 :incoming [{:id :max-pooling-2}] :outgoing [{:id :convolutional-3}] :pass :inference}
{:id :relu-1 :incoming [{:id :convolutional-3}] :outgoing [{:id :relu-1}] :pass :inference}
{:id :convolutional-4 :incoming [{:id :relu-1}] :outgoing [{:id :convolutional-4}] :pass :inference}
{:id :relu-2 :incoming [{:id :convolutional-4}] :outgoing [{:id :relu-2}] :pass :inference}
{:id :label :incoming [{:id :relu-2}] :outgoing [{:id :label}]}]
(get traversal :forward))))))
(def resnet-like-net
[(layers/input 28 28 1 :id :data)
(layers/convolutional 3 0 1 20)
(layers/max-pooling 2 0 2)
(layers/split :id :s1)
(layers/convolutional 3 1 1 20)
(layers/relu :id :r1)
(layers/join :parents [:r1 :s1])
(layers/max-pooling 2 0 2)
(layers/split :id :s2)
(layers/convolutional 3 1 1 20)
(layers/relu :id :r2)
(layers/join :parents [:r2 :s2])
(layers/max-pooling 2 0 2)
(layers/split :id :s3)
(layers/convolutional 3 1 1 20)
(layers/relu :id :r3)
(layers/join :parents [:r3 :s3])
(layers/linear 10 :id :chop-here)
(layers/softmax)])
(deftest basic-resnet-traverse
(let [test-net (network/linear-network resnet-like-net)
infer-traversal (traverse/inference-traversal test-net)
train-traversal (traverse/training-traversal test-net)]
(is (= [nil nil]
(minimal-diff
[{:id :convolutional-1, :incoming [{:stream :data}], :outgoing [{:id :convolutional-1}]}
{:id :max-pooling-1, :incoming [{:id :convolutional-1}], :outgoing [{:id :max-pooling-1}]}
{:id :convolutional-2, :incoming [{:id :max-pooling-1}], :outgoing [{:id :convolutional-2}]}
{:id :r1, :incoming [{:id :convolutional-2}], :outgoing [{:id :r1}]}
{:id :join-1, :incoming [{:id :r1} {:id :max-pooling-1}], :outgoing [{:id :join-1}]}
{:id :max-pooling-2, :incoming [{:id :join-1}], :outgoing [{:id :max-pooling-2}]}
{:id :convolutional-3, :incoming [{:id :max-pooling-2}], :outgoing [{:id :convolutional-3}]}
{:id :r2, :incoming [{:id :convolutional-3}], :outgoing [{:id :r2}]}
{:id :join-2, :incoming [{:id :r2} {:id :max-pooling-2}], :outgoing [{:id :join-2}]}
{:id :max-pooling-3, :incoming [{:id :join-2}], :outgoing [{:id :max-pooling-3}]}
{:id :convolutional-4, :incoming [{:id :max-pooling-3}], :outgoing [{:id :convolutional-4}]}
{:id :r3, :incoming [{:id :convolutional-4}], :outgoing [{:id :r3}]}
{:id :join-3, :incoming [{:id :r3} {:id :max-pooling-3}], :outgoing [{:id :join-3}]}
{:id :chop-here, :incoming [{:id :join-3}], :outgoing [{:id :chop-here}]}
{:id :softmax-1, :incoming [{:id :chop-here}], :outgoing [{:id :softmax-1}]}]
(get infer-traversal :forward))))
(is (= [nil nil]
(minimal-diff
[{:id :convolutional-1, :incoming [{:stream :data}], :outgoing [{:id :convolutional-1}]}
{:id :max-pooling-1, :incoming [{:id :convolutional-1}], :outgoing [{:id :max-pooling-1}]}
{:id :s1, :incoming [{:id :max-pooling-1}], :outgoing [{:id :s1-1} {:id :s1-2}]}
{:id :convolutional-2, :incoming [{:id :s1-1}], :outgoing [{:id :convolutional-2}]}
{:id :r1, :incoming [{:id :convolutional-2}], :outgoing [{:id :r1}]}
{:id :join-1, :incoming [{:id :r1} {:id :s1-2}], :outgoing [{:id :join-1}]}
{:id :max-pooling-2, :incoming [{:id :join-1}], :outgoing [{:id :max-pooling-2}]}
{:id :s2, :incoming [{:id :max-pooling-2}], :outgoing [{:id :s2-1} {:id :s2-2}]}
{:id :convolutional-3, :incoming [{:id :s2-1}], :outgoing [{:id :convolutional-3}]}
{:id :r2, :incoming [{:id :convolutional-3}], :outgoing [{:id :r2}]}
{:id :join-2, :incoming [{:id :r2} {:id :s2-2}], :outgoing [{:id :join-2}]}
{:id :max-pooling-3, :incoming [{:id :join-2}], :outgoing [{:id :max-pooling-3}]}
{:id :s3, :incoming [{:id :max-pooling-3}], :outgoing [{:id :s3-1} {:id :s3-2}]}
{:id :convolutional-4, :incoming [{:id :s3-1}], :outgoing [{:id :convolutional-4}]}
{:id :r3, :incoming [{:id :convolutional-4}], :outgoing [{:id :r3}]}
{:id :join-3, :incoming [{:id :r3} {:id :s3-2}], :outgoing [{:id :join-3}]}
{:id :chop-here, :incoming [{:id :join-3}], :outgoing [{:id :chop-here}]}
{:id :softmax-1, :incoming [{:id :chop-here}], :outgoing [{:id :softmax-1}]}]
(get train-traversal :forward))))))
(defn resnet-retrain-net
[]
(let [test-net (network/linear-network resnet-like-net)
chopped-net (network/dissoc-layers-from-network test-net :chop-here)
nodes (get-in chopped-net [:compute-graph :nodes])
new-node-params (mapv (fn [params] (assoc params :non-trainable? true)) (vals nodes))
frozen-nodes (zipmap (keys nodes) new-node-params)
frozen-net (assoc-in chopped-net [:compute-graph :nodes] frozen-nodes)
layers-to-add (map first [(layers/linear 50)
(layers/softmax)])]
(network/assoc-layers-to-network frozen-net layers-to-add)))
(deftest resnet-retrain-traverse
(let [test-net (resnet-retrain-net)
train-traversal (traverse/training-traversal test-net)]
(is (= [nil nil]
(minimal-diff
[{:id :convolutional-1 :incoming [{:stream :data}]
:outgoing [{:id :convolutional-1}] :pass :inference}
{:id :max-pooling-1 :incoming [{:id :convolutional-1}]
:outgoing [{:id :max-pooling-1}] :pass :inference}
{:id :convolutional-2 :incoming [{:id :max-pooling-1}]
:outgoing [{:id :convolutional-2}] :pass :inference}
{:id :r1 :incoming [{:id :convolutional-2}] :outgoing [{:id :r1}] :pass :inference}
{:id :join-1 :incoming [{:id :r1} {:id :max-pooling-1}]
:outgoing [{:id :join-1}] :pass :inference}
{:id :max-pooling-2 :incoming [{:id :join-1}]
:outgoing [{:id :max-pooling-2}] :pass :inference}
{:id :convolutional-3 :incoming [{:id :max-pooling-2}]
:outgoing [{:id :convolutional-3}] :pass :inference}
{:id :r2 :incoming [{:id :convolutional-3}] :outgoing [{:id :r2}] :pass :inference}
{:id :join-2 :incoming [{:id :r2} {:id :max-pooling-2}]
:outgoing [{:id :join-2}] :pass :inference}
{:id :max-pooling-3 :incoming [{:id :join-2}]
:outgoing [{:id :max-pooling-3}] :pass :inference}
{:id :convolutional-4 :incoming [{:id :max-pooling-3}]
:outgoing [{:id :convolutional-4}] :pass :inference}
{:id :r3 :incoming [{:id :convolutional-4}]
:outgoing [{:id :r3}] :pass :inference}
{:id :join-3 :incoming [{:id :r3} {:id :max-pooling-3}]
:outgoing [{:id :join-3}] :pass :inference}
{:id :linear-1 :incoming [{:id :join-3}]
:outgoing [{:id :linear-1}]}
{:id :softmax-1 :incoming [{:id :linear-1}]
:outgoing [{:id :softmax-1}]}]
(get train-traversal :forward))))
(is (= [nil nil]
(minimal-diff
{0 {:buf-list #{{{:id :linear-1} :buffer} {{:id :max-pooling-1} :buffer}
{{:id :max-pooling-2} :buffer} {{:id :max-pooling-3} :buffer}
{{:stream :data} :buffer}}
:max-size 3380}
1 {:buf-list #{{{:id :convolutional-1} :buffer}
{{:id :convolutional-2} :buffer}
{{:id :convolutional-3} :buffer}
{{:id :convolutional-4} :buffer}
{{:id :join-1} :buffer}
{{:id :join-2} :buffer}
{{:id :join-3} :buffer}}
:max-size 13520}
2 {:buf-list #{{{:id :r1} :buffer} {{:id :r2} :buffer} {{:id :r3} :buffer} {{:id :softmax-1} :buffer}} :max-size 3380}
3 {:buf-list #{{{:id :join-3} :gradient} {{:id :softmax-1} :gradient}} :max-size 320}
4 {:buf-list #{{{:id :linear-1} :gradient}} :max-size 50}}
(:pools (traverse/generate-traversal-buffer-pools train-traversal)))))))
|
c4668fb58742a4c097b59a5a5c87593f0e327d94c8a7d8d439956feb6bc08198 | aristidb/aws | S3.hs | module Aws.S3
(
module Aws.S3.Commands
, module Aws.S3.Core
)
where
import Aws.S3.Commands
import Aws.S3.Core
| null | https://raw.githubusercontent.com/aristidb/aws/a99113ed7768f9758346052c0d8939b66c6efa87/Aws/S3.hs | haskell | module Aws.S3
(
module Aws.S3.Commands
, module Aws.S3.Core
)
where
import Aws.S3.Commands
import Aws.S3.Core
| |
19495b29700e0b885200f5f35df20ee48089a0a4b063e97f775e475b2d9c4f99 | montyly/gueb | ir.ml | open Absenvgenerique
open Program_piqi
module type IR = functor (Absenv_v : AbsEnvGenerique) ->
sig
type ir_stmt
val print_stmt : ir_stmt -> string
val print_type : ir_stmt -> string
val parse_func_protobuf : Program_piqi.function_-> Gueb_type.basic_block list * Gueb_type.edge list * int * int * (ir_stmt*int*int) list * (int list * int list)
: Program_piqi.function_- > Gueb_type.basic_block list * Gueb_type.edge list * Gueb_type.addr * int * ( ir_stmt*int*int ) list * ( int list * int list )
val parse_func_protobuf_number_unloop : Program_piqi.function_-> int (* bbs,connection_unfilter,eip, number_unloop,nodes,call_retn) *)
val get_real_addr : int -> int
val get_value_jump : ir_stmt -> Absenv_v.absenv -> int option
val get_first_arg: ir_stmt -> int option
val function_transfer : ir_stmt -> Absenv_v.absenv -> Gueb_type.addr -> string -> int -> Gueb_type.call_stack -> Absenv_v.absenv
val access_heap : ir_stmt -> Absenv_v.absenv -> Absenv_v.he list
val check_uaf : (ir_stmt*Absenv_v.absenv*Gueb_type.addr) -> (ir_stmt*Absenv_v.he list *Gueb_type.addr) option
TODO use with hashmap
end ;;
| null | https://raw.githubusercontent.com/montyly/gueb/45f496a5a1e8e908e562928762ece304c2408c3a/src/ir.ml | ocaml | bbs,connection_unfilter,eip, number_unloop,nodes,call_retn) | open Absenvgenerique
open Program_piqi
module type IR = functor (Absenv_v : AbsEnvGenerique) ->
sig
type ir_stmt
val print_stmt : ir_stmt -> string
val print_type : ir_stmt -> string
val parse_func_protobuf : Program_piqi.function_-> Gueb_type.basic_block list * Gueb_type.edge list * int * int * (ir_stmt*int*int) list * (int list * int list)
: Program_piqi.function_- > Gueb_type.basic_block list * Gueb_type.edge list * Gueb_type.addr * int * ( ir_stmt*int*int ) list * ( int list * int list )
val get_real_addr : int -> int
val get_value_jump : ir_stmt -> Absenv_v.absenv -> int option
val get_first_arg: ir_stmt -> int option
val function_transfer : ir_stmt -> Absenv_v.absenv -> Gueb_type.addr -> string -> int -> Gueb_type.call_stack -> Absenv_v.absenv
val access_heap : ir_stmt -> Absenv_v.absenv -> Absenv_v.he list
val check_uaf : (ir_stmt*Absenv_v.absenv*Gueb_type.addr) -> (ir_stmt*Absenv_v.he list *Gueb_type.addr) option
TODO use with hashmap
end ;;
|
32f4484df1f54daa2578027d41485b09c340f3874a097ffa3da2338204c90797 | esl/MongooseIM | mongoose_graphql_user_subscription.erl | -module(mongoose_graphql_user_subscription).
-behaviour(mongoose_graphql).
-export([execute/4]).
-ignore_xref([execute/4]).
-include("../mongoose_graphql_types.hrl").
execute(_Ctx, _Obj, <<"stanza">>, _Args) ->
{ok, stanza}.
| null | https://raw.githubusercontent.com/esl/MongooseIM/7c7419889d3babba1a842903fe515c8f61752e7d/src/graphql/user/mongoose_graphql_user_subscription.erl | erlang | -module(mongoose_graphql_user_subscription).
-behaviour(mongoose_graphql).
-export([execute/4]).
-ignore_xref([execute/4]).
-include("../mongoose_graphql_types.hrl").
execute(_Ctx, _Obj, <<"stanza">>, _Args) ->
{ok, stanza}.
| |
0edb87325cbebc0772f665918d8ec3720ff2a5eb70291fb469811351c01276e5 | ditto/ditto | Syntax.hs | module Ditto.Syntax where
import Data.List
import Data.Maybe
import Data.ByteString.Char8 (ByteString, pack, unpack)
import qualified Data.Map as Map
----------------------------------------------------------------------
snoc :: [a] -> a -> [a]
snoc xs x = xs ++ [x]
reject :: (a -> Bool) -> [a] -> [a]
reject p = filter (not . p)
----------------------------------------------------------------------
data Verbosity = Normal | Verbose
deriving (Show, Read, Eq)
data Icit = Expl | Impl
deriving (Show, Read, Eq)
data Essible = Acc | Inacc
deriving (Show, Read, Eq, Ord)
----------------------------------------------------------------------
data Name = Name Essible ByteString (Maybe Integer)
deriving (Read, Eq, Ord)
instance Show Name where
show (Name e x m) = prefix ++ unpack x ++ suffix
where
prefix = case e of Acc -> ""; Inacc -> "."
suffix = case m of Nothing -> ""; Just n -> "$" ++ show n
bs2n :: Essible -> ByteString -> Name
bs2n e x = Name e x Nothing
s2n :: Essible -> String -> Name
s2n e x = bs2n e (pack x)
uniqName :: Name -> Integer -> Name
uniqName x@(Name e _ _) n = uniqEName e x n
uniqEName :: Essible -> Name -> Integer -> Name
uniqEName e (Name _ x _) n = Name e x (Just n)
isInacc :: Name -> Bool
isInacc (Name Inacc _ _) = True
isInacc _ = False
----------------------------------------------------------------------
newtype PName = PName ByteString
deriving (Read, Eq, Ord)
instance Show PName where
show (PName x) = "#" ++ unpack x
pname2name :: PName -> Name
pname2name (PName x) = Name Acc x Nothing
name2pname :: Name -> Maybe PName
name2pname (Name Acc x Nothing) = Just (PName x)
name2pname _ = Nothing
----------------------------------------------------------------------
newtype GName = GName Integer
deriving (Read, Eq, Ord)
instance Show GName where
show (GName n) = "!" ++ show n
----------------------------------------------------------------------
data MName = MName MKind Integer
deriving (Read, Eq, Ord)
instance Show MName where
show (MName k n) = "?" ++ prefix k ++ show n
where
prefix :: MKind -> String
prefix MInfer = ""
prefix (MHole Nothing) = ""
prefix (MHole (Just nm)) = unpack nm ++ "-"
data MKind = MInfer | MHole (Maybe ByteString)
deriving (Show, Read, Eq, Ord)
----------------------------------------------------------------------
type Prog = [MStmt]
type MStmt = Either Stmt [Stmt]
data Stmt =
SDef PName Exp Exp
| SData PName Exp SCons
| SDefn PName Exp SClauses
deriving (Show, Read, Eq)
type SCons = [(PName, Exp)]
type SClauses = [SClause]
type SClause = (Pats, RHS)
data Sig =
GDef PName Exp
| GData PName Exp
| GDefn PName Exp Icits
deriving (Show, Read, Eq)
data Bod =
BDef PName Exp
| BData PName SCons
| BDefn PName SClauses
deriving (Show, Read, Eq)
data Exp =
EType | EPi Icit Exp Bind | ELam Icit Exp Bind
| EForm PName Args | ECon PName PName Args
| ERed PName Args | EMeta MName Args
| EVar Name | EGuard GName
| EApp Icit Exp Exp | EInfer MKind
deriving (Show, Read, Eq)
data Bind = Bind Name Exp
deriving (Show, Read, Eq)
type Icits = [Icit]
type Args = [Arg]
type Arg = (Icit, Exp)
type Env = [Crumb]
type Tel = [(Icit, Name, Exp)]
type Ren = [(Name, Name)]
type Sub = [(Name, Exp)]
type PSub = [(Name, Pat)]
type Acts = [(Tel, Act)]
type CtxErr = ([Name], Prog, Acts, Tel, Err)
type Flex = Either MName GName
type Holes = [Hole]
type Hole = (MName, Meta)
type Forms = Map.Map PName Tel
type Conss = Map.Map PName Cons
type Cons = [(PName, Con)]
data Con = Con Tel Args
deriving (Show, Read, Eq)
type Reds = Map.Map PName Red
data Red = Red Tel Exp
deriving (Show, Read, Eq)
type Clausess = Map.Map PName Clauses
type Clauses = [Clause]
data Clause = Clause Tel Pats RHS
deriving (Show, Read, Eq)
type Metas = Map.Map MName Meta
data Meta = Meta Acts Tel Exp
deriving (Show, Read, Eq)
type Sols = Map.Map MName Exp
type Defs = Map.Map Name Ann
type Guards = Map.Map GName Ann
data Ann = Ann { val, typ :: Exp }
deriving (Show, Read)
type MProb = Maybe Prob
type Probs = Map.Map GName Prob
data Prob =
Prob1 Acts Tel Exp Exp
| ProbN Prob Acts Tel Args Args
deriving (Show, Read, Eq)
type Pats = [(Icit, Pat)]
data RHS = MapsTo Exp | Caseless Name | Split Name
deriving (Show, Read, Eq)
data Crumb = CData PName | CDefn PName
deriving (Show, Read, Eq)
data Pat = PVar Name | PInacc (Maybe Exp) | PCon PName Pats
deriving (Show, Read, Eq)
data Act =
ACheck Exp Exp
| AConv Exp Exp
| ACover PName Pats
| ADef PName
| AData PName
| AClause Clause
| ACon PName
| ADefn PName
deriving (Show, Read, Eq)
data Err =
RGen String
| RConv Exp Exp
| RScope Name
| RCaseless Name
| RUnsolved [Prob] Holes
| RReach PName SClauses
| RSplit Clauses
| RAtom Exp
deriving (Show, Read, Eq)
----------------------------------------------------------------------
toSig :: Stmt -> Sig
toSig (SDef x _ _A) = GDef x _A
toSig (SData x _A _) = GData x _A
toSig (SDefn x _A cs) = GDefn x _A (coverIcits cs)
toBod :: Stmt -> Bod
toBod (SDef x a _) = BDef x a
toBod (SData x _ cs) = BData x cs
toBod (SDefn x _ cs) = BDefn x cs
----------------------------------------------------------------------
names :: Tel -> [Name]
names = map $ \(_,x,_) -> x
coverIcits :: SClauses -> Icits
coverIcits [] = []
coverIcits ((ps,_):_) = map fst ps
lookupTel :: Name -> Tel -> Maybe Exp
lookupTel x = lookup x . map (\(_,x,a) -> (x, a))
varArgs :: Tel -> Args
varArgs = map $ \(i,x,_) -> (i, EVar x)
pvarPats :: Tel -> Pats
pvarPats = map (\(i, x, _) -> (i, PVar x))
pis :: Tel -> Exp -> Exp
pis = flip $ foldr $ \ (i, x, _A) _B -> EPi i _A (Bind x _B)
ipis :: Tel -> Exp -> Exp
ipis as = pis (map (\(_,x,a) -> (Impl,x,a)) as)
paramCons :: Tel -> SCons -> SCons
paramCons _As = map (\(x, _A) -> (x, ipis _As _A))
lams :: Tel -> Exp -> Exp
lams = flip $ foldr $ \ (i, x , _A) _B -> ELam i _A (Bind x _B)
apps :: Exp -> Args -> Exp
apps = foldl $ \ f (i, a) -> EApp i f a
hole :: Exp
hole = EInfer (MHole Nothing)
----------------------------------------------------------------------
formType :: Tel -> Exp
formType _Is = pis _Is EType
conType :: Tel -> PName -> Args -> Exp
conType _As _X _Is = pis _As (EForm _X _Is)
----------------------------------------------------------------------
viewSpine :: Exp -> (Exp, Args)
viewSpine (EApp i f a) = (g, snoc as (i, a))
where (g, as) = viewSpine f
viewSpine x = (x, [])
----------------------------------------------------------------------
fv :: Exp -> [Name]
fv (EVar x) = [x]
fv EType = []
fv (EInfer _) = []
fv (EGuard _) = []
fv (EForm _ is) = fvs is
fv (ECon _ _ as) = fvs as
fv (ERed _ as) = fvs as
fv (EMeta _ as) = fvs as
fv (EPi _ _A _B) = fv _A ++ fvBind _B
fv (ELam _ _A b) = fv _A ++ fvBind b
fv (EApp _ a b) = fv a ++ fv b
fvs :: Args -> [Name]
fvs as = concatMap fv (map snd as)
fvBind :: Bind -> [Name]
fvBind (Bind n b) = n `delete` nub (fv b)
fvTel :: Tel -> [Name]
fvTel [] = []
fvTel ((_, _X, _A):_As) = fv _A ++ (_X `delete` nub (fvTel _As))
fvRHS :: RHS -> [Name]
fvRHS (MapsTo a) = fv a
fvRHS (Caseless x) = [x]
fvRHS (Split x) = [x]
fvPats :: Pats -> [Name]
fvPats = concatMap (\(i,p) -> fvPat p)
fvPat :: Pat -> [Name]
fvPat (PVar x) = [x]
fvPat (PInacc _) = []
fvPat (PCon _ ps) = fvPats ps
----------------------------------------------------------------------
mv :: Exp -> [Flex]
mv (EVar _) = []
mv EType = []
mv (EInfer _) = []
mv (EGuard x) = [Right x]
mv (EForm _ is) = mvs is
mv (ECon _ _ as) = mvs as
mv (ERed _ as) = mvs as
mv (EMeta x as) = Left x : mvs as
mv (EPi _ _A _B) = mv _A ++ mvBind _B
mv (ELam _ _A b) = mv _A ++ mvBind b
mv (EApp _ a b) = mv a ++ mv b
mvs :: Args -> [Flex]
mvs as = concatMap mv (map snd as)
mvBind :: Bind -> [Flex]
mvBind (Bind _ b) = mv b
----------------------------------------------------------------------
isHole :: MName -> Bool
isHole (MName (MHole _) _) = True
isHole (MName _ _) = False
----------------------------------------------------------------------
| null | https://raw.githubusercontent.com/ditto/ditto/f0d35271950216308f3f9bbcf701b176ece5ccd9/src/Ditto/Syntax.hs | haskell | --------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
-------------------------------------------------------------------- | module Ditto.Syntax where
import Data.List
import Data.Maybe
import Data.ByteString.Char8 (ByteString, pack, unpack)
import qualified Data.Map as Map
snoc :: [a] -> a -> [a]
snoc xs x = xs ++ [x]
reject :: (a -> Bool) -> [a] -> [a]
reject p = filter (not . p)
data Verbosity = Normal | Verbose
deriving (Show, Read, Eq)
data Icit = Expl | Impl
deriving (Show, Read, Eq)
data Essible = Acc | Inacc
deriving (Show, Read, Eq, Ord)
data Name = Name Essible ByteString (Maybe Integer)
deriving (Read, Eq, Ord)
instance Show Name where
show (Name e x m) = prefix ++ unpack x ++ suffix
where
prefix = case e of Acc -> ""; Inacc -> "."
suffix = case m of Nothing -> ""; Just n -> "$" ++ show n
bs2n :: Essible -> ByteString -> Name
bs2n e x = Name e x Nothing
s2n :: Essible -> String -> Name
s2n e x = bs2n e (pack x)
uniqName :: Name -> Integer -> Name
uniqName x@(Name e _ _) n = uniqEName e x n
uniqEName :: Essible -> Name -> Integer -> Name
uniqEName e (Name _ x _) n = Name e x (Just n)
isInacc :: Name -> Bool
isInacc (Name Inacc _ _) = True
isInacc _ = False
newtype PName = PName ByteString
deriving (Read, Eq, Ord)
instance Show PName where
show (PName x) = "#" ++ unpack x
pname2name :: PName -> Name
pname2name (PName x) = Name Acc x Nothing
name2pname :: Name -> Maybe PName
name2pname (Name Acc x Nothing) = Just (PName x)
name2pname _ = Nothing
newtype GName = GName Integer
deriving (Read, Eq, Ord)
instance Show GName where
show (GName n) = "!" ++ show n
data MName = MName MKind Integer
deriving (Read, Eq, Ord)
instance Show MName where
show (MName k n) = "?" ++ prefix k ++ show n
where
prefix :: MKind -> String
prefix MInfer = ""
prefix (MHole Nothing) = ""
prefix (MHole (Just nm)) = unpack nm ++ "-"
data MKind = MInfer | MHole (Maybe ByteString)
deriving (Show, Read, Eq, Ord)
type Prog = [MStmt]
type MStmt = Either Stmt [Stmt]
data Stmt =
SDef PName Exp Exp
| SData PName Exp SCons
| SDefn PName Exp SClauses
deriving (Show, Read, Eq)
type SCons = [(PName, Exp)]
type SClauses = [SClause]
type SClause = (Pats, RHS)
data Sig =
GDef PName Exp
| GData PName Exp
| GDefn PName Exp Icits
deriving (Show, Read, Eq)
data Bod =
BDef PName Exp
| BData PName SCons
| BDefn PName SClauses
deriving (Show, Read, Eq)
data Exp =
EType | EPi Icit Exp Bind | ELam Icit Exp Bind
| EForm PName Args | ECon PName PName Args
| ERed PName Args | EMeta MName Args
| EVar Name | EGuard GName
| EApp Icit Exp Exp | EInfer MKind
deriving (Show, Read, Eq)
data Bind = Bind Name Exp
deriving (Show, Read, Eq)
type Icits = [Icit]
type Args = [Arg]
type Arg = (Icit, Exp)
type Env = [Crumb]
type Tel = [(Icit, Name, Exp)]
type Ren = [(Name, Name)]
type Sub = [(Name, Exp)]
type PSub = [(Name, Pat)]
type Acts = [(Tel, Act)]
type CtxErr = ([Name], Prog, Acts, Tel, Err)
type Flex = Either MName GName
type Holes = [Hole]
type Hole = (MName, Meta)
type Forms = Map.Map PName Tel
type Conss = Map.Map PName Cons
type Cons = [(PName, Con)]
data Con = Con Tel Args
deriving (Show, Read, Eq)
type Reds = Map.Map PName Red
data Red = Red Tel Exp
deriving (Show, Read, Eq)
type Clausess = Map.Map PName Clauses
type Clauses = [Clause]
data Clause = Clause Tel Pats RHS
deriving (Show, Read, Eq)
type Metas = Map.Map MName Meta
data Meta = Meta Acts Tel Exp
deriving (Show, Read, Eq)
type Sols = Map.Map MName Exp
type Defs = Map.Map Name Ann
type Guards = Map.Map GName Ann
data Ann = Ann { val, typ :: Exp }
deriving (Show, Read)
type MProb = Maybe Prob
type Probs = Map.Map GName Prob
data Prob =
Prob1 Acts Tel Exp Exp
| ProbN Prob Acts Tel Args Args
deriving (Show, Read, Eq)
type Pats = [(Icit, Pat)]
data RHS = MapsTo Exp | Caseless Name | Split Name
deriving (Show, Read, Eq)
data Crumb = CData PName | CDefn PName
deriving (Show, Read, Eq)
data Pat = PVar Name | PInacc (Maybe Exp) | PCon PName Pats
deriving (Show, Read, Eq)
data Act =
ACheck Exp Exp
| AConv Exp Exp
| ACover PName Pats
| ADef PName
| AData PName
| AClause Clause
| ACon PName
| ADefn PName
deriving (Show, Read, Eq)
data Err =
RGen String
| RConv Exp Exp
| RScope Name
| RCaseless Name
| RUnsolved [Prob] Holes
| RReach PName SClauses
| RSplit Clauses
| RAtom Exp
deriving (Show, Read, Eq)
toSig :: Stmt -> Sig
toSig (SDef x _ _A) = GDef x _A
toSig (SData x _A _) = GData x _A
toSig (SDefn x _A cs) = GDefn x _A (coverIcits cs)
toBod :: Stmt -> Bod
toBod (SDef x a _) = BDef x a
toBod (SData x _ cs) = BData x cs
toBod (SDefn x _ cs) = BDefn x cs
names :: Tel -> [Name]
names = map $ \(_,x,_) -> x
coverIcits :: SClauses -> Icits
coverIcits [] = []
coverIcits ((ps,_):_) = map fst ps
lookupTel :: Name -> Tel -> Maybe Exp
lookupTel x = lookup x . map (\(_,x,a) -> (x, a))
varArgs :: Tel -> Args
varArgs = map $ \(i,x,_) -> (i, EVar x)
pvarPats :: Tel -> Pats
pvarPats = map (\(i, x, _) -> (i, PVar x))
pis :: Tel -> Exp -> Exp
pis = flip $ foldr $ \ (i, x, _A) _B -> EPi i _A (Bind x _B)
ipis :: Tel -> Exp -> Exp
ipis as = pis (map (\(_,x,a) -> (Impl,x,a)) as)
paramCons :: Tel -> SCons -> SCons
paramCons _As = map (\(x, _A) -> (x, ipis _As _A))
lams :: Tel -> Exp -> Exp
lams = flip $ foldr $ \ (i, x , _A) _B -> ELam i _A (Bind x _B)
apps :: Exp -> Args -> Exp
apps = foldl $ \ f (i, a) -> EApp i f a
hole :: Exp
hole = EInfer (MHole Nothing)
formType :: Tel -> Exp
formType _Is = pis _Is EType
conType :: Tel -> PName -> Args -> Exp
conType _As _X _Is = pis _As (EForm _X _Is)
viewSpine :: Exp -> (Exp, Args)
viewSpine (EApp i f a) = (g, snoc as (i, a))
where (g, as) = viewSpine f
viewSpine x = (x, [])
fv :: Exp -> [Name]
fv (EVar x) = [x]
fv EType = []
fv (EInfer _) = []
fv (EGuard _) = []
fv (EForm _ is) = fvs is
fv (ECon _ _ as) = fvs as
fv (ERed _ as) = fvs as
fv (EMeta _ as) = fvs as
fv (EPi _ _A _B) = fv _A ++ fvBind _B
fv (ELam _ _A b) = fv _A ++ fvBind b
fv (EApp _ a b) = fv a ++ fv b
fvs :: Args -> [Name]
fvs as = concatMap fv (map snd as)
fvBind :: Bind -> [Name]
fvBind (Bind n b) = n `delete` nub (fv b)
fvTel :: Tel -> [Name]
fvTel [] = []
fvTel ((_, _X, _A):_As) = fv _A ++ (_X `delete` nub (fvTel _As))
fvRHS :: RHS -> [Name]
fvRHS (MapsTo a) = fv a
fvRHS (Caseless x) = [x]
fvRHS (Split x) = [x]
fvPats :: Pats -> [Name]
fvPats = concatMap (\(i,p) -> fvPat p)
fvPat :: Pat -> [Name]
fvPat (PVar x) = [x]
fvPat (PInacc _) = []
fvPat (PCon _ ps) = fvPats ps
mv :: Exp -> [Flex]
mv (EVar _) = []
mv EType = []
mv (EInfer _) = []
mv (EGuard x) = [Right x]
mv (EForm _ is) = mvs is
mv (ECon _ _ as) = mvs as
mv (ERed _ as) = mvs as
mv (EMeta x as) = Left x : mvs as
mv (EPi _ _A _B) = mv _A ++ mvBind _B
mv (ELam _ _A b) = mv _A ++ mvBind b
mv (EApp _ a b) = mv a ++ mv b
mvs :: Args -> [Flex]
mvs as = concatMap mv (map snd as)
mvBind :: Bind -> [Flex]
mvBind (Bind _ b) = mv b
isHole :: MName -> Bool
isHole (MName (MHole _) _) = True
isHole (MName _ _) = False
|
8fa2dfc56c30f7c5adbfd018c6838763033593af223651b3c53f29225fe19b65 | GaloisInc/daedalus | Free.hs | # LANGUAGE RankNTypes , GADTs , BlockArguments , NamedFieldPuns #
-- -----------------------------------------------------------------------------
-- Bound, free, and free in a function position variables
--
module Daedalus.Type.Free where
import Control.Monad.State
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Graph.SCC(stronglyConnComp)
import qualified Data.List.NonEmpty as NE
import Data.Parameterized.Some
import Daedalus.Rec(sccToRec)
import Daedalus.Type.AST
import Daedalus.Type.Traverse
-- -- | Oorder a bunch of declarations topologically
topoOrder :: [TCDecl a] -> [Rec (TCDecl a)]
topoOrder = map sccToRec . stronglyConnComp . map node
where
node d = (d, tcDeclName d, getFree d)
getFree TCDecl { tcDeclDef } =
[ n | n@Name { nameScopedIdent = ModScope {} } <- Set.toList (tcCalls tcDeclDef) ]
-- TCName because we need the context
class TCFree t where
tcFree :: t -> Set (Some TCName)
class TCBinds t where
tcBinds :: t -> Set (Some TCName)
instance TCBinds t => TCBinds (Maybe t) where
tcBinds = maybe Set.empty tcBinds
instance (TCBinds a, TCBinds b) => TCBinds (a,b) where
tcBinds (a,b) = Set.union (tcBinds a) (tcBinds b)
instance TCBinds (TCName k) where
tcBinds x = Set.singleton (Some x)
instance TCBinds (LoopFlav a k) where
tcBinds lf =
case lf of
Fold x _ col -> tcBinds (x,col)
LoopMap col -> tcBinds col
LoopMany _ x _ -> tcBinds x
instance TCBinds (LoopCollection a) where
tcBinds col = tcBinds (lcKName col, lcElName col)
forgetFree :: forall k. TCName k -> Set (Some TCName) -> Set (Some TCName)
forgetFree v = Set.delete (Some v)
instance TCFree a => TCFree [a] where
tcFree = Set.unions . map tcFree
instance TCFree a => TCFree (Maybe a) where
tcFree = maybe Set.empty tcFree
instance (TCFree a, TCFree b) => TCFree (a,b) where
tcFree (a,b) = Set.union (tcFree a) (tcFree b)
instance TCFree a => TCFree (ManyBounds a) where
tcFree b =
case b of
Exactly e -> tcFree e
Between x y -> tcFree (x,y)
instance TCFree (LoopCollection a) where
tcFree col = tcFree (lcCol col)
instance TCFree (LoopFlav a k) where
tcFree lf =
case lf of
Fold _ s col -> tcFree (s,col)
LoopMap col -> tcFree col
LoopMany _ _ s -> tcFree s
instance TCFree (Loop a k) where
tcFree lp =
Set.unions [ tcFree (loopFlav lp)
, tcFree (loopBody lp) `Set.difference` tcBinds (loopFlav lp)
]
instance TCFree (TCF a k) where
tcFree texpr =
case texpr of
TCVar x -> Set.singleton (Some x)
TCDo (Just x) e1 e2 ->
tcFree e1 `Set.union` (forgetFree x (tcFree e2))
TCLet x e1 e2 -> tcFree e1 `Set.union` forgetFree x (tcFree e2)
TCCall f _ts as | isLocalName (tcName f) ->
Set.singleton (Some f) `Set.union` tcFree as
TCFor lp -> tcFree lp
TCCase e pats mdef ->
Set.unions (tcFree e : tcFree mdef : map doAlt (NE.toList pats))
where
doAlt (TCAlt ps rhs) = foldr forgetFree (tcFree rhs) (patBinds (head ps))
e -> foldMapTCF tcFree e
-- XXX: Why are we doing this complicated traverals thing here??
instance TCFree (TC a k) where
tcFree = tcFree . texprValue
instance TCFree (Arg a) where
tcFree (GrammarArg a) = tcFree a
tcFree (ValArg a) = tcFree a
tcFree (ClassArg a) = tcFree a
-- Get the *top level* calls
tcCalls :: TCDeclDef a k -> Set Name
tcCalls def =
case def of
ExternDecl _ -> Set.empty
Defined d -> flip execState Set.empty (go d)
where
go :: forall a k'. TC a k' -> State (Set Name) (TC a k')
go (TC m) = TC <$> traverse go' m
go' :: forall a k'. TCF a k' -> State (Set Name) (TCF a k')
go' texpr =
case texpr of
TCCall f ts as ->
do unless (isLocalName (tcName f)) (modify (Set.insert (tcName f)))
TCCall f ts <$> traverse (traverseArg go) as
x -> traverseTCF go x
| null | https://raw.githubusercontent.com/GaloisInc/daedalus/9749914338687cf7be73a3f550ed2d356edffc4d/src/Daedalus/Type/Free.hs | haskell | -----------------------------------------------------------------------------
Bound, free, and free in a function position variables
-- | Oorder a bunch of declarations topologically
TCName because we need the context
XXX: Why are we doing this complicated traverals thing here??
Get the *top level* calls | # LANGUAGE RankNTypes , GADTs , BlockArguments , NamedFieldPuns #
module Daedalus.Type.Free where
import Control.Monad.State
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Graph.SCC(stronglyConnComp)
import qualified Data.List.NonEmpty as NE
import Data.Parameterized.Some
import Daedalus.Rec(sccToRec)
import Daedalus.Type.AST
import Daedalus.Type.Traverse
topoOrder :: [TCDecl a] -> [Rec (TCDecl a)]
topoOrder = map sccToRec . stronglyConnComp . map node
where
node d = (d, tcDeclName d, getFree d)
getFree TCDecl { tcDeclDef } =
[ n | n@Name { nameScopedIdent = ModScope {} } <- Set.toList (tcCalls tcDeclDef) ]
class TCFree t where
tcFree :: t -> Set (Some TCName)
class TCBinds t where
tcBinds :: t -> Set (Some TCName)
instance TCBinds t => TCBinds (Maybe t) where
tcBinds = maybe Set.empty tcBinds
instance (TCBinds a, TCBinds b) => TCBinds (a,b) where
tcBinds (a,b) = Set.union (tcBinds a) (tcBinds b)
instance TCBinds (TCName k) where
tcBinds x = Set.singleton (Some x)
instance TCBinds (LoopFlav a k) where
tcBinds lf =
case lf of
Fold x _ col -> tcBinds (x,col)
LoopMap col -> tcBinds col
LoopMany _ x _ -> tcBinds x
instance TCBinds (LoopCollection a) where
tcBinds col = tcBinds (lcKName col, lcElName col)
forgetFree :: forall k. TCName k -> Set (Some TCName) -> Set (Some TCName)
forgetFree v = Set.delete (Some v)
instance TCFree a => TCFree [a] where
tcFree = Set.unions . map tcFree
instance TCFree a => TCFree (Maybe a) where
tcFree = maybe Set.empty tcFree
instance (TCFree a, TCFree b) => TCFree (a,b) where
tcFree (a,b) = Set.union (tcFree a) (tcFree b)
instance TCFree a => TCFree (ManyBounds a) where
tcFree b =
case b of
Exactly e -> tcFree e
Between x y -> tcFree (x,y)
instance TCFree (LoopCollection a) where
tcFree col = tcFree (lcCol col)
instance TCFree (LoopFlav a k) where
tcFree lf =
case lf of
Fold _ s col -> tcFree (s,col)
LoopMap col -> tcFree col
LoopMany _ _ s -> tcFree s
instance TCFree (Loop a k) where
tcFree lp =
Set.unions [ tcFree (loopFlav lp)
, tcFree (loopBody lp) `Set.difference` tcBinds (loopFlav lp)
]
instance TCFree (TCF a k) where
tcFree texpr =
case texpr of
TCVar x -> Set.singleton (Some x)
TCDo (Just x) e1 e2 ->
tcFree e1 `Set.union` (forgetFree x (tcFree e2))
TCLet x e1 e2 -> tcFree e1 `Set.union` forgetFree x (tcFree e2)
TCCall f _ts as | isLocalName (tcName f) ->
Set.singleton (Some f) `Set.union` tcFree as
TCFor lp -> tcFree lp
TCCase e pats mdef ->
Set.unions (tcFree e : tcFree mdef : map doAlt (NE.toList pats))
where
doAlt (TCAlt ps rhs) = foldr forgetFree (tcFree rhs) (patBinds (head ps))
e -> foldMapTCF tcFree e
instance TCFree (TC a k) where
tcFree = tcFree . texprValue
instance TCFree (Arg a) where
tcFree (GrammarArg a) = tcFree a
tcFree (ValArg a) = tcFree a
tcFree (ClassArg a) = tcFree a
tcCalls :: TCDeclDef a k -> Set Name
tcCalls def =
case def of
ExternDecl _ -> Set.empty
Defined d -> flip execState Set.empty (go d)
where
go :: forall a k'. TC a k' -> State (Set Name) (TC a k')
go (TC m) = TC <$> traverse go' m
go' :: forall a k'. TCF a k' -> State (Set Name) (TCF a k')
go' texpr =
case texpr of
TCCall f ts as ->
do unless (isLocalName (tcName f)) (modify (Set.insert (tcName f)))
TCCall f ts <$> traverse (traverseArg go) as
x -> traverseTCF go x
|
74b48dfe518cc1ce12e699234150df7a2566d578924287f50b128a6b3d328343 | quoll/asami | block_file.clj | (ns ^{:doc "A mapped file implementation of the Block abstraction"
:author "Paula Gearon"}
asami.durable.block.file.block-file
(:require [clojure.java.io :as io]
[asami.durable.common :refer [Transaction Closeable Forceable rewind! commit! close]]
[asami.durable.block.block-api :refer [CountedBlocks BlockManager copy-over! copy-block! allocate-block! get-id get-block-count]]
[asami.durable.block.bufferblock :refer [create-block]]
[asami.durable.block.file.voodoo :as voodoo]
[asami.cache :refer [lookup hit miss lru-cache-factory]])
(:import [java.io RandomAccessFile File]
[java.nio ByteBuffer IntBuffer LongBuffer MappedByteBuffer]
[java.nio.channels FileChannel FileChannel$MapMode]
[java.lang.ref SoftReference]))
;; (set! *warn-on-reflection* true)
(def region-size (* 8 1024 1024))
(def cache-size 1024)
(def retries 3)
(def ^:const null 0)
;; Each mapping is called a region, and will contain multiple blocks.
;; Blocks are expected to evenly divide into a region, though slack
;; space at the end of a region is permissible. The slack space will
;; be (mod region-size block-size).
;; - nr-blocks is the total number of blocks in a file
;; - block-size is the number of bytes in a block
;; - nr-mapped-regions is a cached value for the count of mapped-byte-buffers.
;; - mapped-byte-buffers is a seq of all regions.
;; - stride is the size of a region
;; - file is the File being mapped
;; - raf is the RandomAccessFile for the file
- fc is the FileChannel of the raf
(defrecord BlockFile [nr-blocks
block-size nr-mapped-regions
mapped-byte-buffers stride
file raf fc])
(declare set-nr-blocks!)
(defn open-block-file
"Opens a file for storing blocks. Returns a structure with the block file
and the RandomAccessFile that the block file uses. The file will need to be
closed when block files based on this initial block file are no longer needed.
When the init-nr-blocks is not nil, then it holds the recorded number of blocks
in the file."
[file block-size init-nr-blocks]
(let [file (io/file file)
raf (RandomAccessFile. file "rw")
^FileChannel fc (.getChannel raf)
nr-blocks (or init-nr-blocks (long (/ (.size fc) block-size)))
slack (mod region-size block-size)
stride (if (zero? slack) region-size (+ region-size (- block-size slack)))]
(set-nr-blocks! (->BlockFile 0 block-size 0 [] stride file raf fc) nr-blocks)))
(defn- system-cleanup
"Prompt the system to clean up outstanding objects, thereby releasing unique resources
for re-use. This is required for MappedByteBuffers as the Java NIO cannot release the
resources explicitly without putting a guard on every access (thereby compromising the
speed advantages of memory mapping) or allowing continuing access to memory that is
no longer accessible. Therefore, the resources must be released implicitly (by setting
all references null) and then calling this code to prompt the system to clean the
resources up. Depending on the host OS, this method may need to be called several times.
Linux typically only requires 1 or 2 invocations, while Windows regularly needs more than
2 and can require >6"
[]
(System/gc)
(try (Thread/sleep 100) (catch InterruptedException _))
(System/runFinalization))
(defn- retry-loop
"Retries a thunk, using a countdown and a cleanup thunk."
[action cleanup retries]
(loop [r retries]
(let [[response ex] (try [(action) nil] (catch Exception e [nil e]))]
(or response
(if (zero? r)
(throw ex)
(do
(cleanup)
(recur (dec r))))))))
(defn- file-size
"Gets the size of a block-file. Returns a size."
[{fc :fc}]
(.size ^FileChannel fc))
(defn- set-length!
"Sets the length of a block-file.
Returns the open block-file."
[{raf :raf :as block-file} ^long len]
(.setLength ^RandomAccessFile raf len)
block-file)
(defn- map-buffer
"Maps a buffer in a block-file. Returns a new block-file."
[{:keys [fc stride] :as block-file} region-nr]
(retry-loop
(fn []
(let [mbb (.map ^FileChannel fc FileChannel$MapMode/READ_WRITE (* region-nr stride) stride)]
(-> block-file
(update-in [:mapped-byte-buffers] conj mbb)
(assoc :nr-mapped-regions (inc region-nr)))))
system-cleanup
retries))
(defn map-file!
"Expands a block-file to one that is mapped to the required number of regions.
Returns a new block-file with the required mappings."
[{:keys [nr-mapped-regions stride mapped-byte-buffers] :as block-file} regions]
(let [mapped-size (if (> nr-mapped-regions 0) (+ (* (dec nr-mapped-regions) stride) stride) 0)
current-file-size (file-size block-file)
new-file-size (+ (* (dec regions) stride) stride)
_ (when (< current-file-size mapped-size)
(throw (ex-info (str "File has shrunk: " (:file block-file)) {:file-size current-file-size
:expected-size mapped-size})))
block-file (if (> current-file-size new-file-size)
(set-length! block-file new-file-size)
block-file)]
(loop [bf block-file region-nr nr-mapped-regions]
(if (>= region-nr regions)
bf
(recur (map-buffer bf region-nr) (inc region-nr))))))
(defn set-nr-blocks!
"Updates the number of blocks mapped in a block file. Returns the new block-file."
[{:keys [nr-blocks block-size nr-mapped-regions stride] :as block-file} new-nr]
(if (= new-nr nr-blocks)
block-file
(let [block-file (assoc block-file :nr-blocks new-nr)]
(if (< new-nr nr-blocks)
block-file
(let [regions (if (<= new-nr 0) 0 (inc (/ (* (dec new-nr) block-size) stride)))]
(if (> regions nr-mapped-regions)
(map-file! block-file regions)
block-file))))))
(defn get-nr-blocks
"Returns the number of blocks"
[{:keys [nr-blocks]}]
nr-blocks)
(defn force-file
"Ensures all cached data is written to disk. This returns synchronously after all data is written."
[{:keys [mapped-byte-buffers] :as block-file}]
(doseq [^MappedByteBuffer b mapped-byte-buffers] (.force b))
block-file)
(defn block-for
"Returns the byte buffer that references the given block."
[{:keys [nr-blocks block-size stride mapped-byte-buffers] :as block-file} block-id]
(when (< block-id 0) (throw (ex-info "Bad block ID" {:id block-id})))
(when (>= block-id nr-blocks)
(throw (ex-info "Block ID out of range" {:id block-id :max-id (dec nr-blocks)})))
(let [file-offset (* block-id block-size)
region-nr (int (/ file-offset stride))
offset (mod file-offset stride)]
(create-block block-id block-size offset (nth mapped-byte-buffers region-nr))))
(defn copy-block
"Allocates a new block with a copy of the original block."
[{:keys [mapped-byte-buffers block-size stride] :as block-file} {:keys [byte-offset ro] :as block} new-block-id]
(let [new-file-offset (* new-block-id block-size)
new-region-nr (int (/ new-file-offset stride))
new-byte-offset (mod new-file-offset stride)
^ByteBuffer new-buffer (nth mapped-byte-buffers new-region-nr)]
(.limit ^ByteBuffer ro (int (+ byte-offset block-size)))
(.position ^ByteBuffer ro (int byte-offset))
(.position new-buffer (int new-byte-offset))
(.put new-buffer ^ByteBuffer ro)
(create-block new-block-id block-size new-byte-offset new-buffer)))
(defn unmap
"Throw away mappings. This is dangerous, as it invalidates all instances.
Only to be used when closing the file for good."
[{:keys [mapped-byte-buffers block-size nr-blocks raf] :as block-file}]
(set-length! block-file (* block-size nr-blocks))
(voodoo/release mapped-byte-buffers)
(.close ^RandomAccessFile raf))
(defn clear!
[{:keys [block-size stride mapped-byte-buffers file raf fc] :as block-file}]
(voodoo/release mapped-byte-buffers)
(set-length! block-file 0)
(->BlockFile 0 block-size 0 [] stride file raf fc))
(def LN2 (Math/log 2))
(defn log2 [x] (max 0 (/ (Math/log x) LN2)))
(defn pow2
"Raise 2 to the power of x, with a floor value of 1."
[x]
(if (<= x 0) 1 (bit-shift-left 1 x)))
(def power-increment
"Defines how many bits behind the region magnitude to increment the number of regions by.
4 bits behind means that it starts at incrementing by 1, until size 32. Then 2 until 64.
Then 4 until 128, and so on."
4)
(defn next-size-increment
"Determine the next number of blocks that the file should move up to.
The size increment of the file increases as the size of the file increases"
[{:keys [nr-blocks block-size stride] :as block-file}]
(let [blocks-per-region (long (/ stride block-size))
full-regions (long (/ nr-blocks blocks-per-region))
new-regions (pow2 (- (long (log2 full-regions)) power-increment))]
(* blocks-per-region (+ full-regions new-regions))))
(defrecord ManagedBlockFile [state]
BlockManager
(allocate-block! [this]
(let [{block-id :next-id} (vswap! state update :next-id inc)]
(when (>= block-id (:nr-blocks (:block-file @state)))
(vswap! state update :block-file #(set-nr-blocks! % (next-size-increment %))))
(block-for (:block-file @state) block-id)))
(copy-block! [this block]
(let [new-block (allocate-block! this)]
(copy-over! new-block block 0)))
;; this operation is a no-op
(write-block [this block] this)
(get-block [this id]
(let [s (deref state)]
(if (and (= null id) (= (:next-id s) -1)) ;; asking for the null block on an empty file
(allocate-block! this)
(let [^SoftReference block-ref (lookup (:block-cache s) id)]
(if-let [block (and block-ref
(if-let [b (.get block-ref)]
(do
(vswap! state update :block-cache hit id)
b)))]
block
(let [block (block-for (:block-file s) id)]
(vswap! state update :block-cache miss id (SoftReference. block))
block))))))
(get-block-size [this]
(:block-size (:block-file @state)))
(copy-to-tx [this block]
(if (<= (get-id block) (:commit-point @state))
(copy-block! this block)
block))
CountedBlocks
(get-block-count [this]
(get-nr-blocks (:block-file @state)))
Transaction
(rewind! [this]
(vswap! state #(assoc % :next-id (:commit-point %)))
this)
(commit! [this]
(vswap! state #(assoc % :commit-point (:next-id %)))
(force-file (:block-file @state))
this)
Forceable
(force! [this]
(force-file (:block-file @state)))
Closeable
(close [this]
(let [{:keys [block-file next-id]} @state]
(force-file block-file)
(unmap (assoc block-file :nr-blocks (inc next-id)))))
(delete! [this]
(let [{{file :file} :block-file} @state]
(.delete ^File file))))
(defn create-managed-block-file
[filename block-size nr-blocks]
(let [block-file (open-block-file filename block-size nr-blocks)
next-id (dec (:nr-blocks block-file))]
(when (and nr-blocks (= next-id nr-blocks))
(throw (ex-info "Inconsistent reopening of block file" {:set-blocks nr-blocks :file-blocks (:nr-blocks block-file)})))
(->ManagedBlockFile (volatile! {:block-file block-file
:next-id next-id
:commit-point next-id
:block-cache (lru-cache-factory {} :threshold cache-size)}))))
| null | https://raw.githubusercontent.com/quoll/asami/e2d3b9354ecc2da2f5ba9d139e0ad0e8a95262c0/src/asami/durable/block/file/block_file.clj | clojure | (set! *warn-on-reflection* true)
Each mapping is called a region, and will contain multiple blocks.
Blocks are expected to evenly divide into a region, though slack
space at the end of a region is permissible. The slack space will
be (mod region-size block-size).
- nr-blocks is the total number of blocks in a file
- block-size is the number of bytes in a block
- nr-mapped-regions is a cached value for the count of mapped-byte-buffers.
- mapped-byte-buffers is a seq of all regions.
- stride is the size of a region
- file is the File being mapped
- raf is the RandomAccessFile for the file
this operation is a no-op
asking for the null block on an empty file | (ns ^{:doc "A mapped file implementation of the Block abstraction"
:author "Paula Gearon"}
asami.durable.block.file.block-file
(:require [clojure.java.io :as io]
[asami.durable.common :refer [Transaction Closeable Forceable rewind! commit! close]]
[asami.durable.block.block-api :refer [CountedBlocks BlockManager copy-over! copy-block! allocate-block! get-id get-block-count]]
[asami.durable.block.bufferblock :refer [create-block]]
[asami.durable.block.file.voodoo :as voodoo]
[asami.cache :refer [lookup hit miss lru-cache-factory]])
(:import [java.io RandomAccessFile File]
[java.nio ByteBuffer IntBuffer LongBuffer MappedByteBuffer]
[java.nio.channels FileChannel FileChannel$MapMode]
[java.lang.ref SoftReference]))
(def region-size (* 8 1024 1024))
(def cache-size 1024)
(def retries 3)
(def ^:const null 0)
- fc is the FileChannel of the raf
(defrecord BlockFile [nr-blocks
block-size nr-mapped-regions
mapped-byte-buffers stride
file raf fc])
(declare set-nr-blocks!)
(defn open-block-file
"Opens a file for storing blocks. Returns a structure with the block file
and the RandomAccessFile that the block file uses. The file will need to be
closed when block files based on this initial block file are no longer needed.
When the init-nr-blocks is not nil, then it holds the recorded number of blocks
in the file."
[file block-size init-nr-blocks]
(let [file (io/file file)
raf (RandomAccessFile. file "rw")
^FileChannel fc (.getChannel raf)
nr-blocks (or init-nr-blocks (long (/ (.size fc) block-size)))
slack (mod region-size block-size)
stride (if (zero? slack) region-size (+ region-size (- block-size slack)))]
(set-nr-blocks! (->BlockFile 0 block-size 0 [] stride file raf fc) nr-blocks)))
(defn- system-cleanup
"Prompt the system to clean up outstanding objects, thereby releasing unique resources
for re-use. This is required for MappedByteBuffers as the Java NIO cannot release the
resources explicitly without putting a guard on every access (thereby compromising the
speed advantages of memory mapping) or allowing continuing access to memory that is
no longer accessible. Therefore, the resources must be released implicitly (by setting
all references null) and then calling this code to prompt the system to clean the
resources up. Depending on the host OS, this method may need to be called several times.
Linux typically only requires 1 or 2 invocations, while Windows regularly needs more than
2 and can require >6"
[]
(System/gc)
(try (Thread/sleep 100) (catch InterruptedException _))
(System/runFinalization))
(defn- retry-loop
"Retries a thunk, using a countdown and a cleanup thunk."
[action cleanup retries]
(loop [r retries]
(let [[response ex] (try [(action) nil] (catch Exception e [nil e]))]
(or response
(if (zero? r)
(throw ex)
(do
(cleanup)
(recur (dec r))))))))
(defn- file-size
"Gets the size of a block-file. Returns a size."
[{fc :fc}]
(.size ^FileChannel fc))
(defn- set-length!
"Sets the length of a block-file.
Returns the open block-file."
[{raf :raf :as block-file} ^long len]
(.setLength ^RandomAccessFile raf len)
block-file)
(defn- map-buffer
"Maps a buffer in a block-file. Returns a new block-file."
[{:keys [fc stride] :as block-file} region-nr]
(retry-loop
(fn []
(let [mbb (.map ^FileChannel fc FileChannel$MapMode/READ_WRITE (* region-nr stride) stride)]
(-> block-file
(update-in [:mapped-byte-buffers] conj mbb)
(assoc :nr-mapped-regions (inc region-nr)))))
system-cleanup
retries))
(defn map-file!
"Expands a block-file to one that is mapped to the required number of regions.
Returns a new block-file with the required mappings."
[{:keys [nr-mapped-regions stride mapped-byte-buffers] :as block-file} regions]
(let [mapped-size (if (> nr-mapped-regions 0) (+ (* (dec nr-mapped-regions) stride) stride) 0)
current-file-size (file-size block-file)
new-file-size (+ (* (dec regions) stride) stride)
_ (when (< current-file-size mapped-size)
(throw (ex-info (str "File has shrunk: " (:file block-file)) {:file-size current-file-size
:expected-size mapped-size})))
block-file (if (> current-file-size new-file-size)
(set-length! block-file new-file-size)
block-file)]
(loop [bf block-file region-nr nr-mapped-regions]
(if (>= region-nr regions)
bf
(recur (map-buffer bf region-nr) (inc region-nr))))))
(defn set-nr-blocks!
"Updates the number of blocks mapped in a block file. Returns the new block-file."
[{:keys [nr-blocks block-size nr-mapped-regions stride] :as block-file} new-nr]
(if (= new-nr nr-blocks)
block-file
(let [block-file (assoc block-file :nr-blocks new-nr)]
(if (< new-nr nr-blocks)
block-file
(let [regions (if (<= new-nr 0) 0 (inc (/ (* (dec new-nr) block-size) stride)))]
(if (> regions nr-mapped-regions)
(map-file! block-file regions)
block-file))))))
(defn get-nr-blocks
"Returns the number of blocks"
[{:keys [nr-blocks]}]
nr-blocks)
(defn force-file
"Ensures all cached data is written to disk. This returns synchronously after all data is written."
[{:keys [mapped-byte-buffers] :as block-file}]
(doseq [^MappedByteBuffer b mapped-byte-buffers] (.force b))
block-file)
(defn block-for
"Returns the byte buffer that references the given block."
[{:keys [nr-blocks block-size stride mapped-byte-buffers] :as block-file} block-id]
(when (< block-id 0) (throw (ex-info "Bad block ID" {:id block-id})))
(when (>= block-id nr-blocks)
(throw (ex-info "Block ID out of range" {:id block-id :max-id (dec nr-blocks)})))
(let [file-offset (* block-id block-size)
region-nr (int (/ file-offset stride))
offset (mod file-offset stride)]
(create-block block-id block-size offset (nth mapped-byte-buffers region-nr))))
(defn copy-block
"Allocates a new block with a copy of the original block."
[{:keys [mapped-byte-buffers block-size stride] :as block-file} {:keys [byte-offset ro] :as block} new-block-id]
(let [new-file-offset (* new-block-id block-size)
new-region-nr (int (/ new-file-offset stride))
new-byte-offset (mod new-file-offset stride)
^ByteBuffer new-buffer (nth mapped-byte-buffers new-region-nr)]
(.limit ^ByteBuffer ro (int (+ byte-offset block-size)))
(.position ^ByteBuffer ro (int byte-offset))
(.position new-buffer (int new-byte-offset))
(.put new-buffer ^ByteBuffer ro)
(create-block new-block-id block-size new-byte-offset new-buffer)))
(defn unmap
"Throw away mappings. This is dangerous, as it invalidates all instances.
Only to be used when closing the file for good."
[{:keys [mapped-byte-buffers block-size nr-blocks raf] :as block-file}]
(set-length! block-file (* block-size nr-blocks))
(voodoo/release mapped-byte-buffers)
(.close ^RandomAccessFile raf))
(defn clear!
[{:keys [block-size stride mapped-byte-buffers file raf fc] :as block-file}]
(voodoo/release mapped-byte-buffers)
(set-length! block-file 0)
(->BlockFile 0 block-size 0 [] stride file raf fc))
(def LN2 (Math/log 2))
(defn log2 [x] (max 0 (/ (Math/log x) LN2)))
(defn pow2
"Raise 2 to the power of x, with a floor value of 1."
[x]
(if (<= x 0) 1 (bit-shift-left 1 x)))
(def power-increment
"Defines how many bits behind the region magnitude to increment the number of regions by.
4 bits behind means that it starts at incrementing by 1, until size 32. Then 2 until 64.
Then 4 until 128, and so on."
4)
(defn next-size-increment
"Determine the next number of blocks that the file should move up to.
The size increment of the file increases as the size of the file increases"
[{:keys [nr-blocks block-size stride] :as block-file}]
(let [blocks-per-region (long (/ stride block-size))
full-regions (long (/ nr-blocks blocks-per-region))
new-regions (pow2 (- (long (log2 full-regions)) power-increment))]
(* blocks-per-region (+ full-regions new-regions))))
(defrecord ManagedBlockFile [state]
BlockManager
(allocate-block! [this]
(let [{block-id :next-id} (vswap! state update :next-id inc)]
(when (>= block-id (:nr-blocks (:block-file @state)))
(vswap! state update :block-file #(set-nr-blocks! % (next-size-increment %))))
(block-for (:block-file @state) block-id)))
(copy-block! [this block]
(let [new-block (allocate-block! this)]
(copy-over! new-block block 0)))
(write-block [this block] this)
(get-block [this id]
(let [s (deref state)]
(allocate-block! this)
(let [^SoftReference block-ref (lookup (:block-cache s) id)]
(if-let [block (and block-ref
(if-let [b (.get block-ref)]
(do
(vswap! state update :block-cache hit id)
b)))]
block
(let [block (block-for (:block-file s) id)]
(vswap! state update :block-cache miss id (SoftReference. block))
block))))))
(get-block-size [this]
(:block-size (:block-file @state)))
(copy-to-tx [this block]
(if (<= (get-id block) (:commit-point @state))
(copy-block! this block)
block))
CountedBlocks
(get-block-count [this]
(get-nr-blocks (:block-file @state)))
Transaction
(rewind! [this]
(vswap! state #(assoc % :next-id (:commit-point %)))
this)
(commit! [this]
(vswap! state #(assoc % :commit-point (:next-id %)))
(force-file (:block-file @state))
this)
Forceable
(force! [this]
(force-file (:block-file @state)))
Closeable
(close [this]
(let [{:keys [block-file next-id]} @state]
(force-file block-file)
(unmap (assoc block-file :nr-blocks (inc next-id)))))
(delete! [this]
(let [{{file :file} :block-file} @state]
(.delete ^File file))))
(defn create-managed-block-file
[filename block-size nr-blocks]
(let [block-file (open-block-file filename block-size nr-blocks)
next-id (dec (:nr-blocks block-file))]
(when (and nr-blocks (= next-id nr-blocks))
(throw (ex-info "Inconsistent reopening of block file" {:set-blocks nr-blocks :file-blocks (:nr-blocks block-file)})))
(->ManagedBlockFile (volatile! {:block-file block-file
:next-id next-id
:commit-point next-id
:block-cache (lru-cache-factory {} :threshold cache-size)}))))
|
feffdde29b63e13372b63d3863fc6a5edd12e35fc9a42f63782e9aa3dd0d82a3 | lpeterse/haskell-ssh | Name.hs | module Network.SSH.Name where
import qualified Data.ByteString as BS
import Data.String
newtype Name = Name BS.ByteString
deriving (Eq, Ord, Show, IsString)
class HasName a where
name :: a -> Name
instance HasName Name where
name = id
instance HasName () where
name = const (Name "()") | null | https://raw.githubusercontent.com/lpeterse/haskell-ssh/d1a614b6bf30c4932ee5a66efcae6e71680b4819/src/hssh-internal/Network/SSH/Name.hs | haskell | module Network.SSH.Name where
import qualified Data.ByteString as BS
import Data.String
newtype Name = Name BS.ByteString
deriving (Eq, Ord, Show, IsString)
class HasName a where
name :: a -> Name
instance HasName Name where
name = id
instance HasName () where
name = const (Name "()") | |
89865f3efa0bd45b968650de8b548a999a8de962b39741afdfc4bb87e6a1c2fa | clj-kondo/clj-kondo | a_test.clj | (ns simple-test
(:require [clojure.test :refer [deftest is]]))
(deftest foo
(is (= 1 1)))
| null | https://raw.githubusercontent.com/clj-kondo/clj-kondo/35207f8b92f5a813483505b21af856bce4623c43/corpus/simple_test/a_test.clj | clojure | (ns simple-test
(:require [clojure.test :refer [deftest is]]))
(deftest foo
(is (= 1 1)))
| |
701983c245f9594f3174e747318b8d037a4cd9b615fb93028cba14cd496669c0 | ghcjs/jsaddle-dom | SpeechSynthesisEvent.hs | # LANGUAGE PatternSynonyms #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.SpeechSynthesisEvent
(getCharIndex, getElapsedTime, getName, SpeechSynthesisEvent(..),
gTypeSpeechSynthesisEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/SpeechSynthesisEvent.charIndex Mozilla SpeechSynthesisEvent.charIndex documentation >
getCharIndex :: (MonadDOM m) => SpeechSynthesisEvent -> m Word
getCharIndex self
= liftDOM (round <$> ((self ^. js "charIndex") >>= valToNumber))
| < -US/docs/Web/API/SpeechSynthesisEvent.elapsedTime Mozilla SpeechSynthesisEvent.elapsedTime documentation >
getElapsedTime :: (MonadDOM m) => SpeechSynthesisEvent -> m Float
getElapsedTime self
= liftDOM
(realToFrac <$> ((self ^. js "elapsedTime") >>= valToNumber))
| < -US/docs/Web/API/SpeechSynthesisEvent.name Mozilla SpeechSynthesisEvent.name documentation >
getName ::
(MonadDOM m, FromJSString result) =>
SpeechSynthesisEvent -> m result
getName self = liftDOM ((self ^. js "name") >>= fromJSValUnchecked)
| null | https://raw.githubusercontent.com/ghcjs/jsaddle-dom/5f5094277d4b11f3dc3e2df6bb437b75712d268f/src/JSDOM/Generated/SpeechSynthesisEvent.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures # | # LANGUAGE PatternSynonyms #
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.SpeechSynthesisEvent
(getCharIndex, getElapsedTime, getName, SpeechSynthesisEvent(..),
gTypeSpeechSynthesisEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/SpeechSynthesisEvent.charIndex Mozilla SpeechSynthesisEvent.charIndex documentation >
getCharIndex :: (MonadDOM m) => SpeechSynthesisEvent -> m Word
getCharIndex self
= liftDOM (round <$> ((self ^. js "charIndex") >>= valToNumber))
| < -US/docs/Web/API/SpeechSynthesisEvent.elapsedTime Mozilla SpeechSynthesisEvent.elapsedTime documentation >
getElapsedTime :: (MonadDOM m) => SpeechSynthesisEvent -> m Float
getElapsedTime self
= liftDOM
(realToFrac <$> ((self ^. js "elapsedTime") >>= valToNumber))
| < -US/docs/Web/API/SpeechSynthesisEvent.name Mozilla SpeechSynthesisEvent.name documentation >
getName ::
(MonadDOM m, FromJSString result) =>
SpeechSynthesisEvent -> m result
getName self = liftDOM ((self ^. js "name") >>= fromJSValUnchecked)
|
5c1dd7370646e40b9bc3ed783b17585f2ec0c8e47ada4f34d10128984d536511 | chaoxu/mgccl-haskell | aspc.hs | import Math.Combinatorics.Exact.Binomial
main :: IO ()
main = do c <- getLine
print $ (\[x,y]->binsum x y) (map read (words c) ::[Integer])
where binsum n m = foldl1 (\x y -> (x+y) `rem` 1000000) [n `choose` k|k<-[m..n]]
| null | https://raw.githubusercontent.com/chaoxu/mgccl-haskell/bb03e39ae43f410bd2a673ac2b438929ab8ef7a1/rosalind/aspc.hs | haskell | import Math.Combinatorics.Exact.Binomial
main :: IO ()
main = do c <- getLine
print $ (\[x,y]->binsum x y) (map read (words c) ::[Integer])
where binsum n m = foldl1 (\x y -> (x+y) `rem` 1000000) [n `choose` k|k<-[m..n]]
| |
65e926208695a8af1c355c2f73ef03b7fb3c42dd002c4a2786ed1919082b03d8 | flipstone/haskell-for-beginners | 1_making_change.hs | -- Implement a program to make change. Your
-- program should accept a list of numbers
on stdin . The first number indicates the
-- amount of money to change, the rest
-- indicate the denominations of coin to
-- make change into.
--
-- The program should print out a list of
-- coins in the denominations given that totals
-- up to the amount of money being changed.
-- If there is money left over (i.e. the money
-- cannot be divided evenly into the denominations
-- given), the program should print the list of
-- coins and a message indicating how much
-- was left unchanged.
--
import Data.List (sort, foldl', intercalate)
main = interact makeChange
data Coin = Coin Int deriving (Show, Eq, Ord)
data Unchanged = Unchanged Int deriving Show
data Change = Change Unchanged [Coin] deriving Show
readInt :: String -> Int
readInt = read
makeChange :: String -> String
makeChange s =
let (total:denominations) = map readInt (words s)
unchanged = Unchanged total
coins = reverse $ sort $ map Coin denominations
initial = Change unchanged []
in showResult $ foldl' giveCoins initial coins
giveCoins :: Change -> Coin -> Change
giveCoins orig@(Change (Unchanged total) coins) coin@(Coin value)
| value > total = orig
| otherwise = let count = total `div` value
given = count * value
in Change (Unchanged (total - given))
(coins ++ replicate count coin)
value :: Coin -> Int
value (Coin v) = v
showResult :: Change -> String
showResult (Change (Unchanged total) coins) =
(intercalate "\n" $ map (show . value) coins) ++ "\n" ++
if total > 0
then "And " ++ show total ++ " unchanged!" ++ "\n"
else ""
| null | https://raw.githubusercontent.com/flipstone/haskell-for-beginners/e586a1f3ef08f21d5181171fe7a7b27057391f0b/answers/chapter_10/1_making_change.hs | haskell | Implement a program to make change. Your
program should accept a list of numbers
amount of money to change, the rest
indicate the denominations of coin to
make change into.
The program should print out a list of
coins in the denominations given that totals
up to the amount of money being changed.
If there is money left over (i.e. the money
cannot be divided evenly into the denominations
given), the program should print the list of
coins and a message indicating how much
was left unchanged.
| on stdin . The first number indicates the
import Data.List (sort, foldl', intercalate)
main = interact makeChange
data Coin = Coin Int deriving (Show, Eq, Ord)
data Unchanged = Unchanged Int deriving Show
data Change = Change Unchanged [Coin] deriving Show
readInt :: String -> Int
readInt = read
makeChange :: String -> String
makeChange s =
let (total:denominations) = map readInt (words s)
unchanged = Unchanged total
coins = reverse $ sort $ map Coin denominations
initial = Change unchanged []
in showResult $ foldl' giveCoins initial coins
giveCoins :: Change -> Coin -> Change
giveCoins orig@(Change (Unchanged total) coins) coin@(Coin value)
| value > total = orig
| otherwise = let count = total `div` value
given = count * value
in Change (Unchanged (total - given))
(coins ++ replicate count coin)
value :: Coin -> Int
value (Coin v) = v
showResult :: Change -> String
showResult (Change (Unchanged total) coins) =
(intercalate "\n" $ map (show . value) coins) ++ "\n" ++
if total > 0
then "And " ++ show total ++ " unchanged!" ++ "\n"
else ""
|
05873d6e6167d854552b254d9310d54dd8bc5e09344b93368a994a1cadc03bda | metaocaml/ber-metaocaml | pr8769.ml | TEST
modules = " nocrypto.mli fortuna.ml rng.ml "
* setup - ocamlc.byte - build - env
* * ocamlc.byte
module = " nocrypto.mli "
* * ocamlc.byte
flags = " -for - pack Nocrypto "
module = " fortuna.ml "
* * ocamlc.byte
flags = " -for - pack Nocrypto "
module = " rng.ml "
* * ocamlc.byte
program = " nocrypto.cmo "
flags = " -pack "
all_modules = " fortuna.cmo rng.cmo "
* setup - ocamlopt.byte - build - env
* * ocamlopt.byte
module = " nocrypto.mli "
* * ocamlopt.byte
flags = " -for - pack Nocrypto "
module = " fortuna.ml "
* * ocamlopt.byte
flags = " -for - pack Nocrypto "
module = " rng.ml "
* * ocamlopt.byte
program = " nocrypto.cmx "
flags = " -pack "
all_modules = " fortuna.cmx rng.cmx "
modules = "nocrypto.mli fortuna.ml rng.ml"
* setup-ocamlc.byte-build-env
** ocamlc.byte
module = "nocrypto.mli"
** ocamlc.byte
flags = "-for-pack Nocrypto"
module = "fortuna.ml"
** ocamlc.byte
flags = "-for-pack Nocrypto"
module = "rng.ml"
** ocamlc.byte
program = "nocrypto.cmo"
flags = "-pack"
all_modules = "fortuna.cmo rng.cmo"
* setup-ocamlopt.byte-build-env
** ocamlopt.byte
module = "nocrypto.mli"
** ocamlopt.byte
flags = "-for-pack Nocrypto"
module = "fortuna.ml"
** ocamlopt.byte
flags = "-for-pack Nocrypto"
module = "rng.ml"
** ocamlopt.byte
program = "nocrypto.cmx"
flags = "-pack"
all_modules = "fortuna.cmx rng.cmx"
*)
| null | https://raw.githubusercontent.com/metaocaml/ber-metaocaml/4992d1f87fc08ccb958817926cf9d1d739caf3a2/testsuite/tests/regression/pr8769/pr8769.ml | ocaml | TEST
modules = " nocrypto.mli fortuna.ml rng.ml "
* setup - ocamlc.byte - build - env
* * ocamlc.byte
module = " nocrypto.mli "
* * ocamlc.byte
flags = " -for - pack Nocrypto "
module = " fortuna.ml "
* * ocamlc.byte
flags = " -for - pack Nocrypto "
module = " rng.ml "
* * ocamlc.byte
program = " nocrypto.cmo "
flags = " -pack "
all_modules = " fortuna.cmo rng.cmo "
* setup - ocamlopt.byte - build - env
* * ocamlopt.byte
module = " nocrypto.mli "
* * ocamlopt.byte
flags = " -for - pack Nocrypto "
module = " fortuna.ml "
* * ocamlopt.byte
flags = " -for - pack Nocrypto "
module = " rng.ml "
* * ocamlopt.byte
program = " nocrypto.cmx "
flags = " -pack "
all_modules = " fortuna.cmx rng.cmx "
modules = "nocrypto.mli fortuna.ml rng.ml"
* setup-ocamlc.byte-build-env
** ocamlc.byte
module = "nocrypto.mli"
** ocamlc.byte
flags = "-for-pack Nocrypto"
module = "fortuna.ml"
** ocamlc.byte
flags = "-for-pack Nocrypto"
module = "rng.ml"
** ocamlc.byte
program = "nocrypto.cmo"
flags = "-pack"
all_modules = "fortuna.cmo rng.cmo"
* setup-ocamlopt.byte-build-env
** ocamlopt.byte
module = "nocrypto.mli"
** ocamlopt.byte
flags = "-for-pack Nocrypto"
module = "fortuna.ml"
** ocamlopt.byte
flags = "-for-pack Nocrypto"
module = "rng.ml"
** ocamlopt.byte
program = "nocrypto.cmx"
flags = "-pack"
all_modules = "fortuna.cmx rng.cmx"
*)
| |
7f4dcbb92f69776fe446710c52c9ff85851b5c0b223a060906679708272e3f85 | sneeuwballen/zipperposition | Proof.ml |
This file is free software , part of Zipperposition . See file " license " for more details .
(** {1 Manipulate proofs} *)
module Loc = ParseLocation
module T = TypedSTerm
module F = T.Form
module UA = UntypedAST
module Fmt = CCFormat
type term = TypedSTerm.t
type form = TypedSTerm.t
type inst_subst = (term, term) Var.Subst.t
type 'a sequence = ('a -> unit) -> unit
let section = Util.Section.make "proof"
type rule = string
type tag = Builtin.Tag.t
type attrs = UntypedAST.attrs
type info = UntypedAST.attr
type infos = info list
type kind =
| Intro of source * role
| Inference of rule * tag list
| Simplification of rule * tag list
| Esa of rule
| Trivial (** trivial, or trivial within theories *)
| Define of ID.t * source (** definition *)
| By_def of ID.t (** following from the def of ID *)
and source = {
src_id: int;
src_view: source_view;
}
and source_view =
| From_file of from_file * UntypedAST.attrs
| Internal of attrs
and role =
| R_assert
| R_goal
| R_def
| R_decl
| R_lemma
(* a statement in a file *)
and from_file = {
file : string;
name : string option;
loc: ParseLocation.t option;
}
type flavor =
[ `Pure_bool
| `Absurd_lits
| `Proof_of_false
| `Vanilla
| `Def
]
* for the result of a proof step
type 'a result_tc = {
res_id: int; (* unique ID of the class *)
res_of_exn: exn -> 'a option;
res_to_exn: 'a -> exn;
res_compare: 'a -> 'a -> int;
res_is_stmt: bool;
res_is_dead_cl: unit -> bool;
res_pp_in: Output_format.t -> 'a CCFormat.printer;
res_to_form: ctx:Term.Conv.ctx -> 'a -> TypedSTerm.Form.t;
res_to_form_subst: ctx:Term.Conv.ctx -> Subst.Projection.t -> 'a -> form * inst_subst;
res_name:('a -> string) option;
res_flavor: 'a -> flavor;
}
(** existential type for result of an inference *)
type result = Res : 'a result_tc * exn -> result
(** A proof step, without the conclusion *)
type step = {
id: int; (* unique ID *)
kind: kind;
dist_to_goal: int option; (* distance to goal *)
proof_depth: int;
parents: parent list;
infos: UntypedAST.attr list; (* additional info *)
}
and parent =
| P_of of proof
| P_subst of proof * Subst.Projection.t
(** Proof Step with its conclusion *)
and proof = {
step: step;
result : result;
}
type t = proof
module Tag = Builtin.Tag
module Rule = struct
type t = rule
let pp out r = Format.fprintf out "'%s'" r
let name r = r
let mk name = name
let mkf fmt = CCFormat.ksprintf ~f:mk fmt
end
module Src = struct
type t = source
let file x = x.file
let name x = x.name
let loc x = x.loc
let equal a b = a.src_id = b.src_id
let hash a = a.src_id
let view a = a.src_view
let mk_ =
let n = ref 0 in
fun src_view -> {src_view; src_id=CCRef.get_then_incr n}
let mk_name_ =
let n = ref 0 in
fun () -> Printf.sprintf "zf_stmt_%d" (CCRef.get_then_incr n)
let from_file ?loc ?name ?(attrs=[]) file : t =
(* NOTE: we always give a unique name if not present *)
let name = match name with Some _ -> name | None -> Some(mk_name_()) in
mk_ (From_file ({ name; loc; file; }, attrs))
let internal attrs = mk_ (Internal attrs)
let pp_from_file out x =
let pp_name out = function
| None -> ()
| Some n -> Format.fprintf out "at %s " n
in
Format.fprintf out "@[<2>%ain@ `%s`@,%a@]"
pp_name x.name x.file ParseLocation.pp_opt x.loc
let pp_role out = function
| R_decl -> CCFormat.string out "decl"
| R_assert -> CCFormat.string out "assert"
| R_goal -> CCFormat.string out "goal"
| R_def -> CCFormat.string out "def"
| R_lemma -> CCFormat.string out "lemma"
let pp_tstp out src = match view src with
| Internal _ -> ()
| From_file (src,_) ->
let file = src.file in
begin match src.name with
| None -> Format.fprintf out "file('%s')" file
| Some name -> Format.fprintf out "file(@['%s',@ '%s'@])" file name
end
let pp out src = match view src with
| Internal _ -> ()
| From_file (src,attrs) ->
let file = src.file in
begin match src.name with
| None -> Format.fprintf out "'%s'%a" file UA.pp_attrs attrs
| Some name -> Format.fprintf out "'%s' in '%s'%a" name file UA.pp_attrs attrs
end
let to_attrs src : UntypedAST.attrs =
let open UntypedAST.A in
begin match view src with
| Internal attrs -> str "internal" :: attrs
| From_file (f,attrs) ->
begin match f.name with
| None -> app "file" [quoted f.file] :: attrs
| Some n -> app "file" [quoted f.file; quoted n] :: attrs
end
end
end
module Parent = struct
type t = parent
let from p: t = P_of p
let from_subst_proj p (subst:Subst.Projection.t) : t =
if Subst.Projection.is_empty subst
then P_of p
else P_subst (p,subst)
let from_subst renaming (p,sc_p) subst: t =
let subst = Subst.Projection.make renaming (subst,sc_p) in
from_subst_proj p subst
let proof = function
| P_of p -> p
| P_subst (p,_) -> p
let subst = function
| P_of _ -> None
| P_subst (_,s) -> Some s
end
let pp_tag = Tag.pp
let pp_tags out = function
| [] -> ()
| l -> Fmt.fprintf out "@ [@[%a@]]" (Util.pp_list ~sep:"," pp_tag) l
module Kind = struct
type t = kind
let pp_parent_ out = function
| `Name s -> Format.fprintf out "%s" s
| `Theory s -> Format.fprintf out "theory(%s)" s
let pp out k = match k with
| Intro (src,R_goal) -> Format.fprintf out "goal %a" Src.pp src
| Intro (src,R_lemma) -> Format.fprintf out "lemma %a" Src.pp src
| Intro (src,R_assert) -> Src.pp out src
| Intro (src, (R_def | R_decl)) -> Src.pp out src
| Inference (rule,tags) ->
Format.fprintf out "inf %a%a" Rule.pp rule pp_tags tags
| Simplification (rule,tags) ->
Format.fprintf out "simp %a%a" Rule.pp rule pp_tags tags
| Esa rule ->
Format.fprintf out "esa %a" Rule.pp rule
| Trivial -> CCFormat.string out "trivial"
| By_def id -> Format.fprintf out "by_def(%a)" ID.pp id
| Define (id,src) -> Format.fprintf out "define(@[%a@ %a@])" ID.pp id Src.pp src
let pp_tstp out (k,parents) =
let pp_parents = Util.pp_list pp_parent_ in
let pp_step status out (rule,parents) = match parents with
| [] ->
Format.fprintf out "inference(@[%a,@ [status(%s)]@])" Rule.pp rule status
| _::_ ->
Format.fprintf out "inference(@[%a,@ [status(%s)],@ [@[%a@]]@])"
Rule.pp rule status pp_parents parents
in
begin match k with
| Intro (src,(R_assert|R_goal|R_def|R_decl)) -> Src.pp_tstp out src
| Inference (rule,_)
| Simplification (rule,_) -> pp_step "thm" out (rule,parents)
| Esa rule -> pp_step "esa" out (rule,parents)
| Intro (_,R_lemma) -> Format.fprintf out "lemma"
| Trivial -> assert(parents=[]); Format.fprintf out "trivial([status(thm)])"
| By_def _ -> Format.fprintf out "by_def([status(thm)])"
| Define _ -> Format.fprintf out "define([status(thm)])"
end
end
module Result = struct
type t = result
type 'a tc = 'a result_tc
let res_to_int_ = function (Res ({res_id; _}, _)) -> res_id
type flavor =
[ `Pure_bool
| `Absurd_lits
| `Proof_of_false
| `Vanilla
| `Def
]
let compare a b = match a, b with
| Res (r1,x1), Res (r2,x2) ->
if r1.res_id <> r2.res_id
then CCInt.compare r1.res_id r2.res_id
else match r1.res_of_exn x1, r1.res_of_exn x2 with
| Some y1, Some y2 -> r1.res_compare y1 y2
| _ -> assert false (* same ID?? *)
let equal a b = compare a b = 0
let to_form ?(ctx=Term.Conv.create()) (Res (r,x)) = match r.res_of_exn x with
| None -> assert false
| Some x -> r.res_to_form ~ctx x
let to_form_subst ?(ctx=Term.Conv.create()) subst (Res (r,x)) = match r.res_of_exn x with
| None -> assert false
| Some x -> r.res_to_form_subst ~ctx subst x
let pp_in o out (Res (r,x)) = match r.res_of_exn x with
| None -> assert false
| Some x -> r.res_pp_in o out x
let pp = pp_in Output_format.normal
let flavor (Res (r,x)) = match r.res_of_exn x with
| None -> assert false
| Some x -> r.res_flavor x
let name (Res (r,x)) = match r.res_of_exn x with
| None -> assert false
| Some x ->
begin match r.res_name with
| None -> None
| Some f -> Some (f x)
end
let n_ = ref 0
let make_tc (type a)
~of_exn ~to_exn ~compare
~to_form
?(to_form_subst=fun ~ctx:_ _ _ -> assert false)
~pp_in
?name
?(is_stmt=false)
?(is_dead_cl= fun () -> false)
?(flavor=fun _ -> `Vanilla)
() : a result_tc
=
let id = CCRef.incr_then_get n_ in
{ res_id=id;
res_of_exn=of_exn;
res_to_exn=to_exn;
res_compare=compare;
res_is_stmt=is_stmt;
res_is_dead_cl=is_dead_cl;
res_pp_in=pp_in;
res_to_form=to_form;
res_to_form_subst=to_form_subst;
res_name=name;
res_flavor=flavor;
}
let make tc x : t = Res (tc, tc.res_to_exn x)
exception E_form of form
type inst_subst = (term,term) Var.Subst.t
let form_tc : form result_tc =
make_tc
~of_exn:(function
| E_form f -> Some f | _ -> None)
~to_exn:(fun f -> E_form f)
~to_form:(fun ~ctx:_ t -> t)
~compare:T.compare
~pp_in:TypedSTerm.pp_in
~flavor:(fun f -> if T.equal f F.false_ then `Proof_of_false else `Vanilla)
()
let of_form = make form_tc
let is_stmt (Res (r,_)) = r.res_is_stmt
let is_dead_cl (Res (r,_)) = r.res_is_dead_cl
end
let pp_parent out = function
| P_of p -> Result.pp out p.result
| P_subst (p,subst) ->
Format.fprintf out "(@[instantiate `%a`@ :subst %a@])"
Result.pp p.result Subst.Projection.pp subst
module Step = struct
type t = step
let equal p1 p2 = p1.id=p2.id
let compare p1 p2 = CCInt.compare p1.id p2.id
let hash p = p.id
let kind p = p.kind
let parents p = p.parents
let infos p = p.infos
let src p = match p.kind with
| Intro (src,_) | Define (_,src) -> Some src
| Trivial | By_def _ | Esa _ | Inference _ | Simplification _
-> None
let to_attrs p = match src p with
| None -> []
| Some src -> Src.to_attrs src
let rule p = match p.kind with
| Intro _
| Trivial
| By_def _
| Define _ -> None
| Esa rule
| Simplification (rule,_)
| Inference (rule,_)
-> Some rule
let is_assert p = match p.kind with Intro (_,R_assert) -> true | _ -> false
let is_assert_like p = match p.kind with Intro (_,(R_assert|R_def|R_decl)) -> true | _ -> false
let is_goal p = match p.kind with Intro (_,(R_goal|R_lemma)) -> true | _ -> false
let is_trivial p = match p.kind with Trivial -> true | _ -> false
let is_by_def p = match p.kind with By_def _ -> true | _ -> false
let is_inference p = match p.kind with Inference _ -> true | _ -> false
let is_simpl ?(name=None) p =
match p.kind with
| Simplification(rule,_) ->
begin match name with
| None -> true
| Some n -> String.equal (Rule.name rule) n
end
| _ -> false
let distance_to_goal p = p.dist_to_goal
let parent_proof_depth parents =
List.map (fun par -> (Parent.proof par).step.proof_depth) parents
|> CCList.fold_left (fun acc depth -> max acc depth) 0
let count_rules ~name p =
let rec aux p =
let init =
CCOpt.get_or ~default: 0 (CCOpt.map (fun r ->
if CCString.equal (Rule.name r) name then 1 else 0)
(rule p)) in
List.fold_left (fun acc par ->
acc + aux ((Parent.proof par).step)
) init (p.parents) in
aux p
let get_id_ =
let n = ref 0 in
fun () -> CCRef.incr_then_get n
let trivial = {id=get_id_(); parents=[]; kind=Trivial; dist_to_goal=None; proof_depth=0; infos=[]; }
let by_def id = {id=get_id_(); parents=[]; kind=By_def id; dist_to_goal=None; proof_depth=0; infos=[]; }
let intro src r =
let dist_to_goal = match r with
| R_goal | R_lemma -> Some 0 | _ -> None
in
{id=get_id_(); parents=[]; proof_depth=0; kind=Intro(src,r); dist_to_goal; infos=[]}
let define id src parents =
{id=get_id_(); parents; kind=Define (id,src); dist_to_goal=None; proof_depth=parent_proof_depth parents; infos=[]; }
let define_internal id parents = define id (Src.internal []) parents
let lemma src =
{id=get_id_(); parents=[]; kind=Intro(src,R_lemma); dist_to_goal=Some 0; proof_depth=0; infos=[]; }
let combine_dist o p = match o, (Parent.proof p).step.dist_to_goal with
| None, None -> None
| (Some _ as res), None
| None, (Some _ as res) -> res
| Some x, Some y -> Some (min x y)
let inferences_performed p = p.proof_depth
let rec has_ho_step p = match p.kind with
| Simplification(_,tags)
| Inference(_,tags) ->
List.mem Tag.T_ho tags ||
List.exists has_ho_step (List.map (fun par -> (Parent.proof par).step) p.parents)
| _ -> false
let step_ ?(infos=[]) kind parents =
(* distance to goal (0 if a goal itself) *)
let dist_to_goal = match kind with
| Intro (_,(R_goal | R_lemma)) -> Some 0
| _ ->
let d = match parents with
| [] -> None
| [p] -> (Parent.proof p).step.dist_to_goal
| [p1;p2] -> combine_dist (Parent.proof p1).step.dist_to_goal p2
| p::l -> List.fold_left combine_dist (Parent.proof p).step.dist_to_goal l
in
match kind with
| Inference _ -> CCOpt.map succ d
| _ -> d
in
let inc =
match kind with
| Inference (_,tag_list) when not (List.mem Tag.T_dont_increase_depth tag_list) -> 1
| _ -> 0 in
{ id=get_id_(); kind; parents; dist_to_goal;
proof_depth=parent_proof_depth parents + inc; infos; }
let intro src r = step_ (Intro(src,r)) []
let assert_ src = intro src R_assert
let assert' ?loc ~file ~name () =
let src = Src.from_file ?loc ~name file in
assert_ src
let goal src = intro src R_goal
let goal' ?loc ~file ~name () =
let src = Src.from_file ?loc ~name file in
goal src
let[@inline] dedup_tags (tgs:tag list) : tag list =
CCList.sort_uniq ~cmp:Builtin.Tag.compare tgs
let tags p = match p.kind with
| Simplification(_,tags)
| Inference(_,tags) -> dedup_tags tags
| _ -> []
let inference ?infos ?(tags=[]) ~rule parents =
let tags = dedup_tags tags in
step_ ?infos (Inference (rule,tags)) parents
let simp ?infos ?(tags=[]) ~rule parents =
let tags = dedup_tags tags in
step_ ?infos (Simplification (rule,tags)) parents
let esa ?infos ~rule parents =
step_ ?infos (Esa rule) parents
let pp_infos out = function
| [] -> ()
| l ->
Format.fprintf out "@ %a" (Util.pp_list ~sep:" " UntypedAST.pp_attr) l
let pp_parents out = function
| [] -> ()
| l ->
Format.fprintf out "@ with @[<hv>%a@]"
(Util.pp_list Result.pp)
(List.map (fun p -> (Parent.proof p).result) @@ l)
let pp out step = match kind step with
| Intro (_,(R_assert|R_goal|R_def|R_decl)) ->
Format.fprintf out "@[<hv2>%a@]%a" Kind.pp (kind step) pp_infos step.infos
| Intro (_,R_lemma) -> Format.fprintf out "@[<2>lemma%a@]" pp_infos step.infos
| Trivial -> Format.fprintf out "@[<2>trivial%a@]" pp_infos step.infos
| By_def id -> Format.fprintf out "@[<2>by_def %a%a@]" ID.pp id pp_infos step.infos
| Define (id,src) ->
Format.fprintf out "@[<2>define %a@ %a%a%a@]"
ID.pp id Src.pp src pp_parents (parents step) pp_infos step.infos
| Inference _
| Simplification _
| Esa _ ->
Format.fprintf out "@[<hv2>%a%a%a@]"
Kind.pp (kind step) pp_parents (parents step) pp_infos step.infos
end
module S = struct
type t = proof
let result p = p.result
let step p = p.step
let compare a b =
let (<?>) = CCOrd.(<?>) in
compare a.step b.step <?> (Result.compare, a.result, b.result)
let equal a b =
Step.equal a.step b.step && Result.equal a.result b.result
let hash a = Step.hash a.step
let compare_by_result a b = Result.compare a.result b.result
module Tbl = CCHashtbl.Make(struct
type t = proof
let equal = equal
let hash = hash
end)
let has_absurd_lits p = Result.flavor (result p) = `Absurd_lits
let is_proof_of_false p = Result.flavor (result p) = `Proof_of_false
let is_pure_bool p = Result.flavor (result p) = `Pure_bool
let is_def p = Result.flavor (result p) = `Def
let mk step res = {step; result=res}
let mk_f step res = mk step (Result.of_form res)
let mk_f_trivial = mk_f Step.trivial
let mk_f_by_def id f = mk_f (Step.by_def id) f
let mk_f_inference ~rule f parents =
let step = Step.inference ~rule parents in
mk_f step f
let mk_f_simp ~rule f parents =
let step = Step.simp ~rule parents in
mk_f step f
let mk_f_esa ~rule f parents =
let step = Step.esa ~rule parents in
mk_f step f
let adapt p r = { p with result=r; }
let adapt_f p f = adapt p (Result.of_form f)
let name_gen_ = ref 0
(* retrieve the name, or create a new one on the fly *)
let name ~namespace (p:t) : string =
(* look if the result is a named thing from the input, otherwise
generate a fresh one from the namespace *)
begin match Result.name (result p) with
| Some s -> s
| None ->
try Tbl.find namespace p
with Not_found ->
let s = Printf.sprintf "'%d'" (Tbl.length namespace) in
Tbl.add namespace p s;
s
end
* { 2 Conversion to a graph of proofs }
(** Get a graph of the proof *)
let as_graph : (t, rule * Subst.Projection.t option * infos) CCGraph.t =
CCGraph.make
(fun p ->
let st = step p in
let rule = match Step.rule st with
| None -> ""
| Some rule -> rule
in
st
|> Step.parents
|> Iter.of_list
|> Iter.map
(fun p' -> (rule,Parent.subst p',Step.infos st), Parent.proof p'))
* { 2 IO }
let pp_result_of out proof = Result.pp out @@ result proof
let pp_notrec out p =
Format.fprintf out "@[%a by %a@]"
pp_result_of p Kind.pp (Step.kind @@ step p)
let pp_notrec1 out p =
Format.fprintf out "@[<hv>%a by %a@ from [@[<v>%a@]]@]"
pp_result_of p Kind.pp (Step.kind @@ step p)
(Util.pp_list pp_parent) p.step.parents
let traverse_bfs ~traversed proof k =
layered BFS
let current, next = ref [proof], ref [] in
while !current <> [] do
(* exhaust the current layer of proofs to explore *)
List.iter (fun proof ->
if Tbl.mem traversed proof then ()
else (
Tbl.add traversed proof ();
traverse premises first
List.iter
(fun proof' -> next := Parent.proof proof' :: !next)
(Step.parents @@ step proof);
(* yield proof *)
k proof
))
!current;
(* explore next layer *)
current := !next;
next := [];
done
let traverse_dfs ~traversed proof k =
let rec aux proof =
if Tbl.mem traversed proof then ()
else (
Tbl.add traversed proof ();
traverse premises first
List.iter
(fun p' -> aux (Parent.proof p'))
(Step.parents @@ step proof);
(* yield proof *)
k proof
)
in
aux proof
let traverse ?(traversed=Tbl.create 16) ~order proof k =
match order with
| `BFS -> traverse_bfs ~traversed proof k
| `DFS -> traverse_dfs ~traversed proof k
let pp_normal out proof =
let sep = "by" in
Format.fprintf out "@[<v>";
let pp_bullet out = Format.fprintf out "@<1>@{<Green>*@}" in
traverse ~order:`DFS proof
(fun p ->
Format.fprintf out "@[<hv2>%t @[%a@] %s@ %a@]@,"
pp_bullet Result.pp (result p) sep Step.pp (step p));
Format.fprintf out "@]"
let pp_tstp out proof =
let module F = TypedSTerm in
let ctx = Type.Conv.create () in
let conv_ty ty =
Type.Conv.of_simple_term_exn ctx ty
in
let namespace = Tbl.create 8 in
let already_defined = ref ID.Set.empty in
let tydecl_out out hd ty =
if not (ID.Set.mem hd !already_defined) then (
Format.fprintf out "thf(@[@[%a@], type, @[%a@]: @[%a@]@]).@."
Util.pp_str_tstp (ID.name hd ^ "_type") Util.pp_str_tstp (ID.name hd) (Type.TPTP.pp_ho ~depth:0) (conv_ty ty);
already_defined := ID.Set.add hd !already_defined;
)
in
let declare_combinators () =
let decls =
[(Builtin.SComb, "s_comb", "!>[A:$tType, B:$tType, C:$tType]: ((A > B > C) > (A > B) > A > C)");
(Builtin.CComb, "c_comb", "!>[A:$tType, B:$tType, C:$tType]: ((A > B > C) > B > A > C)");
(Builtin.BComb, "b_comb", "!>[A:$tType, B:$tType, C:$tType]: ((A > B) > (C > A) > C > B)");
(Builtin.KComb, "k_comb", "!>[A:$tType, B:$tType]: (B > A > B)");
(Builtin.IComb, "i_comb", "!>[A:$tType]: (A > A)")]
in
List.iter (fun (comb, name, decl) ->
Format.fprintf out "thf(@[@[%a@], type, @[%s@]: @[%s@]@]).@."
Util.pp_str_tstp (name ^ "_type") (Builtin.TPTP.to_string comb) decl;
) decls
in
Format.fprintf out " @[<v > " ;
let constants = ref F.Set.empty in
let has_comb = ref false in
let types = ref ID.Set.empty in
traverse ~order:`DFS proof (
fun p ->
let f = Result.to_form (result p) in
constants :=
F.Seq.subterms f
|> CCFun.tap (fun subterms ->
Iter.iter (fun st -> match F.view st with
| F.AppBuiltin(hd, args) ->
has_comb := Builtin.is_combinator hd || !has_comb
| _ -> ()) subterms
)
|> Iter.filter (F.is_const)
|> F.Set.of_iter
|> F.Set.union !constants;
F.Seq.subterms f
|> Iter.filter_map (F.ty)
|> Iter.iter (fun t ->
match F.Ty.view t with
| F.Ty.Ty_app(hd, args) when not @@ ID.Set.mem hd (!types) ->
let ty = F.Ty.(==>) (CCList.replicate (List.length args) F.Ty.tType) F.Ty.tType in
tydecl_out out hd ty
| _ -> ()
)
);
F.Set.iter (fun cst ->
match F.as_id_app cst with
| Some (hd, ty, []) -> tydecl_out out hd ty
| _ -> assert false
) !constants;
if !has_comb then declare_combinators ();
traverse ~order:`DFS proof
(fun p ->
let p_name = name ~namespace p in
let parents =
List.map (fun p -> `Name (name ~namespace @@ Parent.proof p))
(Step.parents @@ step p)
in
TODO
let pp_infos out = function
| [] -> ()
| l ->
Format.fprintf out ",@ [@[<hv>%a@]]"
(Util.pp_list ~sep:", " UntypedAST.pp_attr_tstp) l
in
let infos = p.step |> Step.infos in
if Result.is_stmt (result p) then (
Format.fprintf out "%a@," (Result.pp_in Output_format.tptp) (result p)
) else (
Format.fprintf out "thf(@[%s, %s,@ (@[%a@]),@ @[%a@]%a@]).@,"
p_name role (Result.pp_in Output_format.tptp) (result p)
Kind.pp_tstp (Step.kind @@ step p,parents) pp_infos infos
));
Format.fprintf out "@]";
()
let pp_zf out proof =
let module UA = UntypedAST.A in
Format.fprintf out "@[<v>";
let namespace = Tbl.create 8 in
traverse ~order:`DFS proof
(fun p ->
let p_name = name ~namespace p in
let parents =
List.map (fun p -> name ~namespace @@ Parent.proof p)
(Step.parents @@ step p)
in
let mk_status r = UA.app "status" [UA.quoted r] in
let info_name =
UA.(app "name" [str p_name])
and info_from =
if parents=[] then []
else (
[UA.(app "from" [list (List.map str parents)])]
)
and info_rule = match Step.rule (step p) with
| Some r -> [UA.(app "rule" [quoted r])]
| None -> []
and info_status = match Step.kind (step p) with
| Inference _ | Simplification _ -> [mk_status "inference"]
| Esa _ -> [mk_status "equisatisfiable"]
| Intro (src,R_lemma) -> mk_status "lemma" :: Src.to_attrs src
| Intro (src,R_goal) -> mk_status "goal" :: Src.to_attrs src
| Intro (src,R_assert) -> mk_status "assert" :: Src.to_attrs src
| Intro (src,R_def) -> mk_status "def" :: Src.to_attrs src
| Intro (src,R_decl) -> mk_status "decl" :: Src.to_attrs src
| Trivial -> [mk_status "trivial"]
| By_def _ | Define _ -> []
in
let pp_infos = UntypedAST.pp_attrs_zf in
let infos =
info_name :: info_from @ info_rule @ info_status @ (Step.infos p.step)
in
if Result.is_stmt (result p) then (
Format.fprintf out "%a@," (Result.pp_in Output_format.zf) (result p)
) else (
Format.fprintf out "@[<2>assert%a@ %a@].@,"
pp_infos infos (Result.pp_in Output_format.zf) (result p)
));
Format.fprintf out "@]";
()
(** Prints the proof according to the given input switch *)
let pp_in o out proof = match o with
| Output_format.O_none -> Util.debug ~section 1 "proof printing disabled"
| Output_format.O_tptp -> pp_tstp out proof
| Output_format.O_normal -> pp_normal out proof
| Output_format.O_zf -> pp_zf out proof
let _pp_list_str = Util.pp_list CCFormat.string
let _to_str_escape fmt =
Util.ksprintf_noc ~f:Util.escape_dot fmt
let pp_dot_seq ~name out seq =
CCGraph.Dot.pp_all
~tbl:(CCGraph.mk_table ~eq:equal ~hash:hash 64)
~eq:equal
~name
~graph:as_graph
~attrs_v:(fun p ->
let label = _to_str_escape "@[<2>%a@]@." pp_result_of p in
let attrs = [`Label label; `Style "filled"] in
let shape = `Shape "box" in
if is_proof_of_false p then [`Color "red"; `Label "[]"; `Shape "box"; `Style "filled"]
else if is_pure_bool p then `Color "cyan3" :: shape :: attrs
else if has_absurd_lits p then `Color "orange" :: shape :: attrs
else if is_def p then `Color "navajowhite" :: shape :: attrs
else if Step.is_goal @@ step p then `Color "green" :: shape :: attrs
else if Step.is_trivial @@ step p then `Color "cyan" :: shape :: attrs
else if Step.is_by_def @@ step p then `Color "navajowhite" :: shape :: attrs
else if Step.is_assert_like @@ step p then `Color "yellow" :: shape :: attrs
else shape :: attrs
)
~attrs_e:(fun (r,s,infos) ->
let pp_subst out s =
if not (Subst.is_empty @@ Subst.Projection.subst s) then (
Format.fprintf out "@,%a" Subst.Projection.pp s
)
in
let label =
if s=None && infos=[] then Rule.name r
else (
_to_str_escape "@[<v>%s%a%a@]@."
(Rule.name r) (CCFormat.some pp_subst) s Step.pp_infos infos
)
in
[`Label label; `Other ("dir", "back")])
out
seq;
Format.pp_print_newline out ();
()
let pp_dot ~name out proof = pp_dot_seq ~name out (Iter.singleton proof)
let pp_dot_seq_file ?(name="proof") filename seq =
(* print graph on file *)
Util.debugf ~section 1 "print proof graph to@ `%s`" (fun k->k filename);
CCIO.with_out filename
(fun oc ->
let out = Format.formatter_of_out_channel oc in
Format.fprintf out "%a@." (pp_dot_seq ~name) seq)
let pp_dot_file ?name filename proof =
pp_dot_seq_file ?name filename (Iter.singleton proof)
end
| null | https://raw.githubusercontent.com/sneeuwballen/zipperposition/7f1455fbe2e7509907f927649c288141b1a3a247/src/core/Proof.ml | ocaml | * {1 Manipulate proofs}
* trivial, or trivial within theories
* definition
* following from the def of ID
a statement in a file
unique ID of the class
* existential type for result of an inference
* A proof step, without the conclusion
unique ID
distance to goal
additional info
* Proof Step with its conclusion
NOTE: we always give a unique name if not present
same ID??
distance to goal (0 if a goal itself)
retrieve the name, or create a new one on the fly
look if the result is a named thing from the input, otherwise
generate a fresh one from the namespace
* Get a graph of the proof
exhaust the current layer of proofs to explore
yield proof
explore next layer
yield proof
* Prints the proof according to the given input switch
print graph on file |
This file is free software , part of Zipperposition . See file " license " for more details .
module Loc = ParseLocation
module T = TypedSTerm
module F = T.Form
module UA = UntypedAST
module Fmt = CCFormat
type term = TypedSTerm.t
type form = TypedSTerm.t
type inst_subst = (term, term) Var.Subst.t
type 'a sequence = ('a -> unit) -> unit
let section = Util.Section.make "proof"
type rule = string
type tag = Builtin.Tag.t
type attrs = UntypedAST.attrs
type info = UntypedAST.attr
type infos = info list
type kind =
| Intro of source * role
| Inference of rule * tag list
| Simplification of rule * tag list
| Esa of rule
and source = {
src_id: int;
src_view: source_view;
}
and source_view =
| From_file of from_file * UntypedAST.attrs
| Internal of attrs
and role =
| R_assert
| R_goal
| R_def
| R_decl
| R_lemma
and from_file = {
file : string;
name : string option;
loc: ParseLocation.t option;
}
type flavor =
[ `Pure_bool
| `Absurd_lits
| `Proof_of_false
| `Vanilla
| `Def
]
* for the result of a proof step
type 'a result_tc = {
res_of_exn: exn -> 'a option;
res_to_exn: 'a -> exn;
res_compare: 'a -> 'a -> int;
res_is_stmt: bool;
res_is_dead_cl: unit -> bool;
res_pp_in: Output_format.t -> 'a CCFormat.printer;
res_to_form: ctx:Term.Conv.ctx -> 'a -> TypedSTerm.Form.t;
res_to_form_subst: ctx:Term.Conv.ctx -> Subst.Projection.t -> 'a -> form * inst_subst;
res_name:('a -> string) option;
res_flavor: 'a -> flavor;
}
type result = Res : 'a result_tc * exn -> result
type step = {
kind: kind;
proof_depth: int;
parents: parent list;
}
and parent =
| P_of of proof
| P_subst of proof * Subst.Projection.t
and proof = {
step: step;
result : result;
}
type t = proof
module Tag = Builtin.Tag
module Rule = struct
type t = rule
let pp out r = Format.fprintf out "'%s'" r
let name r = r
let mk name = name
let mkf fmt = CCFormat.ksprintf ~f:mk fmt
end
module Src = struct
type t = source
let file x = x.file
let name x = x.name
let loc x = x.loc
let equal a b = a.src_id = b.src_id
let hash a = a.src_id
let view a = a.src_view
let mk_ =
let n = ref 0 in
fun src_view -> {src_view; src_id=CCRef.get_then_incr n}
let mk_name_ =
let n = ref 0 in
fun () -> Printf.sprintf "zf_stmt_%d" (CCRef.get_then_incr n)
let from_file ?loc ?name ?(attrs=[]) file : t =
let name = match name with Some _ -> name | None -> Some(mk_name_()) in
mk_ (From_file ({ name; loc; file; }, attrs))
let internal attrs = mk_ (Internal attrs)
let pp_from_file out x =
let pp_name out = function
| None -> ()
| Some n -> Format.fprintf out "at %s " n
in
Format.fprintf out "@[<2>%ain@ `%s`@,%a@]"
pp_name x.name x.file ParseLocation.pp_opt x.loc
let pp_role out = function
| R_decl -> CCFormat.string out "decl"
| R_assert -> CCFormat.string out "assert"
| R_goal -> CCFormat.string out "goal"
| R_def -> CCFormat.string out "def"
| R_lemma -> CCFormat.string out "lemma"
let pp_tstp out src = match view src with
| Internal _ -> ()
| From_file (src,_) ->
let file = src.file in
begin match src.name with
| None -> Format.fprintf out "file('%s')" file
| Some name -> Format.fprintf out "file(@['%s',@ '%s'@])" file name
end
let pp out src = match view src with
| Internal _ -> ()
| From_file (src,attrs) ->
let file = src.file in
begin match src.name with
| None -> Format.fprintf out "'%s'%a" file UA.pp_attrs attrs
| Some name -> Format.fprintf out "'%s' in '%s'%a" name file UA.pp_attrs attrs
end
let to_attrs src : UntypedAST.attrs =
let open UntypedAST.A in
begin match view src with
| Internal attrs -> str "internal" :: attrs
| From_file (f,attrs) ->
begin match f.name with
| None -> app "file" [quoted f.file] :: attrs
| Some n -> app "file" [quoted f.file; quoted n] :: attrs
end
end
end
module Parent = struct
type t = parent
let from p: t = P_of p
let from_subst_proj p (subst:Subst.Projection.t) : t =
if Subst.Projection.is_empty subst
then P_of p
else P_subst (p,subst)
let from_subst renaming (p,sc_p) subst: t =
let subst = Subst.Projection.make renaming (subst,sc_p) in
from_subst_proj p subst
let proof = function
| P_of p -> p
| P_subst (p,_) -> p
let subst = function
| P_of _ -> None
| P_subst (_,s) -> Some s
end
let pp_tag = Tag.pp
let pp_tags out = function
| [] -> ()
| l -> Fmt.fprintf out "@ [@[%a@]]" (Util.pp_list ~sep:"," pp_tag) l
module Kind = struct
type t = kind
let pp_parent_ out = function
| `Name s -> Format.fprintf out "%s" s
| `Theory s -> Format.fprintf out "theory(%s)" s
let pp out k = match k with
| Intro (src,R_goal) -> Format.fprintf out "goal %a" Src.pp src
| Intro (src,R_lemma) -> Format.fprintf out "lemma %a" Src.pp src
| Intro (src,R_assert) -> Src.pp out src
| Intro (src, (R_def | R_decl)) -> Src.pp out src
| Inference (rule,tags) ->
Format.fprintf out "inf %a%a" Rule.pp rule pp_tags tags
| Simplification (rule,tags) ->
Format.fprintf out "simp %a%a" Rule.pp rule pp_tags tags
| Esa rule ->
Format.fprintf out "esa %a" Rule.pp rule
| Trivial -> CCFormat.string out "trivial"
| By_def id -> Format.fprintf out "by_def(%a)" ID.pp id
| Define (id,src) -> Format.fprintf out "define(@[%a@ %a@])" ID.pp id Src.pp src
let pp_tstp out (k,parents) =
let pp_parents = Util.pp_list pp_parent_ in
let pp_step status out (rule,parents) = match parents with
| [] ->
Format.fprintf out "inference(@[%a,@ [status(%s)]@])" Rule.pp rule status
| _::_ ->
Format.fprintf out "inference(@[%a,@ [status(%s)],@ [@[%a@]]@])"
Rule.pp rule status pp_parents parents
in
begin match k with
| Intro (src,(R_assert|R_goal|R_def|R_decl)) -> Src.pp_tstp out src
| Inference (rule,_)
| Simplification (rule,_) -> pp_step "thm" out (rule,parents)
| Esa rule -> pp_step "esa" out (rule,parents)
| Intro (_,R_lemma) -> Format.fprintf out "lemma"
| Trivial -> assert(parents=[]); Format.fprintf out "trivial([status(thm)])"
| By_def _ -> Format.fprintf out "by_def([status(thm)])"
| Define _ -> Format.fprintf out "define([status(thm)])"
end
end
module Result = struct
type t = result
type 'a tc = 'a result_tc
let res_to_int_ = function (Res ({res_id; _}, _)) -> res_id
type flavor =
[ `Pure_bool
| `Absurd_lits
| `Proof_of_false
| `Vanilla
| `Def
]
let compare a b = match a, b with
| Res (r1,x1), Res (r2,x2) ->
if r1.res_id <> r2.res_id
then CCInt.compare r1.res_id r2.res_id
else match r1.res_of_exn x1, r1.res_of_exn x2 with
| Some y1, Some y2 -> r1.res_compare y1 y2
let equal a b = compare a b = 0
let to_form ?(ctx=Term.Conv.create()) (Res (r,x)) = match r.res_of_exn x with
| None -> assert false
| Some x -> r.res_to_form ~ctx x
let to_form_subst ?(ctx=Term.Conv.create()) subst (Res (r,x)) = match r.res_of_exn x with
| None -> assert false
| Some x -> r.res_to_form_subst ~ctx subst x
let pp_in o out (Res (r,x)) = match r.res_of_exn x with
| None -> assert false
| Some x -> r.res_pp_in o out x
let pp = pp_in Output_format.normal
let flavor (Res (r,x)) = match r.res_of_exn x with
| None -> assert false
| Some x -> r.res_flavor x
let name (Res (r,x)) = match r.res_of_exn x with
| None -> assert false
| Some x ->
begin match r.res_name with
| None -> None
| Some f -> Some (f x)
end
let n_ = ref 0
let make_tc (type a)
~of_exn ~to_exn ~compare
~to_form
?(to_form_subst=fun ~ctx:_ _ _ -> assert false)
~pp_in
?name
?(is_stmt=false)
?(is_dead_cl= fun () -> false)
?(flavor=fun _ -> `Vanilla)
() : a result_tc
=
let id = CCRef.incr_then_get n_ in
{ res_id=id;
res_of_exn=of_exn;
res_to_exn=to_exn;
res_compare=compare;
res_is_stmt=is_stmt;
res_is_dead_cl=is_dead_cl;
res_pp_in=pp_in;
res_to_form=to_form;
res_to_form_subst=to_form_subst;
res_name=name;
res_flavor=flavor;
}
let make tc x : t = Res (tc, tc.res_to_exn x)
exception E_form of form
type inst_subst = (term,term) Var.Subst.t
let form_tc : form result_tc =
make_tc
~of_exn:(function
| E_form f -> Some f | _ -> None)
~to_exn:(fun f -> E_form f)
~to_form:(fun ~ctx:_ t -> t)
~compare:T.compare
~pp_in:TypedSTerm.pp_in
~flavor:(fun f -> if T.equal f F.false_ then `Proof_of_false else `Vanilla)
()
let of_form = make form_tc
let is_stmt (Res (r,_)) = r.res_is_stmt
let is_dead_cl (Res (r,_)) = r.res_is_dead_cl
end
let pp_parent out = function
| P_of p -> Result.pp out p.result
| P_subst (p,subst) ->
Format.fprintf out "(@[instantiate `%a`@ :subst %a@])"
Result.pp p.result Subst.Projection.pp subst
module Step = struct
type t = step
let equal p1 p2 = p1.id=p2.id
let compare p1 p2 = CCInt.compare p1.id p2.id
let hash p = p.id
let kind p = p.kind
let parents p = p.parents
let infos p = p.infos
let src p = match p.kind with
| Intro (src,_) | Define (_,src) -> Some src
| Trivial | By_def _ | Esa _ | Inference _ | Simplification _
-> None
let to_attrs p = match src p with
| None -> []
| Some src -> Src.to_attrs src
let rule p = match p.kind with
| Intro _
| Trivial
| By_def _
| Define _ -> None
| Esa rule
| Simplification (rule,_)
| Inference (rule,_)
-> Some rule
let is_assert p = match p.kind with Intro (_,R_assert) -> true | _ -> false
let is_assert_like p = match p.kind with Intro (_,(R_assert|R_def|R_decl)) -> true | _ -> false
let is_goal p = match p.kind with Intro (_,(R_goal|R_lemma)) -> true | _ -> false
let is_trivial p = match p.kind with Trivial -> true | _ -> false
let is_by_def p = match p.kind with By_def _ -> true | _ -> false
let is_inference p = match p.kind with Inference _ -> true | _ -> false
let is_simpl ?(name=None) p =
match p.kind with
| Simplification(rule,_) ->
begin match name with
| None -> true
| Some n -> String.equal (Rule.name rule) n
end
| _ -> false
let distance_to_goal p = p.dist_to_goal
let parent_proof_depth parents =
List.map (fun par -> (Parent.proof par).step.proof_depth) parents
|> CCList.fold_left (fun acc depth -> max acc depth) 0
let count_rules ~name p =
let rec aux p =
let init =
CCOpt.get_or ~default: 0 (CCOpt.map (fun r ->
if CCString.equal (Rule.name r) name then 1 else 0)
(rule p)) in
List.fold_left (fun acc par ->
acc + aux ((Parent.proof par).step)
) init (p.parents) in
aux p
let get_id_ =
let n = ref 0 in
fun () -> CCRef.incr_then_get n
let trivial = {id=get_id_(); parents=[]; kind=Trivial; dist_to_goal=None; proof_depth=0; infos=[]; }
let by_def id = {id=get_id_(); parents=[]; kind=By_def id; dist_to_goal=None; proof_depth=0; infos=[]; }
let intro src r =
let dist_to_goal = match r with
| R_goal | R_lemma -> Some 0 | _ -> None
in
{id=get_id_(); parents=[]; proof_depth=0; kind=Intro(src,r); dist_to_goal; infos=[]}
let define id src parents =
{id=get_id_(); parents; kind=Define (id,src); dist_to_goal=None; proof_depth=parent_proof_depth parents; infos=[]; }
let define_internal id parents = define id (Src.internal []) parents
let lemma src =
{id=get_id_(); parents=[]; kind=Intro(src,R_lemma); dist_to_goal=Some 0; proof_depth=0; infos=[]; }
let combine_dist o p = match o, (Parent.proof p).step.dist_to_goal with
| None, None -> None
| (Some _ as res), None
| None, (Some _ as res) -> res
| Some x, Some y -> Some (min x y)
let inferences_performed p = p.proof_depth
let rec has_ho_step p = match p.kind with
| Simplification(_,tags)
| Inference(_,tags) ->
List.mem Tag.T_ho tags ||
List.exists has_ho_step (List.map (fun par -> (Parent.proof par).step) p.parents)
| _ -> false
let step_ ?(infos=[]) kind parents =
let dist_to_goal = match kind with
| Intro (_,(R_goal | R_lemma)) -> Some 0
| _ ->
let d = match parents with
| [] -> None
| [p] -> (Parent.proof p).step.dist_to_goal
| [p1;p2] -> combine_dist (Parent.proof p1).step.dist_to_goal p2
| p::l -> List.fold_left combine_dist (Parent.proof p).step.dist_to_goal l
in
match kind with
| Inference _ -> CCOpt.map succ d
| _ -> d
in
let inc =
match kind with
| Inference (_,tag_list) when not (List.mem Tag.T_dont_increase_depth tag_list) -> 1
| _ -> 0 in
{ id=get_id_(); kind; parents; dist_to_goal;
proof_depth=parent_proof_depth parents + inc; infos; }
let intro src r = step_ (Intro(src,r)) []
let assert_ src = intro src R_assert
let assert' ?loc ~file ~name () =
let src = Src.from_file ?loc ~name file in
assert_ src
let goal src = intro src R_goal
let goal' ?loc ~file ~name () =
let src = Src.from_file ?loc ~name file in
goal src
let[@inline] dedup_tags (tgs:tag list) : tag list =
CCList.sort_uniq ~cmp:Builtin.Tag.compare tgs
let tags p = match p.kind with
| Simplification(_,tags)
| Inference(_,tags) -> dedup_tags tags
| _ -> []
let inference ?infos ?(tags=[]) ~rule parents =
let tags = dedup_tags tags in
step_ ?infos (Inference (rule,tags)) parents
let simp ?infos ?(tags=[]) ~rule parents =
let tags = dedup_tags tags in
step_ ?infos (Simplification (rule,tags)) parents
let esa ?infos ~rule parents =
step_ ?infos (Esa rule) parents
let pp_infos out = function
| [] -> ()
| l ->
Format.fprintf out "@ %a" (Util.pp_list ~sep:" " UntypedAST.pp_attr) l
let pp_parents out = function
| [] -> ()
| l ->
Format.fprintf out "@ with @[<hv>%a@]"
(Util.pp_list Result.pp)
(List.map (fun p -> (Parent.proof p).result) @@ l)
let pp out step = match kind step with
| Intro (_,(R_assert|R_goal|R_def|R_decl)) ->
Format.fprintf out "@[<hv2>%a@]%a" Kind.pp (kind step) pp_infos step.infos
| Intro (_,R_lemma) -> Format.fprintf out "@[<2>lemma%a@]" pp_infos step.infos
| Trivial -> Format.fprintf out "@[<2>trivial%a@]" pp_infos step.infos
| By_def id -> Format.fprintf out "@[<2>by_def %a%a@]" ID.pp id pp_infos step.infos
| Define (id,src) ->
Format.fprintf out "@[<2>define %a@ %a%a%a@]"
ID.pp id Src.pp src pp_parents (parents step) pp_infos step.infos
| Inference _
| Simplification _
| Esa _ ->
Format.fprintf out "@[<hv2>%a%a%a@]"
Kind.pp (kind step) pp_parents (parents step) pp_infos step.infos
end
module S = struct
type t = proof
let result p = p.result
let step p = p.step
let compare a b =
let (<?>) = CCOrd.(<?>) in
compare a.step b.step <?> (Result.compare, a.result, b.result)
let equal a b =
Step.equal a.step b.step && Result.equal a.result b.result
let hash a = Step.hash a.step
let compare_by_result a b = Result.compare a.result b.result
module Tbl = CCHashtbl.Make(struct
type t = proof
let equal = equal
let hash = hash
end)
let has_absurd_lits p = Result.flavor (result p) = `Absurd_lits
let is_proof_of_false p = Result.flavor (result p) = `Proof_of_false
let is_pure_bool p = Result.flavor (result p) = `Pure_bool
let is_def p = Result.flavor (result p) = `Def
let mk step res = {step; result=res}
let mk_f step res = mk step (Result.of_form res)
let mk_f_trivial = mk_f Step.trivial
let mk_f_by_def id f = mk_f (Step.by_def id) f
let mk_f_inference ~rule f parents =
let step = Step.inference ~rule parents in
mk_f step f
let mk_f_simp ~rule f parents =
let step = Step.simp ~rule parents in
mk_f step f
let mk_f_esa ~rule f parents =
let step = Step.esa ~rule parents in
mk_f step f
let adapt p r = { p with result=r; }
let adapt_f p f = adapt p (Result.of_form f)
let name_gen_ = ref 0
let name ~namespace (p:t) : string =
begin match Result.name (result p) with
| Some s -> s
| None ->
try Tbl.find namespace p
with Not_found ->
let s = Printf.sprintf "'%d'" (Tbl.length namespace) in
Tbl.add namespace p s;
s
end
* { 2 Conversion to a graph of proofs }
let as_graph : (t, rule * Subst.Projection.t option * infos) CCGraph.t =
CCGraph.make
(fun p ->
let st = step p in
let rule = match Step.rule st with
| None -> ""
| Some rule -> rule
in
st
|> Step.parents
|> Iter.of_list
|> Iter.map
(fun p' -> (rule,Parent.subst p',Step.infos st), Parent.proof p'))
* { 2 IO }
let pp_result_of out proof = Result.pp out @@ result proof
let pp_notrec out p =
Format.fprintf out "@[%a by %a@]"
pp_result_of p Kind.pp (Step.kind @@ step p)
let pp_notrec1 out p =
Format.fprintf out "@[<hv>%a by %a@ from [@[<v>%a@]]@]"
pp_result_of p Kind.pp (Step.kind @@ step p)
(Util.pp_list pp_parent) p.step.parents
let traverse_bfs ~traversed proof k =
layered BFS
let current, next = ref [proof], ref [] in
while !current <> [] do
List.iter (fun proof ->
if Tbl.mem traversed proof then ()
else (
Tbl.add traversed proof ();
traverse premises first
List.iter
(fun proof' -> next := Parent.proof proof' :: !next)
(Step.parents @@ step proof);
k proof
))
!current;
current := !next;
next := [];
done
let traverse_dfs ~traversed proof k =
let rec aux proof =
if Tbl.mem traversed proof then ()
else (
Tbl.add traversed proof ();
traverse premises first
List.iter
(fun p' -> aux (Parent.proof p'))
(Step.parents @@ step proof);
k proof
)
in
aux proof
let traverse ?(traversed=Tbl.create 16) ~order proof k =
match order with
| `BFS -> traverse_bfs ~traversed proof k
| `DFS -> traverse_dfs ~traversed proof k
let pp_normal out proof =
let sep = "by" in
Format.fprintf out "@[<v>";
let pp_bullet out = Format.fprintf out "@<1>@{<Green>*@}" in
traverse ~order:`DFS proof
(fun p ->
Format.fprintf out "@[<hv2>%t @[%a@] %s@ %a@]@,"
pp_bullet Result.pp (result p) sep Step.pp (step p));
Format.fprintf out "@]"
let pp_tstp out proof =
let module F = TypedSTerm in
let ctx = Type.Conv.create () in
let conv_ty ty =
Type.Conv.of_simple_term_exn ctx ty
in
let namespace = Tbl.create 8 in
let already_defined = ref ID.Set.empty in
let tydecl_out out hd ty =
if not (ID.Set.mem hd !already_defined) then (
Format.fprintf out "thf(@[@[%a@], type, @[%a@]: @[%a@]@]).@."
Util.pp_str_tstp (ID.name hd ^ "_type") Util.pp_str_tstp (ID.name hd) (Type.TPTP.pp_ho ~depth:0) (conv_ty ty);
already_defined := ID.Set.add hd !already_defined;
)
in
let declare_combinators () =
let decls =
[(Builtin.SComb, "s_comb", "!>[A:$tType, B:$tType, C:$tType]: ((A > B > C) > (A > B) > A > C)");
(Builtin.CComb, "c_comb", "!>[A:$tType, B:$tType, C:$tType]: ((A > B > C) > B > A > C)");
(Builtin.BComb, "b_comb", "!>[A:$tType, B:$tType, C:$tType]: ((A > B) > (C > A) > C > B)");
(Builtin.KComb, "k_comb", "!>[A:$tType, B:$tType]: (B > A > B)");
(Builtin.IComb, "i_comb", "!>[A:$tType]: (A > A)")]
in
List.iter (fun (comb, name, decl) ->
Format.fprintf out "thf(@[@[%a@], type, @[%s@]: @[%s@]@]).@."
Util.pp_str_tstp (name ^ "_type") (Builtin.TPTP.to_string comb) decl;
) decls
in
Format.fprintf out " @[<v > " ;
let constants = ref F.Set.empty in
let has_comb = ref false in
let types = ref ID.Set.empty in
traverse ~order:`DFS proof (
fun p ->
let f = Result.to_form (result p) in
constants :=
F.Seq.subterms f
|> CCFun.tap (fun subterms ->
Iter.iter (fun st -> match F.view st with
| F.AppBuiltin(hd, args) ->
has_comb := Builtin.is_combinator hd || !has_comb
| _ -> ()) subterms
)
|> Iter.filter (F.is_const)
|> F.Set.of_iter
|> F.Set.union !constants;
F.Seq.subterms f
|> Iter.filter_map (F.ty)
|> Iter.iter (fun t ->
match F.Ty.view t with
| F.Ty.Ty_app(hd, args) when not @@ ID.Set.mem hd (!types) ->
let ty = F.Ty.(==>) (CCList.replicate (List.length args) F.Ty.tType) F.Ty.tType in
tydecl_out out hd ty
| _ -> ()
)
);
F.Set.iter (fun cst ->
match F.as_id_app cst with
| Some (hd, ty, []) -> tydecl_out out hd ty
| _ -> assert false
) !constants;
if !has_comb then declare_combinators ();
traverse ~order:`DFS proof
(fun p ->
let p_name = name ~namespace p in
let parents =
List.map (fun p -> `Name (name ~namespace @@ Parent.proof p))
(Step.parents @@ step p)
in
TODO
let pp_infos out = function
| [] -> ()
| l ->
Format.fprintf out ",@ [@[<hv>%a@]]"
(Util.pp_list ~sep:", " UntypedAST.pp_attr_tstp) l
in
let infos = p.step |> Step.infos in
if Result.is_stmt (result p) then (
Format.fprintf out "%a@," (Result.pp_in Output_format.tptp) (result p)
) else (
Format.fprintf out "thf(@[%s, %s,@ (@[%a@]),@ @[%a@]%a@]).@,"
p_name role (Result.pp_in Output_format.tptp) (result p)
Kind.pp_tstp (Step.kind @@ step p,parents) pp_infos infos
));
Format.fprintf out "@]";
()
let pp_zf out proof =
let module UA = UntypedAST.A in
Format.fprintf out "@[<v>";
let namespace = Tbl.create 8 in
traverse ~order:`DFS proof
(fun p ->
let p_name = name ~namespace p in
let parents =
List.map (fun p -> name ~namespace @@ Parent.proof p)
(Step.parents @@ step p)
in
let mk_status r = UA.app "status" [UA.quoted r] in
let info_name =
UA.(app "name" [str p_name])
and info_from =
if parents=[] then []
else (
[UA.(app "from" [list (List.map str parents)])]
)
and info_rule = match Step.rule (step p) with
| Some r -> [UA.(app "rule" [quoted r])]
| None -> []
and info_status = match Step.kind (step p) with
| Inference _ | Simplification _ -> [mk_status "inference"]
| Esa _ -> [mk_status "equisatisfiable"]
| Intro (src,R_lemma) -> mk_status "lemma" :: Src.to_attrs src
| Intro (src,R_goal) -> mk_status "goal" :: Src.to_attrs src
| Intro (src,R_assert) -> mk_status "assert" :: Src.to_attrs src
| Intro (src,R_def) -> mk_status "def" :: Src.to_attrs src
| Intro (src,R_decl) -> mk_status "decl" :: Src.to_attrs src
| Trivial -> [mk_status "trivial"]
| By_def _ | Define _ -> []
in
let pp_infos = UntypedAST.pp_attrs_zf in
let infos =
info_name :: info_from @ info_rule @ info_status @ (Step.infos p.step)
in
if Result.is_stmt (result p) then (
Format.fprintf out "%a@," (Result.pp_in Output_format.zf) (result p)
) else (
Format.fprintf out "@[<2>assert%a@ %a@].@,"
pp_infos infos (Result.pp_in Output_format.zf) (result p)
));
Format.fprintf out "@]";
()
let pp_in o out proof = match o with
| Output_format.O_none -> Util.debug ~section 1 "proof printing disabled"
| Output_format.O_tptp -> pp_tstp out proof
| Output_format.O_normal -> pp_normal out proof
| Output_format.O_zf -> pp_zf out proof
let _pp_list_str = Util.pp_list CCFormat.string
let _to_str_escape fmt =
Util.ksprintf_noc ~f:Util.escape_dot fmt
let pp_dot_seq ~name out seq =
CCGraph.Dot.pp_all
~tbl:(CCGraph.mk_table ~eq:equal ~hash:hash 64)
~eq:equal
~name
~graph:as_graph
~attrs_v:(fun p ->
let label = _to_str_escape "@[<2>%a@]@." pp_result_of p in
let attrs = [`Label label; `Style "filled"] in
let shape = `Shape "box" in
if is_proof_of_false p then [`Color "red"; `Label "[]"; `Shape "box"; `Style "filled"]
else if is_pure_bool p then `Color "cyan3" :: shape :: attrs
else if has_absurd_lits p then `Color "orange" :: shape :: attrs
else if is_def p then `Color "navajowhite" :: shape :: attrs
else if Step.is_goal @@ step p then `Color "green" :: shape :: attrs
else if Step.is_trivial @@ step p then `Color "cyan" :: shape :: attrs
else if Step.is_by_def @@ step p then `Color "navajowhite" :: shape :: attrs
else if Step.is_assert_like @@ step p then `Color "yellow" :: shape :: attrs
else shape :: attrs
)
~attrs_e:(fun (r,s,infos) ->
let pp_subst out s =
if not (Subst.is_empty @@ Subst.Projection.subst s) then (
Format.fprintf out "@,%a" Subst.Projection.pp s
)
in
let label =
if s=None && infos=[] then Rule.name r
else (
_to_str_escape "@[<v>%s%a%a@]@."
(Rule.name r) (CCFormat.some pp_subst) s Step.pp_infos infos
)
in
[`Label label; `Other ("dir", "back")])
out
seq;
Format.pp_print_newline out ();
()
let pp_dot ~name out proof = pp_dot_seq ~name out (Iter.singleton proof)
let pp_dot_seq_file ?(name="proof") filename seq =
Util.debugf ~section 1 "print proof graph to@ `%s`" (fun k->k filename);
CCIO.with_out filename
(fun oc ->
let out = Format.formatter_of_out_channel oc in
Format.fprintf out "%a@." (pp_dot_seq ~name) seq)
let pp_dot_file ?name filename proof =
pp_dot_seq_file ?name filename (Iter.singleton proof)
end
|
5b17b91414905183770c173263c706019f8056c5e2e998fc070df8f46a0564e4 | clojure-interop/java-jdk | SpinnerUI.clj | (ns javax.swing.plaf.SpinnerUI
"Pluggable look and feel interface for JSpinner"
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.plaf SpinnerUI]))
(defn ->spinner-ui
"Constructor."
(^SpinnerUI []
(new SpinnerUI )))
| null | https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.swing/src/javax/swing/plaf/SpinnerUI.clj | clojure | (ns javax.swing.plaf.SpinnerUI
"Pluggable look and feel interface for JSpinner"
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.plaf SpinnerUI]))
(defn ->spinner-ui
"Constructor."
(^SpinnerUI []
(new SpinnerUI )))
| |
764325a00b8b477c8817dfbc07a60ab70e9b62c8ee6ac05b5550d3256504bd00 | mejgun/haskell-tdlib | GetSavedOrderInfo.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
module TD.Query.GetSavedOrderInfo where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified Utils as U
-- |
Returns saved order information . Returns a 404 error if there is no saved order information
data GetSavedOrderInfo = GetSavedOrderInfo
{
}
deriving (Eq)
instance Show GetSavedOrderInfo where
show GetSavedOrderInfo =
"GetSavedOrderInfo"
++ U.cc
[]
instance T.ToJSON GetSavedOrderInfo where
toJSON GetSavedOrderInfo =
A.object
[ "@type" A..= T.String "getSavedOrderInfo"
]
| null | https://raw.githubusercontent.com/mejgun/haskell-tdlib/81516bd04c25c7371d4a9a5c972499791111c407/src/TD/Query/GetSavedOrderInfo.hs | haskell | # LANGUAGE OverloadedStrings #
|
| |
module TD.Query.GetSavedOrderInfo where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified Utils as U
Returns saved order information . Returns a 404 error if there is no saved order information
data GetSavedOrderInfo = GetSavedOrderInfo
{
}
deriving (Eq)
instance Show GetSavedOrderInfo where
show GetSavedOrderInfo =
"GetSavedOrderInfo"
++ U.cc
[]
instance T.ToJSON GetSavedOrderInfo where
toJSON GetSavedOrderInfo =
A.object
[ "@type" A..= T.String "getSavedOrderInfo"
]
|
d61cb891ffa653c7aecec95060c710093af0bb5390ade84c7d7b7998eb8dacbb | lispcord/lispcord | package.lisp |
(defpackage :lispcord.constants
(:use #:cl #:lispcord.util)
(:export #:+os+
#:+lib+
#:+base-url+
#:+api-suffix+
#:+gw-rate-limit+
#:+gw-rate-limit-connection+
#:+gw-rate-limit-game-status+))
(defpackage :lispcord.ratelimits
(:use #:cl #:lispcord.util #:lispcord.constants)
(:export #:rl-parse
#:rl-buffer))
(defpackage :lispcord.core
(:use #:cl
#:lispcord.util
#:lispcord.ratelimits
#:lispcord.constants)
(:export #:bot
#:%make-bot
#:*client*
#:bot-token
#:bot-user
#:bot-version
#:bot-seq
#:bot-session-id
#:bot-afk-since
#:bot-event-handlers
#:bot-conn
#:bot-running
#:bot-heartbeat-ack
#:bot-heartbeat-thread
#:bot-auth-as-bot
#:bot-auth
#:bot-url
#:base-url
#:api-suffix
#:discord-req
#:get-rq
#:post-rq))
(defpackage :lispcord.pipes
(:use #:cl #:lispcord.util #:lispcord.core)
(:export #:make-event-table
#:add-event-handler
#:dispatch-event))
(defpackage :lispcord.gateway
(:use #:cl
#:alexandria
#:lispcord.util
#:lispcord.pipes
#:lispcord.core
#:lispcord.constants)
(:import-from #:lispcord.classes
#:from-json
#:%to-json
#:cache
#:getcache-id
#:decache-id)
(:export #:connect
#:disconnect))
(defpackage :lispcord.http
(:use #:cl
#:alexandria
#:jonathan
#:lispcord.constants
#:lispcord.util
#:lispcord.core)
(:import-from #:lispcord.classes
#:cache
#:getcache-id
#:decache-id
#:from-json
#:botp)
(:export #:botp
#:create
#:edit
#:erase
#:from-id
#:get-messages
#:erase-reaction
#:erase-messages
#:erase-overwrite
#:start-typing
#:get-pinned
#:pin
#:unpin
#:get-emojis
#:erase-emoji
#:get-channels
#:get-members
#:move-member
#:set-nick
#:erase-role
#:get-bans
#:ban
#:unban
#:get-roles
#:leave
#:create-dms))
(defpackage :lispcord
(:use #:cl
#:lispcord.util
#:lispcord.constants
#:lispcord.gateway
#:lispcord.http
#:lispcord.core
#:lispcord.pipes)
(:import-from #:lispcord.classes
#:getcache-id
#:botp)
(:export #:*client*
#:connect
#:disconnect
#:defbot
#:make-bot
#:botp
#:make-prefix
#:commandp
#:sanitize-content
#:remove-substring
#:remove-mention
#:mention
#:demention
#:reply
#:me
#:add-event-handler
#:create
#:edit
#:erase
#:from-id
#:get-messages
#:erase-reaction
#:erase-messages
#:erase-overwrite
#:start-typing
#:get-pinned
#:pin
#:unpin
#:get-emojis
#:erase-emoji
#:get-channels
#:get-members
#:move-member
#:set-nick
#:erase-role
#:get-bans
#:ban
#:unban
#:get-roles
#:leave))
| null | https://raw.githubusercontent.com/lispcord/lispcord/448190cc503a0d7e59bdc0ffddb2e9dba0a706af/src/package.lisp | lisp |
(defpackage :lispcord.constants
(:use #:cl #:lispcord.util)
(:export #:+os+
#:+lib+
#:+base-url+
#:+api-suffix+
#:+gw-rate-limit+
#:+gw-rate-limit-connection+
#:+gw-rate-limit-game-status+))
(defpackage :lispcord.ratelimits
(:use #:cl #:lispcord.util #:lispcord.constants)
(:export #:rl-parse
#:rl-buffer))
(defpackage :lispcord.core
(:use #:cl
#:lispcord.util
#:lispcord.ratelimits
#:lispcord.constants)
(:export #:bot
#:%make-bot
#:*client*
#:bot-token
#:bot-user
#:bot-version
#:bot-seq
#:bot-session-id
#:bot-afk-since
#:bot-event-handlers
#:bot-conn
#:bot-running
#:bot-heartbeat-ack
#:bot-heartbeat-thread
#:bot-auth-as-bot
#:bot-auth
#:bot-url
#:base-url
#:api-suffix
#:discord-req
#:get-rq
#:post-rq))
(defpackage :lispcord.pipes
(:use #:cl #:lispcord.util #:lispcord.core)
(:export #:make-event-table
#:add-event-handler
#:dispatch-event))
(defpackage :lispcord.gateway
(:use #:cl
#:alexandria
#:lispcord.util
#:lispcord.pipes
#:lispcord.core
#:lispcord.constants)
(:import-from #:lispcord.classes
#:from-json
#:%to-json
#:cache
#:getcache-id
#:decache-id)
(:export #:connect
#:disconnect))
(defpackage :lispcord.http
(:use #:cl
#:alexandria
#:jonathan
#:lispcord.constants
#:lispcord.util
#:lispcord.core)
(:import-from #:lispcord.classes
#:cache
#:getcache-id
#:decache-id
#:from-json
#:botp)
(:export #:botp
#:create
#:edit
#:erase
#:from-id
#:get-messages
#:erase-reaction
#:erase-messages
#:erase-overwrite
#:start-typing
#:get-pinned
#:pin
#:unpin
#:get-emojis
#:erase-emoji
#:get-channels
#:get-members
#:move-member
#:set-nick
#:erase-role
#:get-bans
#:ban
#:unban
#:get-roles
#:leave
#:create-dms))
(defpackage :lispcord
(:use #:cl
#:lispcord.util
#:lispcord.constants
#:lispcord.gateway
#:lispcord.http
#:lispcord.core
#:lispcord.pipes)
(:import-from #:lispcord.classes
#:getcache-id
#:botp)
(:export #:*client*
#:connect
#:disconnect
#:defbot
#:make-bot
#:botp
#:make-prefix
#:commandp
#:sanitize-content
#:remove-substring
#:remove-mention
#:mention
#:demention
#:reply
#:me
#:add-event-handler
#:create
#:edit
#:erase
#:from-id
#:get-messages
#:erase-reaction
#:erase-messages
#:erase-overwrite
#:start-typing
#:get-pinned
#:pin
#:unpin
#:get-emojis
#:erase-emoji
#:get-channels
#:get-members
#:move-member
#:set-nick
#:erase-role
#:get-bans
#:ban
#:unban
#:get-roles
#:leave))
| |
4dedeebcfecd3b6350890d4e5d02cd074631f637dd1679773cedd3f2a7980289 | coingaming/lnd-client | Chainnotifier.hs | This file was auto - generated from chainrpc / chainnotifier.proto by the proto - lens - protoc program .
# LANGUAGE ScopedTypeVariables , DataKinds , TypeFamilies , UndecidableInstances , GeneralizedNewtypeDeriving , MultiParamTypeClasses , FlexibleContexts , FlexibleInstances , PatternSynonyms , MagicHash , NoImplicitPrelude , BangPatterns , TypeApplications , OverloadedStrings , DerivingStrategies , DeriveGeneric #
{-# OPTIONS_GHC -Wno-unused-imports#-}
{-# OPTIONS_GHC -Wno-duplicate-exports#-}
# OPTIONS_GHC -Wno - dodgy - exports #
module Proto.Chainrpc.Chainnotifier (
ChainNotifier(..), BlockEpoch(), ConfDetails(), ConfEvent(),
ConfEvent'Event(..), _ConfEvent'Conf, _ConfEvent'Reorg,
ConfRequest(), Outpoint(), Reorg(), SpendDetails(), SpendEvent(),
SpendEvent'Event(..), _SpendEvent'Spend, _SpendEvent'Reorg,
SpendRequest()
) where
import qualified Data.ProtoLens.Runtime.Control.DeepSeq as Control.DeepSeq
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Prism as Data.ProtoLens.Prism
import qualified Text.PrettyPrint.GenericPretty.Instance
import qualified GHC.Generics
import qualified Text.PrettyPrint.GenericPretty
import qualified Data.ProtoLens.Runtime.Prelude as Prelude
import qualified Data.ProtoLens.Runtime.Data.Int as Data.Int
import qualified Data.ProtoLens.Runtime.Data.Monoid as Data.Monoid
import qualified Data.ProtoLens.Runtime.Data.Word as Data.Word
import qualified Data.ProtoLens.Runtime.Data.ProtoLens as Data.ProtoLens
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Bytes as Data.ProtoLens.Encoding.Bytes
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Growing as Data.ProtoLens.Encoding.Growing
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Parser.Unsafe as Data.ProtoLens.Encoding.Parser.Unsafe
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Wire as Data.ProtoLens.Encoding.Wire
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Field as Data.ProtoLens.Field
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Message.Enum as Data.ProtoLens.Message.Enum
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Service.Types as Data.ProtoLens.Service.Types
import qualified Data.ProtoLens.Runtime.Lens.Family2 as Lens.Family2
import qualified Data.ProtoLens.Runtime.Lens.Family2.Unchecked as Lens.Family2.Unchecked
import qualified Data.ProtoLens.Runtime.Data.Text as Data.Text
import qualified Data.ProtoLens.Runtime.Data.Map as Data.Map
import qualified Data.ProtoLens.Runtime.Data.ByteString as Data.ByteString
import qualified Data.ProtoLens.Runtime.Data.ByteString.Char8 as Data.ByteString.Char8
import qualified Data.ProtoLens.Runtime.Data.Text.Encoding as Data.Text.Encoding
import qualified Data.ProtoLens.Runtime.Data.Vector as Data.Vector
import qualified Data.ProtoLens.Runtime.Data.Vector.Generic as Data.Vector.Generic
import qualified Data.ProtoLens.Runtime.Data.Vector.Unboxed as Data.Vector.Unboxed
import qualified Data.ProtoLens.Runtime.Text.Read as Text.Read
| Fields :
* ' Proto.Chainrpc.Chainnotifier_Fields.hash ' @ : : Lens ' BlockEpoch Data . ByteString . ByteString@
* ' Proto.Chainrpc.Chainnotifier_Fields.height ' @ : : Lens ' BlockEpoch Data . Word . Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.hash' @:: Lens' BlockEpoch Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.height' @:: Lens' BlockEpoch Data.Word.Word32@ -}
data BlockEpoch
= BlockEpoch'_constructor {_BlockEpoch'hash :: !Data.ByteString.ByteString,
_BlockEpoch'height :: !Data.Word.Word32,
_BlockEpoch'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show BlockEpoch where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out BlockEpoch
instance Data.ProtoLens.Field.HasField BlockEpoch "hash" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_BlockEpoch'hash (\ x__ y__ -> x__ {_BlockEpoch'hash = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField BlockEpoch "height" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_BlockEpoch'height (\ x__ y__ -> x__ {_BlockEpoch'height = y__}))
Prelude.id
instance Data.ProtoLens.Message BlockEpoch where
messageName _ = Data.Text.pack "chainrpc.BlockEpoch"
packedMessageDescriptor _
= "\n\
\\n\
\BlockEpoch\DC2\DC2\n\
\\EOThash\CAN\SOH \SOH(\fR\EOThash\DC2\SYN\n\
\\ACKheight\CAN\STX \SOH(\rR\ACKheight"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
hash__field_descriptor
= Data.ProtoLens.FieldDescriptor
"hash"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"hash")) ::
Data.ProtoLens.FieldDescriptor BlockEpoch
height__field_descriptor
= Data.ProtoLens.FieldDescriptor
"height"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"height")) ::
Data.ProtoLens.FieldDescriptor BlockEpoch
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, hash__field_descriptor),
(Data.ProtoLens.Tag 2, height__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_BlockEpoch'_unknownFields
(\ x__ y__ -> x__ {_BlockEpoch'_unknownFields = y__})
defMessage
= BlockEpoch'_constructor
{_BlockEpoch'hash = Data.ProtoLens.fieldDefault,
_BlockEpoch'height = Data.ProtoLens.fieldDefault,
_BlockEpoch'_unknownFields = []}
parseMessage
= let
loop ::
BlockEpoch -> Data.ProtoLens.Encoding.Bytes.Parser BlockEpoch
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"hash"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"hash") y x)
16
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"height"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"height") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "BlockEpoch"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(let _v = Lens.Family2.view (Data.ProtoLens.Field.field @"hash") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"height") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 16)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x)))
instance Control.DeepSeq.NFData BlockEpoch where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_BlockEpoch'_unknownFields x__)
(Control.DeepSeq.deepseq
(_BlockEpoch'hash x__)
(Control.DeepSeq.deepseq (_BlockEpoch'height x__) ()))
| Fields :
* ' Proto . Chainrpc . Chainnotifier_Fields.rawTx ' @ : : Lens ' ConfDetails Data . ByteString . ByteString@
* ' Proto . Chainrpc . Chainnotifier_Fields.blockHash ' @ : : Lens ' ConfDetails Data . ByteString . ByteString@
* ' Proto . Chainrpc . Chainnotifier_Fields.blockHeight ' @ : : Lens ' ConfDetails Data . Word . Word32@
* ' Proto . Chainrpc . Chainnotifier_Fields.txIndex ' @ : : Lens ' ConfDetails Data . Word . Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.rawTx' @:: Lens' ConfDetails Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.blockHash' @:: Lens' ConfDetails Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.blockHeight' @:: Lens' ConfDetails Data.Word.Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.txIndex' @:: Lens' ConfDetails Data.Word.Word32@ -}
data ConfDetails
= ConfDetails'_constructor {_ConfDetails'rawTx :: !Data.ByteString.ByteString,
_ConfDetails'blockHash :: !Data.ByteString.ByteString,
_ConfDetails'blockHeight :: !Data.Word.Word32,
_ConfDetails'txIndex :: !Data.Word.Word32,
_ConfDetails'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show ConfDetails where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out ConfDetails
instance Data.ProtoLens.Field.HasField ConfDetails "rawTx" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfDetails'rawTx (\ x__ y__ -> x__ {_ConfDetails'rawTx = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfDetails "blockHash" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfDetails'blockHash
(\ x__ y__ -> x__ {_ConfDetails'blockHash = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfDetails "blockHeight" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfDetails'blockHeight
(\ x__ y__ -> x__ {_ConfDetails'blockHeight = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfDetails "txIndex" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfDetails'txIndex
(\ x__ y__ -> x__ {_ConfDetails'txIndex = y__}))
Prelude.id
instance Data.ProtoLens.Message ConfDetails where
messageName _ = Data.Text.pack "chainrpc.ConfDetails"
packedMessageDescriptor _
= "\n\
\\vConfDetails\DC2\NAK\n\
\\ACKraw_tx\CAN\SOH \SOH(\fR\ENQrawTx\DC2\GS\n\
\\n\
\block_hash\CAN\STX \SOH(\fR\tblockHash\DC2!\n\
\\fblock_height\CAN\ETX \SOH(\rR\vblockHeight\DC2\EM\n\
\\btx_index\CAN\EOT \SOH(\rR\atxIndex"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
rawTx__field_descriptor
= Data.ProtoLens.FieldDescriptor
"raw_tx"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"rawTx")) ::
Data.ProtoLens.FieldDescriptor ConfDetails
blockHash__field_descriptor
= Data.ProtoLens.FieldDescriptor
"block_hash"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"blockHash")) ::
Data.ProtoLens.FieldDescriptor ConfDetails
blockHeight__field_descriptor
= Data.ProtoLens.FieldDescriptor
"block_height"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"blockHeight")) ::
Data.ProtoLens.FieldDescriptor ConfDetails
txIndex__field_descriptor
= Data.ProtoLens.FieldDescriptor
"tx_index"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"txIndex")) ::
Data.ProtoLens.FieldDescriptor ConfDetails
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, rawTx__field_descriptor),
(Data.ProtoLens.Tag 2, blockHash__field_descriptor),
(Data.ProtoLens.Tag 3, blockHeight__field_descriptor),
(Data.ProtoLens.Tag 4, txIndex__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_ConfDetails'_unknownFields
(\ x__ y__ -> x__ {_ConfDetails'_unknownFields = y__})
defMessage
= ConfDetails'_constructor
{_ConfDetails'rawTx = Data.ProtoLens.fieldDefault,
_ConfDetails'blockHash = Data.ProtoLens.fieldDefault,
_ConfDetails'blockHeight = Data.ProtoLens.fieldDefault,
_ConfDetails'txIndex = Data.ProtoLens.fieldDefault,
_ConfDetails'_unknownFields = []}
parseMessage
= let
loop ::
ConfDetails -> Data.ProtoLens.Encoding.Bytes.Parser ConfDetails
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"raw_tx"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"rawTx") y x)
18
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"block_hash"
loop
(Lens.Family2.set (Data.ProtoLens.Field.field @"blockHash") y x)
24
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"block_height"
loop
(Lens.Family2.set (Data.ProtoLens.Field.field @"blockHeight") y x)
32
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"tx_index"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"txIndex") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "ConfDetails"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"rawTx") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"blockHash") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 18)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view (Data.ProtoLens.Field.field @"blockHeight") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 24)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"txIndex") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 32)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x)))))
instance Control.DeepSeq.NFData ConfDetails where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_ConfDetails'_unknownFields x__)
(Control.DeepSeq.deepseq
(_ConfDetails'rawTx x__)
(Control.DeepSeq.deepseq
(_ConfDetails'blockHash x__)
(Control.DeepSeq.deepseq
(_ConfDetails'blockHeight x__)
(Control.DeepSeq.deepseq (_ConfDetails'txIndex x__) ()))))
| Fields :
* ' Proto . Chainrpc . Chainnotifier_Fields.maybe'event ' @ : : Lens ' ConfEvent ( Prelude . Maybe ConfEvent'Event)@
* ' Proto . Chainrpc . Chainnotifier_Fields.maybe'conf ' @ : : Lens ' ConfEvent ( Prelude . Maybe ConfDetails)@
* ' Proto.Chainrpc.Chainnotifier_Fields.conf ' @ : : Lens ' ConfEvent ConfDetails@
* ' Proto . Chainrpc . ' @ : : Lens ' ConfEvent ( Prelude . Maybe Reorg)@
* ' Proto.Chainrpc.Chainnotifier_Fields.reorg ' @ : : Lens ' ConfEvent Reorg@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'event' @:: Lens' ConfEvent (Prelude.Maybe ConfEvent'Event)@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'conf' @:: Lens' ConfEvent (Prelude.Maybe ConfDetails)@
* 'Proto.Chainrpc.Chainnotifier_Fields.conf' @:: Lens' ConfEvent ConfDetails@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'reorg' @:: Lens' ConfEvent (Prelude.Maybe Reorg)@
* 'Proto.Chainrpc.Chainnotifier_Fields.reorg' @:: Lens' ConfEvent Reorg@ -}
data ConfEvent
= ConfEvent'_constructor {_ConfEvent'event :: !(Prelude.Maybe ConfEvent'Event),
_ConfEvent'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show ConfEvent where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out ConfEvent
data ConfEvent'Event
= ConfEvent'Conf !ConfDetails | ConfEvent'Reorg !Reorg
deriving stock (Prelude.Show,
Prelude.Eq,
Prelude.Ord,
GHC.Generics.Generic)
instance Text.PrettyPrint.GenericPretty.Out ConfEvent'Event
instance Data.ProtoLens.Field.HasField ConfEvent "maybe'event" (Prelude.Maybe ConfEvent'Event) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfEvent'event (\ x__ y__ -> x__ {_ConfEvent'event = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfEvent "maybe'conf" (Prelude.Maybe ConfDetails) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfEvent'event (\ x__ y__ -> x__ {_ConfEvent'event = y__}))
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (ConfEvent'Conf x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap ConfEvent'Conf y__))
instance Data.ProtoLens.Field.HasField ConfEvent "conf" ConfDetails where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfEvent'event (\ x__ y__ -> x__ {_ConfEvent'event = y__}))
((Prelude..)
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (ConfEvent'Conf x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap ConfEvent'Conf y__))
(Data.ProtoLens.maybeLens Data.ProtoLens.defMessage))
instance Data.ProtoLens.Field.HasField ConfEvent "maybe'reorg" (Prelude.Maybe Reorg) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfEvent'event (\ x__ y__ -> x__ {_ConfEvent'event = y__}))
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (ConfEvent'Reorg x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap ConfEvent'Reorg y__))
instance Data.ProtoLens.Field.HasField ConfEvent "reorg" Reorg where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfEvent'event (\ x__ y__ -> x__ {_ConfEvent'event = y__}))
((Prelude..)
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (ConfEvent'Reorg x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap ConfEvent'Reorg y__))
(Data.ProtoLens.maybeLens Data.ProtoLens.defMessage))
instance Data.ProtoLens.Message ConfEvent where
messageName _ = Data.Text.pack "chainrpc.ConfEvent"
packedMessageDescriptor _
= "\n\
\\tConfEvent\DC2+\n\
\\EOTconf\CAN\SOH \SOH(\v2\NAK.chainrpc.ConfDetailsH\NULR\EOTconf\DC2'\n\
\\ENQreorg\CAN\STX \SOH(\v2\SI.chainrpc.ReorgH\NULR\ENQreorgB\a\n\
\\ENQevent"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
conf__field_descriptor
= Data.ProtoLens.FieldDescriptor
"conf"
(Data.ProtoLens.MessageField Data.ProtoLens.MessageType ::
Data.ProtoLens.FieldTypeDescriptor ConfDetails)
(Data.ProtoLens.OptionalField
(Data.ProtoLens.Field.field @"maybe'conf")) ::
Data.ProtoLens.FieldDescriptor ConfEvent
reorg__field_descriptor
= Data.ProtoLens.FieldDescriptor
"reorg"
(Data.ProtoLens.MessageField Data.ProtoLens.MessageType ::
Data.ProtoLens.FieldTypeDescriptor Reorg)
(Data.ProtoLens.OptionalField
(Data.ProtoLens.Field.field @"maybe'reorg")) ::
Data.ProtoLens.FieldDescriptor ConfEvent
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, conf__field_descriptor),
(Data.ProtoLens.Tag 2, reorg__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_ConfEvent'_unknownFields
(\ x__ y__ -> x__ {_ConfEvent'_unknownFields = y__})
defMessage
= ConfEvent'_constructor
{_ConfEvent'event = Prelude.Nothing,
_ConfEvent'_unknownFields = []}
parseMessage
= let
loop :: ConfEvent -> Data.ProtoLens.Encoding.Bytes.Parser ConfEvent
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.isolate
(Prelude.fromIntegral len) Data.ProtoLens.parseMessage)
"conf"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"conf") y x)
18
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.isolate
(Prelude.fromIntegral len) Data.ProtoLens.parseMessage)
"reorg"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"reorg") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "ConfEvent"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(case
Lens.Family2.view (Data.ProtoLens.Field.field @"maybe'event") _x
of
Prelude.Nothing -> Data.Monoid.mempty
(Prelude.Just (ConfEvent'Conf v))
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((Prelude..)
(\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
Data.ProtoLens.encodeMessage v)
(Prelude.Just (ConfEvent'Reorg v))
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 18)
((Prelude..)
(\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
Data.ProtoLens.encodeMessage v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x))
instance Control.DeepSeq.NFData ConfEvent where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_ConfEvent'_unknownFields x__)
(Control.DeepSeq.deepseq (_ConfEvent'event x__) ())
instance Control.DeepSeq.NFData ConfEvent'Event where
rnf (ConfEvent'Conf x__) = Control.DeepSeq.rnf x__
rnf (ConfEvent'Reorg x__) = Control.DeepSeq.rnf x__
_ConfEvent'Conf ::
Data.ProtoLens.Prism.Prism' ConfEvent'Event ConfDetails
_ConfEvent'Conf
= Data.ProtoLens.Prism.prism'
ConfEvent'Conf
(\ p__
-> case p__ of
(ConfEvent'Conf p__val) -> Prelude.Just p__val
_otherwise -> Prelude.Nothing)
_ConfEvent'Reorg ::
Data.ProtoLens.Prism.Prism' ConfEvent'Event Reorg
_ConfEvent'Reorg
= Data.ProtoLens.Prism.prism'
ConfEvent'Reorg
(\ p__
-> case p__ of
(ConfEvent'Reorg p__val) -> Prelude.Just p__val
_otherwise -> Prelude.Nothing)
| Fields :
* ' Proto.Chainrpc.Chainnotifier_Fields.txid ' @ : : Lens ' ConfRequest Data . ByteString . ByteString@
* ' Proto.Chainrpc.Chainnotifier_Fields.script ' @ : : Lens ' ConfRequest Data . ByteString . ByteString@
* ' Proto . Chainrpc . Chainnotifier_Fields.numConfs ' @ : : Lens ' ConfRequest Data . Word . Word32@
* ' Proto . . Chainnotifier_Fields.heightHint ' @ : : Lens ' ConfRequest Data . Word . Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.txid' @:: Lens' ConfRequest Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.script' @:: Lens' ConfRequest Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.numConfs' @:: Lens' ConfRequest Data.Word.Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.heightHint' @:: Lens' ConfRequest Data.Word.Word32@ -}
data ConfRequest
= ConfRequest'_constructor {_ConfRequest'txid :: !Data.ByteString.ByteString,
_ConfRequest'script :: !Data.ByteString.ByteString,
_ConfRequest'numConfs :: !Data.Word.Word32,
_ConfRequest'heightHint :: !Data.Word.Word32,
_ConfRequest'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show ConfRequest where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out ConfRequest
instance Data.ProtoLens.Field.HasField ConfRequest "txid" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfRequest'txid (\ x__ y__ -> x__ {_ConfRequest'txid = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfRequest "script" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfRequest'script (\ x__ y__ -> x__ {_ConfRequest'script = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfRequest "numConfs" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfRequest'numConfs
(\ x__ y__ -> x__ {_ConfRequest'numConfs = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfRequest "heightHint" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfRequest'heightHint
(\ x__ y__ -> x__ {_ConfRequest'heightHint = y__}))
Prelude.id
instance Data.ProtoLens.Message ConfRequest where
messageName _ = Data.Text.pack "chainrpc.ConfRequest"
packedMessageDescriptor _
= "\n\
\\vConfRequest\DC2\DC2\n\
\\EOTtxid\CAN\SOH \SOH(\fR\EOTtxid\DC2\SYN\n\
\\ACKscript\CAN\STX \SOH(\fR\ACKscript\DC2\ESC\n\
\\tnum_confs\CAN\ETX \SOH(\rR\bnumConfs\DC2\US\n\
\\vheight_hint\CAN\EOT \SOH(\rR\n\
\heightHint"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
txid__field_descriptor
= Data.ProtoLens.FieldDescriptor
"txid"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"txid")) ::
Data.ProtoLens.FieldDescriptor ConfRequest
script__field_descriptor
= Data.ProtoLens.FieldDescriptor
"script"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"script")) ::
Data.ProtoLens.FieldDescriptor ConfRequest
numConfs__field_descriptor
= Data.ProtoLens.FieldDescriptor
"num_confs"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"numConfs")) ::
Data.ProtoLens.FieldDescriptor ConfRequest
heightHint__field_descriptor
= Data.ProtoLens.FieldDescriptor
"height_hint"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"heightHint")) ::
Data.ProtoLens.FieldDescriptor ConfRequest
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, txid__field_descriptor),
(Data.ProtoLens.Tag 2, script__field_descriptor),
(Data.ProtoLens.Tag 3, numConfs__field_descriptor),
(Data.ProtoLens.Tag 4, heightHint__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_ConfRequest'_unknownFields
(\ x__ y__ -> x__ {_ConfRequest'_unknownFields = y__})
defMessage
= ConfRequest'_constructor
{_ConfRequest'txid = Data.ProtoLens.fieldDefault,
_ConfRequest'script = Data.ProtoLens.fieldDefault,
_ConfRequest'numConfs = Data.ProtoLens.fieldDefault,
_ConfRequest'heightHint = Data.ProtoLens.fieldDefault,
_ConfRequest'_unknownFields = []}
parseMessage
= let
loop ::
ConfRequest -> Data.ProtoLens.Encoding.Bytes.Parser ConfRequest
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"txid"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"txid") y x)
18
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"script"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"script") y x)
24
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"num_confs"
loop
(Lens.Family2.set (Data.ProtoLens.Field.field @"numConfs") y x)
32
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"height_hint"
loop
(Lens.Family2.set (Data.ProtoLens.Field.field @"heightHint") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "ConfRequest"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(let _v = Lens.Family2.view (Data.ProtoLens.Field.field @"txid") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"script") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 18)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"numConfs") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 24)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view (Data.ProtoLens.Field.field @"heightHint") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 32)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x)))))
instance Control.DeepSeq.NFData ConfRequest where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_ConfRequest'_unknownFields x__)
(Control.DeepSeq.deepseq
(_ConfRequest'txid x__)
(Control.DeepSeq.deepseq
(_ConfRequest'script x__)
(Control.DeepSeq.deepseq
(_ConfRequest'numConfs x__)
(Control.DeepSeq.deepseq (_ConfRequest'heightHint x__) ()))))
| Fields :
* ' Proto.Chainrpc.Chainnotifier_Fields.hash ' @ : : Lens ' Outpoint Data . ByteString . ByteString@
* ' Proto.Chainrpc.Chainnotifier_Fields.index ' @ : : Lens ' Outpoint Data . Word . Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.hash' @:: Lens' Outpoint Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.index' @:: Lens' Outpoint Data.Word.Word32@ -}
data Outpoint
= Outpoint'_constructor {_Outpoint'hash :: !Data.ByteString.ByteString,
_Outpoint'index :: !Data.Word.Word32,
_Outpoint'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show Outpoint where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out Outpoint
instance Data.ProtoLens.Field.HasField Outpoint "hash" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_Outpoint'hash (\ x__ y__ -> x__ {_Outpoint'hash = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField Outpoint "index" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_Outpoint'index (\ x__ y__ -> x__ {_Outpoint'index = y__}))
Prelude.id
instance Data.ProtoLens.Message Outpoint where
messageName _ = Data.Text.pack "chainrpc.Outpoint"
packedMessageDescriptor _
= "\n\
\\bOutpoint\DC2\DC2\n\
\\EOThash\CAN\SOH \SOH(\fR\EOThash\DC2\DC4\n\
\\ENQindex\CAN\STX \SOH(\rR\ENQindex"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
hash__field_descriptor
= Data.ProtoLens.FieldDescriptor
"hash"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"hash")) ::
Data.ProtoLens.FieldDescriptor Outpoint
index__field_descriptor
= Data.ProtoLens.FieldDescriptor
"index"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"index")) ::
Data.ProtoLens.FieldDescriptor Outpoint
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, hash__field_descriptor),
(Data.ProtoLens.Tag 2, index__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_Outpoint'_unknownFields
(\ x__ y__ -> x__ {_Outpoint'_unknownFields = y__})
defMessage
= Outpoint'_constructor
{_Outpoint'hash = Data.ProtoLens.fieldDefault,
_Outpoint'index = Data.ProtoLens.fieldDefault,
_Outpoint'_unknownFields = []}
parseMessage
= let
loop :: Outpoint -> Data.ProtoLens.Encoding.Bytes.Parser Outpoint
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"hash"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"hash") y x)
16
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"index"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"index") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "Outpoint"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(let _v = Lens.Family2.view (Data.ProtoLens.Field.field @"hash") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"index") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 16)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x)))
instance Control.DeepSeq.NFData Outpoint where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_Outpoint'_unknownFields x__)
(Control.DeepSeq.deepseq
(_Outpoint'hash x__)
(Control.DeepSeq.deepseq (_Outpoint'index x__) ()))
{- | Fields :
-}
data Reorg
= Reorg'_constructor {_Reorg'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show Reorg where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out Reorg
instance Data.ProtoLens.Message Reorg where
messageName _ = Data.Text.pack "chainrpc.Reorg"
packedMessageDescriptor _
= "\n\
\\ENQReorg"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag = let in Data.Map.fromList []
unknownFields
= Lens.Family2.Unchecked.lens
_Reorg'_unknownFields
(\ x__ y__ -> x__ {_Reorg'_unknownFields = y__})
defMessage = Reorg'_constructor {_Reorg'_unknownFields = []}
parseMessage
= let
loop :: Reorg -> Data.ProtoLens.Encoding.Bytes.Parser Reorg
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of {
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x) }
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "Reorg"
buildMessage
= \ _x
-> Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x)
instance Control.DeepSeq.NFData Reorg where
rnf
= \ x__ -> Control.DeepSeq.deepseq (_Reorg'_unknownFields x__) ()
| Fields :
* ' Proto . Chainrpc . Chainnotifier_Fields.spendingOutpoint ' @ : : Lens ' SpendDetails Outpoint@
* ' Proto . Chainrpc . Chainnotifier_Fields.maybe'spendingOutpoint ' @ : : Lens ' SpendDetails ( Prelude . Maybe Outpoint)@
* ' Proto . Chainrpc . ' @ : : Lens ' SpendDetails Data . ByteString . ByteString@
* ' Proto . Chainrpc . ' @ : : Lens ' SpendDetails Data . ByteString . ByteString@
* ' Proto . . Chainnotifier_Fields.spendingInputIndex ' @ : : Lens ' SpendDetails Data . Word . Word32@
* ' Proto . Chainrpc . ' @ : : Lens ' SpendDetails Data . Word . Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.spendingOutpoint' @:: Lens' SpendDetails Outpoint@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'spendingOutpoint' @:: Lens' SpendDetails (Prelude.Maybe Outpoint)@
* 'Proto.Chainrpc.Chainnotifier_Fields.rawSpendingTx' @:: Lens' SpendDetails Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.spendingTxHash' @:: Lens' SpendDetails Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.spendingInputIndex' @:: Lens' SpendDetails Data.Word.Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.spendingHeight' @:: Lens' SpendDetails Data.Word.Word32@ -}
data SpendDetails
= SpendDetails'_constructor {_SpendDetails'spendingOutpoint :: !(Prelude.Maybe Outpoint),
_SpendDetails'rawSpendingTx :: !Data.ByteString.ByteString,
_SpendDetails'spendingTxHash :: !Data.ByteString.ByteString,
_SpendDetails'spendingInputIndex :: !Data.Word.Word32,
_SpendDetails'spendingHeight :: !Data.Word.Word32,
_SpendDetails'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show SpendDetails where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out SpendDetails
instance Data.ProtoLens.Field.HasField SpendDetails "spendingOutpoint" Outpoint where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendDetails'spendingOutpoint
(\ x__ y__ -> x__ {_SpendDetails'spendingOutpoint = y__}))
(Data.ProtoLens.maybeLens Data.ProtoLens.defMessage)
instance Data.ProtoLens.Field.HasField SpendDetails "maybe'spendingOutpoint" (Prelude.Maybe Outpoint) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendDetails'spendingOutpoint
(\ x__ y__ -> x__ {_SpendDetails'spendingOutpoint = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendDetails "rawSpendingTx" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendDetails'rawSpendingTx
(\ x__ y__ -> x__ {_SpendDetails'rawSpendingTx = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendDetails "spendingTxHash" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendDetails'spendingTxHash
(\ x__ y__ -> x__ {_SpendDetails'spendingTxHash = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendDetails "spendingInputIndex" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendDetails'spendingInputIndex
(\ x__ y__ -> x__ {_SpendDetails'spendingInputIndex = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendDetails "spendingHeight" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendDetails'spendingHeight
(\ x__ y__ -> x__ {_SpendDetails'spendingHeight = y__}))
Prelude.id
instance Data.ProtoLens.Message SpendDetails where
messageName _ = Data.Text.pack "chainrpc.SpendDetails"
packedMessageDescriptor _
= "\n\
\\fSpendDetails\DC2?\n\
\\DC1spending_outpoint\CAN\SOH \SOH(\v2\DC2.chainrpc.OutpointR\DLEspendingOutpoint\DC2&\n\
\\SIraw_spending_tx\CAN\STX \SOH(\fR\rrawSpendingTx\DC2(\n\
\\DLEspending_tx_hash\CAN\ETX \SOH(\fR\SOspendingTxHash\DC20\n\
\\DC4spending_input_index\CAN\EOT \SOH(\rR\DC2spendingInputIndex\DC2'\n\
\\SIspending_height\CAN\ENQ \SOH(\rR\SOspendingHeight"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
spendingOutpoint__field_descriptor
= Data.ProtoLens.FieldDescriptor
"spending_outpoint"
(Data.ProtoLens.MessageField Data.ProtoLens.MessageType ::
Data.ProtoLens.FieldTypeDescriptor Outpoint)
(Data.ProtoLens.OptionalField
(Data.ProtoLens.Field.field @"maybe'spendingOutpoint")) ::
Data.ProtoLens.FieldDescriptor SpendDetails
rawSpendingTx__field_descriptor
= Data.ProtoLens.FieldDescriptor
"raw_spending_tx"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"rawSpendingTx")) ::
Data.ProtoLens.FieldDescriptor SpendDetails
spendingTxHash__field_descriptor
= Data.ProtoLens.FieldDescriptor
"spending_tx_hash"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"spendingTxHash")) ::
Data.ProtoLens.FieldDescriptor SpendDetails
spendingInputIndex__field_descriptor
= Data.ProtoLens.FieldDescriptor
"spending_input_index"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"spendingInputIndex")) ::
Data.ProtoLens.FieldDescriptor SpendDetails
spendingHeight__field_descriptor
= Data.ProtoLens.FieldDescriptor
"spending_height"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"spendingHeight")) ::
Data.ProtoLens.FieldDescriptor SpendDetails
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, spendingOutpoint__field_descriptor),
(Data.ProtoLens.Tag 2, rawSpendingTx__field_descriptor),
(Data.ProtoLens.Tag 3, spendingTxHash__field_descriptor),
(Data.ProtoLens.Tag 4, spendingInputIndex__field_descriptor),
(Data.ProtoLens.Tag 5, spendingHeight__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_SpendDetails'_unknownFields
(\ x__ y__ -> x__ {_SpendDetails'_unknownFields = y__})
defMessage
= SpendDetails'_constructor
{_SpendDetails'spendingOutpoint = Prelude.Nothing,
_SpendDetails'rawSpendingTx = Data.ProtoLens.fieldDefault,
_SpendDetails'spendingTxHash = Data.ProtoLens.fieldDefault,
_SpendDetails'spendingInputIndex = Data.ProtoLens.fieldDefault,
_SpendDetails'spendingHeight = Data.ProtoLens.fieldDefault,
_SpendDetails'_unknownFields = []}
parseMessage
= let
loop ::
SpendDetails -> Data.ProtoLens.Encoding.Bytes.Parser SpendDetails
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.isolate
(Prelude.fromIntegral len) Data.ProtoLens.parseMessage)
"spending_outpoint"
loop
(Lens.Family2.set
(Data.ProtoLens.Field.field @"spendingOutpoint") y x)
18
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"raw_spending_tx"
loop
(Lens.Family2.set
(Data.ProtoLens.Field.field @"rawSpendingTx") y x)
26
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"spending_tx_hash"
loop
(Lens.Family2.set
(Data.ProtoLens.Field.field @"spendingTxHash") y x)
32
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"spending_input_index"
loop
(Lens.Family2.set
(Data.ProtoLens.Field.field @"spendingInputIndex") y x)
40
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"spending_height"
loop
(Lens.Family2.set
(Data.ProtoLens.Field.field @"spendingHeight") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "SpendDetails"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(case
Lens.Family2.view
(Data.ProtoLens.Field.field @"maybe'spendingOutpoint") _x
of
Prelude.Nothing -> Data.Monoid.mempty
(Prelude.Just _v)
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((Prelude..)
(\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
Data.ProtoLens.encodeMessage _v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view
(Data.ProtoLens.Field.field @"rawSpendingTx") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 18)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view
(Data.ProtoLens.Field.field @"spendingTxHash") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 26)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view
(Data.ProtoLens.Field.field @"spendingInputIndex") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 32)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view
(Data.ProtoLens.Field.field @"spendingHeight") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 40)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral
_v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x))))))
instance Control.DeepSeq.NFData SpendDetails where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_SpendDetails'_unknownFields x__)
(Control.DeepSeq.deepseq
(_SpendDetails'spendingOutpoint x__)
(Control.DeepSeq.deepseq
(_SpendDetails'rawSpendingTx x__)
(Control.DeepSeq.deepseq
(_SpendDetails'spendingTxHash x__)
(Control.DeepSeq.deepseq
(_SpendDetails'spendingInputIndex x__)
(Control.DeepSeq.deepseq (_SpendDetails'spendingHeight x__) ())))))
| Fields :
* ' Proto . Chainrpc . Chainnotifier_Fields.maybe'event ' @ : : Lens ' SpendEvent ( Prelude . Maybe
* ' Proto . Chainrpc . Chainnotifier_Fields.maybe'spend ' @ : : Lens ' SpendEvent ( Prelude . Maybe SpendDetails)@
* ' Proto.Chainrpc.Chainnotifier_Fields.spend ' @ : : Lens ' SpendEvent SpendDetails@
* ' Proto . Chainrpc . ' @ : : Lens ' SpendEvent ( Prelude . Maybe Reorg)@
* ' Proto.Chainrpc.Chainnotifier_Fields.reorg ' @ : : Lens ' SpendEvent Reorg@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'event' @:: Lens' SpendEvent (Prelude.Maybe SpendEvent'Event)@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'spend' @:: Lens' SpendEvent (Prelude.Maybe SpendDetails)@
* 'Proto.Chainrpc.Chainnotifier_Fields.spend' @:: Lens' SpendEvent SpendDetails@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'reorg' @:: Lens' SpendEvent (Prelude.Maybe Reorg)@
* 'Proto.Chainrpc.Chainnotifier_Fields.reorg' @:: Lens' SpendEvent Reorg@ -}
data SpendEvent
= SpendEvent'_constructor {_SpendEvent'event :: !(Prelude.Maybe SpendEvent'Event),
_SpendEvent'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show SpendEvent where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out SpendEvent
data SpendEvent'Event
= SpendEvent'Spend !SpendDetails | SpendEvent'Reorg !Reorg
deriving stock (Prelude.Show,
Prelude.Eq,
Prelude.Ord,
GHC.Generics.Generic)
instance Text.PrettyPrint.GenericPretty.Out SpendEvent'Event
instance Data.ProtoLens.Field.HasField SpendEvent "maybe'event" (Prelude.Maybe SpendEvent'Event) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendEvent'event (\ x__ y__ -> x__ {_SpendEvent'event = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendEvent "maybe'spend" (Prelude.Maybe SpendDetails) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendEvent'event (\ x__ y__ -> x__ {_SpendEvent'event = y__}))
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (SpendEvent'Spend x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap SpendEvent'Spend y__))
instance Data.ProtoLens.Field.HasField SpendEvent "spend" SpendDetails where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendEvent'event (\ x__ y__ -> x__ {_SpendEvent'event = y__}))
((Prelude..)
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (SpendEvent'Spend x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap SpendEvent'Spend y__))
(Data.ProtoLens.maybeLens Data.ProtoLens.defMessage))
instance Data.ProtoLens.Field.HasField SpendEvent "maybe'reorg" (Prelude.Maybe Reorg) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendEvent'event (\ x__ y__ -> x__ {_SpendEvent'event = y__}))
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (SpendEvent'Reorg x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap SpendEvent'Reorg y__))
instance Data.ProtoLens.Field.HasField SpendEvent "reorg" Reorg where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendEvent'event (\ x__ y__ -> x__ {_SpendEvent'event = y__}))
((Prelude..)
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (SpendEvent'Reorg x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap SpendEvent'Reorg y__))
(Data.ProtoLens.maybeLens Data.ProtoLens.defMessage))
instance Data.ProtoLens.Message SpendEvent where
messageName _ = Data.Text.pack "chainrpc.SpendEvent"
packedMessageDescriptor _
= "\n\
\\n\
\SpendEvent\DC2.\n\
\\ENQspend\CAN\SOH \SOH(\v2\SYN.chainrpc.SpendDetailsH\NULR\ENQspend\DC2'\n\
\\ENQreorg\CAN\STX \SOH(\v2\SI.chainrpc.ReorgH\NULR\ENQreorgB\a\n\
\\ENQevent"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
spend__field_descriptor
= Data.ProtoLens.FieldDescriptor
"spend"
(Data.ProtoLens.MessageField Data.ProtoLens.MessageType ::
Data.ProtoLens.FieldTypeDescriptor SpendDetails)
(Data.ProtoLens.OptionalField
(Data.ProtoLens.Field.field @"maybe'spend")) ::
Data.ProtoLens.FieldDescriptor SpendEvent
reorg__field_descriptor
= Data.ProtoLens.FieldDescriptor
"reorg"
(Data.ProtoLens.MessageField Data.ProtoLens.MessageType ::
Data.ProtoLens.FieldTypeDescriptor Reorg)
(Data.ProtoLens.OptionalField
(Data.ProtoLens.Field.field @"maybe'reorg")) ::
Data.ProtoLens.FieldDescriptor SpendEvent
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, spend__field_descriptor),
(Data.ProtoLens.Tag 2, reorg__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_SpendEvent'_unknownFields
(\ x__ y__ -> x__ {_SpendEvent'_unknownFields = y__})
defMessage
= SpendEvent'_constructor
{_SpendEvent'event = Prelude.Nothing,
_SpendEvent'_unknownFields = []}
parseMessage
= let
loop ::
SpendEvent -> Data.ProtoLens.Encoding.Bytes.Parser SpendEvent
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.isolate
(Prelude.fromIntegral len) Data.ProtoLens.parseMessage)
"spend"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"spend") y x)
18
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.isolate
(Prelude.fromIntegral len) Data.ProtoLens.parseMessage)
"reorg"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"reorg") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "SpendEvent"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(case
Lens.Family2.view (Data.ProtoLens.Field.field @"maybe'event") _x
of
Prelude.Nothing -> Data.Monoid.mempty
(Prelude.Just (SpendEvent'Spend v))
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((Prelude..)
(\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
Data.ProtoLens.encodeMessage v)
(Prelude.Just (SpendEvent'Reorg v))
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 18)
((Prelude..)
(\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
Data.ProtoLens.encodeMessage v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x))
instance Control.DeepSeq.NFData SpendEvent where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_SpendEvent'_unknownFields x__)
(Control.DeepSeq.deepseq (_SpendEvent'event x__) ())
instance Control.DeepSeq.NFData SpendEvent'Event where
rnf (SpendEvent'Spend x__) = Control.DeepSeq.rnf x__
rnf (SpendEvent'Reorg x__) = Control.DeepSeq.rnf x__
_SpendEvent'Spend ::
Data.ProtoLens.Prism.Prism' SpendEvent'Event SpendDetails
_SpendEvent'Spend
= Data.ProtoLens.Prism.prism'
SpendEvent'Spend
(\ p__
-> case p__ of
(SpendEvent'Spend p__val) -> Prelude.Just p__val
_otherwise -> Prelude.Nothing)
_SpendEvent'Reorg ::
Data.ProtoLens.Prism.Prism' SpendEvent'Event Reorg
_SpendEvent'Reorg
= Data.ProtoLens.Prism.prism'
SpendEvent'Reorg
(\ p__
-> case p__ of
(SpendEvent'Reorg p__val) -> Prelude.Just p__val
_otherwise -> Prelude.Nothing)
| Fields :
* ' Proto.Chainrpc.Chainnotifier_Fields.outpoint ' @ : : Lens ' SpendRequest Outpoint@
* ' Proto . Chainrpc . Chainnotifier_Fields.maybe'outpoint ' @ : : Lens ' SpendRequest ( Prelude . Maybe Outpoint)@
* ' Proto.Chainrpc.Chainnotifier_Fields.script ' @ : : Lens ' SpendRequest Data . ByteString . ByteString@
* ' Proto . . Chainnotifier_Fields.heightHint ' @ : : Lens ' SpendRequest Data . Word . Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.outpoint' @:: Lens' SpendRequest Outpoint@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'outpoint' @:: Lens' SpendRequest (Prelude.Maybe Outpoint)@
* 'Proto.Chainrpc.Chainnotifier_Fields.script' @:: Lens' SpendRequest Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.heightHint' @:: Lens' SpendRequest Data.Word.Word32@ -}
data SpendRequest
= SpendRequest'_constructor {_SpendRequest'outpoint :: !(Prelude.Maybe Outpoint),
_SpendRequest'script :: !Data.ByteString.ByteString,
_SpendRequest'heightHint :: !Data.Word.Word32,
_SpendRequest'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show SpendRequest where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out SpendRequest
instance Data.ProtoLens.Field.HasField SpendRequest "outpoint" Outpoint where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendRequest'outpoint
(\ x__ y__ -> x__ {_SpendRequest'outpoint = y__}))
(Data.ProtoLens.maybeLens Data.ProtoLens.defMessage)
instance Data.ProtoLens.Field.HasField SpendRequest "maybe'outpoint" (Prelude.Maybe Outpoint) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendRequest'outpoint
(\ x__ y__ -> x__ {_SpendRequest'outpoint = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendRequest "script" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendRequest'script
(\ x__ y__ -> x__ {_SpendRequest'script = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendRequest "heightHint" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendRequest'heightHint
(\ x__ y__ -> x__ {_SpendRequest'heightHint = y__}))
Prelude.id
instance Data.ProtoLens.Message SpendRequest where
messageName _ = Data.Text.pack "chainrpc.SpendRequest"
packedMessageDescriptor _
= "\n\
\\fSpendRequest\DC2.\n\
\\boutpoint\CAN\SOH \SOH(\v2\DC2.chainrpc.OutpointR\boutpoint\DC2\SYN\n\
\\ACKscript\CAN\STX \SOH(\fR\ACKscript\DC2\US\n\
\\vheight_hint\CAN\ETX \SOH(\rR\n\
\heightHint"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
outpoint__field_descriptor
= Data.ProtoLens.FieldDescriptor
"outpoint"
(Data.ProtoLens.MessageField Data.ProtoLens.MessageType ::
Data.ProtoLens.FieldTypeDescriptor Outpoint)
(Data.ProtoLens.OptionalField
(Data.ProtoLens.Field.field @"maybe'outpoint")) ::
Data.ProtoLens.FieldDescriptor SpendRequest
script__field_descriptor
= Data.ProtoLens.FieldDescriptor
"script"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"script")) ::
Data.ProtoLens.FieldDescriptor SpendRequest
heightHint__field_descriptor
= Data.ProtoLens.FieldDescriptor
"height_hint"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"heightHint")) ::
Data.ProtoLens.FieldDescriptor SpendRequest
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, outpoint__field_descriptor),
(Data.ProtoLens.Tag 2, script__field_descriptor),
(Data.ProtoLens.Tag 3, heightHint__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_SpendRequest'_unknownFields
(\ x__ y__ -> x__ {_SpendRequest'_unknownFields = y__})
defMessage
= SpendRequest'_constructor
{_SpendRequest'outpoint = Prelude.Nothing,
_SpendRequest'script = Data.ProtoLens.fieldDefault,
_SpendRequest'heightHint = Data.ProtoLens.fieldDefault,
_SpendRequest'_unknownFields = []}
parseMessage
= let
loop ::
SpendRequest -> Data.ProtoLens.Encoding.Bytes.Parser SpendRequest
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.isolate
(Prelude.fromIntegral len) Data.ProtoLens.parseMessage)
"outpoint"
loop
(Lens.Family2.set (Data.ProtoLens.Field.field @"outpoint") y x)
18
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"script"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"script") y x)
24
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"height_hint"
loop
(Lens.Family2.set (Data.ProtoLens.Field.field @"heightHint") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "SpendRequest"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(case
Lens.Family2.view (Data.ProtoLens.Field.field @"maybe'outpoint") _x
of
Prelude.Nothing -> Data.Monoid.mempty
(Prelude.Just _v)
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((Prelude..)
(\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
Data.ProtoLens.encodeMessage _v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"script") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 18)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view (Data.ProtoLens.Field.field @"heightHint") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 24)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x))))
instance Control.DeepSeq.NFData SpendRequest where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_SpendRequest'_unknownFields x__)
(Control.DeepSeq.deepseq
(_SpendRequest'outpoint x__)
(Control.DeepSeq.deepseq
(_SpendRequest'script x__)
(Control.DeepSeq.deepseq (_SpendRequest'heightHint x__) ())))
data ChainNotifier = ChainNotifier {}
instance Data.ProtoLens.Service.Types.Service ChainNotifier where
type ServiceName ChainNotifier = "ChainNotifier"
type ServicePackage ChainNotifier = "chainrpc"
type ServiceMethods ChainNotifier = '["registerBlockEpochNtfn",
"registerConfirmationsNtfn",
"registerSpendNtfn"]
packedServiceDescriptor _
= "\n\
\\rChainNotifier\DC2I\n\
\\EMRegisterConfirmationsNtfn\DC2\NAK.chainrpc.ConfRequest\SUB\DC3.chainrpc.ConfEvent0\SOH\DC2C\n\
\\DC1RegisterSpendNtfn\DC2\SYN.chainrpc.SpendRequest\SUB\DC4.chainrpc.SpendEvent0\SOH\DC2F\n\
\\SYNRegisterBlockEpochNtfn\DC2\DC4.chainrpc.BlockEpoch\SUB\DC4.chainrpc.BlockEpoch0\SOH"
instance Data.ProtoLens.Service.Types.HasMethodImpl ChainNotifier "registerConfirmationsNtfn" where
type MethodName ChainNotifier "registerConfirmationsNtfn" = "RegisterConfirmationsNtfn"
type MethodInput ChainNotifier "registerConfirmationsNtfn" = ConfRequest
type MethodOutput ChainNotifier "registerConfirmationsNtfn" = ConfEvent
type MethodStreamingType ChainNotifier "registerConfirmationsNtfn" = 'Data.ProtoLens.Service.Types.ServerStreaming
instance Data.ProtoLens.Service.Types.HasMethodImpl ChainNotifier "registerSpendNtfn" where
type MethodName ChainNotifier "registerSpendNtfn" = "RegisterSpendNtfn"
type MethodInput ChainNotifier "registerSpendNtfn" = SpendRequest
type MethodOutput ChainNotifier "registerSpendNtfn" = SpendEvent
type MethodStreamingType ChainNotifier "registerSpendNtfn" = 'Data.ProtoLens.Service.Types.ServerStreaming
instance Data.ProtoLens.Service.Types.HasMethodImpl ChainNotifier "registerBlockEpochNtfn" where
type MethodName ChainNotifier "registerBlockEpochNtfn" = "RegisterBlockEpochNtfn"
type MethodInput ChainNotifier "registerBlockEpochNtfn" = BlockEpoch
type MethodOutput ChainNotifier "registerBlockEpochNtfn" = BlockEpoch
type MethodStreamingType ChainNotifier "registerBlockEpochNtfn" = 'Data.ProtoLens.Service.Types.ServerStreaming
packedFileDescriptor :: Data.ByteString.ByteString
packedFileDescriptor
= "\n\
\\FSchainrpc/chainnotifier.proto\DC2\bchainrpc\"w\n\
\\vConfRequest\DC2\DC2\n\
\\EOTtxid\CAN\SOH \SOH(\fR\EOTtxid\DC2\SYN\n\
\\ACKscript\CAN\STX \SOH(\fR\ACKscript\DC2\ESC\n\
\\tnum_confs\CAN\ETX \SOH(\rR\bnumConfs\DC2\US\n\
\\vheight_hint\CAN\EOT \SOH(\rR\n\
\heightHint\"\129\SOH\n\
\\vConfDetails\DC2\NAK\n\
\\ACKraw_tx\CAN\SOH \SOH(\fR\ENQrawTx\DC2\GS\n\
\\n\
\block_hash\CAN\STX \SOH(\fR\tblockHash\DC2!\n\
\\fblock_height\CAN\ETX \SOH(\rR\vblockHeight\DC2\EM\n\
\\btx_index\CAN\EOT \SOH(\rR\atxIndex\"\a\n\
\\ENQReorg\"j\n\
\\tConfEvent\DC2+\n\
\\EOTconf\CAN\SOH \SOH(\v2\NAK.chainrpc.ConfDetailsH\NULR\EOTconf\DC2'\n\
\\ENQreorg\CAN\STX \SOH(\v2\SI.chainrpc.ReorgH\NULR\ENQreorgB\a\n\
\\ENQevent\"4\n\
\\bOutpoint\DC2\DC2\n\
\\EOThash\CAN\SOH \SOH(\fR\EOThash\DC2\DC4\n\
\\ENQindex\CAN\STX \SOH(\rR\ENQindex\"w\n\
\\fSpendRequest\DC2.\n\
\\boutpoint\CAN\SOH \SOH(\v2\DC2.chainrpc.OutpointR\boutpoint\DC2\SYN\n\
\\ACKscript\CAN\STX \SOH(\fR\ACKscript\DC2\US\n\
\\vheight_hint\CAN\ETX \SOH(\rR\n\
\heightHint\"\252\SOH\n\
\\fSpendDetails\DC2?\n\
\\DC1spending_outpoint\CAN\SOH \SOH(\v2\DC2.chainrpc.OutpointR\DLEspendingOutpoint\DC2&\n\
\\SIraw_spending_tx\CAN\STX \SOH(\fR\rrawSpendingTx\DC2(\n\
\\DLEspending_tx_hash\CAN\ETX \SOH(\fR\SOspendingTxHash\DC20\n\
\\DC4spending_input_index\CAN\EOT \SOH(\rR\DC2spendingInputIndex\DC2'\n\
\\SIspending_height\CAN\ENQ \SOH(\rR\SOspendingHeight\"n\n\
\\n\
\SpendEvent\DC2.\n\
\\ENQspend\CAN\SOH \SOH(\v2\SYN.chainrpc.SpendDetailsH\NULR\ENQspend\DC2'\n\
\\ENQreorg\CAN\STX \SOH(\v2\SI.chainrpc.ReorgH\NULR\ENQreorgB\a\n\
\\ENQevent\"8\n\
\\n\
\BlockEpoch\DC2\DC2\n\
\\EOThash\CAN\SOH \SOH(\fR\EOThash\DC2\SYN\n\
\\ACKheight\CAN\STX \SOH(\rR\ACKheight2\231\SOH\n\
\\rChainNotifier\DC2I\n\
\\EMRegisterConfirmationsNtfn\DC2\NAK.chainrpc.ConfRequest\SUB\DC3.chainrpc.ConfEvent0\SOH\DC2C\n\
\\DC1RegisterSpendNtfn\DC2\SYN.chainrpc.SpendRequest\SUB\DC4.chainrpc.SpendEvent0\SOH\DC2F\n\
\\SYNRegisterBlockEpochNtfn\DC2\DC4.chainrpc.BlockEpoch\SUB\DC4.chainrpc.BlockEpoch0\SOHB0Z.github.com/lightningnetwork/lnd/lnrpc/chainrpcJ\171-\n\
\\a\DC2\ENQ\NUL\NUL\181\SOH\SOH\n\
\\b\n\
\\SOH\f\DC2\ETX\NUL\NUL\DC2\n\
\\b\n\
\\SOH\STX\DC2\ETX\STX\NUL\DC1\n\
\\b\n\
\\SOH\b\DC2\ETX\EOT\NULE\n\
\\t\n\
\\STX\b\v\DC2\ETX\EOT\NULE\n\
\\145\SOH\n\
\\STX\ACK\NUL\DC2\EOT\b\NUL*\SOH\SUB\132\SOH ChainNotifier is a service that can be used to get information about the\n\
\ chain backend by registering notifiers for chain events.\n\
\\n\
\\n\
\\n\
\\ETX\ACK\NUL\SOH\DC2\ETX\b\b\NAK\n\
\\250\STX\n\
\\EOT\ACK\NUL\STX\NUL\DC2\ETX\DC2\EOTK\SUB\236\STX\n\
\RegisterConfirmationsNtfn is a synchronous response-streaming RPC that\n\
\registers an intent for a client to be notified once a confirmation request\n\
\has reached its required number of confirmations on-chain.\n\
\\n\
\A client can specify whether the confirmation request should be for a\n\
\particular transaction by its hash or for an output script by specifying a\n\
\zero hash.\n\
\\n\
\\f\n\
\\ENQ\ACK\NUL\STX\NUL\SOH\DC2\ETX\DC2\b!\n\
\\f\n\
\\ENQ\ACK\NUL\STX\NUL\STX\DC2\ETX\DC2#.\n\
\\f\n\
\\ENQ\ACK\NUL\STX\NUL\ACK\DC2\ETX\DC29?\n\
\\f\n\
\\ENQ\ACK\NUL\STX\NUL\ETX\DC2\ETX\DC2@I\n\
\\224\STX\n\
\\EOT\ACK\NUL\STX\SOH\DC2\ETX\FS\EOTE\SUB\210\STX\n\
\RegisterSpendNtfn is a synchronous response-streaming RPC that registers an\n\
\intent for a client to be notification once a spend request has been spent\n\
\by a transaction that has confirmed on-chain.\n\
\\n\
\A client can specify whether the spend request should be for a particular\n\
\outpoint or for an output script by specifying a zero outpoint.\n\
\\n\
\\f\n\
\\ENQ\ACK\NUL\STX\SOH\SOH\DC2\ETX\FS\b\EM\n\
\\f\n\
\\ENQ\ACK\NUL\STX\SOH\STX\DC2\ETX\FS\ESC'\n\
\\f\n\
\\ENQ\ACK\NUL\STX\SOH\ACK\DC2\ETX\FS28\n\
\\f\n\
\\ENQ\ACK\NUL\STX\SOH\ETX\DC2\ETX\FS9C\n\
\\185\EOT\n\
\\EOT\ACK\NUL\STX\STX\DC2\ETX)\EOTH\SUB\171\EOT\n\
\RegisterBlockEpochNtfn is a synchronous response-streaming RPC that\n\
\registers an intent for a client to be notified of blocks in the chain. The\n\
\stream will return a hash and height tuple of a block for each new/stale\n\
\block in the chain. It is the client's responsibility to determine whether\n\
\the tuple returned is for a new or stale block in the chain.\n\
\\n\
\A client can also request a historical backlog of blocks from a particular\n\
\point. This allows clients to be idempotent by ensuring that they do not\n\
\missing processing a single block within the chain.\n\
\\n\
\\f\n\
\\ENQ\ACK\NUL\STX\STX\SOH\DC2\ETX)\b\RS\n\
\\f\n\
\\ENQ\ACK\NUL\STX\STX\STX\DC2\ETX) *\n\
\\f\n\
\\ENQ\ACK\NUL\STX\STX\ACK\DC2\ETX)5;\n\
\\f\n\
\\ENQ\ACK\NUL\STX\STX\ETX\DC2\ETX)<F\n\
\\n\
\\n\
\\STX\EOT\NUL\DC2\EOT,\NULH\SOH\n\
\\n\
\\n\
\\ETX\EOT\NUL\SOH\DC2\ETX,\b\DC3\n\
\\205\SOH\n\
\\EOT\EOT\NUL\STX\NUL\DC2\ETX2\EOT\DC3\SUB\191\SOH\n\
\The transaction hash for which we should request a confirmation notification\n\
\for. If set to a hash of all zeros, then the confirmation notification will\n\
\be requested for the script instead.\n\
\\n\
\\f\n\
\\ENQ\EOT\NUL\STX\NUL\ENQ\DC2\ETX2\EOT\t\n\
\\f\n\
\\ENQ\EOT\NUL\STX\NUL\SOH\DC2\ETX2\n\
\\SO\n\
\\f\n\
\\ENQ\EOT\NUL\STX\NUL\ETX\DC2\ETX2\DC1\DC2\n\
\\136\STX\n\
\\EOT\EOT\NUL\STX\SOH\DC2\ETX:\EOT\NAK\SUB\250\SOH\n\
\An output script within a transaction with the hash above which will be used\n\
\by light clients to match block filters. If the transaction hash is set to a\n\
\hash of all zeros, then a confirmation notification will be requested for\n\
\this script instead.\n\
\\n\
\\f\n\
\\ENQ\EOT\NUL\STX\SOH\ENQ\DC2\ETX:\EOT\t\n\
\\f\n\
\\ENQ\EOT\NUL\STX\SOH\SOH\DC2\ETX:\n\
\\DLE\n\
\\f\n\
\\ENQ\EOT\NUL\STX\SOH\ETX\DC2\ETX:\DC3\DC4\n\
\\142\SOH\n\
\\EOT\EOT\NUL\STX\STX\DC2\ETX@\EOT\EM\SUB\128\SOH\n\
\The number of desired confirmations the transaction/output script should\n\
\reach before dispatching a confirmation notification.\n\
\\n\
\\f\n\
\\ENQ\EOT\NUL\STX\STX\ENQ\DC2\ETX@\EOT\n\
\\n\
\\f\n\
\\ENQ\EOT\NUL\STX\STX\SOH\DC2\ETX@\v\DC4\n\
\\f\n\
\\ENQ\EOT\NUL\STX\STX\ETX\DC2\ETX@\ETB\CAN\n\
\\216\SOH\n\
\\EOT\EOT\NUL\STX\ETX\DC2\ETXG\EOT\ESC\SUB\202\SOH\n\
\The earliest height in the chain for which the transaction/output script\n\
\could have been included in a block. This should in most cases be set to the\n\
\broadcast height of the transaction/output script.\n\
\\n\
\\f\n\
\\ENQ\EOT\NUL\STX\ETX\ENQ\DC2\ETXG\EOT\n\
\\n\
\\f\n\
\\ENQ\EOT\NUL\STX\ETX\SOH\DC2\ETXG\v\SYN\n\
\\f\n\
\\ENQ\EOT\NUL\STX\ETX\ETX\DC2\ETXG\EM\SUB\n\
\\n\
\\n\
\\STX\EOT\SOH\DC2\EOTJ\NULW\SOH\n\
\\n\
\\n\
\\ETX\EOT\SOH\SOH\DC2\ETXJ\b\DC3\n\
\:\n\
\\EOT\EOT\SOH\STX\NUL\DC2\ETXL\EOT\NAK\SUB- The raw bytes of the confirmed transaction.\n\
\\n\
\\f\n\
\\ENQ\EOT\SOH\STX\NUL\ENQ\DC2\ETXL\EOT\t\n\
\\f\n\
\\ENQ\EOT\SOH\STX\NUL\SOH\DC2\ETXL\n\
\\DLE\n\
\\f\n\
\\ENQ\EOT\SOH\STX\NUL\ETX\DC2\ETXL\DC3\DC4\n\
\X\n\
\\EOT\EOT\SOH\STX\SOH\DC2\ETXO\EOT\EM\SUBK The hash of the block in which the confirmed transaction was included in.\n\
\\n\
\\f\n\
\\ENQ\EOT\SOH\STX\SOH\ENQ\DC2\ETXO\EOT\t\n\
\\f\n\
\\ENQ\EOT\SOH\STX\SOH\SOH\DC2\ETXO\n\
\\DC4\n\
\\f\n\
\\ENQ\EOT\SOH\STX\SOH\ETX\DC2\ETXO\ETB\CAN\n\
\[\n\
\\EOT\EOT\SOH\STX\STX\DC2\ETXS\EOT\FS\SUBN The height of the block in which the confirmed transaction was included\n\
\ in.\n\
\\n\
\\f\n\
\\ENQ\EOT\SOH\STX\STX\ENQ\DC2\ETXS\EOT\n\
\\n\
\\f\n\
\\ENQ\EOT\SOH\STX\STX\SOH\DC2\ETXS\v\ETB\n\
\\f\n\
\\ENQ\EOT\SOH\STX\STX\ETX\DC2\ETXS\SUB\ESC\n\
\M\n\
\\EOT\EOT\SOH\STX\ETX\DC2\ETXV\EOT\CAN\SUB@ The index of the confirmed transaction within the transaction.\n\
\\n\
\\f\n\
\\ENQ\EOT\SOH\STX\ETX\ENQ\DC2\ETXV\EOT\n\
\\n\
\\f\n\
\\ENQ\EOT\SOH\STX\ETX\SOH\DC2\ETXV\v\DC3\n\
\\f\n\
\\ENQ\EOT\SOH\STX\ETX\ETX\DC2\ETXV\SYN\ETB\n\
\L\n\
\\STX\EOT\STX\DC2\EOTY\NUL[\SOH\"@ TODO(wilmer): need to know how the client will use this first.\n\
\\n\
\\n\
\\n\
\\ETX\EOT\STX\SOH\DC2\ETXY\b\r\n\
\\n\
\\n\
\\STX\EOT\ETX\DC2\EOT]\NULk\SOH\n\
\\n\
\\n\
\\ETX\EOT\ETX\SOH\DC2\ETX]\b\DC1\n\
\\f\n\
\\EOT\EOT\ETX\b\NUL\DC2\EOT^\EOTj\ENQ\n\
\\f\n\
\\ENQ\EOT\ETX\b\NUL\SOH\DC2\ETX^\n\
\\SI\n\
\b\n\
\\EOT\EOT\ETX\STX\NUL\DC2\ETXc\b\GS\SUBU\n\
\An event that includes the confirmation details of the request\n\
\(txid/ouput script).\n\
\\n\
\\f\n\
\\ENQ\EOT\ETX\STX\NUL\ACK\DC2\ETXc\b\DC3\n\
\\f\n\
\\ENQ\EOT\ETX\STX\NUL\SOH\DC2\ETXc\DC4\CAN\n\
\\f\n\
\\ENQ\EOT\ETX\STX\NUL\ETX\DC2\ETXc\ESC\FS\n\
\]\n\
\\EOT\EOT\ETX\STX\SOH\DC2\ETXi\b\CAN\SUBP\n\
\An event send when the transaction of the request is reorged out of the\n\
\chain.\n\
\\n\
\\f\n\
\\ENQ\EOT\ETX\STX\SOH\ACK\DC2\ETXi\b\r\n\
\\f\n\
\\ENQ\EOT\ETX\STX\SOH\SOH\DC2\ETXi\SO\DC3\n\
\\f\n\
\\ENQ\EOT\ETX\STX\SOH\ETX\DC2\ETXi\SYN\ETB\n\
\\n\
\\n\
\\STX\EOT\EOT\DC2\EOTm\NULs\SOH\n\
\\n\
\\n\
\\ETX\EOT\EOT\SOH\DC2\ETXm\b\DLE\n\
\+\n\
\\EOT\EOT\EOT\STX\NUL\DC2\ETXo\EOT\DC3\SUB\RS The hash of the transaction.\n\
\\n\
\\f\n\
\\ENQ\EOT\EOT\STX\NUL\ENQ\DC2\ETXo\EOT\t\n\
\\f\n\
\\ENQ\EOT\EOT\STX\NUL\SOH\DC2\ETXo\n\
\\SO\n\
\\f\n\
\\ENQ\EOT\EOT\STX\NUL\ETX\DC2\ETXo\DC1\DC2\n\
\>\n\
\\EOT\EOT\EOT\STX\SOH\DC2\ETXr\EOT\NAK\SUB1 The index of the output within the transaction.\n\
\\n\
\\f\n\
\\ENQ\EOT\EOT\STX\SOH\ENQ\DC2\ETXr\EOT\n\
\\n\
\\f\n\
\\ENQ\EOT\EOT\STX\SOH\SOH\DC2\ETXr\v\DLE\n\
\\f\n\
\\ENQ\EOT\EOT\STX\SOH\ETX\DC2\ETXr\DC3\DC4\n\
\\v\n\
\\STX\EOT\ENQ\DC2\ENQu\NUL\140\SOH\SOH\n\
\\n\
\\n\
\\ETX\EOT\ENQ\SOH\DC2\ETXu\b\DC4\n\
\\179\SOH\n\
\\EOT\EOT\ENQ\STX\NUL\DC2\ETX{\EOT\SUB\SUB\165\SOH\n\
\The outpoint for which we should request a spend notification for. If set to\n\
\a zero outpoint, then the spend notification will be requested for the\n\
\script instead.\n\
\\n\
\\f\n\
\\ENQ\EOT\ENQ\STX\NUL\ACK\DC2\ETX{\EOT\f\n\
\\f\n\
\\ENQ\EOT\ENQ\STX\NUL\SOH\DC2\ETX{\r\NAK\n\
\\f\n\
\\ENQ\EOT\ENQ\STX\NUL\ETX\DC2\ETX{\CAN\EM\n\
\\229\SOH\n\
\\EOT\EOT\ENQ\STX\SOH\DC2\EOT\130\SOH\EOT\NAK\SUB\214\SOH\n\
\The output script for the outpoint above. This will be used by light clients\n\
\to match block filters. If the outpoint is set to a zero outpoint, then a\n\
\spend notification will be requested for this script instead.\n\
\\n\
\\r\n\
\\ENQ\EOT\ENQ\STX\SOH\ENQ\DC2\EOT\130\SOH\EOT\t\n\
\\r\n\
\\ENQ\EOT\ENQ\STX\SOH\SOH\DC2\EOT\130\SOH\n\
\\DLE\n\
\\r\n\
\\ENQ\EOT\ENQ\STX\SOH\ETX\DC2\EOT\130\SOH\DC3\DC4\n\
\\197\SOH\n\
\\EOT\EOT\ENQ\STX\STX\DC2\EOT\137\SOH\EOT\ESC\SUB\182\SOH\n\
\The earliest height in the chain for which the outpoint/output script could\n\
\have been spent. This should in most cases be set to the broadcast height of\n\
\the outpoint/output script.\n\
\\n\
\\r\n\
\\ENQ\EOT\ENQ\STX\STX\ENQ\DC2\EOT\137\SOH\EOT\n\
\\n\
\\r\n\
\\ENQ\EOT\ENQ\STX\STX\SOH\DC2\EOT\137\SOH\v\SYN\n\
\\r\n\
\\ENQ\EOT\ENQ\STX\STX\ETX\DC2\EOT\137\SOH\EM\SUB\n\
\\f\n\
\\STX\EOT\ACK\DC2\ACK\142\SOH\NUL\157\SOH\SOH\n\
\\v\n\
\\ETX\EOT\ACK\SOH\DC2\EOT\142\SOH\b\DC4\n\
\,\n\
\\EOT\EOT\ACK\STX\NUL\DC2\EOT\144\SOH\EOT#\SUB\RS The outpoint was that spent.\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\NUL\ACK\DC2\EOT\144\SOH\EOT\f\n\
\\r\n\
\\ENQ\EOT\ACK\STX\NUL\SOH\DC2\EOT\144\SOH\r\RS\n\
\\r\n\
\\ENQ\EOT\ACK\STX\NUL\ETX\DC2\EOT\144\SOH!\"\n\
\:\n\
\\EOT\EOT\ACK\STX\SOH\DC2\EOT\147\SOH\EOT\RS\SUB, The raw bytes of the spending transaction.\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\SOH\ENQ\DC2\EOT\147\SOH\EOT\t\n\
\\r\n\
\\ENQ\EOT\ACK\STX\SOH\SOH\DC2\EOT\147\SOH\n\
\\EM\n\
\\r\n\
\\ENQ\EOT\ACK\STX\SOH\ETX\DC2\EOT\147\SOH\FS\GS\n\
\5\n\
\\EOT\EOT\ACK\STX\STX\DC2\EOT\150\SOH\EOT\US\SUB' The hash of the spending transaction.\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\STX\ENQ\DC2\EOT\150\SOH\EOT\t\n\
\\r\n\
\\ENQ\EOT\ACK\STX\STX\SOH\DC2\EOT\150\SOH\n\
\\SUB\n\
\\r\n\
\\ENQ\EOT\ACK\STX\STX\ETX\DC2\EOT\150\SOH\GS\RS\n\
\W\n\
\\EOT\EOT\ACK\STX\ETX\DC2\EOT\153\SOH\EOT$\SUBI The input of the spending transaction that fulfilled the spend request.\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\ETX\ENQ\DC2\EOT\153\SOH\EOT\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\ETX\SOH\DC2\EOT\153\SOH\v\US\n\
\\r\n\
\\ENQ\EOT\ACK\STX\ETX\ETX\DC2\EOT\153\SOH\"#\n\
\U\n\
\\EOT\EOT\ACK\STX\EOT\DC2\EOT\156\SOH\EOT\US\SUBG The height at which the spending transaction was included in a block.\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\EOT\ENQ\DC2\EOT\156\SOH\EOT\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\EOT\SOH\DC2\EOT\156\SOH\v\SUB\n\
\\r\n\
\\ENQ\EOT\ACK\STX\EOT\ETX\DC2\EOT\156\SOH\GS\RS\n\
\\f\n\
\\STX\EOT\a\DC2\ACK\159\SOH\NUL\173\SOH\SOH\n\
\\v\n\
\\ETX\EOT\a\SOH\DC2\EOT\159\SOH\b\DC2\n\
\\SO\n\
\\EOT\EOT\a\b\NUL\DC2\ACK\160\SOH\EOT\172\SOH\ENQ\n\
\\r\n\
\\ENQ\EOT\a\b\NUL\SOH\DC2\EOT\160\SOH\n\
\\SI\n\
\w\n\
\\EOT\EOT\a\STX\NUL\DC2\EOT\165\SOH\b\US\SUBi\n\
\An event that includes the details of the spending transaction of the\n\
\request (outpoint/output script).\n\
\\n\
\\r\n\
\\ENQ\EOT\a\STX\NUL\ACK\DC2\EOT\165\SOH\b\DC4\n\
\\r\n\
\\ENQ\EOT\a\STX\NUL\SOH\DC2\EOT\165\SOH\NAK\SUB\n\
\\r\n\
\\ENQ\EOT\a\STX\NUL\ETX\DC2\EOT\165\SOH\GS\RS\n\
\h\n\
\\EOT\EOT\a\STX\SOH\DC2\EOT\171\SOH\b\CAN\SUBZ\n\
\An event sent when the spending transaction of the request was\n\
\reorged out of the chain.\n\
\\n\
\\r\n\
\\ENQ\EOT\a\STX\SOH\ACK\DC2\EOT\171\SOH\b\r\n\
\\r\n\
\\ENQ\EOT\a\STX\SOH\SOH\DC2\EOT\171\SOH\SO\DC3\n\
\\r\n\
\\ENQ\EOT\a\STX\SOH\ETX\DC2\EOT\171\SOH\SYN\ETB\n\
\\f\n\
\\STX\EOT\b\DC2\ACK\175\SOH\NUL\181\SOH\SOH\n\
\\v\n\
\\ETX\EOT\b\SOH\DC2\EOT\175\SOH\b\DC2\n\
\&\n\
\\EOT\EOT\b\STX\NUL\DC2\EOT\177\SOH\EOT\DC3\SUB\CAN The hash of the block.\n\
\\n\
\\r\n\
\\ENQ\EOT\b\STX\NUL\ENQ\DC2\EOT\177\SOH\EOT\t\n\
\\r\n\
\\ENQ\EOT\b\STX\NUL\SOH\DC2\EOT\177\SOH\n\
\\SO\n\
\\r\n\
\\ENQ\EOT\b\STX\NUL\ETX\DC2\EOT\177\SOH\DC1\DC2\n\
\(\n\
\\EOT\EOT\b\STX\SOH\DC2\EOT\180\SOH\EOT\SYN\SUB\SUB The height of the block.\n\
\\n\
\\r\n\
\\ENQ\EOT\b\STX\SOH\ENQ\DC2\EOT\180\SOH\EOT\n\
\\n\
\\r\n\
\\ENQ\EOT\b\STX\SOH\SOH\DC2\EOT\180\SOH\v\DC1\n\
\\r\n\
\\ENQ\EOT\b\STX\SOH\ETX\DC2\EOT\180\SOH\DC4\NAKb\ACKproto3" | null | https://raw.githubusercontent.com/coingaming/lnd-client/98974c514cd82253dbd6111bafbbb2bbff6bffe2/src/Proto/Chainrpc/Chainnotifier.hs | haskell | # OPTIONS_GHC -Wno-unused-imports#
# OPTIONS_GHC -Wno-duplicate-exports#
| Fields :
| This file was auto - generated from chainrpc / chainnotifier.proto by the proto - lens - protoc program .
# LANGUAGE ScopedTypeVariables , DataKinds , TypeFamilies , UndecidableInstances , GeneralizedNewtypeDeriving , MultiParamTypeClasses , FlexibleContexts , FlexibleInstances , PatternSynonyms , MagicHash , NoImplicitPrelude , BangPatterns , TypeApplications , OverloadedStrings , DerivingStrategies , DeriveGeneric #
# OPTIONS_GHC -Wno - dodgy - exports #
module Proto.Chainrpc.Chainnotifier (
ChainNotifier(..), BlockEpoch(), ConfDetails(), ConfEvent(),
ConfEvent'Event(..), _ConfEvent'Conf, _ConfEvent'Reorg,
ConfRequest(), Outpoint(), Reorg(), SpendDetails(), SpendEvent(),
SpendEvent'Event(..), _SpendEvent'Spend, _SpendEvent'Reorg,
SpendRequest()
) where
import qualified Data.ProtoLens.Runtime.Control.DeepSeq as Control.DeepSeq
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Prism as Data.ProtoLens.Prism
import qualified Text.PrettyPrint.GenericPretty.Instance
import qualified GHC.Generics
import qualified Text.PrettyPrint.GenericPretty
import qualified Data.ProtoLens.Runtime.Prelude as Prelude
import qualified Data.ProtoLens.Runtime.Data.Int as Data.Int
import qualified Data.ProtoLens.Runtime.Data.Monoid as Data.Monoid
import qualified Data.ProtoLens.Runtime.Data.Word as Data.Word
import qualified Data.ProtoLens.Runtime.Data.ProtoLens as Data.ProtoLens
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Bytes as Data.ProtoLens.Encoding.Bytes
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Growing as Data.ProtoLens.Encoding.Growing
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Parser.Unsafe as Data.ProtoLens.Encoding.Parser.Unsafe
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Wire as Data.ProtoLens.Encoding.Wire
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Field as Data.ProtoLens.Field
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Message.Enum as Data.ProtoLens.Message.Enum
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Service.Types as Data.ProtoLens.Service.Types
import qualified Data.ProtoLens.Runtime.Lens.Family2 as Lens.Family2
import qualified Data.ProtoLens.Runtime.Lens.Family2.Unchecked as Lens.Family2.Unchecked
import qualified Data.ProtoLens.Runtime.Data.Text as Data.Text
import qualified Data.ProtoLens.Runtime.Data.Map as Data.Map
import qualified Data.ProtoLens.Runtime.Data.ByteString as Data.ByteString
import qualified Data.ProtoLens.Runtime.Data.ByteString.Char8 as Data.ByteString.Char8
import qualified Data.ProtoLens.Runtime.Data.Text.Encoding as Data.Text.Encoding
import qualified Data.ProtoLens.Runtime.Data.Vector as Data.Vector
import qualified Data.ProtoLens.Runtime.Data.Vector.Generic as Data.Vector.Generic
import qualified Data.ProtoLens.Runtime.Data.Vector.Unboxed as Data.Vector.Unboxed
import qualified Data.ProtoLens.Runtime.Text.Read as Text.Read
| Fields :
* ' Proto.Chainrpc.Chainnotifier_Fields.hash ' @ : : Lens ' BlockEpoch Data . ByteString . ByteString@
* ' Proto.Chainrpc.Chainnotifier_Fields.height ' @ : : Lens ' BlockEpoch Data . Word . Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.hash' @:: Lens' BlockEpoch Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.height' @:: Lens' BlockEpoch Data.Word.Word32@ -}
data BlockEpoch
= BlockEpoch'_constructor {_BlockEpoch'hash :: !Data.ByteString.ByteString,
_BlockEpoch'height :: !Data.Word.Word32,
_BlockEpoch'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show BlockEpoch where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out BlockEpoch
instance Data.ProtoLens.Field.HasField BlockEpoch "hash" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_BlockEpoch'hash (\ x__ y__ -> x__ {_BlockEpoch'hash = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField BlockEpoch "height" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_BlockEpoch'height (\ x__ y__ -> x__ {_BlockEpoch'height = y__}))
Prelude.id
instance Data.ProtoLens.Message BlockEpoch where
messageName _ = Data.Text.pack "chainrpc.BlockEpoch"
packedMessageDescriptor _
= "\n\
\\n\
\BlockEpoch\DC2\DC2\n\
\\EOThash\CAN\SOH \SOH(\fR\EOThash\DC2\SYN\n\
\\ACKheight\CAN\STX \SOH(\rR\ACKheight"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
hash__field_descriptor
= Data.ProtoLens.FieldDescriptor
"hash"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"hash")) ::
Data.ProtoLens.FieldDescriptor BlockEpoch
height__field_descriptor
= Data.ProtoLens.FieldDescriptor
"height"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"height")) ::
Data.ProtoLens.FieldDescriptor BlockEpoch
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, hash__field_descriptor),
(Data.ProtoLens.Tag 2, height__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_BlockEpoch'_unknownFields
(\ x__ y__ -> x__ {_BlockEpoch'_unknownFields = y__})
defMessage
= BlockEpoch'_constructor
{_BlockEpoch'hash = Data.ProtoLens.fieldDefault,
_BlockEpoch'height = Data.ProtoLens.fieldDefault,
_BlockEpoch'_unknownFields = []}
parseMessage
= let
loop ::
BlockEpoch -> Data.ProtoLens.Encoding.Bytes.Parser BlockEpoch
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"hash"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"hash") y x)
16
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"height"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"height") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "BlockEpoch"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(let _v = Lens.Family2.view (Data.ProtoLens.Field.field @"hash") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"height") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 16)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x)))
instance Control.DeepSeq.NFData BlockEpoch where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_BlockEpoch'_unknownFields x__)
(Control.DeepSeq.deepseq
(_BlockEpoch'hash x__)
(Control.DeepSeq.deepseq (_BlockEpoch'height x__) ()))
| Fields :
* ' Proto . Chainrpc . Chainnotifier_Fields.rawTx ' @ : : Lens ' ConfDetails Data . ByteString . ByteString@
* ' Proto . Chainrpc . Chainnotifier_Fields.blockHash ' @ : : Lens ' ConfDetails Data . ByteString . ByteString@
* ' Proto . Chainrpc . Chainnotifier_Fields.blockHeight ' @ : : Lens ' ConfDetails Data . Word . Word32@
* ' Proto . Chainrpc . Chainnotifier_Fields.txIndex ' @ : : Lens ' ConfDetails Data . Word . Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.rawTx' @:: Lens' ConfDetails Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.blockHash' @:: Lens' ConfDetails Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.blockHeight' @:: Lens' ConfDetails Data.Word.Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.txIndex' @:: Lens' ConfDetails Data.Word.Word32@ -}
data ConfDetails
= ConfDetails'_constructor {_ConfDetails'rawTx :: !Data.ByteString.ByteString,
_ConfDetails'blockHash :: !Data.ByteString.ByteString,
_ConfDetails'blockHeight :: !Data.Word.Word32,
_ConfDetails'txIndex :: !Data.Word.Word32,
_ConfDetails'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show ConfDetails where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out ConfDetails
instance Data.ProtoLens.Field.HasField ConfDetails "rawTx" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfDetails'rawTx (\ x__ y__ -> x__ {_ConfDetails'rawTx = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfDetails "blockHash" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfDetails'blockHash
(\ x__ y__ -> x__ {_ConfDetails'blockHash = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfDetails "blockHeight" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfDetails'blockHeight
(\ x__ y__ -> x__ {_ConfDetails'blockHeight = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfDetails "txIndex" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfDetails'txIndex
(\ x__ y__ -> x__ {_ConfDetails'txIndex = y__}))
Prelude.id
instance Data.ProtoLens.Message ConfDetails where
messageName _ = Data.Text.pack "chainrpc.ConfDetails"
packedMessageDescriptor _
= "\n\
\\vConfDetails\DC2\NAK\n\
\\ACKraw_tx\CAN\SOH \SOH(\fR\ENQrawTx\DC2\GS\n\
\\n\
\block_hash\CAN\STX \SOH(\fR\tblockHash\DC2!\n\
\\fblock_height\CAN\ETX \SOH(\rR\vblockHeight\DC2\EM\n\
\\btx_index\CAN\EOT \SOH(\rR\atxIndex"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
rawTx__field_descriptor
= Data.ProtoLens.FieldDescriptor
"raw_tx"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"rawTx")) ::
Data.ProtoLens.FieldDescriptor ConfDetails
blockHash__field_descriptor
= Data.ProtoLens.FieldDescriptor
"block_hash"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"blockHash")) ::
Data.ProtoLens.FieldDescriptor ConfDetails
blockHeight__field_descriptor
= Data.ProtoLens.FieldDescriptor
"block_height"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"blockHeight")) ::
Data.ProtoLens.FieldDescriptor ConfDetails
txIndex__field_descriptor
= Data.ProtoLens.FieldDescriptor
"tx_index"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"txIndex")) ::
Data.ProtoLens.FieldDescriptor ConfDetails
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, rawTx__field_descriptor),
(Data.ProtoLens.Tag 2, blockHash__field_descriptor),
(Data.ProtoLens.Tag 3, blockHeight__field_descriptor),
(Data.ProtoLens.Tag 4, txIndex__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_ConfDetails'_unknownFields
(\ x__ y__ -> x__ {_ConfDetails'_unknownFields = y__})
defMessage
= ConfDetails'_constructor
{_ConfDetails'rawTx = Data.ProtoLens.fieldDefault,
_ConfDetails'blockHash = Data.ProtoLens.fieldDefault,
_ConfDetails'blockHeight = Data.ProtoLens.fieldDefault,
_ConfDetails'txIndex = Data.ProtoLens.fieldDefault,
_ConfDetails'_unknownFields = []}
parseMessage
= let
loop ::
ConfDetails -> Data.ProtoLens.Encoding.Bytes.Parser ConfDetails
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"raw_tx"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"rawTx") y x)
18
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"block_hash"
loop
(Lens.Family2.set (Data.ProtoLens.Field.field @"blockHash") y x)
24
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"block_height"
loop
(Lens.Family2.set (Data.ProtoLens.Field.field @"blockHeight") y x)
32
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"tx_index"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"txIndex") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "ConfDetails"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"rawTx") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"blockHash") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 18)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view (Data.ProtoLens.Field.field @"blockHeight") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 24)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"txIndex") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 32)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x)))))
instance Control.DeepSeq.NFData ConfDetails where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_ConfDetails'_unknownFields x__)
(Control.DeepSeq.deepseq
(_ConfDetails'rawTx x__)
(Control.DeepSeq.deepseq
(_ConfDetails'blockHash x__)
(Control.DeepSeq.deepseq
(_ConfDetails'blockHeight x__)
(Control.DeepSeq.deepseq (_ConfDetails'txIndex x__) ()))))
| Fields :
* ' Proto . Chainrpc . Chainnotifier_Fields.maybe'event ' @ : : Lens ' ConfEvent ( Prelude . Maybe ConfEvent'Event)@
* ' Proto . Chainrpc . Chainnotifier_Fields.maybe'conf ' @ : : Lens ' ConfEvent ( Prelude . Maybe ConfDetails)@
* ' Proto.Chainrpc.Chainnotifier_Fields.conf ' @ : : Lens ' ConfEvent ConfDetails@
* ' Proto . Chainrpc . ' @ : : Lens ' ConfEvent ( Prelude . Maybe Reorg)@
* ' Proto.Chainrpc.Chainnotifier_Fields.reorg ' @ : : Lens ' ConfEvent Reorg@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'event' @:: Lens' ConfEvent (Prelude.Maybe ConfEvent'Event)@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'conf' @:: Lens' ConfEvent (Prelude.Maybe ConfDetails)@
* 'Proto.Chainrpc.Chainnotifier_Fields.conf' @:: Lens' ConfEvent ConfDetails@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'reorg' @:: Lens' ConfEvent (Prelude.Maybe Reorg)@
* 'Proto.Chainrpc.Chainnotifier_Fields.reorg' @:: Lens' ConfEvent Reorg@ -}
data ConfEvent
= ConfEvent'_constructor {_ConfEvent'event :: !(Prelude.Maybe ConfEvent'Event),
_ConfEvent'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show ConfEvent where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out ConfEvent
data ConfEvent'Event
= ConfEvent'Conf !ConfDetails | ConfEvent'Reorg !Reorg
deriving stock (Prelude.Show,
Prelude.Eq,
Prelude.Ord,
GHC.Generics.Generic)
instance Text.PrettyPrint.GenericPretty.Out ConfEvent'Event
instance Data.ProtoLens.Field.HasField ConfEvent "maybe'event" (Prelude.Maybe ConfEvent'Event) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfEvent'event (\ x__ y__ -> x__ {_ConfEvent'event = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfEvent "maybe'conf" (Prelude.Maybe ConfDetails) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfEvent'event (\ x__ y__ -> x__ {_ConfEvent'event = y__}))
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (ConfEvent'Conf x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap ConfEvent'Conf y__))
instance Data.ProtoLens.Field.HasField ConfEvent "conf" ConfDetails where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfEvent'event (\ x__ y__ -> x__ {_ConfEvent'event = y__}))
((Prelude..)
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (ConfEvent'Conf x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap ConfEvent'Conf y__))
(Data.ProtoLens.maybeLens Data.ProtoLens.defMessage))
instance Data.ProtoLens.Field.HasField ConfEvent "maybe'reorg" (Prelude.Maybe Reorg) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfEvent'event (\ x__ y__ -> x__ {_ConfEvent'event = y__}))
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (ConfEvent'Reorg x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap ConfEvent'Reorg y__))
instance Data.ProtoLens.Field.HasField ConfEvent "reorg" Reorg where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfEvent'event (\ x__ y__ -> x__ {_ConfEvent'event = y__}))
((Prelude..)
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (ConfEvent'Reorg x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap ConfEvent'Reorg y__))
(Data.ProtoLens.maybeLens Data.ProtoLens.defMessage))
instance Data.ProtoLens.Message ConfEvent where
messageName _ = Data.Text.pack "chainrpc.ConfEvent"
packedMessageDescriptor _
= "\n\
\\tConfEvent\DC2+\n\
\\EOTconf\CAN\SOH \SOH(\v2\NAK.chainrpc.ConfDetailsH\NULR\EOTconf\DC2'\n\
\\ENQreorg\CAN\STX \SOH(\v2\SI.chainrpc.ReorgH\NULR\ENQreorgB\a\n\
\\ENQevent"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
conf__field_descriptor
= Data.ProtoLens.FieldDescriptor
"conf"
(Data.ProtoLens.MessageField Data.ProtoLens.MessageType ::
Data.ProtoLens.FieldTypeDescriptor ConfDetails)
(Data.ProtoLens.OptionalField
(Data.ProtoLens.Field.field @"maybe'conf")) ::
Data.ProtoLens.FieldDescriptor ConfEvent
reorg__field_descriptor
= Data.ProtoLens.FieldDescriptor
"reorg"
(Data.ProtoLens.MessageField Data.ProtoLens.MessageType ::
Data.ProtoLens.FieldTypeDescriptor Reorg)
(Data.ProtoLens.OptionalField
(Data.ProtoLens.Field.field @"maybe'reorg")) ::
Data.ProtoLens.FieldDescriptor ConfEvent
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, conf__field_descriptor),
(Data.ProtoLens.Tag 2, reorg__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_ConfEvent'_unknownFields
(\ x__ y__ -> x__ {_ConfEvent'_unknownFields = y__})
defMessage
= ConfEvent'_constructor
{_ConfEvent'event = Prelude.Nothing,
_ConfEvent'_unknownFields = []}
parseMessage
= let
loop :: ConfEvent -> Data.ProtoLens.Encoding.Bytes.Parser ConfEvent
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.isolate
(Prelude.fromIntegral len) Data.ProtoLens.parseMessage)
"conf"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"conf") y x)
18
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.isolate
(Prelude.fromIntegral len) Data.ProtoLens.parseMessage)
"reorg"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"reorg") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "ConfEvent"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(case
Lens.Family2.view (Data.ProtoLens.Field.field @"maybe'event") _x
of
Prelude.Nothing -> Data.Monoid.mempty
(Prelude.Just (ConfEvent'Conf v))
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((Prelude..)
(\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
Data.ProtoLens.encodeMessage v)
(Prelude.Just (ConfEvent'Reorg v))
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 18)
((Prelude..)
(\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
Data.ProtoLens.encodeMessage v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x))
instance Control.DeepSeq.NFData ConfEvent where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_ConfEvent'_unknownFields x__)
(Control.DeepSeq.deepseq (_ConfEvent'event x__) ())
instance Control.DeepSeq.NFData ConfEvent'Event where
rnf (ConfEvent'Conf x__) = Control.DeepSeq.rnf x__
rnf (ConfEvent'Reorg x__) = Control.DeepSeq.rnf x__
_ConfEvent'Conf ::
Data.ProtoLens.Prism.Prism' ConfEvent'Event ConfDetails
_ConfEvent'Conf
= Data.ProtoLens.Prism.prism'
ConfEvent'Conf
(\ p__
-> case p__ of
(ConfEvent'Conf p__val) -> Prelude.Just p__val
_otherwise -> Prelude.Nothing)
_ConfEvent'Reorg ::
Data.ProtoLens.Prism.Prism' ConfEvent'Event Reorg
_ConfEvent'Reorg
= Data.ProtoLens.Prism.prism'
ConfEvent'Reorg
(\ p__
-> case p__ of
(ConfEvent'Reorg p__val) -> Prelude.Just p__val
_otherwise -> Prelude.Nothing)
| Fields :
* ' Proto.Chainrpc.Chainnotifier_Fields.txid ' @ : : Lens ' ConfRequest Data . ByteString . ByteString@
* ' Proto.Chainrpc.Chainnotifier_Fields.script ' @ : : Lens ' ConfRequest Data . ByteString . ByteString@
* ' Proto . Chainrpc . Chainnotifier_Fields.numConfs ' @ : : Lens ' ConfRequest Data . Word . Word32@
* ' Proto . . Chainnotifier_Fields.heightHint ' @ : : Lens ' ConfRequest Data . Word . Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.txid' @:: Lens' ConfRequest Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.script' @:: Lens' ConfRequest Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.numConfs' @:: Lens' ConfRequest Data.Word.Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.heightHint' @:: Lens' ConfRequest Data.Word.Word32@ -}
data ConfRequest
= ConfRequest'_constructor {_ConfRequest'txid :: !Data.ByteString.ByteString,
_ConfRequest'script :: !Data.ByteString.ByteString,
_ConfRequest'numConfs :: !Data.Word.Word32,
_ConfRequest'heightHint :: !Data.Word.Word32,
_ConfRequest'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show ConfRequest where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out ConfRequest
instance Data.ProtoLens.Field.HasField ConfRequest "txid" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfRequest'txid (\ x__ y__ -> x__ {_ConfRequest'txid = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfRequest "script" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfRequest'script (\ x__ y__ -> x__ {_ConfRequest'script = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfRequest "numConfs" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfRequest'numConfs
(\ x__ y__ -> x__ {_ConfRequest'numConfs = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField ConfRequest "heightHint" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_ConfRequest'heightHint
(\ x__ y__ -> x__ {_ConfRequest'heightHint = y__}))
Prelude.id
instance Data.ProtoLens.Message ConfRequest where
messageName _ = Data.Text.pack "chainrpc.ConfRequest"
packedMessageDescriptor _
= "\n\
\\vConfRequest\DC2\DC2\n\
\\EOTtxid\CAN\SOH \SOH(\fR\EOTtxid\DC2\SYN\n\
\\ACKscript\CAN\STX \SOH(\fR\ACKscript\DC2\ESC\n\
\\tnum_confs\CAN\ETX \SOH(\rR\bnumConfs\DC2\US\n\
\\vheight_hint\CAN\EOT \SOH(\rR\n\
\heightHint"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
txid__field_descriptor
= Data.ProtoLens.FieldDescriptor
"txid"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"txid")) ::
Data.ProtoLens.FieldDescriptor ConfRequest
script__field_descriptor
= Data.ProtoLens.FieldDescriptor
"script"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"script")) ::
Data.ProtoLens.FieldDescriptor ConfRequest
numConfs__field_descriptor
= Data.ProtoLens.FieldDescriptor
"num_confs"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"numConfs")) ::
Data.ProtoLens.FieldDescriptor ConfRequest
heightHint__field_descriptor
= Data.ProtoLens.FieldDescriptor
"height_hint"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"heightHint")) ::
Data.ProtoLens.FieldDescriptor ConfRequest
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, txid__field_descriptor),
(Data.ProtoLens.Tag 2, script__field_descriptor),
(Data.ProtoLens.Tag 3, numConfs__field_descriptor),
(Data.ProtoLens.Tag 4, heightHint__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_ConfRequest'_unknownFields
(\ x__ y__ -> x__ {_ConfRequest'_unknownFields = y__})
defMessage
= ConfRequest'_constructor
{_ConfRequest'txid = Data.ProtoLens.fieldDefault,
_ConfRequest'script = Data.ProtoLens.fieldDefault,
_ConfRequest'numConfs = Data.ProtoLens.fieldDefault,
_ConfRequest'heightHint = Data.ProtoLens.fieldDefault,
_ConfRequest'_unknownFields = []}
parseMessage
= let
loop ::
ConfRequest -> Data.ProtoLens.Encoding.Bytes.Parser ConfRequest
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"txid"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"txid") y x)
18
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"script"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"script") y x)
24
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"num_confs"
loop
(Lens.Family2.set (Data.ProtoLens.Field.field @"numConfs") y x)
32
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"height_hint"
loop
(Lens.Family2.set (Data.ProtoLens.Field.field @"heightHint") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "ConfRequest"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(let _v = Lens.Family2.view (Data.ProtoLens.Field.field @"txid") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"script") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 18)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"numConfs") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 24)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view (Data.ProtoLens.Field.field @"heightHint") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 32)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x)))))
instance Control.DeepSeq.NFData ConfRequest where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_ConfRequest'_unknownFields x__)
(Control.DeepSeq.deepseq
(_ConfRequest'txid x__)
(Control.DeepSeq.deepseq
(_ConfRequest'script x__)
(Control.DeepSeq.deepseq
(_ConfRequest'numConfs x__)
(Control.DeepSeq.deepseq (_ConfRequest'heightHint x__) ()))))
| Fields :
* ' Proto.Chainrpc.Chainnotifier_Fields.hash ' @ : : Lens ' Outpoint Data . ByteString . ByteString@
* ' Proto.Chainrpc.Chainnotifier_Fields.index ' @ : : Lens ' Outpoint Data . Word . Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.hash' @:: Lens' Outpoint Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.index' @:: Lens' Outpoint Data.Word.Word32@ -}
data Outpoint
= Outpoint'_constructor {_Outpoint'hash :: !Data.ByteString.ByteString,
_Outpoint'index :: !Data.Word.Word32,
_Outpoint'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show Outpoint where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out Outpoint
instance Data.ProtoLens.Field.HasField Outpoint "hash" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_Outpoint'hash (\ x__ y__ -> x__ {_Outpoint'hash = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField Outpoint "index" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_Outpoint'index (\ x__ y__ -> x__ {_Outpoint'index = y__}))
Prelude.id
instance Data.ProtoLens.Message Outpoint where
messageName _ = Data.Text.pack "chainrpc.Outpoint"
packedMessageDescriptor _
= "\n\
\\bOutpoint\DC2\DC2\n\
\\EOThash\CAN\SOH \SOH(\fR\EOThash\DC2\DC4\n\
\\ENQindex\CAN\STX \SOH(\rR\ENQindex"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
hash__field_descriptor
= Data.ProtoLens.FieldDescriptor
"hash"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"hash")) ::
Data.ProtoLens.FieldDescriptor Outpoint
index__field_descriptor
= Data.ProtoLens.FieldDescriptor
"index"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"index")) ::
Data.ProtoLens.FieldDescriptor Outpoint
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, hash__field_descriptor),
(Data.ProtoLens.Tag 2, index__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_Outpoint'_unknownFields
(\ x__ y__ -> x__ {_Outpoint'_unknownFields = y__})
defMessage
= Outpoint'_constructor
{_Outpoint'hash = Data.ProtoLens.fieldDefault,
_Outpoint'index = Data.ProtoLens.fieldDefault,
_Outpoint'_unknownFields = []}
parseMessage
= let
loop :: Outpoint -> Data.ProtoLens.Encoding.Bytes.Parser Outpoint
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"hash"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"hash") y x)
16
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"index"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"index") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "Outpoint"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(let _v = Lens.Family2.view (Data.ProtoLens.Field.field @"hash") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"index") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 16)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x)))
instance Control.DeepSeq.NFData Outpoint where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_Outpoint'_unknownFields x__)
(Control.DeepSeq.deepseq
(_Outpoint'hash x__)
(Control.DeepSeq.deepseq (_Outpoint'index x__) ()))
data Reorg
= Reorg'_constructor {_Reorg'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show Reorg where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out Reorg
instance Data.ProtoLens.Message Reorg where
messageName _ = Data.Text.pack "chainrpc.Reorg"
packedMessageDescriptor _
= "\n\
\\ENQReorg"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag = let in Data.Map.fromList []
unknownFields
= Lens.Family2.Unchecked.lens
_Reorg'_unknownFields
(\ x__ y__ -> x__ {_Reorg'_unknownFields = y__})
defMessage = Reorg'_constructor {_Reorg'_unknownFields = []}
parseMessage
= let
loop :: Reorg -> Data.ProtoLens.Encoding.Bytes.Parser Reorg
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of {
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x) }
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "Reorg"
buildMessage
= \ _x
-> Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x)
instance Control.DeepSeq.NFData Reorg where
rnf
= \ x__ -> Control.DeepSeq.deepseq (_Reorg'_unknownFields x__) ()
| Fields :
* ' Proto . Chainrpc . Chainnotifier_Fields.spendingOutpoint ' @ : : Lens ' SpendDetails Outpoint@
* ' Proto . Chainrpc . Chainnotifier_Fields.maybe'spendingOutpoint ' @ : : Lens ' SpendDetails ( Prelude . Maybe Outpoint)@
* ' Proto . Chainrpc . ' @ : : Lens ' SpendDetails Data . ByteString . ByteString@
* ' Proto . Chainrpc . ' @ : : Lens ' SpendDetails Data . ByteString . ByteString@
* ' Proto . . Chainnotifier_Fields.spendingInputIndex ' @ : : Lens ' SpendDetails Data . Word . Word32@
* ' Proto . Chainrpc . ' @ : : Lens ' SpendDetails Data . Word . Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.spendingOutpoint' @:: Lens' SpendDetails Outpoint@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'spendingOutpoint' @:: Lens' SpendDetails (Prelude.Maybe Outpoint)@
* 'Proto.Chainrpc.Chainnotifier_Fields.rawSpendingTx' @:: Lens' SpendDetails Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.spendingTxHash' @:: Lens' SpendDetails Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.spendingInputIndex' @:: Lens' SpendDetails Data.Word.Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.spendingHeight' @:: Lens' SpendDetails Data.Word.Word32@ -}
data SpendDetails
= SpendDetails'_constructor {_SpendDetails'spendingOutpoint :: !(Prelude.Maybe Outpoint),
_SpendDetails'rawSpendingTx :: !Data.ByteString.ByteString,
_SpendDetails'spendingTxHash :: !Data.ByteString.ByteString,
_SpendDetails'spendingInputIndex :: !Data.Word.Word32,
_SpendDetails'spendingHeight :: !Data.Word.Word32,
_SpendDetails'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show SpendDetails where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out SpendDetails
instance Data.ProtoLens.Field.HasField SpendDetails "spendingOutpoint" Outpoint where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendDetails'spendingOutpoint
(\ x__ y__ -> x__ {_SpendDetails'spendingOutpoint = y__}))
(Data.ProtoLens.maybeLens Data.ProtoLens.defMessage)
instance Data.ProtoLens.Field.HasField SpendDetails "maybe'spendingOutpoint" (Prelude.Maybe Outpoint) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendDetails'spendingOutpoint
(\ x__ y__ -> x__ {_SpendDetails'spendingOutpoint = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendDetails "rawSpendingTx" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendDetails'rawSpendingTx
(\ x__ y__ -> x__ {_SpendDetails'rawSpendingTx = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendDetails "spendingTxHash" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendDetails'spendingTxHash
(\ x__ y__ -> x__ {_SpendDetails'spendingTxHash = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendDetails "spendingInputIndex" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendDetails'spendingInputIndex
(\ x__ y__ -> x__ {_SpendDetails'spendingInputIndex = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendDetails "spendingHeight" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendDetails'spendingHeight
(\ x__ y__ -> x__ {_SpendDetails'spendingHeight = y__}))
Prelude.id
instance Data.ProtoLens.Message SpendDetails where
messageName _ = Data.Text.pack "chainrpc.SpendDetails"
packedMessageDescriptor _
= "\n\
\\fSpendDetails\DC2?\n\
\\DC1spending_outpoint\CAN\SOH \SOH(\v2\DC2.chainrpc.OutpointR\DLEspendingOutpoint\DC2&\n\
\\SIraw_spending_tx\CAN\STX \SOH(\fR\rrawSpendingTx\DC2(\n\
\\DLEspending_tx_hash\CAN\ETX \SOH(\fR\SOspendingTxHash\DC20\n\
\\DC4spending_input_index\CAN\EOT \SOH(\rR\DC2spendingInputIndex\DC2'\n\
\\SIspending_height\CAN\ENQ \SOH(\rR\SOspendingHeight"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
spendingOutpoint__field_descriptor
= Data.ProtoLens.FieldDescriptor
"spending_outpoint"
(Data.ProtoLens.MessageField Data.ProtoLens.MessageType ::
Data.ProtoLens.FieldTypeDescriptor Outpoint)
(Data.ProtoLens.OptionalField
(Data.ProtoLens.Field.field @"maybe'spendingOutpoint")) ::
Data.ProtoLens.FieldDescriptor SpendDetails
rawSpendingTx__field_descriptor
= Data.ProtoLens.FieldDescriptor
"raw_spending_tx"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"rawSpendingTx")) ::
Data.ProtoLens.FieldDescriptor SpendDetails
spendingTxHash__field_descriptor
= Data.ProtoLens.FieldDescriptor
"spending_tx_hash"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"spendingTxHash")) ::
Data.ProtoLens.FieldDescriptor SpendDetails
spendingInputIndex__field_descriptor
= Data.ProtoLens.FieldDescriptor
"spending_input_index"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"spendingInputIndex")) ::
Data.ProtoLens.FieldDescriptor SpendDetails
spendingHeight__field_descriptor
= Data.ProtoLens.FieldDescriptor
"spending_height"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"spendingHeight")) ::
Data.ProtoLens.FieldDescriptor SpendDetails
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, spendingOutpoint__field_descriptor),
(Data.ProtoLens.Tag 2, rawSpendingTx__field_descriptor),
(Data.ProtoLens.Tag 3, spendingTxHash__field_descriptor),
(Data.ProtoLens.Tag 4, spendingInputIndex__field_descriptor),
(Data.ProtoLens.Tag 5, spendingHeight__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_SpendDetails'_unknownFields
(\ x__ y__ -> x__ {_SpendDetails'_unknownFields = y__})
defMessage
= SpendDetails'_constructor
{_SpendDetails'spendingOutpoint = Prelude.Nothing,
_SpendDetails'rawSpendingTx = Data.ProtoLens.fieldDefault,
_SpendDetails'spendingTxHash = Data.ProtoLens.fieldDefault,
_SpendDetails'spendingInputIndex = Data.ProtoLens.fieldDefault,
_SpendDetails'spendingHeight = Data.ProtoLens.fieldDefault,
_SpendDetails'_unknownFields = []}
parseMessage
= let
loop ::
SpendDetails -> Data.ProtoLens.Encoding.Bytes.Parser SpendDetails
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.isolate
(Prelude.fromIntegral len) Data.ProtoLens.parseMessage)
"spending_outpoint"
loop
(Lens.Family2.set
(Data.ProtoLens.Field.field @"spendingOutpoint") y x)
18
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"raw_spending_tx"
loop
(Lens.Family2.set
(Data.ProtoLens.Field.field @"rawSpendingTx") y x)
26
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"spending_tx_hash"
loop
(Lens.Family2.set
(Data.ProtoLens.Field.field @"spendingTxHash") y x)
32
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"spending_input_index"
loop
(Lens.Family2.set
(Data.ProtoLens.Field.field @"spendingInputIndex") y x)
40
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"spending_height"
loop
(Lens.Family2.set
(Data.ProtoLens.Field.field @"spendingHeight") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "SpendDetails"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(case
Lens.Family2.view
(Data.ProtoLens.Field.field @"maybe'spendingOutpoint") _x
of
Prelude.Nothing -> Data.Monoid.mempty
(Prelude.Just _v)
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((Prelude..)
(\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
Data.ProtoLens.encodeMessage _v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view
(Data.ProtoLens.Field.field @"rawSpendingTx") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 18)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view
(Data.ProtoLens.Field.field @"spendingTxHash") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 26)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view
(Data.ProtoLens.Field.field @"spendingInputIndex") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 32)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view
(Data.ProtoLens.Field.field @"spendingHeight") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 40)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral
_v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x))))))
instance Control.DeepSeq.NFData SpendDetails where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_SpendDetails'_unknownFields x__)
(Control.DeepSeq.deepseq
(_SpendDetails'spendingOutpoint x__)
(Control.DeepSeq.deepseq
(_SpendDetails'rawSpendingTx x__)
(Control.DeepSeq.deepseq
(_SpendDetails'spendingTxHash x__)
(Control.DeepSeq.deepseq
(_SpendDetails'spendingInputIndex x__)
(Control.DeepSeq.deepseq (_SpendDetails'spendingHeight x__) ())))))
| Fields :
* ' Proto . Chainrpc . Chainnotifier_Fields.maybe'event ' @ : : Lens ' SpendEvent ( Prelude . Maybe
* ' Proto . Chainrpc . Chainnotifier_Fields.maybe'spend ' @ : : Lens ' SpendEvent ( Prelude . Maybe SpendDetails)@
* ' Proto.Chainrpc.Chainnotifier_Fields.spend ' @ : : Lens ' SpendEvent SpendDetails@
* ' Proto . Chainrpc . ' @ : : Lens ' SpendEvent ( Prelude . Maybe Reorg)@
* ' Proto.Chainrpc.Chainnotifier_Fields.reorg ' @ : : Lens ' SpendEvent Reorg@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'event' @:: Lens' SpendEvent (Prelude.Maybe SpendEvent'Event)@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'spend' @:: Lens' SpendEvent (Prelude.Maybe SpendDetails)@
* 'Proto.Chainrpc.Chainnotifier_Fields.spend' @:: Lens' SpendEvent SpendDetails@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'reorg' @:: Lens' SpendEvent (Prelude.Maybe Reorg)@
* 'Proto.Chainrpc.Chainnotifier_Fields.reorg' @:: Lens' SpendEvent Reorg@ -}
data SpendEvent
= SpendEvent'_constructor {_SpendEvent'event :: !(Prelude.Maybe SpendEvent'Event),
_SpendEvent'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show SpendEvent where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out SpendEvent
data SpendEvent'Event
= SpendEvent'Spend !SpendDetails | SpendEvent'Reorg !Reorg
deriving stock (Prelude.Show,
Prelude.Eq,
Prelude.Ord,
GHC.Generics.Generic)
instance Text.PrettyPrint.GenericPretty.Out SpendEvent'Event
instance Data.ProtoLens.Field.HasField SpendEvent "maybe'event" (Prelude.Maybe SpendEvent'Event) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendEvent'event (\ x__ y__ -> x__ {_SpendEvent'event = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendEvent "maybe'spend" (Prelude.Maybe SpendDetails) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendEvent'event (\ x__ y__ -> x__ {_SpendEvent'event = y__}))
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (SpendEvent'Spend x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap SpendEvent'Spend y__))
instance Data.ProtoLens.Field.HasField SpendEvent "spend" SpendDetails where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendEvent'event (\ x__ y__ -> x__ {_SpendEvent'event = y__}))
((Prelude..)
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (SpendEvent'Spend x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap SpendEvent'Spend y__))
(Data.ProtoLens.maybeLens Data.ProtoLens.defMessage))
instance Data.ProtoLens.Field.HasField SpendEvent "maybe'reorg" (Prelude.Maybe Reorg) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendEvent'event (\ x__ y__ -> x__ {_SpendEvent'event = y__}))
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (SpendEvent'Reorg x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap SpendEvent'Reorg y__))
instance Data.ProtoLens.Field.HasField SpendEvent "reorg" Reorg where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendEvent'event (\ x__ y__ -> x__ {_SpendEvent'event = y__}))
((Prelude..)
(Lens.Family2.Unchecked.lens
(\ x__
-> case x__ of
(Prelude.Just (SpendEvent'Reorg x__val)) -> Prelude.Just x__val
_otherwise -> Prelude.Nothing)
(\ _ y__ -> Prelude.fmap SpendEvent'Reorg y__))
(Data.ProtoLens.maybeLens Data.ProtoLens.defMessage))
instance Data.ProtoLens.Message SpendEvent where
messageName _ = Data.Text.pack "chainrpc.SpendEvent"
packedMessageDescriptor _
= "\n\
\\n\
\SpendEvent\DC2.\n\
\\ENQspend\CAN\SOH \SOH(\v2\SYN.chainrpc.SpendDetailsH\NULR\ENQspend\DC2'\n\
\\ENQreorg\CAN\STX \SOH(\v2\SI.chainrpc.ReorgH\NULR\ENQreorgB\a\n\
\\ENQevent"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
spend__field_descriptor
= Data.ProtoLens.FieldDescriptor
"spend"
(Data.ProtoLens.MessageField Data.ProtoLens.MessageType ::
Data.ProtoLens.FieldTypeDescriptor SpendDetails)
(Data.ProtoLens.OptionalField
(Data.ProtoLens.Field.field @"maybe'spend")) ::
Data.ProtoLens.FieldDescriptor SpendEvent
reorg__field_descriptor
= Data.ProtoLens.FieldDescriptor
"reorg"
(Data.ProtoLens.MessageField Data.ProtoLens.MessageType ::
Data.ProtoLens.FieldTypeDescriptor Reorg)
(Data.ProtoLens.OptionalField
(Data.ProtoLens.Field.field @"maybe'reorg")) ::
Data.ProtoLens.FieldDescriptor SpendEvent
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, spend__field_descriptor),
(Data.ProtoLens.Tag 2, reorg__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_SpendEvent'_unknownFields
(\ x__ y__ -> x__ {_SpendEvent'_unknownFields = y__})
defMessage
= SpendEvent'_constructor
{_SpendEvent'event = Prelude.Nothing,
_SpendEvent'_unknownFields = []}
parseMessage
= let
loop ::
SpendEvent -> Data.ProtoLens.Encoding.Bytes.Parser SpendEvent
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.isolate
(Prelude.fromIntegral len) Data.ProtoLens.parseMessage)
"spend"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"spend") y x)
18
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.isolate
(Prelude.fromIntegral len) Data.ProtoLens.parseMessage)
"reorg"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"reorg") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "SpendEvent"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(case
Lens.Family2.view (Data.ProtoLens.Field.field @"maybe'event") _x
of
Prelude.Nothing -> Data.Monoid.mempty
(Prelude.Just (SpendEvent'Spend v))
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((Prelude..)
(\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
Data.ProtoLens.encodeMessage v)
(Prelude.Just (SpendEvent'Reorg v))
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 18)
((Prelude..)
(\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
Data.ProtoLens.encodeMessage v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x))
instance Control.DeepSeq.NFData SpendEvent where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_SpendEvent'_unknownFields x__)
(Control.DeepSeq.deepseq (_SpendEvent'event x__) ())
instance Control.DeepSeq.NFData SpendEvent'Event where
rnf (SpendEvent'Spend x__) = Control.DeepSeq.rnf x__
rnf (SpendEvent'Reorg x__) = Control.DeepSeq.rnf x__
_SpendEvent'Spend ::
Data.ProtoLens.Prism.Prism' SpendEvent'Event SpendDetails
_SpendEvent'Spend
= Data.ProtoLens.Prism.prism'
SpendEvent'Spend
(\ p__
-> case p__ of
(SpendEvent'Spend p__val) -> Prelude.Just p__val
_otherwise -> Prelude.Nothing)
_SpendEvent'Reorg ::
Data.ProtoLens.Prism.Prism' SpendEvent'Event Reorg
_SpendEvent'Reorg
= Data.ProtoLens.Prism.prism'
SpendEvent'Reorg
(\ p__
-> case p__ of
(SpendEvent'Reorg p__val) -> Prelude.Just p__val
_otherwise -> Prelude.Nothing)
| Fields :
* ' Proto.Chainrpc.Chainnotifier_Fields.outpoint ' @ : : Lens ' SpendRequest Outpoint@
* ' Proto . Chainrpc . Chainnotifier_Fields.maybe'outpoint ' @ : : Lens ' SpendRequest ( Prelude . Maybe Outpoint)@
* ' Proto.Chainrpc.Chainnotifier_Fields.script ' @ : : Lens ' SpendRequest Data . ByteString . ByteString@
* ' Proto . . Chainnotifier_Fields.heightHint ' @ : : Lens ' SpendRequest Data . Word . Word32@
* 'Proto.Chainrpc.Chainnotifier_Fields.outpoint' @:: Lens' SpendRequest Outpoint@
* 'Proto.Chainrpc.Chainnotifier_Fields.maybe'outpoint' @:: Lens' SpendRequest (Prelude.Maybe Outpoint)@
* 'Proto.Chainrpc.Chainnotifier_Fields.script' @:: Lens' SpendRequest Data.ByteString.ByteString@
* 'Proto.Chainrpc.Chainnotifier_Fields.heightHint' @:: Lens' SpendRequest Data.Word.Word32@ -}
data SpendRequest
= SpendRequest'_constructor {_SpendRequest'outpoint :: !(Prelude.Maybe Outpoint),
_SpendRequest'script :: !Data.ByteString.ByteString,
_SpendRequest'heightHint :: !Data.Word.Word32,
_SpendRequest'_unknownFields :: !Data.ProtoLens.FieldSet}
deriving stock (Prelude.Eq, Prelude.Ord, GHC.Generics.Generic)
instance Prelude.Show SpendRequest where
showsPrec _ __x __s
= Prelude.showChar
'{'
(Prelude.showString
(Data.ProtoLens.showMessageShort __x) (Prelude.showChar '}' __s))
instance Text.PrettyPrint.GenericPretty.Out SpendRequest
instance Data.ProtoLens.Field.HasField SpendRequest "outpoint" Outpoint where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendRequest'outpoint
(\ x__ y__ -> x__ {_SpendRequest'outpoint = y__}))
(Data.ProtoLens.maybeLens Data.ProtoLens.defMessage)
instance Data.ProtoLens.Field.HasField SpendRequest "maybe'outpoint" (Prelude.Maybe Outpoint) where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendRequest'outpoint
(\ x__ y__ -> x__ {_SpendRequest'outpoint = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendRequest "script" Data.ByteString.ByteString where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendRequest'script
(\ x__ y__ -> x__ {_SpendRequest'script = y__}))
Prelude.id
instance Data.ProtoLens.Field.HasField SpendRequest "heightHint" Data.Word.Word32 where
fieldOf _
= (Prelude..)
(Lens.Family2.Unchecked.lens
_SpendRequest'heightHint
(\ x__ y__ -> x__ {_SpendRequest'heightHint = y__}))
Prelude.id
instance Data.ProtoLens.Message SpendRequest where
messageName _ = Data.Text.pack "chainrpc.SpendRequest"
packedMessageDescriptor _
= "\n\
\\fSpendRequest\DC2.\n\
\\boutpoint\CAN\SOH \SOH(\v2\DC2.chainrpc.OutpointR\boutpoint\DC2\SYN\n\
\\ACKscript\CAN\STX \SOH(\fR\ACKscript\DC2\US\n\
\\vheight_hint\CAN\ETX \SOH(\rR\n\
\heightHint"
packedFileDescriptor _ = packedFileDescriptor
fieldsByTag
= let
outpoint__field_descriptor
= Data.ProtoLens.FieldDescriptor
"outpoint"
(Data.ProtoLens.MessageField Data.ProtoLens.MessageType ::
Data.ProtoLens.FieldTypeDescriptor Outpoint)
(Data.ProtoLens.OptionalField
(Data.ProtoLens.Field.field @"maybe'outpoint")) ::
Data.ProtoLens.FieldDescriptor SpendRequest
script__field_descriptor
= Data.ProtoLens.FieldDescriptor
"script"
(Data.ProtoLens.ScalarField Data.ProtoLens.BytesField ::
Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional (Data.ProtoLens.Field.field @"script")) ::
Data.ProtoLens.FieldDescriptor SpendRequest
heightHint__field_descriptor
= Data.ProtoLens.FieldDescriptor
"height_hint"
(Data.ProtoLens.ScalarField Data.ProtoLens.UInt32Field ::
Data.ProtoLens.FieldTypeDescriptor Data.Word.Word32)
(Data.ProtoLens.PlainField
Data.ProtoLens.Optional
(Data.ProtoLens.Field.field @"heightHint")) ::
Data.ProtoLens.FieldDescriptor SpendRequest
in
Data.Map.fromList
[(Data.ProtoLens.Tag 1, outpoint__field_descriptor),
(Data.ProtoLens.Tag 2, script__field_descriptor),
(Data.ProtoLens.Tag 3, heightHint__field_descriptor)]
unknownFields
= Lens.Family2.Unchecked.lens
_SpendRequest'_unknownFields
(\ x__ y__ -> x__ {_SpendRequest'_unknownFields = y__})
defMessage
= SpendRequest'_constructor
{_SpendRequest'outpoint = Prelude.Nothing,
_SpendRequest'script = Data.ProtoLens.fieldDefault,
_SpendRequest'heightHint = Data.ProtoLens.fieldDefault,
_SpendRequest'_unknownFields = []}
parseMessage
= let
loop ::
SpendRequest -> Data.ProtoLens.Encoding.Bytes.Parser SpendRequest
loop x
= do end <- Data.ProtoLens.Encoding.Bytes.atEnd
if end then
do (let missing = []
in
if Prelude.null missing then
Prelude.return ()
else
Prelude.fail
((Prelude.++)
"Missing required fields: "
(Prelude.show (missing :: [Prelude.String]))))
Prelude.return
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> Prelude.reverse t) x)
else
do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt
case tag of
10
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.isolate
(Prelude.fromIntegral len) Data.ProtoLens.parseMessage)
"outpoint"
loop
(Lens.Family2.set (Data.ProtoLens.Field.field @"outpoint") y x)
18
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(do len <- Data.ProtoLens.Encoding.Bytes.getVarInt
Data.ProtoLens.Encoding.Bytes.getBytes
(Prelude.fromIntegral len))
"script"
loop (Lens.Family2.set (Data.ProtoLens.Field.field @"script") y x)
24
-> do y <- (Data.ProtoLens.Encoding.Bytes.<?>)
(Prelude.fmap
Prelude.fromIntegral
Data.ProtoLens.Encoding.Bytes.getVarInt)
"height_hint"
loop
(Lens.Family2.set (Data.ProtoLens.Field.field @"heightHint") y x)
wire
-> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire
wire
loop
(Lens.Family2.over
Data.ProtoLens.unknownFields (\ !t -> (:) y t) x)
in
(Data.ProtoLens.Encoding.Bytes.<?>)
(do loop Data.ProtoLens.defMessage) "SpendRequest"
buildMessage
= \ _x
-> (Data.Monoid.<>)
(case
Lens.Family2.view (Data.ProtoLens.Field.field @"maybe'outpoint") _x
of
Prelude.Nothing -> Data.Monoid.mempty
(Prelude.Just _v)
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 10)
((Prelude..)
(\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
Data.ProtoLens.encodeMessage _v))
((Data.Monoid.<>)
(let
_v = Lens.Family2.view (Data.ProtoLens.Field.field @"script") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 18)
((\ bs
-> (Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt
(Prelude.fromIntegral (Data.ByteString.length bs)))
(Data.ProtoLens.Encoding.Bytes.putBytes bs))
_v))
((Data.Monoid.<>)
(let
_v
= Lens.Family2.view (Data.ProtoLens.Field.field @"heightHint") _x
in
if (Prelude.==) _v Data.ProtoLens.fieldDefault then
Data.Monoid.mempty
else
(Data.Monoid.<>)
(Data.ProtoLens.Encoding.Bytes.putVarInt 24)
((Prelude..)
Data.ProtoLens.Encoding.Bytes.putVarInt Prelude.fromIntegral _v))
(Data.ProtoLens.Encoding.Wire.buildFieldSet
(Lens.Family2.view Data.ProtoLens.unknownFields _x))))
instance Control.DeepSeq.NFData SpendRequest where
rnf
= \ x__
-> Control.DeepSeq.deepseq
(_SpendRequest'_unknownFields x__)
(Control.DeepSeq.deepseq
(_SpendRequest'outpoint x__)
(Control.DeepSeq.deepseq
(_SpendRequest'script x__)
(Control.DeepSeq.deepseq (_SpendRequest'heightHint x__) ())))
data ChainNotifier = ChainNotifier {}
instance Data.ProtoLens.Service.Types.Service ChainNotifier where
type ServiceName ChainNotifier = "ChainNotifier"
type ServicePackage ChainNotifier = "chainrpc"
type ServiceMethods ChainNotifier = '["registerBlockEpochNtfn",
"registerConfirmationsNtfn",
"registerSpendNtfn"]
packedServiceDescriptor _
= "\n\
\\rChainNotifier\DC2I\n\
\\EMRegisterConfirmationsNtfn\DC2\NAK.chainrpc.ConfRequest\SUB\DC3.chainrpc.ConfEvent0\SOH\DC2C\n\
\\DC1RegisterSpendNtfn\DC2\SYN.chainrpc.SpendRequest\SUB\DC4.chainrpc.SpendEvent0\SOH\DC2F\n\
\\SYNRegisterBlockEpochNtfn\DC2\DC4.chainrpc.BlockEpoch\SUB\DC4.chainrpc.BlockEpoch0\SOH"
instance Data.ProtoLens.Service.Types.HasMethodImpl ChainNotifier "registerConfirmationsNtfn" where
type MethodName ChainNotifier "registerConfirmationsNtfn" = "RegisterConfirmationsNtfn"
type MethodInput ChainNotifier "registerConfirmationsNtfn" = ConfRequest
type MethodOutput ChainNotifier "registerConfirmationsNtfn" = ConfEvent
type MethodStreamingType ChainNotifier "registerConfirmationsNtfn" = 'Data.ProtoLens.Service.Types.ServerStreaming
instance Data.ProtoLens.Service.Types.HasMethodImpl ChainNotifier "registerSpendNtfn" where
type MethodName ChainNotifier "registerSpendNtfn" = "RegisterSpendNtfn"
type MethodInput ChainNotifier "registerSpendNtfn" = SpendRequest
type MethodOutput ChainNotifier "registerSpendNtfn" = SpendEvent
type MethodStreamingType ChainNotifier "registerSpendNtfn" = 'Data.ProtoLens.Service.Types.ServerStreaming
instance Data.ProtoLens.Service.Types.HasMethodImpl ChainNotifier "registerBlockEpochNtfn" where
type MethodName ChainNotifier "registerBlockEpochNtfn" = "RegisterBlockEpochNtfn"
type MethodInput ChainNotifier "registerBlockEpochNtfn" = BlockEpoch
type MethodOutput ChainNotifier "registerBlockEpochNtfn" = BlockEpoch
type MethodStreamingType ChainNotifier "registerBlockEpochNtfn" = 'Data.ProtoLens.Service.Types.ServerStreaming
packedFileDescriptor :: Data.ByteString.ByteString
packedFileDescriptor
= "\n\
\\FSchainrpc/chainnotifier.proto\DC2\bchainrpc\"w\n\
\\vConfRequest\DC2\DC2\n\
\\EOTtxid\CAN\SOH \SOH(\fR\EOTtxid\DC2\SYN\n\
\\ACKscript\CAN\STX \SOH(\fR\ACKscript\DC2\ESC\n\
\\tnum_confs\CAN\ETX \SOH(\rR\bnumConfs\DC2\US\n\
\\vheight_hint\CAN\EOT \SOH(\rR\n\
\heightHint\"\129\SOH\n\
\\vConfDetails\DC2\NAK\n\
\\ACKraw_tx\CAN\SOH \SOH(\fR\ENQrawTx\DC2\GS\n\
\\n\
\block_hash\CAN\STX \SOH(\fR\tblockHash\DC2!\n\
\\fblock_height\CAN\ETX \SOH(\rR\vblockHeight\DC2\EM\n\
\\btx_index\CAN\EOT \SOH(\rR\atxIndex\"\a\n\
\\ENQReorg\"j\n\
\\tConfEvent\DC2+\n\
\\EOTconf\CAN\SOH \SOH(\v2\NAK.chainrpc.ConfDetailsH\NULR\EOTconf\DC2'\n\
\\ENQreorg\CAN\STX \SOH(\v2\SI.chainrpc.ReorgH\NULR\ENQreorgB\a\n\
\\ENQevent\"4\n\
\\bOutpoint\DC2\DC2\n\
\\EOThash\CAN\SOH \SOH(\fR\EOThash\DC2\DC4\n\
\\ENQindex\CAN\STX \SOH(\rR\ENQindex\"w\n\
\\fSpendRequest\DC2.\n\
\\boutpoint\CAN\SOH \SOH(\v2\DC2.chainrpc.OutpointR\boutpoint\DC2\SYN\n\
\\ACKscript\CAN\STX \SOH(\fR\ACKscript\DC2\US\n\
\\vheight_hint\CAN\ETX \SOH(\rR\n\
\heightHint\"\252\SOH\n\
\\fSpendDetails\DC2?\n\
\\DC1spending_outpoint\CAN\SOH \SOH(\v2\DC2.chainrpc.OutpointR\DLEspendingOutpoint\DC2&\n\
\\SIraw_spending_tx\CAN\STX \SOH(\fR\rrawSpendingTx\DC2(\n\
\\DLEspending_tx_hash\CAN\ETX \SOH(\fR\SOspendingTxHash\DC20\n\
\\DC4spending_input_index\CAN\EOT \SOH(\rR\DC2spendingInputIndex\DC2'\n\
\\SIspending_height\CAN\ENQ \SOH(\rR\SOspendingHeight\"n\n\
\\n\
\SpendEvent\DC2.\n\
\\ENQspend\CAN\SOH \SOH(\v2\SYN.chainrpc.SpendDetailsH\NULR\ENQspend\DC2'\n\
\\ENQreorg\CAN\STX \SOH(\v2\SI.chainrpc.ReorgH\NULR\ENQreorgB\a\n\
\\ENQevent\"8\n\
\\n\
\BlockEpoch\DC2\DC2\n\
\\EOThash\CAN\SOH \SOH(\fR\EOThash\DC2\SYN\n\
\\ACKheight\CAN\STX \SOH(\rR\ACKheight2\231\SOH\n\
\\rChainNotifier\DC2I\n\
\\EMRegisterConfirmationsNtfn\DC2\NAK.chainrpc.ConfRequest\SUB\DC3.chainrpc.ConfEvent0\SOH\DC2C\n\
\\DC1RegisterSpendNtfn\DC2\SYN.chainrpc.SpendRequest\SUB\DC4.chainrpc.SpendEvent0\SOH\DC2F\n\
\\SYNRegisterBlockEpochNtfn\DC2\DC4.chainrpc.BlockEpoch\SUB\DC4.chainrpc.BlockEpoch0\SOHB0Z.github.com/lightningnetwork/lnd/lnrpc/chainrpcJ\171-\n\
\\a\DC2\ENQ\NUL\NUL\181\SOH\SOH\n\
\\b\n\
\\SOH\f\DC2\ETX\NUL\NUL\DC2\n\
\\b\n\
\\SOH\STX\DC2\ETX\STX\NUL\DC1\n\
\\b\n\
\\SOH\b\DC2\ETX\EOT\NULE\n\
\\t\n\
\\STX\b\v\DC2\ETX\EOT\NULE\n\
\\145\SOH\n\
\\STX\ACK\NUL\DC2\EOT\b\NUL*\SOH\SUB\132\SOH ChainNotifier is a service that can be used to get information about the\n\
\ chain backend by registering notifiers for chain events.\n\
\\n\
\\n\
\\n\
\\ETX\ACK\NUL\SOH\DC2\ETX\b\b\NAK\n\
\\250\STX\n\
\\EOT\ACK\NUL\STX\NUL\DC2\ETX\DC2\EOTK\SUB\236\STX\n\
\RegisterConfirmationsNtfn is a synchronous response-streaming RPC that\n\
\registers an intent for a client to be notified once a confirmation request\n\
\has reached its required number of confirmations on-chain.\n\
\\n\
\A client can specify whether the confirmation request should be for a\n\
\particular transaction by its hash or for an output script by specifying a\n\
\zero hash.\n\
\\n\
\\f\n\
\\ENQ\ACK\NUL\STX\NUL\SOH\DC2\ETX\DC2\b!\n\
\\f\n\
\\ENQ\ACK\NUL\STX\NUL\STX\DC2\ETX\DC2#.\n\
\\f\n\
\\ENQ\ACK\NUL\STX\NUL\ACK\DC2\ETX\DC29?\n\
\\f\n\
\\ENQ\ACK\NUL\STX\NUL\ETX\DC2\ETX\DC2@I\n\
\\224\STX\n\
\\EOT\ACK\NUL\STX\SOH\DC2\ETX\FS\EOTE\SUB\210\STX\n\
\RegisterSpendNtfn is a synchronous response-streaming RPC that registers an\n\
\intent for a client to be notification once a spend request has been spent\n\
\by a transaction that has confirmed on-chain.\n\
\\n\
\A client can specify whether the spend request should be for a particular\n\
\outpoint or for an output script by specifying a zero outpoint.\n\
\\n\
\\f\n\
\\ENQ\ACK\NUL\STX\SOH\SOH\DC2\ETX\FS\b\EM\n\
\\f\n\
\\ENQ\ACK\NUL\STX\SOH\STX\DC2\ETX\FS\ESC'\n\
\\f\n\
\\ENQ\ACK\NUL\STX\SOH\ACK\DC2\ETX\FS28\n\
\\f\n\
\\ENQ\ACK\NUL\STX\SOH\ETX\DC2\ETX\FS9C\n\
\\185\EOT\n\
\\EOT\ACK\NUL\STX\STX\DC2\ETX)\EOTH\SUB\171\EOT\n\
\RegisterBlockEpochNtfn is a synchronous response-streaming RPC that\n\
\registers an intent for a client to be notified of blocks in the chain. The\n\
\stream will return a hash and height tuple of a block for each new/stale\n\
\block in the chain. It is the client's responsibility to determine whether\n\
\the tuple returned is for a new or stale block in the chain.\n\
\\n\
\A client can also request a historical backlog of blocks from a particular\n\
\point. This allows clients to be idempotent by ensuring that they do not\n\
\missing processing a single block within the chain.\n\
\\n\
\\f\n\
\\ENQ\ACK\NUL\STX\STX\SOH\DC2\ETX)\b\RS\n\
\\f\n\
\\ENQ\ACK\NUL\STX\STX\STX\DC2\ETX) *\n\
\\f\n\
\\ENQ\ACK\NUL\STX\STX\ACK\DC2\ETX)5;\n\
\\f\n\
\\ENQ\ACK\NUL\STX\STX\ETX\DC2\ETX)<F\n\
\\n\
\\n\
\\STX\EOT\NUL\DC2\EOT,\NULH\SOH\n\
\\n\
\\n\
\\ETX\EOT\NUL\SOH\DC2\ETX,\b\DC3\n\
\\205\SOH\n\
\\EOT\EOT\NUL\STX\NUL\DC2\ETX2\EOT\DC3\SUB\191\SOH\n\
\The transaction hash for which we should request a confirmation notification\n\
\for. If set to a hash of all zeros, then the confirmation notification will\n\
\be requested for the script instead.\n\
\\n\
\\f\n\
\\ENQ\EOT\NUL\STX\NUL\ENQ\DC2\ETX2\EOT\t\n\
\\f\n\
\\ENQ\EOT\NUL\STX\NUL\SOH\DC2\ETX2\n\
\\SO\n\
\\f\n\
\\ENQ\EOT\NUL\STX\NUL\ETX\DC2\ETX2\DC1\DC2\n\
\\136\STX\n\
\\EOT\EOT\NUL\STX\SOH\DC2\ETX:\EOT\NAK\SUB\250\SOH\n\
\An output script within a transaction with the hash above which will be used\n\
\by light clients to match block filters. If the transaction hash is set to a\n\
\hash of all zeros, then a confirmation notification will be requested for\n\
\this script instead.\n\
\\n\
\\f\n\
\\ENQ\EOT\NUL\STX\SOH\ENQ\DC2\ETX:\EOT\t\n\
\\f\n\
\\ENQ\EOT\NUL\STX\SOH\SOH\DC2\ETX:\n\
\\DLE\n\
\\f\n\
\\ENQ\EOT\NUL\STX\SOH\ETX\DC2\ETX:\DC3\DC4\n\
\\142\SOH\n\
\\EOT\EOT\NUL\STX\STX\DC2\ETX@\EOT\EM\SUB\128\SOH\n\
\The number of desired confirmations the transaction/output script should\n\
\reach before dispatching a confirmation notification.\n\
\\n\
\\f\n\
\\ENQ\EOT\NUL\STX\STX\ENQ\DC2\ETX@\EOT\n\
\\n\
\\f\n\
\\ENQ\EOT\NUL\STX\STX\SOH\DC2\ETX@\v\DC4\n\
\\f\n\
\\ENQ\EOT\NUL\STX\STX\ETX\DC2\ETX@\ETB\CAN\n\
\\216\SOH\n\
\\EOT\EOT\NUL\STX\ETX\DC2\ETXG\EOT\ESC\SUB\202\SOH\n\
\The earliest height in the chain for which the transaction/output script\n\
\could have been included in a block. This should in most cases be set to the\n\
\broadcast height of the transaction/output script.\n\
\\n\
\\f\n\
\\ENQ\EOT\NUL\STX\ETX\ENQ\DC2\ETXG\EOT\n\
\\n\
\\f\n\
\\ENQ\EOT\NUL\STX\ETX\SOH\DC2\ETXG\v\SYN\n\
\\f\n\
\\ENQ\EOT\NUL\STX\ETX\ETX\DC2\ETXG\EM\SUB\n\
\\n\
\\n\
\\STX\EOT\SOH\DC2\EOTJ\NULW\SOH\n\
\\n\
\\n\
\\ETX\EOT\SOH\SOH\DC2\ETXJ\b\DC3\n\
\:\n\
\\EOT\EOT\SOH\STX\NUL\DC2\ETXL\EOT\NAK\SUB- The raw bytes of the confirmed transaction.\n\
\\n\
\\f\n\
\\ENQ\EOT\SOH\STX\NUL\ENQ\DC2\ETXL\EOT\t\n\
\\f\n\
\\ENQ\EOT\SOH\STX\NUL\SOH\DC2\ETXL\n\
\\DLE\n\
\\f\n\
\\ENQ\EOT\SOH\STX\NUL\ETX\DC2\ETXL\DC3\DC4\n\
\X\n\
\\EOT\EOT\SOH\STX\SOH\DC2\ETXO\EOT\EM\SUBK The hash of the block in which the confirmed transaction was included in.\n\
\\n\
\\f\n\
\\ENQ\EOT\SOH\STX\SOH\ENQ\DC2\ETXO\EOT\t\n\
\\f\n\
\\ENQ\EOT\SOH\STX\SOH\SOH\DC2\ETXO\n\
\\DC4\n\
\\f\n\
\\ENQ\EOT\SOH\STX\SOH\ETX\DC2\ETXO\ETB\CAN\n\
\[\n\
\\EOT\EOT\SOH\STX\STX\DC2\ETXS\EOT\FS\SUBN The height of the block in which the confirmed transaction was included\n\
\ in.\n\
\\n\
\\f\n\
\\ENQ\EOT\SOH\STX\STX\ENQ\DC2\ETXS\EOT\n\
\\n\
\\f\n\
\\ENQ\EOT\SOH\STX\STX\SOH\DC2\ETXS\v\ETB\n\
\\f\n\
\\ENQ\EOT\SOH\STX\STX\ETX\DC2\ETXS\SUB\ESC\n\
\M\n\
\\EOT\EOT\SOH\STX\ETX\DC2\ETXV\EOT\CAN\SUB@ The index of the confirmed transaction within the transaction.\n\
\\n\
\\f\n\
\\ENQ\EOT\SOH\STX\ETX\ENQ\DC2\ETXV\EOT\n\
\\n\
\\f\n\
\\ENQ\EOT\SOH\STX\ETX\SOH\DC2\ETXV\v\DC3\n\
\\f\n\
\\ENQ\EOT\SOH\STX\ETX\ETX\DC2\ETXV\SYN\ETB\n\
\L\n\
\\STX\EOT\STX\DC2\EOTY\NUL[\SOH\"@ TODO(wilmer): need to know how the client will use this first.\n\
\\n\
\\n\
\\n\
\\ETX\EOT\STX\SOH\DC2\ETXY\b\r\n\
\\n\
\\n\
\\STX\EOT\ETX\DC2\EOT]\NULk\SOH\n\
\\n\
\\n\
\\ETX\EOT\ETX\SOH\DC2\ETX]\b\DC1\n\
\\f\n\
\\EOT\EOT\ETX\b\NUL\DC2\EOT^\EOTj\ENQ\n\
\\f\n\
\\ENQ\EOT\ETX\b\NUL\SOH\DC2\ETX^\n\
\\SI\n\
\b\n\
\\EOT\EOT\ETX\STX\NUL\DC2\ETXc\b\GS\SUBU\n\
\An event that includes the confirmation details of the request\n\
\(txid/ouput script).\n\
\\n\
\\f\n\
\\ENQ\EOT\ETX\STX\NUL\ACK\DC2\ETXc\b\DC3\n\
\\f\n\
\\ENQ\EOT\ETX\STX\NUL\SOH\DC2\ETXc\DC4\CAN\n\
\\f\n\
\\ENQ\EOT\ETX\STX\NUL\ETX\DC2\ETXc\ESC\FS\n\
\]\n\
\\EOT\EOT\ETX\STX\SOH\DC2\ETXi\b\CAN\SUBP\n\
\An event send when the transaction of the request is reorged out of the\n\
\chain.\n\
\\n\
\\f\n\
\\ENQ\EOT\ETX\STX\SOH\ACK\DC2\ETXi\b\r\n\
\\f\n\
\\ENQ\EOT\ETX\STX\SOH\SOH\DC2\ETXi\SO\DC3\n\
\\f\n\
\\ENQ\EOT\ETX\STX\SOH\ETX\DC2\ETXi\SYN\ETB\n\
\\n\
\\n\
\\STX\EOT\EOT\DC2\EOTm\NULs\SOH\n\
\\n\
\\n\
\\ETX\EOT\EOT\SOH\DC2\ETXm\b\DLE\n\
\+\n\
\\EOT\EOT\EOT\STX\NUL\DC2\ETXo\EOT\DC3\SUB\RS The hash of the transaction.\n\
\\n\
\\f\n\
\\ENQ\EOT\EOT\STX\NUL\ENQ\DC2\ETXo\EOT\t\n\
\\f\n\
\\ENQ\EOT\EOT\STX\NUL\SOH\DC2\ETXo\n\
\\SO\n\
\\f\n\
\\ENQ\EOT\EOT\STX\NUL\ETX\DC2\ETXo\DC1\DC2\n\
\>\n\
\\EOT\EOT\EOT\STX\SOH\DC2\ETXr\EOT\NAK\SUB1 The index of the output within the transaction.\n\
\\n\
\\f\n\
\\ENQ\EOT\EOT\STX\SOH\ENQ\DC2\ETXr\EOT\n\
\\n\
\\f\n\
\\ENQ\EOT\EOT\STX\SOH\SOH\DC2\ETXr\v\DLE\n\
\\f\n\
\\ENQ\EOT\EOT\STX\SOH\ETX\DC2\ETXr\DC3\DC4\n\
\\v\n\
\\STX\EOT\ENQ\DC2\ENQu\NUL\140\SOH\SOH\n\
\\n\
\\n\
\\ETX\EOT\ENQ\SOH\DC2\ETXu\b\DC4\n\
\\179\SOH\n\
\\EOT\EOT\ENQ\STX\NUL\DC2\ETX{\EOT\SUB\SUB\165\SOH\n\
\The outpoint for which we should request a spend notification for. If set to\n\
\a zero outpoint, then the spend notification will be requested for the\n\
\script instead.\n\
\\n\
\\f\n\
\\ENQ\EOT\ENQ\STX\NUL\ACK\DC2\ETX{\EOT\f\n\
\\f\n\
\\ENQ\EOT\ENQ\STX\NUL\SOH\DC2\ETX{\r\NAK\n\
\\f\n\
\\ENQ\EOT\ENQ\STX\NUL\ETX\DC2\ETX{\CAN\EM\n\
\\229\SOH\n\
\\EOT\EOT\ENQ\STX\SOH\DC2\EOT\130\SOH\EOT\NAK\SUB\214\SOH\n\
\The output script for the outpoint above. This will be used by light clients\n\
\to match block filters. If the outpoint is set to a zero outpoint, then a\n\
\spend notification will be requested for this script instead.\n\
\\n\
\\r\n\
\\ENQ\EOT\ENQ\STX\SOH\ENQ\DC2\EOT\130\SOH\EOT\t\n\
\\r\n\
\\ENQ\EOT\ENQ\STX\SOH\SOH\DC2\EOT\130\SOH\n\
\\DLE\n\
\\r\n\
\\ENQ\EOT\ENQ\STX\SOH\ETX\DC2\EOT\130\SOH\DC3\DC4\n\
\\197\SOH\n\
\\EOT\EOT\ENQ\STX\STX\DC2\EOT\137\SOH\EOT\ESC\SUB\182\SOH\n\
\The earliest height in the chain for which the outpoint/output script could\n\
\have been spent. This should in most cases be set to the broadcast height of\n\
\the outpoint/output script.\n\
\\n\
\\r\n\
\\ENQ\EOT\ENQ\STX\STX\ENQ\DC2\EOT\137\SOH\EOT\n\
\\n\
\\r\n\
\\ENQ\EOT\ENQ\STX\STX\SOH\DC2\EOT\137\SOH\v\SYN\n\
\\r\n\
\\ENQ\EOT\ENQ\STX\STX\ETX\DC2\EOT\137\SOH\EM\SUB\n\
\\f\n\
\\STX\EOT\ACK\DC2\ACK\142\SOH\NUL\157\SOH\SOH\n\
\\v\n\
\\ETX\EOT\ACK\SOH\DC2\EOT\142\SOH\b\DC4\n\
\,\n\
\\EOT\EOT\ACK\STX\NUL\DC2\EOT\144\SOH\EOT#\SUB\RS The outpoint was that spent.\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\NUL\ACK\DC2\EOT\144\SOH\EOT\f\n\
\\r\n\
\\ENQ\EOT\ACK\STX\NUL\SOH\DC2\EOT\144\SOH\r\RS\n\
\\r\n\
\\ENQ\EOT\ACK\STX\NUL\ETX\DC2\EOT\144\SOH!\"\n\
\:\n\
\\EOT\EOT\ACK\STX\SOH\DC2\EOT\147\SOH\EOT\RS\SUB, The raw bytes of the spending transaction.\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\SOH\ENQ\DC2\EOT\147\SOH\EOT\t\n\
\\r\n\
\\ENQ\EOT\ACK\STX\SOH\SOH\DC2\EOT\147\SOH\n\
\\EM\n\
\\r\n\
\\ENQ\EOT\ACK\STX\SOH\ETX\DC2\EOT\147\SOH\FS\GS\n\
\5\n\
\\EOT\EOT\ACK\STX\STX\DC2\EOT\150\SOH\EOT\US\SUB' The hash of the spending transaction.\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\STX\ENQ\DC2\EOT\150\SOH\EOT\t\n\
\\r\n\
\\ENQ\EOT\ACK\STX\STX\SOH\DC2\EOT\150\SOH\n\
\\SUB\n\
\\r\n\
\\ENQ\EOT\ACK\STX\STX\ETX\DC2\EOT\150\SOH\GS\RS\n\
\W\n\
\\EOT\EOT\ACK\STX\ETX\DC2\EOT\153\SOH\EOT$\SUBI The input of the spending transaction that fulfilled the spend request.\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\ETX\ENQ\DC2\EOT\153\SOH\EOT\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\ETX\SOH\DC2\EOT\153\SOH\v\US\n\
\\r\n\
\\ENQ\EOT\ACK\STX\ETX\ETX\DC2\EOT\153\SOH\"#\n\
\U\n\
\\EOT\EOT\ACK\STX\EOT\DC2\EOT\156\SOH\EOT\US\SUBG The height at which the spending transaction was included in a block.\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\EOT\ENQ\DC2\EOT\156\SOH\EOT\n\
\\n\
\\r\n\
\\ENQ\EOT\ACK\STX\EOT\SOH\DC2\EOT\156\SOH\v\SUB\n\
\\r\n\
\\ENQ\EOT\ACK\STX\EOT\ETX\DC2\EOT\156\SOH\GS\RS\n\
\\f\n\
\\STX\EOT\a\DC2\ACK\159\SOH\NUL\173\SOH\SOH\n\
\\v\n\
\\ETX\EOT\a\SOH\DC2\EOT\159\SOH\b\DC2\n\
\\SO\n\
\\EOT\EOT\a\b\NUL\DC2\ACK\160\SOH\EOT\172\SOH\ENQ\n\
\\r\n\
\\ENQ\EOT\a\b\NUL\SOH\DC2\EOT\160\SOH\n\
\\SI\n\
\w\n\
\\EOT\EOT\a\STX\NUL\DC2\EOT\165\SOH\b\US\SUBi\n\
\An event that includes the details of the spending transaction of the\n\
\request (outpoint/output script).\n\
\\n\
\\r\n\
\\ENQ\EOT\a\STX\NUL\ACK\DC2\EOT\165\SOH\b\DC4\n\
\\r\n\
\\ENQ\EOT\a\STX\NUL\SOH\DC2\EOT\165\SOH\NAK\SUB\n\
\\r\n\
\\ENQ\EOT\a\STX\NUL\ETX\DC2\EOT\165\SOH\GS\RS\n\
\h\n\
\\EOT\EOT\a\STX\SOH\DC2\EOT\171\SOH\b\CAN\SUBZ\n\
\An event sent when the spending transaction of the request was\n\
\reorged out of the chain.\n\
\\n\
\\r\n\
\\ENQ\EOT\a\STX\SOH\ACK\DC2\EOT\171\SOH\b\r\n\
\\r\n\
\\ENQ\EOT\a\STX\SOH\SOH\DC2\EOT\171\SOH\SO\DC3\n\
\\r\n\
\\ENQ\EOT\a\STX\SOH\ETX\DC2\EOT\171\SOH\SYN\ETB\n\
\\f\n\
\\STX\EOT\b\DC2\ACK\175\SOH\NUL\181\SOH\SOH\n\
\\v\n\
\\ETX\EOT\b\SOH\DC2\EOT\175\SOH\b\DC2\n\
\&\n\
\\EOT\EOT\b\STX\NUL\DC2\EOT\177\SOH\EOT\DC3\SUB\CAN The hash of the block.\n\
\\n\
\\r\n\
\\ENQ\EOT\b\STX\NUL\ENQ\DC2\EOT\177\SOH\EOT\t\n\
\\r\n\
\\ENQ\EOT\b\STX\NUL\SOH\DC2\EOT\177\SOH\n\
\\SO\n\
\\r\n\
\\ENQ\EOT\b\STX\NUL\ETX\DC2\EOT\177\SOH\DC1\DC2\n\
\(\n\
\\EOT\EOT\b\STX\SOH\DC2\EOT\180\SOH\EOT\SYN\SUB\SUB The height of the block.\n\
\\n\
\\r\n\
\\ENQ\EOT\b\STX\SOH\ENQ\DC2\EOT\180\SOH\EOT\n\
\\n\
\\r\n\
\\ENQ\EOT\b\STX\SOH\SOH\DC2\EOT\180\SOH\v\DC1\n\
\\r\n\
\\ENQ\EOT\b\STX\SOH\ETX\DC2\EOT\180\SOH\DC4\NAKb\ACKproto3" |
c9732b4017b8abcdddf11e773c3dd20cba13b37f6124dbf266a771c1eb8c3424 | runtimeverification/haskell-backend | Registry.hs | |
Copyright : ( c ) Runtime Verification , 2018 - 2021
License : BSD-3 - Clause
Copyright : (c) Runtime Verification, 2018-2021
License : BSD-3-Clause
-}
module Kore.Equation.Registry (
extractEquations,
partitionEquations,
PartitionedEquations (..),
) where
import Control.Error (
hush,
)
import Data.List (
partition,
sortOn,
)
import Data.Map.Strict (
Map,
)
import Data.Map.Strict qualified as Map
import Kore.Attribute.Axiom (
Assoc (Assoc),
Comm (Comm),
Idem (Idem),
NonExecutable (NonExecutable),
Unit (Unit),
)
import Kore.Attribute.Axiom qualified as Attribute
import Kore.Attribute.Overload
import Kore.Attribute.Symbol (
StepperAttributes,
)
import Kore.Equation.Equation (
Equation (..),
)
import Kore.Equation.Equation qualified as Equation
import Kore.Equation.Sentence qualified as Equation
import Kore.IndexedModule.IndexedModule
import Kore.Internal.TermLike
import Kore.Rewrite.Axiom.Identifier (
AxiomIdentifier,
)
import Kore.Rewrite.Axiom.Identifier qualified as AxiomIdentifier
import Kore.Rewrite.RewritingVariable (
RewritingVariableName,
)
import Kore.Syntax.Sentence (
SentenceAxiom (..),
)
import Kore.Verified qualified as Verified
import Prelude.Kore
-- | Create a mapping from symbol identifiers to their defining axioms.
extractEquations ::
VerifiedModule StepperAttributes ->
Map AxiomIdentifier [Equation VariableName]
extractEquations =
foldl' moduleWorker Map.empty
. indexedModulesInScope
where
moduleWorker ::
Map AxiomIdentifier [Equation VariableName] ->
VerifiedModule StepperAttributes ->
Map AxiomIdentifier [Equation VariableName]
moduleWorker axioms imod =
foldl' sentenceWorker axioms sentences
where
sentences = indexedModuleAxioms imod
sentenceWorker ::
Map AxiomIdentifier [Equation VariableName] ->
(Attribute.Axiom Symbol VariableName, Verified.SentenceAxiom) ->
Map AxiomIdentifier [Equation VariableName]
sentenceWorker axioms sentence =
foldl' insertAxiom axioms (identifyEquation sentence)
insertAxiom ::
Map AxiomIdentifier [Equation VariableName] ->
(AxiomIdentifier, Equation VariableName) ->
Map AxiomIdentifier [Equation VariableName]
insertAxiom axioms (name, patt) =
Map.alter (Just . (patt :) . fromMaybe []) name axioms
identifyEquation ::
( Attribute.Axiom Symbol VariableName
, SentenceAxiom (TermLike VariableName)
) ->
Maybe (AxiomIdentifier, Equation VariableName)
identifyEquation axiom = do
equation@Equation{left} <- hush $ Equation.fromSentenceAxiom axiom
let identifier = AxiomIdentifier.matchAxiomIdentifier left
pure (identifier, equation)
data PartitionedEquations = PartitionedEquations
{ functionRules :: ![Equation RewritingVariableName]
, simplificationRules :: ![Equation RewritingVariableName]
}
| Filters and partitions a list of ' EqualityRule 's to
simplification rules and function rules . The function rules
are also sorted in order of priority .
simplification rules and function rules. The function rules
are also sorted in order of priority.
-}
partitionEquations ::
[Equation RewritingVariableName] ->
PartitionedEquations
partitionEquations equations =
PartitionedEquations
{ functionRules
, simplificationRules
}
where
equations' =
equations
& filter (not . ignoreEquation)
(simplificationRules, functionRules) =
partition Equation.isSimplificationRule
. sortOn Equation.equationPriority
$ equations'
| Should we ignore the ' Equation ' for evaluation or simplification ?
@ignoreEquation@ returns ' True ' if the ' EqualityRule ' should not be used in
evaluation or simplification , such as if it is an associativity or commutativity
axiom , or if it was marked non - executable .
@ignoreEquation@ returns 'True' if the 'EqualityRule' should not be used in
evaluation or simplification, such as if it is an associativity or commutativity
axiom, or if it was marked non-executable.
-}
ignoreEquation :: Equation RewritingVariableName -> Bool
ignoreEquation Equation{attributes}
| isAssoc = True
| isComm = True
TODO ( thomas.tuegel ): Add unification cases for builtin units and enable
-- extraction of their axioms.
| isUnit = True
| isIdem = True
| isNonExecutable = True
| Just _ <- getOverload = False
| otherwise = False
where
Assoc{isAssoc} = Attribute.assoc attributes
Comm{isComm} = Attribute.comm attributes
Unit{isUnit} = Attribute.unit attributes
Idem{isIdem} = Attribute.idem attributes
Overload{getOverload} = Attribute.overload attributes
NonExecutable{isNonExecutable} = Attribute.nonExecutable attributes
| null | https://raw.githubusercontent.com/runtimeverification/haskell-backend/5b558293cce9d8c5813560a645380863b12b1990/kore/src/Kore/Equation/Registry.hs | haskell | | Create a mapping from symbol identifiers to their defining axioms.
extraction of their axioms. | |
Copyright : ( c ) Runtime Verification , 2018 - 2021
License : BSD-3 - Clause
Copyright : (c) Runtime Verification, 2018-2021
License : BSD-3-Clause
-}
module Kore.Equation.Registry (
extractEquations,
partitionEquations,
PartitionedEquations (..),
) where
import Control.Error (
hush,
)
import Data.List (
partition,
sortOn,
)
import Data.Map.Strict (
Map,
)
import Data.Map.Strict qualified as Map
import Kore.Attribute.Axiom (
Assoc (Assoc),
Comm (Comm),
Idem (Idem),
NonExecutable (NonExecutable),
Unit (Unit),
)
import Kore.Attribute.Axiom qualified as Attribute
import Kore.Attribute.Overload
import Kore.Attribute.Symbol (
StepperAttributes,
)
import Kore.Equation.Equation (
Equation (..),
)
import Kore.Equation.Equation qualified as Equation
import Kore.Equation.Sentence qualified as Equation
import Kore.IndexedModule.IndexedModule
import Kore.Internal.TermLike
import Kore.Rewrite.Axiom.Identifier (
AxiomIdentifier,
)
import Kore.Rewrite.Axiom.Identifier qualified as AxiomIdentifier
import Kore.Rewrite.RewritingVariable (
RewritingVariableName,
)
import Kore.Syntax.Sentence (
SentenceAxiom (..),
)
import Kore.Verified qualified as Verified
import Prelude.Kore
extractEquations ::
VerifiedModule StepperAttributes ->
Map AxiomIdentifier [Equation VariableName]
extractEquations =
foldl' moduleWorker Map.empty
. indexedModulesInScope
where
moduleWorker ::
Map AxiomIdentifier [Equation VariableName] ->
VerifiedModule StepperAttributes ->
Map AxiomIdentifier [Equation VariableName]
moduleWorker axioms imod =
foldl' sentenceWorker axioms sentences
where
sentences = indexedModuleAxioms imod
sentenceWorker ::
Map AxiomIdentifier [Equation VariableName] ->
(Attribute.Axiom Symbol VariableName, Verified.SentenceAxiom) ->
Map AxiomIdentifier [Equation VariableName]
sentenceWorker axioms sentence =
foldl' insertAxiom axioms (identifyEquation sentence)
insertAxiom ::
Map AxiomIdentifier [Equation VariableName] ->
(AxiomIdentifier, Equation VariableName) ->
Map AxiomIdentifier [Equation VariableName]
insertAxiom axioms (name, patt) =
Map.alter (Just . (patt :) . fromMaybe []) name axioms
identifyEquation ::
( Attribute.Axiom Symbol VariableName
, SentenceAxiom (TermLike VariableName)
) ->
Maybe (AxiomIdentifier, Equation VariableName)
identifyEquation axiom = do
equation@Equation{left} <- hush $ Equation.fromSentenceAxiom axiom
let identifier = AxiomIdentifier.matchAxiomIdentifier left
pure (identifier, equation)
data PartitionedEquations = PartitionedEquations
{ functionRules :: ![Equation RewritingVariableName]
, simplificationRules :: ![Equation RewritingVariableName]
}
| Filters and partitions a list of ' EqualityRule 's to
simplification rules and function rules . The function rules
are also sorted in order of priority .
simplification rules and function rules. The function rules
are also sorted in order of priority.
-}
partitionEquations ::
[Equation RewritingVariableName] ->
PartitionedEquations
partitionEquations equations =
PartitionedEquations
{ functionRules
, simplificationRules
}
where
equations' =
equations
& filter (not . ignoreEquation)
(simplificationRules, functionRules) =
partition Equation.isSimplificationRule
. sortOn Equation.equationPriority
$ equations'
| Should we ignore the ' Equation ' for evaluation or simplification ?
@ignoreEquation@ returns ' True ' if the ' EqualityRule ' should not be used in
evaluation or simplification , such as if it is an associativity or commutativity
axiom , or if it was marked non - executable .
@ignoreEquation@ returns 'True' if the 'EqualityRule' should not be used in
evaluation or simplification, such as if it is an associativity or commutativity
axiom, or if it was marked non-executable.
-}
ignoreEquation :: Equation RewritingVariableName -> Bool
ignoreEquation Equation{attributes}
| isAssoc = True
| isComm = True
TODO ( thomas.tuegel ): Add unification cases for builtin units and enable
| isUnit = True
| isIdem = True
| isNonExecutable = True
| Just _ <- getOverload = False
| otherwise = False
where
Assoc{isAssoc} = Attribute.assoc attributes
Comm{isComm} = Attribute.comm attributes
Unit{isUnit} = Attribute.unit attributes
Idem{isIdem} = Attribute.idem attributes
Overload{getOverload} = Attribute.overload attributes
NonExecutable{isNonExecutable} = Attribute.nonExecutable attributes
|
311d66e06d213ffbc9e475dab9a26090fb10c5ca1569a51a42ff6c73688d0aa6 | DKurilo/hackerrank | solution.hs | # LANGUAGE OverloadedStrings , UnicodeSyntax #
module Main where
import Prelude.Unicode
import Control.Monad
import qualified Data.ByteString.Char8 as BSC
import Data.Array
import Debug.Trace
import System.IO
data Point = P (Array Int Int) (Array Int Int) -- left right
buildM ∷ Int → [Int] → Array Int Point
buildM n as = listArray (0, (n-1)) $ buildA as []
buildA ∷ [Int] → [Int] → [Point]
buildA [] _ = []
buildA (a:as) bs = P (arr bs) (arr as):buildA as (a:bs)
where arr ∷ [Int] → Array Int Int
arr [] = listArray (0, -1) []
arr xs = listArray (0, k) $ xs'
where (k, xs') = calc (-1) xs
calc ∷ Int → [Int] → (Int, [Int])
calc _ [] = (-1, [])
calc pd (y:ys)
| y < a = (-1, [])
| otherwise = (1 + j, d:ds)
where d = max pd (y - a)
(j, ds) = calc d ys
findL ∷ Array Int Point → Int → Int → Int
findL as d m = 1 + find m right (bounds right) + find m left (bounds left)
where (P left right) = as ! d
find ∷ Int → Array Int Int → (Int, Int) → Int
find _ _ (0, -1) = 0
find tr xs (b, e)
| (e - b) ≤ 1 ∧ xs ! b > tr ∧ xs ! e > tr = b
| (e - b) ≤ 1 ∧ xs ! b ≤ tr ∧ xs ! e > tr = b + 1
| (e - b) ≤ 1 = e + 1
| xs ! m > tr = find tr xs (b, m)
| otherwise = find tr xs (m, e)
where m = (b + e) `div` 2
main ∷ IO()
main = do
let getInt bx = case BSC.readInt bx of
Just (x,_) → x
_ → 0
let getInts = map getInt <$> BSC.split ' '
n ← getInt <$> BSC.getLine
as ← getInts <$> BSC.getLine
let ar = listArray (0,n) as
let ps = buildM n as
let ma = minimum as
let mm = maximum as - ma
q ← getInt <$> BSC.getLine
ss ← forM [1..q] $ \_ → do
(d:m:_) ← getInts <$> BSC.getLine
return ∘ BSC.pack ∘ show $ if ar ! d ≡ ma ∧ m ≥ mm then n else findL ps d m
BSC.putStrLn ∘ BSC.intercalate "\n" $ ss
| null | https://raw.githubusercontent.com/DKurilo/hackerrank/37063170567b397b25a2b7123bc9c1299d34814a/stocks-prediction/solution.hs | haskell | left right | # LANGUAGE OverloadedStrings , UnicodeSyntax #
module Main where
import Prelude.Unicode
import Control.Monad
import qualified Data.ByteString.Char8 as BSC
import Data.Array
import Debug.Trace
import System.IO
buildM ∷ Int → [Int] → Array Int Point
buildM n as = listArray (0, (n-1)) $ buildA as []
buildA ∷ [Int] → [Int] → [Point]
buildA [] _ = []
buildA (a:as) bs = P (arr bs) (arr as):buildA as (a:bs)
where arr ∷ [Int] → Array Int Int
arr [] = listArray (0, -1) []
arr xs = listArray (0, k) $ xs'
where (k, xs') = calc (-1) xs
calc ∷ Int → [Int] → (Int, [Int])
calc _ [] = (-1, [])
calc pd (y:ys)
| y < a = (-1, [])
| otherwise = (1 + j, d:ds)
where d = max pd (y - a)
(j, ds) = calc d ys
findL ∷ Array Int Point → Int → Int → Int
findL as d m = 1 + find m right (bounds right) + find m left (bounds left)
where (P left right) = as ! d
find ∷ Int → Array Int Int → (Int, Int) → Int
find _ _ (0, -1) = 0
find tr xs (b, e)
| (e - b) ≤ 1 ∧ xs ! b > tr ∧ xs ! e > tr = b
| (e - b) ≤ 1 ∧ xs ! b ≤ tr ∧ xs ! e > tr = b + 1
| (e - b) ≤ 1 = e + 1
| xs ! m > tr = find tr xs (b, m)
| otherwise = find tr xs (m, e)
where m = (b + e) `div` 2
main ∷ IO()
main = do
let getInt bx = case BSC.readInt bx of
Just (x,_) → x
_ → 0
let getInts = map getInt <$> BSC.split ' '
n ← getInt <$> BSC.getLine
as ← getInts <$> BSC.getLine
let ar = listArray (0,n) as
let ps = buildM n as
let ma = minimum as
let mm = maximum as - ma
q ← getInt <$> BSC.getLine
ss ← forM [1..q] $ \_ → do
(d:m:_) ← getInts <$> BSC.getLine
return ∘ BSC.pack ∘ show $ if ar ! d ≡ ma ∧ m ≥ mm then n else findL ps d m
BSC.putStrLn ∘ BSC.intercalate "\n" $ ss
|
66ae785e1c524318973f534046ab9aa0e0343ef2d5d6cff9247d1b3723fc100d | aws-beam/aws-erlang | aws_wellarchitected.erl | %% WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
See -beam/aws-codegen for more details .
%% @doc Well-Architected Tool
%%
%% This is the Well-Architected Tool API Reference.
%%
%% The WA Tool API provides programmatic access to the Well-Architected Tool
in the Amazon Web Services Management Console . For information about the
%% Well-Architected Tool, see the Well-Architected Tool User Guide.
-module(aws_wellarchitected).
-export([associate_lenses/3,
associate_lenses/4,
create_lens_share/3,
create_lens_share/4,
create_lens_version/3,
create_lens_version/4,
create_milestone/3,
create_milestone/4,
create_workload/2,
create_workload/3,
create_workload_share/3,
create_workload_share/4,
delete_lens/3,
delete_lens/4,
delete_lens_share/4,
delete_lens_share/5,
delete_workload/3,
delete_workload/4,
delete_workload_share/4,
delete_workload_share/5,
disassociate_lenses/3,
disassociate_lenses/4,
export_lens/2,
export_lens/4,
export_lens/5,
get_answer/4,
get_answer/6,
get_answer/7,
get_lens/2,
get_lens/4,
get_lens/5,
get_lens_review/3,
get_lens_review/5,
get_lens_review/6,
get_lens_review_report/3,
get_lens_review_report/5,
get_lens_review_report/6,
get_lens_version_difference/2,
get_lens_version_difference/4,
get_lens_version_difference/5,
get_milestone/3,
get_milestone/5,
get_milestone/6,
get_workload/2,
get_workload/4,
get_workload/5,
import_lens/2,
import_lens/3,
list_answers/3,
list_answers/5,
list_answers/6,
list_check_details/3,
list_check_details/4,
list_check_summaries/3,
list_check_summaries/4,
list_lens_review_improvements/3,
list_lens_review_improvements/5,
list_lens_review_improvements/6,
list_lens_reviews/2,
list_lens_reviews/4,
list_lens_reviews/5,
list_lens_shares/2,
list_lens_shares/4,
list_lens_shares/5,
list_lenses/1,
list_lenses/3,
list_lenses/4,
list_milestones/3,
list_milestones/4,
list_notifications/2,
list_notifications/3,
list_share_invitations/1,
list_share_invitations/3,
list_share_invitations/4,
list_tags_for_resource/2,
list_tags_for_resource/4,
list_tags_for_resource/5,
list_workload_shares/2,
list_workload_shares/4,
list_workload_shares/5,
list_workloads/2,
list_workloads/3,
tag_resource/3,
tag_resource/4,
untag_resource/3,
untag_resource/4,
update_answer/5,
update_answer/6,
update_global_settings/2,
update_global_settings/3,
update_lens_review/4,
update_lens_review/5,
update_share_invitation/3,
update_share_invitation/4,
update_workload/3,
update_workload/4,
update_workload_share/4,
update_workload_share/5,
upgrade_lens_review/4,
upgrade_lens_review/5]).
-include_lib("hackney/include/hackney_lib.hrl").
%%====================================================================
%% API
%%====================================================================
%% @doc Associate a lens to a workload.
%%
Up to 10 lenses can be associated with a workload in a single API
operation . A maximum of 20 lenses can be associated with a workload .
%%
%% Disclaimer
%%
By accessing and/or applying custom lenses created by another Amazon Web
%% Services user or account, you acknowledge that custom lenses created by
%% other users and shared with you are Third Party Content as defined in the
Amazon Web Services Customer Agreement .
associate_lenses(Client, WorkloadId, Input) ->
associate_lenses(Client, WorkloadId, Input, []).
associate_lenses(Client, WorkloadId, Input0, Options0) ->
Method = patch,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/associateLenses"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Create a lens share.
%%
The owner of a lens can share it with other Amazon Web Services accounts ,
IAM users , an organization , and organizational units ( OUs ) in the same
Amazon Web Services Region . Shared access to a lens is not removed until
%% the lens invitation is deleted.
%%
%% Disclaimer
%%
By sharing your custom lenses with other Amazon Web Services accounts , you
acknowledge that Amazon Web Services will make your custom lenses
%% available to those other accounts. Those other accounts may continue to
%% access and use your shared custom lenses even if you delete the custom
lenses from your own Amazon Web Services account or terminate your Amazon
Web Services account .
create_lens_share(Client, LensAlias, Input) ->
create_lens_share(Client, LensAlias, Input, []).
create_lens_share(Client, LensAlias, Input0, Options0) ->
Method = post,
Path = ["/lenses/", aws_util:encode_uri(LensAlias), "/shares"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Create a new lens version.
%%
A lens can have up to 100 versions .
%%
%% After a lens has been imported, create a new lens version to publish it.
The owner of a lens can share the lens with other Amazon Web Services
accounts and IAM users in the same Amazon Web Services Region . Only the
%% owner of a lens can delete it.
create_lens_version(Client, LensAlias, Input) ->
create_lens_version(Client, LensAlias, Input, []).
create_lens_version(Client, LensAlias, Input0, Options0) ->
Method = post,
Path = ["/lenses/", aws_util:encode_uri(LensAlias), "/versions"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Create a milestone for an existing workload.
create_milestone(Client, WorkloadId, Input) ->
create_milestone(Client, WorkloadId, Input, []).
create_milestone(Client, WorkloadId, Input0, Options0) ->
Method = post,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/milestones"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Create a new workload.
%%
The owner of a workload can share the workload with other Amazon Web
Services accounts , IAM users , an organization , and organizational units
( OUs ) in the same Amazon Web Services Region . Only the owner of a workload
%% can delete it.
%%
%% For more information, see Defining a Workload in the Well-Architected Tool
%% User Guide.
create_workload(Client, Input) ->
create_workload(Client, Input, []).
create_workload(Client, Input0, Options0) ->
Method = post,
Path = ["/workloads"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Create a workload share.
%%
The owner of a workload can share it with other Amazon Web Services
accounts and IAM users in the same Amazon Web Services Region . Shared
%% access to a workload is not removed until the workload invitation is
%% deleted.
%%
For more information , see Sharing a Workload in the Well - Architected Tool
%% User Guide.
create_workload_share(Client, WorkloadId, Input) ->
create_workload_share(Client, WorkloadId, Input, []).
create_workload_share(Client, WorkloadId, Input0, Options0) ->
Method = post,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/shares"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Delete an existing lens.
%%
Only the owner of a lens can delete it . After the lens is deleted , Amazon
Web Services accounts and IAM users that you shared the lens with can
%% continue to use it, but they will no longer be able to apply it to new
%% workloads.
%%
%% Disclaimer
%%
By sharing your custom lenses with other Amazon Web Services accounts , you
acknowledge that Amazon Web Services will make your custom lenses
%% available to those other accounts. Those other accounts may continue to
%% access and use your shared custom lenses even if you delete the custom
lenses from your own Amazon Web Services account or terminate your Amazon
Web Services account .
delete_lens(Client, LensAlias, Input) ->
delete_lens(Client, LensAlias, Input, []).
delete_lens(Client, LensAlias, Input0, Options0) ->
Method = delete,
Path = ["/lenses/", aws_util:encode_uri(LensAlias), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"ClientRequestToken">>, <<"ClientRequestToken">>},
{<<"LensStatus">>, <<"LensStatus">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Delete a lens share.
%%
After the lens share is deleted , Amazon Web Services accounts , IAM users ,
organizations , and organizational units ( OUs ) that you shared the lens
%% with can continue to use it, but they will no longer be able to apply it
%% to new workloads.
%%
%% Disclaimer
%%
By sharing your custom lenses with other Amazon Web Services accounts , you
acknowledge that Amazon Web Services will make your custom lenses
%% available to those other accounts. Those other accounts may continue to
%% access and use your shared custom lenses even if you delete the custom
lenses from your own Amazon Web Services account or terminate your Amazon
Web Services account .
delete_lens_share(Client, LensAlias, ShareId, Input) ->
delete_lens_share(Client, LensAlias, ShareId, Input, []).
delete_lens_share(Client, LensAlias, ShareId, Input0, Options0) ->
Method = delete,
Path = ["/lenses/", aws_util:encode_uri(LensAlias), "/shares/", aws_util:encode_uri(ShareId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"ClientRequestToken">>, <<"ClientRequestToken">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Delete an existing workload.
delete_workload(Client, WorkloadId, Input) ->
delete_workload(Client, WorkloadId, Input, []).
delete_workload(Client, WorkloadId, Input0, Options0) ->
Method = delete,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"ClientRequestToken">>, <<"ClientRequestToken">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Delete a workload share.
delete_workload_share(Client, ShareId, WorkloadId, Input) ->
delete_workload_share(Client, ShareId, WorkloadId, Input, []).
delete_workload_share(Client, ShareId, WorkloadId, Input0, Options0) ->
Method = delete,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/shares/", aws_util:encode_uri(ShareId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"ClientRequestToken">>, <<"ClientRequestToken">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Disassociate a lens from a workload .
%%
Up to 10 lenses can be disassociated from a workload in a single API
%% operation.
%%
The Amazon Web Services Well - Architected Framework lens
%% (`wellarchitected') cannot be removed from a workload.
disassociate_lenses(Client, WorkloadId, Input) ->
disassociate_lenses(Client, WorkloadId, Input, []).
disassociate_lenses(Client, WorkloadId, Input0, Options0) ->
Method = patch,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/disassociateLenses"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Export an existing lens.
%%
%% Lenses are defined in JSON. For more information, see JSON format
%% specification in the Well-Architected Tool User Guide. Only the owner of a
%% lens can export it.
%%
%% Disclaimer
%%
%% Do not include or gather personal identifiable information (PII) of end
%% users or other identifiable individuals in or via your custom lenses. If
%% your custom lens or those shared with you and used in your account do
%% include or collect PII you are responsible for: ensuring that the included
%% PII is processed in accordance with applicable law, providing adequate
%% privacy notices, and obtaining necessary consents for processing such
%% data.
export_lens(Client, LensAlias)
when is_map(Client) ->
export_lens(Client, LensAlias, #{}, #{}).
export_lens(Client, LensAlias, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
export_lens(Client, LensAlias, QueryMap, HeadersMap, []).
export_lens(Client, LensAlias, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/lenses/", aws_util:encode_uri(LensAlias), "/export"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"LensVersion">>, maps:get(<<"LensVersion">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Get the answer to a specific question in a workload review.
get_answer(Client, LensAlias, QuestionId, WorkloadId)
when is_map(Client) ->
get_answer(Client, LensAlias, QuestionId, WorkloadId, #{}, #{}).
get_answer(Client, LensAlias, QuestionId, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_answer(Client, LensAlias, QuestionId, WorkloadId, QueryMap, HeadersMap, []).
get_answer(Client, LensAlias, QuestionId, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), "/answers/", aws_util:encode_uri(QuestionId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MilestoneNumber">>, maps:get(<<"MilestoneNumber">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Get an existing lens.
get_lens(Client, LensAlias)
when is_map(Client) ->
get_lens(Client, LensAlias, #{}, #{}).
get_lens(Client, LensAlias, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_lens(Client, LensAlias, QueryMap, HeadersMap, []).
get_lens(Client, LensAlias, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/lenses/", aws_util:encode_uri(LensAlias), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"LensVersion">>, maps:get(<<"LensVersion">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Get lens review.
get_lens_review(Client, LensAlias, WorkloadId)
when is_map(Client) ->
get_lens_review(Client, LensAlias, WorkloadId, #{}, #{}).
get_lens_review(Client, LensAlias, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_lens_review(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, []).
get_lens_review(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MilestoneNumber">>, maps:get(<<"MilestoneNumber">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Get lens review report.
get_lens_review_report(Client, LensAlias, WorkloadId)
when is_map(Client) ->
get_lens_review_report(Client, LensAlias, WorkloadId, #{}, #{}).
get_lens_review_report(Client, LensAlias, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_lens_review_report(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, []).
get_lens_review_report(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), "/report"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MilestoneNumber">>, maps:get(<<"MilestoneNumber">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Get lens version differences.
get_lens_version_difference(Client, LensAlias)
when is_map(Client) ->
get_lens_version_difference(Client, LensAlias, #{}, #{}).
get_lens_version_difference(Client, LensAlias, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_lens_version_difference(Client, LensAlias, QueryMap, HeadersMap, []).
get_lens_version_difference(Client, LensAlias, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/lenses/", aws_util:encode_uri(LensAlias), "/versionDifference"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"BaseLensVersion">>, maps:get(<<"BaseLensVersion">>, QueryMap, undefined)},
{<<"TargetLensVersion">>, maps:get(<<"TargetLensVersion">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Get a milestone for an existing workload.
get_milestone(Client, MilestoneNumber, WorkloadId)
when is_map(Client) ->
get_milestone(Client, MilestoneNumber, WorkloadId, #{}, #{}).
get_milestone(Client, MilestoneNumber, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_milestone(Client, MilestoneNumber, WorkloadId, QueryMap, HeadersMap, []).
get_milestone(Client, MilestoneNumber, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/milestones/", aws_util:encode_uri(MilestoneNumber), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Get an existing workload.
get_workload(Client, WorkloadId)
when is_map(Client) ->
get_workload(Client, WorkloadId, #{}, #{}).
get_workload(Client, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_workload(Client, WorkloadId, QueryMap, HeadersMap, []).
get_workload(Client, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Import a new lens.
%%
The lens can not be applied to workloads or shared with other Amazon Web
%% Services accounts until it's published with `CreateLensVersion'
%%
%% Lenses are defined in JSON. For more information, see JSON format
%% specification in the Well-Architected Tool User Guide.
%%
A custom lens can not exceed 500 KB in size .
%%
%% Disclaimer
%%
%% Do not include or gather personal identifiable information (PII) of end
%% users or other identifiable individuals in or via your custom lenses. If
%% your custom lens or those shared with you and used in your account do
%% include or collect PII you are responsible for: ensuring that the included
%% PII is processed in accordance with applicable law, providing adequate
%% privacy notices, and obtaining necessary consents for processing such
%% data.
import_lens(Client, Input) ->
import_lens(Client, Input, []).
import_lens(Client, Input0, Options0) ->
Method = put,
Path = ["/importLens"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc List of answers.
list_answers(Client, LensAlias, WorkloadId)
when is_map(Client) ->
list_answers(Client, LensAlias, WorkloadId, #{}, #{}).
list_answers(Client, LensAlias, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_answers(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, []).
list_answers(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), "/answers"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"MilestoneNumber">>, maps:get(<<"MilestoneNumber">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)},
{<<"PillarId">>, maps:get(<<"PillarId">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc List of Trusted Advisor check details by account related to the
%% workload.
list_check_details(Client, WorkloadId, Input) ->
list_check_details(Client, WorkloadId, Input, []).
list_check_details(Client, WorkloadId, Input0, Options0) ->
Method = post,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/checks"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc List of Trusted Advisor checks summarized for all accounts related to
%% the workload.
list_check_summaries(Client, WorkloadId, Input) ->
list_check_summaries(Client, WorkloadId, Input, []).
list_check_summaries(Client, WorkloadId, Input0, Options0) ->
Method = post,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/checkSummaries"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc List lens review improvements.
list_lens_review_improvements(Client, LensAlias, WorkloadId)
when is_map(Client) ->
list_lens_review_improvements(Client, LensAlias, WorkloadId, #{}, #{}).
list_lens_review_improvements(Client, LensAlias, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_lens_review_improvements(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, []).
list_lens_review_improvements(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), "/improvements"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"MilestoneNumber">>, maps:get(<<"MilestoneNumber">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)},
{<<"PillarId">>, maps:get(<<"PillarId">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc List lens reviews.
list_lens_reviews(Client, WorkloadId)
when is_map(Client) ->
list_lens_reviews(Client, WorkloadId, #{}, #{}).
list_lens_reviews(Client, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_lens_reviews(Client, WorkloadId, QueryMap, HeadersMap, []).
list_lens_reviews(Client, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"MilestoneNumber">>, maps:get(<<"MilestoneNumber">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc List the lens shares associated with the lens.
list_lens_shares(Client, LensAlias)
when is_map(Client) ->
list_lens_shares(Client, LensAlias, #{}, #{}).
list_lens_shares(Client, LensAlias, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_lens_shares(Client, LensAlias, QueryMap, HeadersMap, []).
list_lens_shares(Client, LensAlias, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/lenses/", aws_util:encode_uri(LensAlias), "/shares"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)},
{<<"SharedWithPrefix">>, maps:get(<<"SharedWithPrefix">>, QueryMap, undefined)},
{<<"Status">>, maps:get(<<"Status">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc List the available lenses.
list_lenses(Client)
when is_map(Client) ->
list_lenses(Client, #{}, #{}).
list_lenses(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_lenses(Client, QueryMap, HeadersMap, []).
list_lenses(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/lenses"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"LensName">>, maps:get(<<"LensName">>, QueryMap, undefined)},
{<<"LensStatus">>, maps:get(<<"LensStatus">>, QueryMap, undefined)},
{<<"LensType">>, maps:get(<<"LensType">>, QueryMap, undefined)},
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc List all milestones for an existing workload.
list_milestones(Client, WorkloadId, Input) ->
list_milestones(Client, WorkloadId, Input, []).
list_milestones(Client, WorkloadId, Input0, Options0) ->
Method = post,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/milestonesSummaries"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc List lens notifications.
list_notifications(Client, Input) ->
list_notifications(Client, Input, []).
list_notifications(Client, Input0, Options0) ->
Method = post,
Path = ["/notifications"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc List the workload invitations.
list_share_invitations(Client)
when is_map(Client) ->
list_share_invitations(Client, #{}, #{}).
list_share_invitations(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_share_invitations(Client, QueryMap, HeadersMap, []).
list_share_invitations(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/shareInvitations"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"LensNamePrefix">>, maps:get(<<"LensNamePrefix">>, QueryMap, undefined)},
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)},
{<<"ShareResourceType">>, maps:get(<<"ShareResourceType">>, QueryMap, undefined)},
{<<"WorkloadNamePrefix">>, maps:get(<<"WorkloadNamePrefix">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc List the tags for a resource.
%%
The WorkloadArn parameter can be either a workload ARN or a custom lens
ARN .
list_tags_for_resource(Client, WorkloadArn)
when is_map(Client) ->
list_tags_for_resource(Client, WorkloadArn, #{}, #{}).
list_tags_for_resource(Client, WorkloadArn, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_tags_for_resource(Client, WorkloadArn, QueryMap, HeadersMap, []).
list_tags_for_resource(Client, WorkloadArn, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/tags/", aws_util:encode_uri(WorkloadArn), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc List the workload shares associated with the workload.
list_workload_shares(Client, WorkloadId)
when is_map(Client) ->
list_workload_shares(Client, WorkloadId, #{}, #{}).
list_workload_shares(Client, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_workload_shares(Client, WorkloadId, QueryMap, HeadersMap, []).
list_workload_shares(Client, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/shares"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)},
{<<"SharedWithPrefix">>, maps:get(<<"SharedWithPrefix">>, QueryMap, undefined)},
{<<"Status">>, maps:get(<<"Status">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc List workloads.
%%
Paginated .
list_workloads(Client, Input) ->
list_workloads(Client, Input, []).
list_workloads(Client, Input0, Options0) ->
Method = post,
Path = ["/workloadsSummaries"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Adds one or more tags to the specified resource .
%%
The WorkloadArn parameter can be either a workload ARN or a custom lens
ARN .
tag_resource(Client, WorkloadArn, Input) ->
tag_resource(Client, WorkloadArn, Input, []).
tag_resource(Client, WorkloadArn, Input0, Options0) ->
Method = post,
Path = ["/tags/", aws_util:encode_uri(WorkloadArn), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes specified tags from a resource.
%%
The WorkloadArn parameter can be either a workload ARN or a custom lens
ARN .
%%
%% To specify multiple tags, use separate tagKeys parameters, for example:
%%
` DELETE /tags / WorkloadArn?tagKeys = key1&tagKeys = key2 '
untag_resource(Client, WorkloadArn, Input) ->
untag_resource(Client, WorkloadArn, Input, []).
untag_resource(Client, WorkloadArn, Input0, Options0) ->
Method = delete,
Path = ["/tags/", aws_util:encode_uri(WorkloadArn), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"tagKeys">>, <<"TagKeys">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Update the answer to a specific question in a workload review.
update_answer(Client, LensAlias, QuestionId, WorkloadId, Input) ->
update_answer(Client, LensAlias, QuestionId, WorkloadId, Input, []).
update_answer(Client, LensAlias, QuestionId, WorkloadId, Input0, Options0) ->
Method = patch,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), "/answers/", aws_util:encode_uri(QuestionId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Updates whether the Amazon Web Services account is opted into
%% organization sharing features.
update_global_settings(Client, Input) ->
update_global_settings(Client, Input, []).
update_global_settings(Client, Input0, Options0) ->
Method = patch,
Path = ["/global-settings"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Update lens review.
update_lens_review(Client, LensAlias, WorkloadId, Input) ->
update_lens_review(Client, LensAlias, WorkloadId, Input, []).
update_lens_review(Client, LensAlias, WorkloadId, Input0, Options0) ->
Method = patch,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Update a workload or custom lens share invitation.
%%
%% This API operation can be called independently of any resource. Previous
documentation implied that a workload ARN must be specified .
update_share_invitation(Client, ShareInvitationId, Input) ->
update_share_invitation(Client, ShareInvitationId, Input, []).
update_share_invitation(Client, ShareInvitationId, Input0, Options0) ->
Method = patch,
Path = ["/shareInvitations/", aws_util:encode_uri(ShareInvitationId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Update an existing workload.
update_workload(Client, WorkloadId, Input) ->
update_workload(Client, WorkloadId, Input, []).
update_workload(Client, WorkloadId, Input0, Options0) ->
Method = patch,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Update a workload share.
update_workload_share(Client, ShareId, WorkloadId, Input) ->
update_workload_share(Client, ShareId, WorkloadId, Input, []).
update_workload_share(Client, ShareId, WorkloadId, Input0, Options0) ->
Method = patch,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/shares/", aws_util:encode_uri(ShareId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Upgrade lens review.
upgrade_lens_review(Client, LensAlias, WorkloadId, Input) ->
upgrade_lens_review(Client, LensAlias, WorkloadId, Input, []).
upgrade_lens_review(Client, LensAlias, WorkloadId, Input0, Options0) ->
Method = put,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), "/upgrade"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%%====================================================================
Internal functions
%%====================================================================
-spec request(aws_client:aws_client(), atom(), iolist(), list(),
list(), map() | undefined, list(), pos_integer() | undefined) ->
{ok, {integer(), list()}} |
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map(),
Error :: map().
request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
RequestFun = fun() -> do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) end,
aws_request:request(RequestFun, Options).
do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
Client1 = Client#{service => <<"wellarchitected">>},
Host = build_host(<<"wellarchitected">>, Client1),
URL0 = build_url(Host, Path, Client1),
URL = aws_request:add_query(URL0, Query),
AdditionalHeaders1 = [ {<<"Host">>, Host}
, {<<"Content-Type">>, <<"application/x-amz-json-1.1">>}
],
Payload =
case proplists:get_value(send_body_as_binary, Options) of
true ->
maps:get(<<"Body">>, Input, <<"">>);
false ->
encode_payload(Input)
end,
AdditionalHeaders = case proplists:get_value(append_sha256_content_hash, Options, false) of
true ->
add_checksum_hash_header(AdditionalHeaders1, Payload);
false ->
AdditionalHeaders1
end,
Headers1 = aws_request:add_headers(AdditionalHeaders, Headers0),
MethodBin = aws_request:method_to_binary(Method),
SignedHeaders = aws_request:sign_request(Client1, MethodBin, URL, Headers1, Payload),
Response = hackney:request(Method, URL, SignedHeaders, Payload, Options),
DecodeBody = not proplists:get_value(receive_body_as_binary, Options),
handle_response(Response, SuccessStatusCode, DecodeBody).
add_checksum_hash_header(Headers, Body) ->
[ {<<"X-Amz-CheckSum-SHA256">>, base64:encode(crypto:hash(sha256, Body))}
| Headers
].
handle_response({ok, StatusCode, ResponseHeaders}, SuccessStatusCode, _DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
{ok, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders}, _, _DecodeBody) ->
{error, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders, Client}, SuccessStatusCode, DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
case hackney:body(Client) of
{ok, <<>>} when StatusCode =:= 200;
StatusCode =:= SuccessStatusCode ->
{ok, #{}, {StatusCode, ResponseHeaders, Client}};
{ok, Body} ->
Result = case DecodeBody of
true ->
try
jsx:decode(Body)
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
false -> #{<<"Body">> => Body}
end,
{ok, Result, {StatusCode, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, _ResponseHeaders, _Client}, _, _DecodeBody)
when StatusCode =:= 503 ->
Retriable error if retries are enabled
{error, service_unavailable};
handle_response({ok, StatusCode, ResponseHeaders, Client}, _, _DecodeBody) ->
{ok, Body} = hackney:body(Client),
try
DecodedError = jsx:decode(Body),
{error, DecodedError, {StatusCode, ResponseHeaders, Client}}
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
handle_response({error, Reason}, _, _DecodeBody) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Path0, Client) ->
Proto = aws_client:proto(Client),
Path = erlang:iolist_to_binary(Path0),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, Path], <<"">>).
-spec encode_payload(undefined | map()) -> binary().
encode_payload(undefined) ->
<<>>;
encode_payload(Input) ->
jsx:encode(Input).
| null | https://raw.githubusercontent.com/aws-beam/aws-erlang/699287cee7dfc9dc8c08ced5f090dcc192c9cba8/src/aws_wellarchitected.erl | erlang | WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
@doc Well-Architected Tool
This is the Well-Architected Tool API Reference.
The WA Tool API provides programmatic access to the Well-Architected Tool
Well-Architected Tool, see the Well-Architected Tool User Guide.
====================================================================
API
====================================================================
@doc Associate a lens to a workload.
Disclaimer
Services user or account, you acknowledge that custom lenses created by
other users and shared with you are Third Party Content as defined in the
@doc Create a lens share.
the lens invitation is deleted.
Disclaimer
available to those other accounts. Those other accounts may continue to
access and use your shared custom lenses even if you delete the custom
@doc Create a new lens version.
After a lens has been imported, create a new lens version to publish it.
owner of a lens can delete it.
@doc Create a milestone for an existing workload.
@doc Create a new workload.
can delete it.
For more information, see Defining a Workload in the Well-Architected Tool
User Guide.
@doc Create a workload share.
access to a workload is not removed until the workload invitation is
deleted.
User Guide.
@doc Delete an existing lens.
continue to use it, but they will no longer be able to apply it to new
workloads.
Disclaimer
available to those other accounts. Those other accounts may continue to
access and use your shared custom lenses even if you delete the custom
@doc Delete a lens share.
with can continue to use it, but they will no longer be able to apply it
to new workloads.
Disclaimer
available to those other accounts. Those other accounts may continue to
access and use your shared custom lenses even if you delete the custom
@doc Delete an existing workload.
@doc Delete a workload share.
operation.
(`wellarchitected') cannot be removed from a workload.
@doc Export an existing lens.
Lenses are defined in JSON. For more information, see JSON format
specification in the Well-Architected Tool User Guide. Only the owner of a
lens can export it.
Disclaimer
Do not include or gather personal identifiable information (PII) of end
users or other identifiable individuals in or via your custom lenses. If
your custom lens or those shared with you and used in your account do
include or collect PII you are responsible for: ensuring that the included
PII is processed in accordance with applicable law, providing adequate
privacy notices, and obtaining necessary consents for processing such
data.
@doc Get the answer to a specific question in a workload review.
@doc Get an existing lens.
@doc Get lens review.
@doc Get lens review report.
@doc Get lens version differences.
@doc Get a milestone for an existing workload.
@doc Get an existing workload.
@doc Import a new lens.
Services accounts until it's published with `CreateLensVersion'
Lenses are defined in JSON. For more information, see JSON format
specification in the Well-Architected Tool User Guide.
Disclaimer
Do not include or gather personal identifiable information (PII) of end
users or other identifiable individuals in or via your custom lenses. If
your custom lens or those shared with you and used in your account do
include or collect PII you are responsible for: ensuring that the included
PII is processed in accordance with applicable law, providing adequate
privacy notices, and obtaining necessary consents for processing such
data.
@doc List of answers.
@doc List of Trusted Advisor check details by account related to the
workload.
@doc List of Trusted Advisor checks summarized for all accounts related to
the workload.
@doc List lens review improvements.
@doc List lens reviews.
@doc List the lens shares associated with the lens.
@doc List the available lenses.
@doc List all milestones for an existing workload.
@doc List lens notifications.
@doc List the workload invitations.
@doc List the tags for a resource.
@doc List the workload shares associated with the workload.
@doc List workloads.
@doc Deletes specified tags from a resource.
To specify multiple tags, use separate tagKeys parameters, for example:
@doc Update the answer to a specific question in a workload review.
organization sharing features.
@doc Update lens review.
@doc Update a workload or custom lens share invitation.
This API operation can be called independently of any resource. Previous
@doc Update an existing workload.
@doc Update a workload share.
@doc Upgrade lens review.
====================================================================
==================================================================== | See -beam/aws-codegen for more details .
in the Amazon Web Services Management Console . For information about the
-module(aws_wellarchitected).
-export([associate_lenses/3,
associate_lenses/4,
create_lens_share/3,
create_lens_share/4,
create_lens_version/3,
create_lens_version/4,
create_milestone/3,
create_milestone/4,
create_workload/2,
create_workload/3,
create_workload_share/3,
create_workload_share/4,
delete_lens/3,
delete_lens/4,
delete_lens_share/4,
delete_lens_share/5,
delete_workload/3,
delete_workload/4,
delete_workload_share/4,
delete_workload_share/5,
disassociate_lenses/3,
disassociate_lenses/4,
export_lens/2,
export_lens/4,
export_lens/5,
get_answer/4,
get_answer/6,
get_answer/7,
get_lens/2,
get_lens/4,
get_lens/5,
get_lens_review/3,
get_lens_review/5,
get_lens_review/6,
get_lens_review_report/3,
get_lens_review_report/5,
get_lens_review_report/6,
get_lens_version_difference/2,
get_lens_version_difference/4,
get_lens_version_difference/5,
get_milestone/3,
get_milestone/5,
get_milestone/6,
get_workload/2,
get_workload/4,
get_workload/5,
import_lens/2,
import_lens/3,
list_answers/3,
list_answers/5,
list_answers/6,
list_check_details/3,
list_check_details/4,
list_check_summaries/3,
list_check_summaries/4,
list_lens_review_improvements/3,
list_lens_review_improvements/5,
list_lens_review_improvements/6,
list_lens_reviews/2,
list_lens_reviews/4,
list_lens_reviews/5,
list_lens_shares/2,
list_lens_shares/4,
list_lens_shares/5,
list_lenses/1,
list_lenses/3,
list_lenses/4,
list_milestones/3,
list_milestones/4,
list_notifications/2,
list_notifications/3,
list_share_invitations/1,
list_share_invitations/3,
list_share_invitations/4,
list_tags_for_resource/2,
list_tags_for_resource/4,
list_tags_for_resource/5,
list_workload_shares/2,
list_workload_shares/4,
list_workload_shares/5,
list_workloads/2,
list_workloads/3,
tag_resource/3,
tag_resource/4,
untag_resource/3,
untag_resource/4,
update_answer/5,
update_answer/6,
update_global_settings/2,
update_global_settings/3,
update_lens_review/4,
update_lens_review/5,
update_share_invitation/3,
update_share_invitation/4,
update_workload/3,
update_workload/4,
update_workload_share/4,
update_workload_share/5,
upgrade_lens_review/4,
upgrade_lens_review/5]).
-include_lib("hackney/include/hackney_lib.hrl").
Up to 10 lenses can be associated with a workload in a single API
operation . A maximum of 20 lenses can be associated with a workload .
By accessing and/or applying custom lenses created by another Amazon Web
Amazon Web Services Customer Agreement .
associate_lenses(Client, WorkloadId, Input) ->
associate_lenses(Client, WorkloadId, Input, []).
associate_lenses(Client, WorkloadId, Input0, Options0) ->
Method = patch,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/associateLenses"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
The owner of a lens can share it with other Amazon Web Services accounts ,
IAM users , an organization , and organizational units ( OUs ) in the same
Amazon Web Services Region . Shared access to a lens is not removed until
By sharing your custom lenses with other Amazon Web Services accounts , you
acknowledge that Amazon Web Services will make your custom lenses
lenses from your own Amazon Web Services account or terminate your Amazon
Web Services account .
create_lens_share(Client, LensAlias, Input) ->
create_lens_share(Client, LensAlias, Input, []).
create_lens_share(Client, LensAlias, Input0, Options0) ->
Method = post,
Path = ["/lenses/", aws_util:encode_uri(LensAlias), "/shares"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
A lens can have up to 100 versions .
The owner of a lens can share the lens with other Amazon Web Services
accounts and IAM users in the same Amazon Web Services Region . Only the
create_lens_version(Client, LensAlias, Input) ->
create_lens_version(Client, LensAlias, Input, []).
create_lens_version(Client, LensAlias, Input0, Options0) ->
Method = post,
Path = ["/lenses/", aws_util:encode_uri(LensAlias), "/versions"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
create_milestone(Client, WorkloadId, Input) ->
create_milestone(Client, WorkloadId, Input, []).
create_milestone(Client, WorkloadId, Input0, Options0) ->
Method = post,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/milestones"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
The owner of a workload can share the workload with other Amazon Web
Services accounts , IAM users , an organization , and organizational units
( OUs ) in the same Amazon Web Services Region . Only the owner of a workload
create_workload(Client, Input) ->
create_workload(Client, Input, []).
create_workload(Client, Input0, Options0) ->
Method = post,
Path = ["/workloads"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
The owner of a workload can share it with other Amazon Web Services
accounts and IAM users in the same Amazon Web Services Region . Shared
For more information , see Sharing a Workload in the Well - Architected Tool
create_workload_share(Client, WorkloadId, Input) ->
create_workload_share(Client, WorkloadId, Input, []).
create_workload_share(Client, WorkloadId, Input0, Options0) ->
Method = post,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/shares"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
Only the owner of a lens can delete it . After the lens is deleted , Amazon
Web Services accounts and IAM users that you shared the lens with can
By sharing your custom lenses with other Amazon Web Services accounts , you
acknowledge that Amazon Web Services will make your custom lenses
lenses from your own Amazon Web Services account or terminate your Amazon
Web Services account .
delete_lens(Client, LensAlias, Input) ->
delete_lens(Client, LensAlias, Input, []).
delete_lens(Client, LensAlias, Input0, Options0) ->
Method = delete,
Path = ["/lenses/", aws_util:encode_uri(LensAlias), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"ClientRequestToken">>, <<"ClientRequestToken">>},
{<<"LensStatus">>, <<"LensStatus">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
After the lens share is deleted , Amazon Web Services accounts , IAM users ,
organizations , and organizational units ( OUs ) that you shared the lens
By sharing your custom lenses with other Amazon Web Services accounts , you
acknowledge that Amazon Web Services will make your custom lenses
lenses from your own Amazon Web Services account or terminate your Amazon
Web Services account .
delete_lens_share(Client, LensAlias, ShareId, Input) ->
delete_lens_share(Client, LensAlias, ShareId, Input, []).
delete_lens_share(Client, LensAlias, ShareId, Input0, Options0) ->
Method = delete,
Path = ["/lenses/", aws_util:encode_uri(LensAlias), "/shares/", aws_util:encode_uri(ShareId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"ClientRequestToken">>, <<"ClientRequestToken">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_workload(Client, WorkloadId, Input) ->
delete_workload(Client, WorkloadId, Input, []).
delete_workload(Client, WorkloadId, Input0, Options0) ->
Method = delete,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"ClientRequestToken">>, <<"ClientRequestToken">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_workload_share(Client, ShareId, WorkloadId, Input) ->
delete_workload_share(Client, ShareId, WorkloadId, Input, []).
delete_workload_share(Client, ShareId, WorkloadId, Input0, Options0) ->
Method = delete,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/shares/", aws_util:encode_uri(ShareId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"ClientRequestToken">>, <<"ClientRequestToken">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Disassociate a lens from a workload .
Up to 10 lenses can be disassociated from a workload in a single API
The Amazon Web Services Well - Architected Framework lens
disassociate_lenses(Client, WorkloadId, Input) ->
disassociate_lenses(Client, WorkloadId, Input, []).
disassociate_lenses(Client, WorkloadId, Input0, Options0) ->
Method = patch,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/disassociateLenses"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
export_lens(Client, LensAlias)
when is_map(Client) ->
export_lens(Client, LensAlias, #{}, #{}).
export_lens(Client, LensAlias, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
export_lens(Client, LensAlias, QueryMap, HeadersMap, []).
export_lens(Client, LensAlias, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/lenses/", aws_util:encode_uri(LensAlias), "/export"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"LensVersion">>, maps:get(<<"LensVersion">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_answer(Client, LensAlias, QuestionId, WorkloadId)
when is_map(Client) ->
get_answer(Client, LensAlias, QuestionId, WorkloadId, #{}, #{}).
get_answer(Client, LensAlias, QuestionId, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_answer(Client, LensAlias, QuestionId, WorkloadId, QueryMap, HeadersMap, []).
get_answer(Client, LensAlias, QuestionId, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), "/answers/", aws_util:encode_uri(QuestionId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MilestoneNumber">>, maps:get(<<"MilestoneNumber">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_lens(Client, LensAlias)
when is_map(Client) ->
get_lens(Client, LensAlias, #{}, #{}).
get_lens(Client, LensAlias, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_lens(Client, LensAlias, QueryMap, HeadersMap, []).
get_lens(Client, LensAlias, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/lenses/", aws_util:encode_uri(LensAlias), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"LensVersion">>, maps:get(<<"LensVersion">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_lens_review(Client, LensAlias, WorkloadId)
when is_map(Client) ->
get_lens_review(Client, LensAlias, WorkloadId, #{}, #{}).
get_lens_review(Client, LensAlias, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_lens_review(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, []).
get_lens_review(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MilestoneNumber">>, maps:get(<<"MilestoneNumber">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_lens_review_report(Client, LensAlias, WorkloadId)
when is_map(Client) ->
get_lens_review_report(Client, LensAlias, WorkloadId, #{}, #{}).
get_lens_review_report(Client, LensAlias, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_lens_review_report(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, []).
get_lens_review_report(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), "/report"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MilestoneNumber">>, maps:get(<<"MilestoneNumber">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_lens_version_difference(Client, LensAlias)
when is_map(Client) ->
get_lens_version_difference(Client, LensAlias, #{}, #{}).
get_lens_version_difference(Client, LensAlias, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_lens_version_difference(Client, LensAlias, QueryMap, HeadersMap, []).
get_lens_version_difference(Client, LensAlias, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/lenses/", aws_util:encode_uri(LensAlias), "/versionDifference"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"BaseLensVersion">>, maps:get(<<"BaseLensVersion">>, QueryMap, undefined)},
{<<"TargetLensVersion">>, maps:get(<<"TargetLensVersion">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_milestone(Client, MilestoneNumber, WorkloadId)
when is_map(Client) ->
get_milestone(Client, MilestoneNumber, WorkloadId, #{}, #{}).
get_milestone(Client, MilestoneNumber, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_milestone(Client, MilestoneNumber, WorkloadId, QueryMap, HeadersMap, []).
get_milestone(Client, MilestoneNumber, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/milestones/", aws_util:encode_uri(MilestoneNumber), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_workload(Client, WorkloadId)
when is_map(Client) ->
get_workload(Client, WorkloadId, #{}, #{}).
get_workload(Client, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_workload(Client, WorkloadId, QueryMap, HeadersMap, []).
get_workload(Client, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
The lens can not be applied to workloads or shared with other Amazon Web
A custom lens can not exceed 500 KB in size .
import_lens(Client, Input) ->
import_lens(Client, Input, []).
import_lens(Client, Input0, Options0) ->
Method = put,
Path = ["/importLens"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
list_answers(Client, LensAlias, WorkloadId)
when is_map(Client) ->
list_answers(Client, LensAlias, WorkloadId, #{}, #{}).
list_answers(Client, LensAlias, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_answers(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, []).
list_answers(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), "/answers"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"MilestoneNumber">>, maps:get(<<"MilestoneNumber">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)},
{<<"PillarId">>, maps:get(<<"PillarId">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_check_details(Client, WorkloadId, Input) ->
list_check_details(Client, WorkloadId, Input, []).
list_check_details(Client, WorkloadId, Input0, Options0) ->
Method = post,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/checks"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
list_check_summaries(Client, WorkloadId, Input) ->
list_check_summaries(Client, WorkloadId, Input, []).
list_check_summaries(Client, WorkloadId, Input0, Options0) ->
Method = post,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/checkSummaries"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
list_lens_review_improvements(Client, LensAlias, WorkloadId)
when is_map(Client) ->
list_lens_review_improvements(Client, LensAlias, WorkloadId, #{}, #{}).
list_lens_review_improvements(Client, LensAlias, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_lens_review_improvements(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, []).
list_lens_review_improvements(Client, LensAlias, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), "/improvements"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"MilestoneNumber">>, maps:get(<<"MilestoneNumber">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)},
{<<"PillarId">>, maps:get(<<"PillarId">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_lens_reviews(Client, WorkloadId)
when is_map(Client) ->
list_lens_reviews(Client, WorkloadId, #{}, #{}).
list_lens_reviews(Client, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_lens_reviews(Client, WorkloadId, QueryMap, HeadersMap, []).
list_lens_reviews(Client, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"MilestoneNumber">>, maps:get(<<"MilestoneNumber">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_lens_shares(Client, LensAlias)
when is_map(Client) ->
list_lens_shares(Client, LensAlias, #{}, #{}).
list_lens_shares(Client, LensAlias, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_lens_shares(Client, LensAlias, QueryMap, HeadersMap, []).
list_lens_shares(Client, LensAlias, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/lenses/", aws_util:encode_uri(LensAlias), "/shares"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)},
{<<"SharedWithPrefix">>, maps:get(<<"SharedWithPrefix">>, QueryMap, undefined)},
{<<"Status">>, maps:get(<<"Status">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_lenses(Client)
when is_map(Client) ->
list_lenses(Client, #{}, #{}).
list_lenses(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_lenses(Client, QueryMap, HeadersMap, []).
list_lenses(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/lenses"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"LensName">>, maps:get(<<"LensName">>, QueryMap, undefined)},
{<<"LensStatus">>, maps:get(<<"LensStatus">>, QueryMap, undefined)},
{<<"LensType">>, maps:get(<<"LensType">>, QueryMap, undefined)},
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_milestones(Client, WorkloadId, Input) ->
list_milestones(Client, WorkloadId, Input, []).
list_milestones(Client, WorkloadId, Input0, Options0) ->
Method = post,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/milestonesSummaries"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
list_notifications(Client, Input) ->
list_notifications(Client, Input, []).
list_notifications(Client, Input0, Options0) ->
Method = post,
Path = ["/notifications"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
list_share_invitations(Client)
when is_map(Client) ->
list_share_invitations(Client, #{}, #{}).
list_share_invitations(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_share_invitations(Client, QueryMap, HeadersMap, []).
list_share_invitations(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/shareInvitations"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"LensNamePrefix">>, maps:get(<<"LensNamePrefix">>, QueryMap, undefined)},
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)},
{<<"ShareResourceType">>, maps:get(<<"ShareResourceType">>, QueryMap, undefined)},
{<<"WorkloadNamePrefix">>, maps:get(<<"WorkloadNamePrefix">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
The WorkloadArn parameter can be either a workload ARN or a custom lens
ARN .
list_tags_for_resource(Client, WorkloadArn)
when is_map(Client) ->
list_tags_for_resource(Client, WorkloadArn, #{}, #{}).
list_tags_for_resource(Client, WorkloadArn, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_tags_for_resource(Client, WorkloadArn, QueryMap, HeadersMap, []).
list_tags_for_resource(Client, WorkloadArn, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/tags/", aws_util:encode_uri(WorkloadArn), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_workload_shares(Client, WorkloadId)
when is_map(Client) ->
list_workload_shares(Client, WorkloadId, #{}, #{}).
list_workload_shares(Client, WorkloadId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_workload_shares(Client, WorkloadId, QueryMap, HeadersMap, []).
list_workload_shares(Client, WorkloadId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/shares"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)},
{<<"SharedWithPrefix">>, maps:get(<<"SharedWithPrefix">>, QueryMap, undefined)},
{<<"Status">>, maps:get(<<"Status">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
Paginated .
list_workloads(Client, Input) ->
list_workloads(Client, Input, []).
list_workloads(Client, Input0, Options0) ->
Method = post,
Path = ["/workloadsSummaries"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Adds one or more tags to the specified resource .
The WorkloadArn parameter can be either a workload ARN or a custom lens
ARN .
tag_resource(Client, WorkloadArn, Input) ->
tag_resource(Client, WorkloadArn, Input, []).
tag_resource(Client, WorkloadArn, Input0, Options0) ->
Method = post,
Path = ["/tags/", aws_util:encode_uri(WorkloadArn), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
The WorkloadArn parameter can be either a workload ARN or a custom lens
ARN .
` DELETE /tags / WorkloadArn?tagKeys = key1&tagKeys = key2 '
untag_resource(Client, WorkloadArn, Input) ->
untag_resource(Client, WorkloadArn, Input, []).
untag_resource(Client, WorkloadArn, Input0, Options0) ->
Method = delete,
Path = ["/tags/", aws_util:encode_uri(WorkloadArn), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"tagKeys">>, <<"TagKeys">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_answer(Client, LensAlias, QuestionId, WorkloadId, Input) ->
update_answer(Client, LensAlias, QuestionId, WorkloadId, Input, []).
update_answer(Client, LensAlias, QuestionId, WorkloadId, Input0, Options0) ->
Method = patch,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), "/answers/", aws_util:encode_uri(QuestionId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Updates whether the Amazon Web Services account is opted into
update_global_settings(Client, Input) ->
update_global_settings(Client, Input, []).
update_global_settings(Client, Input0, Options0) ->
Method = patch,
Path = ["/global-settings"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_lens_review(Client, LensAlias, WorkloadId, Input) ->
update_lens_review(Client, LensAlias, WorkloadId, Input, []).
update_lens_review(Client, LensAlias, WorkloadId, Input0, Options0) ->
Method = patch,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
documentation implied that a workload ARN must be specified .
update_share_invitation(Client, ShareInvitationId, Input) ->
update_share_invitation(Client, ShareInvitationId, Input, []).
update_share_invitation(Client, ShareInvitationId, Input0, Options0) ->
Method = patch,
Path = ["/shareInvitations/", aws_util:encode_uri(ShareInvitationId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_workload(Client, WorkloadId, Input) ->
update_workload(Client, WorkloadId, Input, []).
update_workload(Client, WorkloadId, Input0, Options0) ->
Method = patch,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_workload_share(Client, ShareId, WorkloadId, Input) ->
update_workload_share(Client, ShareId, WorkloadId, Input, []).
update_workload_share(Client, ShareId, WorkloadId, Input0, Options0) ->
Method = patch,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/shares/", aws_util:encode_uri(ShareId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
upgrade_lens_review(Client, LensAlias, WorkloadId, Input) ->
upgrade_lens_review(Client, LensAlias, WorkloadId, Input, []).
upgrade_lens_review(Client, LensAlias, WorkloadId, Input0, Options0) ->
Method = put,
Path = ["/workloads/", aws_util:encode_uri(WorkloadId), "/lensReviews/", aws_util:encode_uri(LensAlias), "/upgrade"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
Internal functions
-spec request(aws_client:aws_client(), atom(), iolist(), list(),
list(), map() | undefined, list(), pos_integer() | undefined) ->
{ok, {integer(), list()}} |
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map(),
Error :: map().
request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
RequestFun = fun() -> do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) end,
aws_request:request(RequestFun, Options).
do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
Client1 = Client#{service => <<"wellarchitected">>},
Host = build_host(<<"wellarchitected">>, Client1),
URL0 = build_url(Host, Path, Client1),
URL = aws_request:add_query(URL0, Query),
AdditionalHeaders1 = [ {<<"Host">>, Host}
, {<<"Content-Type">>, <<"application/x-amz-json-1.1">>}
],
Payload =
case proplists:get_value(send_body_as_binary, Options) of
true ->
maps:get(<<"Body">>, Input, <<"">>);
false ->
encode_payload(Input)
end,
AdditionalHeaders = case proplists:get_value(append_sha256_content_hash, Options, false) of
true ->
add_checksum_hash_header(AdditionalHeaders1, Payload);
false ->
AdditionalHeaders1
end,
Headers1 = aws_request:add_headers(AdditionalHeaders, Headers0),
MethodBin = aws_request:method_to_binary(Method),
SignedHeaders = aws_request:sign_request(Client1, MethodBin, URL, Headers1, Payload),
Response = hackney:request(Method, URL, SignedHeaders, Payload, Options),
DecodeBody = not proplists:get_value(receive_body_as_binary, Options),
handle_response(Response, SuccessStatusCode, DecodeBody).
add_checksum_hash_header(Headers, Body) ->
[ {<<"X-Amz-CheckSum-SHA256">>, base64:encode(crypto:hash(sha256, Body))}
| Headers
].
handle_response({ok, StatusCode, ResponseHeaders}, SuccessStatusCode, _DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
{ok, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders}, _, _DecodeBody) ->
{error, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders, Client}, SuccessStatusCode, DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
case hackney:body(Client) of
{ok, <<>>} when StatusCode =:= 200;
StatusCode =:= SuccessStatusCode ->
{ok, #{}, {StatusCode, ResponseHeaders, Client}};
{ok, Body} ->
Result = case DecodeBody of
true ->
try
jsx:decode(Body)
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
false -> #{<<"Body">> => Body}
end,
{ok, Result, {StatusCode, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, _ResponseHeaders, _Client}, _, _DecodeBody)
when StatusCode =:= 503 ->
Retriable error if retries are enabled
{error, service_unavailable};
handle_response({ok, StatusCode, ResponseHeaders, Client}, _, _DecodeBody) ->
{ok, Body} = hackney:body(Client),
try
DecodedError = jsx:decode(Body),
{error, DecodedError, {StatusCode, ResponseHeaders, Client}}
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
handle_response({error, Reason}, _, _DecodeBody) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Path0, Client) ->
Proto = aws_client:proto(Client),
Path = erlang:iolist_to_binary(Path0),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, Path], <<"">>).
-spec encode_payload(undefined | map()) -> binary().
encode_payload(undefined) ->
<<>>;
encode_payload(Input) ->
jsx:encode(Input).
|
3669e862dcee4fbda9773897d6660b73b7a3e6ca94e2ec959e7e047e6f4d719b | ConsumerDataStandardsAustralia/validation-prototype | Main.hs | {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Web.ConsumerData.Au.LambdaBank.Main where
{--
- Config parsing and stuff will happen here eventually. Pretty boring for now.
--}
import Web.ConsumerData.Au.Api.Types
import Text.URI (Authority (..))
import Text.URI.QQ (host, scheme)
import Web.ConsumerData.Au.LambdaBank.Server
fakeQualifier :: Int -> LinkQualifier
fakeQualifier port = LinkQualifier
[scheme|http|]
(Authority
{ authUserInfo = Nothing
, authHost = [host|localhost|]
, authPort = Just $ fromIntegral port
})
[]
main :: IO ()
main = runServer port (fakeQualifier port)
where port = 8000
| null | https://raw.githubusercontent.com/ConsumerDataStandardsAustralia/validation-prototype/ff63338b77339ee49fa3e0be5bb9d7f74e50c28b/consumer-data-au-lambdabank/src/Web/ConsumerData/Au/LambdaBank/Main.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE QuasiQuotes #
-
- Config parsing and stuff will happen here eventually. Pretty boring for now.
- | module Web.ConsumerData.Au.LambdaBank.Main where
import Web.ConsumerData.Au.Api.Types
import Text.URI (Authority (..))
import Text.URI.QQ (host, scheme)
import Web.ConsumerData.Au.LambdaBank.Server
fakeQualifier :: Int -> LinkQualifier
fakeQualifier port = LinkQualifier
[scheme|http|]
(Authority
{ authUserInfo = Nothing
, authHost = [host|localhost|]
, authPort = Just $ fromIntegral port
})
[]
main :: IO ()
main = runServer port (fakeQualifier port)
where port = 8000
|
bb481165023b182006b15200abcf07d2d6a8abeb628266ac98b35bf9d4f70f3a | GillianPlatform/Gillian | WProg.mli | type t = {
context : WFun.t list;
predicates : WPred.t list;
lemmas : WLemma.t list;
}
val get_context : t -> WFun.t list
val get_by_id :
?fname:string option ->
t ->
int option ->
[> `None
| `Return of WExpr.t
| `WExpr of WExpr.t
| `WFun of WFun.t
| `WLAssert of WLAssert.t
| `WLCmd of WLCmd.t
| `WLExpr of WLExpr.t
| `WLFormula of WLFormula.t
| `WLemma of WLemma.t
| `WPred of WPred.t
| `WSpec of WSpec.t
| `WStmt of WStmt.t ]
val get_pred : t -> string -> WPred.t option
val get_fun : t -> string -> WFun.t option
val never_called_during_symb : t -> WFun.t list
val pp_context : Format.formatter -> WFun.t list -> unit
val pp : Format.formatter -> t -> unit
val get_function_name_of_element : t -> int -> string
| null | https://raw.githubusercontent.com/GillianPlatform/Gillian/42d0e2aae9fa6b0992a5bc300525cc8d360c3c96/wisl/lib/syntax/WProg.mli | ocaml | type t = {
context : WFun.t list;
predicates : WPred.t list;
lemmas : WLemma.t list;
}
val get_context : t -> WFun.t list
val get_by_id :
?fname:string option ->
t ->
int option ->
[> `None
| `Return of WExpr.t
| `WExpr of WExpr.t
| `WFun of WFun.t
| `WLAssert of WLAssert.t
| `WLCmd of WLCmd.t
| `WLExpr of WLExpr.t
| `WLFormula of WLFormula.t
| `WLemma of WLemma.t
| `WPred of WPred.t
| `WSpec of WSpec.t
| `WStmt of WStmt.t ]
val get_pred : t -> string -> WPred.t option
val get_fun : t -> string -> WFun.t option
val never_called_during_symb : t -> WFun.t list
val pp_context : Format.formatter -> WFun.t list -> unit
val pp : Format.formatter -> t -> unit
val get_function_name_of_element : t -> int -> string
| |
da8c9c6ffc59148b2ebdcdca6af5cdfe26f5086476fb7cbdb8814a9b190b7a96 | cmeiklejohn/riak_pg | riak_pg.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2013 . All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
@author < >
2013 .
%% @doc Application.
-module(riak_pg).
-author('Christopher Meiklejohn <>').
-include("riak_pg.hrl").
-include_lib("riak_core/include/riak_core_vnode.hrl").
-define(TIMEOUT, 5000).
-export([delete/1,
join/2,
leave/2,
groups/0,
members/1,
local_members/1,
connected_members/1]).
-export([mk_reqid/0,
wait_for_reqid/2]).
%% Public API
%% @doc Delete a group.
-spec delete(term()) -> ok | {error, timeout}.
delete(Group) ->
{ok, ReqId} = riak_pg_delete_fsm:delete(Group),
wait_for_reqid(ReqId, ?TIMEOUT).
%% @doc Join pid to group.
-spec join(term(), pid()) -> ok | {error, timeout}.
join(Group, Pid) ->
{ok, ReqId} = riak_pg_join_fsm:join(Group, Pid),
wait_for_reqid(ReqId, ?TIMEOUT).
%% @doc Remove pid from group.
-spec leave(term(), pid()) -> ok | {error, timeout}.
leave(Group, Pid) ->
{ok, ReqId} = riak_pg_leave_fsm:leave(Group, Pid),
wait_for_reqid(ReqId, ?TIMEOUT).
%% @doc Return a listing of all registered groups.
%% @todo
-spec groups() -> ok.
groups() ->
{ok, ReqId} = riak_pg_groups_fsm:groups(),
wait_for_reqid(ReqId, ?TIMEOUT).
%% @doc Return a listing of members of a particular group.
-spec members(term()) -> {ok, list(pid())} | {error, timeout}.
members(Group) ->
{ok, ReqId} = riak_pg_members_fsm:members(Group),
wait_for_reqid(ReqId, ?TIMEOUT).
%% @doc Return a listing of local members of a particular group.
-spec local_members(term()) -> {ok, list(pid())} | {error, timeout}.
local_members(Group) ->
{ok, ReqId} = riak_pg_members_fsm:members(Group),
case wait_for_reqid(ReqId, ?TIMEOUT) of
{ok, Members} ->
LocalMembers = lists:filter(fun(Pid) ->
node(Pid) =:= node() end, Members),
{ok, LocalMembers};
{error, Error} ->
{error, Error}
end.
%% @doc Return a listing of connected members of a particular group.
-spec connected_members(term()) -> {ok, list(pid())} | {error, timeout}.
connected_members(Group) ->
{ok, ReqId} = riak_pg_members_fsm:members(Group),
case wait_for_reqid(ReqId, ?TIMEOUT) of
{ok, Members} ->
ConnectedMembers = lists:filter(fun(Pid) ->
lists:member(node(Pid), nodes()) end, Members),
{ok, ConnectedMembers};
{error, Error} ->
{error, Error}
end.
%%%===================================================================
%%% Internal Functions
%%%===================================================================
%% @doc Generate a request id.
mk_reqid() ->
erlang:phash2(erlang:now()).
%% @doc Wait for a response.
wait_for_reqid(ReqID, Timeout) ->
receive
{ReqID, ok} ->
ok;
{ReqID, ok, Val} ->
{ok, Val}
after Timeout ->
{error, timeout}
end.
| null | https://raw.githubusercontent.com/cmeiklejohn/riak_pg/32d46bc1909144cea93b8b98a652a00f6520ffdb/src/riak_pg.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
@doc Application.
Public API
@doc Delete a group.
@doc Join pid to group.
@doc Remove pid from group.
@doc Return a listing of all registered groups.
@todo
@doc Return a listing of members of a particular group.
@doc Return a listing of local members of a particular group.
@doc Return a listing of connected members of a particular group.
===================================================================
Internal Functions
===================================================================
@doc Generate a request id.
@doc Wait for a response. | Copyright ( c ) 2013 . All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@author < >
2013 .
-module(riak_pg).
-author('Christopher Meiklejohn <>').
-include("riak_pg.hrl").
-include_lib("riak_core/include/riak_core_vnode.hrl").
-define(TIMEOUT, 5000).
-export([delete/1,
join/2,
leave/2,
groups/0,
members/1,
local_members/1,
connected_members/1]).
-export([mk_reqid/0,
wait_for_reqid/2]).
-spec delete(term()) -> ok | {error, timeout}.
delete(Group) ->
{ok, ReqId} = riak_pg_delete_fsm:delete(Group),
wait_for_reqid(ReqId, ?TIMEOUT).
-spec join(term(), pid()) -> ok | {error, timeout}.
join(Group, Pid) ->
{ok, ReqId} = riak_pg_join_fsm:join(Group, Pid),
wait_for_reqid(ReqId, ?TIMEOUT).
-spec leave(term(), pid()) -> ok | {error, timeout}.
leave(Group, Pid) ->
{ok, ReqId} = riak_pg_leave_fsm:leave(Group, Pid),
wait_for_reqid(ReqId, ?TIMEOUT).
-spec groups() -> ok.
groups() ->
{ok, ReqId} = riak_pg_groups_fsm:groups(),
wait_for_reqid(ReqId, ?TIMEOUT).
-spec members(term()) -> {ok, list(pid())} | {error, timeout}.
members(Group) ->
{ok, ReqId} = riak_pg_members_fsm:members(Group),
wait_for_reqid(ReqId, ?TIMEOUT).
-spec local_members(term()) -> {ok, list(pid())} | {error, timeout}.
local_members(Group) ->
{ok, ReqId} = riak_pg_members_fsm:members(Group),
case wait_for_reqid(ReqId, ?TIMEOUT) of
{ok, Members} ->
LocalMembers = lists:filter(fun(Pid) ->
node(Pid) =:= node() end, Members),
{ok, LocalMembers};
{error, Error} ->
{error, Error}
end.
-spec connected_members(term()) -> {ok, list(pid())} | {error, timeout}.
connected_members(Group) ->
{ok, ReqId} = riak_pg_members_fsm:members(Group),
case wait_for_reqid(ReqId, ?TIMEOUT) of
{ok, Members} ->
ConnectedMembers = lists:filter(fun(Pid) ->
lists:member(node(Pid), nodes()) end, Members),
{ok, ConnectedMembers};
{error, Error} ->
{error, Error}
end.
mk_reqid() ->
erlang:phash2(erlang:now()).
wait_for_reqid(ReqID, Timeout) ->
receive
{ReqID, ok} ->
ok;
{ReqID, ok, Val} ->
{ok, Val}
after Timeout ->
{error, timeout}
end.
|
5e678384a976175b925e49b6919ed592f780fac0c4788c806cdef7a70beed914 | openmusic-project/OMChroma | add-4.lisp | ;******************************************************************
; CLASS ADD-4
;******************************************************************
(in-package :om)
; LISP-DEFINED CLASSES SHOULD RESIDE IN THE LIBRARY'S PACKAGE AND
; NOT IN THE USER PACKAGE, WHICH CONTAINS ALL THE CLASSES
; GRAPHICALLY DEFINED
(defclass! add-4
(cs-evt) ; INHERIT FROM CS-EVT
(
; GLOBAL SLOTS (LIGHT BLUE, ON THE LEFT OF THE CLASS):
THE METHOD BELOW TRANSFORMS THEM INTO GLOBAL SLOTS ( " SHOW " UNCHEKED )
; ATTENTION: A GLOBAL SLOT SHOULD NOT HAVE AN INITARG
( source-code :initform
(load-buffer-textfile
(get-orc-source (get-orc "add-4"))
'textfile "append")
:allocation :class
:type textfile
:accessor source-code)
(numchan :initform (or (get-orc-channels (get-orc "add-4")) 1) :allocation :class :accessor numchan)
(cs-inits :initform (get-cs-inits (get-orc "add-4"))
:allocation :class :type list :accessor cs-inits)
(orc-header :initform (list
"; GEN functions **********************************************************"
"; audio wave"
"f1 0 65537 10 1"
"; vibrato wave"
"f2 0 4097 -7 440.0 2048 220.0 2048 440.0"
)
:allocation :class :type list :accessor orc-header)
(InstID :initform 1 :allocation :class :accessor InstID)
; LOCAL SLOTS (RED, CORRESPONDING TO THE P-FIELDS)
; ATTENTION: A GLOBAL SLOT SHOULD HAVE AN INITARG
( amp :type number
:initarg :amp
:initform 1000.0
:accessor amp)
( freq :type number
:initarg :freq
:initform 1.0
:accessor freq)
( aenv :type gen-07
:initarg :aenv
; x-points y-points decimals
:initform (make-cs-table 'Gen-07 '(0 1000 3000 4096) '(0.0 1.0 1.0 0.0) 5 "?" 4097)
:accessor aenv)
( fenv :type gen-07
:initarg :fenv
; x-points y-points decimals
:initform (make-cs-table 'Gen-07 '(0 4096) '(440.0 880.0) 5 "?" 4097)
:accessor fenv)
( atk :type number
:initarg :atk
:initform 0.01
:accessor atk)
( dec :type number
:initarg :dec
:initform 0.01
:accessor dec)
( phs :type number
:initarg :phs
:initform 0.0
:accessor phs)
)
(:documentation
"
;=============================================================================
ADD4.ORC
; SIMPLE ADDITIVE SYNTHESIS ADAPTED TO READ PARTIAL ANALYSIS DATA / MONO
; AMPLITUDE ENVELOPE WITH POSCIL AND ATTACK/RELEASE VALUES TO AVOID CLICKS
; CONTROL OF THE INITIAL PHASE OF THE AUDIO OSCILLATOR
;=============================================================================
; Timbre: simple additive synthesis with variable amplitude and frequency
Synthesis : additive same units
Coded : ms 17/07
; This class reads absolute values for amplitudes and frequencies
coming from analysis data . In this case , set freq to 1.0 ( scaler ) and amplitude
to 1000.0 or 0.0 ( ) . Note that GEN functions should have a negative number
; in order not to be rescaled (GEN 0 -7)
; It can also be used with normalized amp and freq functions, and, in this case,
; amp and freq should have a reasonable value.
NB : NEW STRUCTURE FOR THE AMPLITUDES FROM AUGUST 2008 !
Positive value > 0.0 : linear amplitude ( > 0.0 - 1000.0 )
0.0 or negative value : amplitude in dB ( 0 = maximum value )
The apparently arbitrary amplitude range ( 0 - 1000 , rather than 0 - 1 )
; avoids printing small values with exponential notation
Replaced oscili with poscil ( precise oscillator ) , ms 8/08
Default SR = 96000 , recommended precision : 24 bits
;-----------------------------------------------------------------------------
; p1 = instrument number
p2 = action time [ sec ]
p3 = duration [ sec ]
p4 = max amp [ linear , > 0.0 - 1000.0 or dB , < = 0.0 ]
; p5 = frequency [Hz or scaler]
; p6 = amplitude envelope [GEN number]
; p7 = frequency envelope [GEN number]
p8 = attack time of the amp [ sec ]
p9 = decay time of the amp [ sec ]
; p10 = initial phase of the audio oscillator [rad]
;-----------------------------------------------------------------------------
; COMPULSORY GEN FUNCTIONS :
; f1 audio wave
;_____________________________________________________________________________
; CLASS: ADD-4
GLOBAL KEYWORDS ( default values within parentheses ):
NUMROWS : amount of rows ( components ) in the event ( 1 )
ACTION - TIME : start time of the whole event [ sec ] ( 0.0 )
USER - FUN : user - defined parsing function ( nil )
LOCAL KEYWORDS :
E - DELS : entry delays [ sec ] ( 0.0 )
DURS : duration [ sec ] ( 1.0 )
AMP : amplitude [ , > 0.0 - 1000.0 or dB < - 0.0 ] ( 1000.0 )
: frequency scalre [ 0 - 1 or Hz ] ( 1.0 )
AENV : fun number for the amp envlp [ absolute GEN = negative GEN07 ] ( trapezoid )
FENV : fun number for frequency env [ absolute GEN = negative GEN07 ] ( upward gliss )
ATK : attack time [ sec ] ( 0.01 )
DEC : decay time [ sec ] ( 0.01 )
PHS : initial phase [ rad ] ( 0.0 )
;*****************************************************************************
"
)
(:icon 1001)
)
| null | https://raw.githubusercontent.com/openmusic-project/OMChroma/5ded34f22b59a1a93ea7b87e182c9dbdfa95e047/sources/om6/cs-events/csound/classes/Basic/add-4.lisp | lisp | ******************************************************************
CLASS ADD-4
******************************************************************
LISP-DEFINED CLASSES SHOULD RESIDE IN THE LIBRARY'S PACKAGE AND
NOT IN THE USER PACKAGE, WHICH CONTAINS ALL THE CLASSES
GRAPHICALLY DEFINED
INHERIT FROM CS-EVT
GLOBAL SLOTS (LIGHT BLUE, ON THE LEFT OF THE CLASS):
ATTENTION: A GLOBAL SLOT SHOULD NOT HAVE AN INITARG
LOCAL SLOTS (RED, CORRESPONDING TO THE P-FIELDS)
ATTENTION: A GLOBAL SLOT SHOULD HAVE AN INITARG
x-points y-points decimals
x-points y-points decimals
=============================================================================
SIMPLE ADDITIVE SYNTHESIS ADAPTED TO READ PARTIAL ANALYSIS DATA / MONO
AMPLITUDE ENVELOPE WITH POSCIL AND ATTACK/RELEASE VALUES TO AVOID CLICKS
CONTROL OF THE INITIAL PHASE OF THE AUDIO OSCILLATOR
=============================================================================
Timbre: simple additive synthesis with variable amplitude and frequency
This class reads absolute values for amplitudes and frequencies
in order not to be rescaled (GEN 0 -7)
It can also be used with normalized amp and freq functions, and, in this case,
amp and freq should have a reasonable value.
avoids printing small values with exponential notation
-----------------------------------------------------------------------------
p1 = instrument number
p5 = frequency [Hz or scaler]
p6 = amplitude envelope [GEN number]
p7 = frequency envelope [GEN number]
p10 = initial phase of the audio oscillator [rad]
-----------------------------------------------------------------------------
COMPULSORY GEN FUNCTIONS :
f1 audio wave
_____________________________________________________________________________
CLASS: ADD-4
***************************************************************************** |
(in-package :om)
(defclass! add-4
(
THE METHOD BELOW TRANSFORMS THEM INTO GLOBAL SLOTS ( " SHOW " UNCHEKED )
( source-code :initform
(load-buffer-textfile
(get-orc-source (get-orc "add-4"))
'textfile "append")
:allocation :class
:type textfile
:accessor source-code)
(numchan :initform (or (get-orc-channels (get-orc "add-4")) 1) :allocation :class :accessor numchan)
(cs-inits :initform (get-cs-inits (get-orc "add-4"))
:allocation :class :type list :accessor cs-inits)
(orc-header :initform (list
"; GEN functions **********************************************************"
"; audio wave"
"f1 0 65537 10 1"
"; vibrato wave"
"f2 0 4097 -7 440.0 2048 220.0 2048 440.0"
)
:allocation :class :type list :accessor orc-header)
(InstID :initform 1 :allocation :class :accessor InstID)
( amp :type number
:initarg :amp
:initform 1000.0
:accessor amp)
( freq :type number
:initarg :freq
:initform 1.0
:accessor freq)
( aenv :type gen-07
:initarg :aenv
:initform (make-cs-table 'Gen-07 '(0 1000 3000 4096) '(0.0 1.0 1.0 0.0) 5 "?" 4097)
:accessor aenv)
( fenv :type gen-07
:initarg :fenv
:initform (make-cs-table 'Gen-07 '(0 4096) '(440.0 880.0) 5 "?" 4097)
:accessor fenv)
( atk :type number
:initarg :atk
:initform 0.01
:accessor atk)
( dec :type number
:initarg :dec
:initform 0.01
:accessor dec)
( phs :type number
:initarg :phs
:initform 0.0
:accessor phs)
)
(:documentation
"
ADD4.ORC
Synthesis : additive same units
Coded : ms 17/07
coming from analysis data . In this case , set freq to 1.0 ( scaler ) and amplitude
to 1000.0 or 0.0 ( ) . Note that GEN functions should have a negative number
NB : NEW STRUCTURE FOR THE AMPLITUDES FROM AUGUST 2008 !
Positive value > 0.0 : linear amplitude ( > 0.0 - 1000.0 )
0.0 or negative value : amplitude in dB ( 0 = maximum value )
The apparently arbitrary amplitude range ( 0 - 1000 , rather than 0 - 1 )
Replaced oscili with poscil ( precise oscillator ) , ms 8/08
Default SR = 96000 , recommended precision : 24 bits
p2 = action time [ sec ]
p3 = duration [ sec ]
p4 = max amp [ linear , > 0.0 - 1000.0 or dB , < = 0.0 ]
p8 = attack time of the amp [ sec ]
p9 = decay time of the amp [ sec ]
GLOBAL KEYWORDS ( default values within parentheses ):
NUMROWS : amount of rows ( components ) in the event ( 1 )
ACTION - TIME : start time of the whole event [ sec ] ( 0.0 )
USER - FUN : user - defined parsing function ( nil )
LOCAL KEYWORDS :
E - DELS : entry delays [ sec ] ( 0.0 )
DURS : duration [ sec ] ( 1.0 )
AMP : amplitude [ , > 0.0 - 1000.0 or dB < - 0.0 ] ( 1000.0 )
: frequency scalre [ 0 - 1 or Hz ] ( 1.0 )
AENV : fun number for the amp envlp [ absolute GEN = negative GEN07 ] ( trapezoid )
FENV : fun number for frequency env [ absolute GEN = negative GEN07 ] ( upward gliss )
ATK : attack time [ sec ] ( 0.01 )
DEC : decay time [ sec ] ( 0.01 )
PHS : initial phase [ rad ] ( 0.0 )
"
)
(:icon 1001)
)
|
45f5b902cabbd3887cf93e4f794150464dc638567512d244ebfdfb057a7c4592 | tsloughter/kuberl | kuberl_v2beta2_metric_identifier.erl | -module(kuberl_v2beta2_metric_identifier).
-export([encode/1]).
-export_type([kuberl_v2beta2_metric_identifier/0]).
-type kuberl_v2beta2_metric_identifier() ::
#{ 'name' := binary(),
'selector' => kuberl_v1_label_selector:kuberl_v1_label_selector()
}.
encode(#{ 'name' := Name,
'selector' := Selector
}) ->
#{ 'name' => Name,
'selector' => Selector
}.
| null | https://raw.githubusercontent.com/tsloughter/kuberl/f02ae6680d6ea5db6e8b6c7acbee8c4f9df482e2/gen/kuberl_v2beta2_metric_identifier.erl | erlang | -module(kuberl_v2beta2_metric_identifier).
-export([encode/1]).
-export_type([kuberl_v2beta2_metric_identifier/0]).
-type kuberl_v2beta2_metric_identifier() ::
#{ 'name' := binary(),
'selector' => kuberl_v1_label_selector:kuberl_v1_label_selector()
}.
encode(#{ 'name' := Name,
'selector' := Selector
}) ->
#{ 'name' => Name,
'selector' => Selector
}.
| |
c261937a6a1bb28fdf4ab9dc2c501fd552a35cff413ae2ccc821818946f88a30 | gcv/appengine-magic | mail.clj | (ns test.appengine-magic.services.mail
(:use clojure.test)
(:require [appengine-magic.services.mail :as mail]
[appengine-magic.testing :as ae-testing]))
(use-fixtures :each (ae-testing/local-services :all))
(deftest basics
(let [msg (mail/make-message :to ""
:from ""
:subject "test"
:text-body "hello world")]
(mail/send msg)))
| null | https://raw.githubusercontent.com/gcv/appengine-magic/facc9fe4945b3becd772d6b053844e572dcc1c73/test/test/appengine_magic/services/mail.clj | clojure | (ns test.appengine-magic.services.mail
(:use clojure.test)
(:require [appengine-magic.services.mail :as mail]
[appengine-magic.testing :as ae-testing]))
(use-fixtures :each (ae-testing/local-services :all))
(deftest basics
(let [msg (mail/make-message :to ""
:from ""
:subject "test"
:text-body "hello world")]
(mail/send msg)))
| |
4c9be7fdc9fd718e9d925e13d924f4eb8aef362db439924d29c12d1338475fb5 | alsonkemp/turbinado | HTML.hs | module Turbinado.View.HTML where
import Control.Monad
import Turbinado.View.Monad
import qualified Text.XHtml.Strict as X
type VHtml = View X.Html
class VHTML a where
toVHtml :: a -> VHtml
toVHtmlFromList :: [a] -> VHtml
toVHtmlFromList xs = do xs' <- mapM toVHtml xs
return $ X.concatHtml xs'
instance VHTML X.Html where
toVHtml = return
instance VHTML VHtml where
toVHtml v = v
instance VHTML Char where
toVHtml a = return $ X.toHtml [a]
toVHtmlFromList = return . X.toHtmlFromList
instance (VHTML a) => VHTML [a] where
toVHtml = toVHtmlFromList
instance VHTML a => VHTML (Maybe a) where
toVHtml m = maybe noVHtml toVHtml m
class ADDATTRS a where
(!) :: a -> [X.HtmlAttr] -> a
instance (ADDATTRS b) => ADDATTRS (a -> b) where
fn ! attr = \ arg -> fn arg ! attr
instance ADDATTRS VHtml where
vh ! attr = do vh' <- vh
return $ vh' X.! attr
--
-- * Html primitives and basic combinators
--
-- | Put something inside an HTML element.
(<<) :: (VHTML a) =>
(VHtml -> b) -- ^ Parent
-> a -- ^ Child
-> b
fn << arg = fn (toVHtml arg)
concatVHtml :: (VHTML a) => [a] -> VHtml
concatVHtml as = do hs <- sequence $ map toVHtml as
return $ X.concatHtml hs
-- | Create a piece of HTML which is the concatenation
of two things which can be made into HTML .
(+++) :: (VHTML a,VHTML b) => a -> b -> VHtml
a +++ b = do a' <- toVHtml a
b' <- toVHtml b
return $ a' X.+++ b'
-- | An empty piece of HTML.
noVHtml :: VHtml
noVHtml = return $ X.noHtml
-- | Constructs an element with a custom name.
tag :: String -- ^ Element name
-> VHtml -- ^ Element contents
-> VHtml
tag str htmls = do hs <- htmls
return $ X.tag str hs
-- | Constructs an element with a custom name, and
-- without any children.
itag :: String -> VHtml
itag str = tag str noVHtml
stringToVHtml :: String -> VHtml
stringToVHtml s = return $ X.stringToHtml s
emptyAttr = X.emptyAttr
intAttr = X.intAttr
strAttr = X.strAttr
htmlAttr = X.htmlAttr
--prettyHtml = X.prettyHtml
| null | https://raw.githubusercontent.com/alsonkemp/turbinado/da2ba7c3443ddf6a51d1ec5b05cb45a85efc0809/Turbinado/View/HTML.hs | haskell |
* Html primitives and basic combinators
| Put something inside an HTML element.
^ Parent
^ Child
| Create a piece of HTML which is the concatenation
| An empty piece of HTML.
| Constructs an element with a custom name.
^ Element name
^ Element contents
| Constructs an element with a custom name, and
without any children.
prettyHtml = X.prettyHtml | module Turbinado.View.HTML where
import Control.Monad
import Turbinado.View.Monad
import qualified Text.XHtml.Strict as X
type VHtml = View X.Html
class VHTML a where
toVHtml :: a -> VHtml
toVHtmlFromList :: [a] -> VHtml
toVHtmlFromList xs = do xs' <- mapM toVHtml xs
return $ X.concatHtml xs'
instance VHTML X.Html where
toVHtml = return
instance VHTML VHtml where
toVHtml v = v
instance VHTML Char where
toVHtml a = return $ X.toHtml [a]
toVHtmlFromList = return . X.toHtmlFromList
instance (VHTML a) => VHTML [a] where
toVHtml = toVHtmlFromList
instance VHTML a => VHTML (Maybe a) where
toVHtml m = maybe noVHtml toVHtml m
class ADDATTRS a where
(!) :: a -> [X.HtmlAttr] -> a
instance (ADDATTRS b) => ADDATTRS (a -> b) where
fn ! attr = \ arg -> fn arg ! attr
instance ADDATTRS VHtml where
vh ! attr = do vh' <- vh
return $ vh' X.! attr
(<<) :: (VHTML a) =>
-> b
fn << arg = fn (toVHtml arg)
concatVHtml :: (VHTML a) => [a] -> VHtml
concatVHtml as = do hs <- sequence $ map toVHtml as
return $ X.concatHtml hs
of two things which can be made into HTML .
(+++) :: (VHTML a,VHTML b) => a -> b -> VHtml
a +++ b = do a' <- toVHtml a
b' <- toVHtml b
return $ a' X.+++ b'
noVHtml :: VHtml
noVHtml = return $ X.noHtml
-> VHtml
tag str htmls = do hs <- htmls
return $ X.tag str hs
itag :: String -> VHtml
itag str = tag str noVHtml
stringToVHtml :: String -> VHtml
stringToVHtml s = return $ X.stringToHtml s
emptyAttr = X.emptyAttr
intAttr = X.intAttr
strAttr = X.strAttr
htmlAttr = X.htmlAttr
|
fa5d4a5c0aae54fcfd9d14d8fe080598b2d32fb24556db5791118a2b02bf2067 | Mallku2/lua-redex-model | phrases_constructors_tests.rkt | #lang racket
(require redex
"phrases_constructors.rkt"
rackunit
rackunit/text-ui)
(define-test-suite phrases-constructors-test-suite
; statements
(check-equal? (concrete-grammar-s (skip))
(term \;))
(check-equal? (concrete-grammar-s (break))
(term break))
(check-equal? (concrete-grammar-s (return (tuple (exps (list (number 1))))))
(term (return (< 1 >))))
(check-equal? (concrete-grammar-s (fun-call (id-name 'x) (exps (list (number 1)))))
(term (x (1))))
(check-equal? (concrete-grammar-s (method-call (id-name 'x) (id-name 'y) (exps (list (number 1)))))
(term (x : y (1))))
(check-equal? (concrete-grammar-s (built-in-call (id-name 'service) (exps (list (number 1)))))
(term (\$builtIn service (1))))
(check-equal? (concrete-grammar-s (conditional (nil) (skip) (break)))
(term (if nil then \; else break end)))
(check-equal? (concrete-grammar-s (while (true) (skip)))
(term (while true do \; end)))
(check-equal? (concrete-grammar-s (local-vars (exps (list (id-name 'x) (id-name 'y)))
(exps (list (number 1)))
(skip)))
(term (local (x y) = (1) in \; end)))
(check-equal? (concrete-grammar-s (var-assign (exps (list (id-name 'x) (id-name 'y)))
(exps (list (number 1) (number 2)))))
(term ((x y) = (1 2))))
(check-equal? (concrete-grammar-s (conc-stats (list (skip)
(var-assign (exps (list (id-name 'x) (id-name 'y)))
(exps (list (number 1) (number 2)))))))
(term (\;
((x y) = (1 2)))))
; expressions
(check-equal? (concrete-grammar-e (nil))
(term nil))
(check-equal? (concrete-grammar-e (true))
(term true))
(check-equal? (concrete-grammar-e (false))
(term false))
(check-equal? (concrete-grammar-e (number 1))
(term 1))
(check-equal? (concrete-grammar-e (number 1.1))
(term 1.1))
(check-equal? (concrete-grammar-e (str "asd"))
(term "asd"))
(check-equal? (concrete-grammar-e (id-name 'asd))
(term asd))
(check-equal? (concrete-grammar-e (id-vararg))
(term <<<))
(check-equal? (concrete-grammar-e (var-table-field (number 1) (number 2)))
(term (1 \[ 2 \])))
(check-equal? (concrete-grammar-e (parent-e (number 1)))
(term (\( 1 \))))
(check-equal? (concrete-grammar-e (binop (add)
(number 1)
(number 2)))
(term (1 + 2)))
(check-equal? (concrete-grammar-e (binop (sub)
(number 1)
(number 2)))
(term (1 - 2)))
(check-equal? (concrete-grammar-e (binop (mul)
(number 1)
(number 2)))
(term (1 * 2)))
(check-equal? (concrete-grammar-e (binop (div)
(number 1)
(number 2)))
(term (1 / 2)))
(check-equal? (concrete-grammar-e (binop (pow)
(number 1)
(number 2)))
(term (1 ^ 2)))
(check-equal? (concrete-grammar-e (binop (mod)
(number 1)
(number 2)))
(term (1 % 2)))
(check-equal? (concrete-grammar-e (binop (lt)
(number 1)
(number 2)))
(term (1 < 2)))
(check-equal? (concrete-grammar-e (binop (le)
(number 1)
(number 2)))
(term (1 <= 2)))
(check-equal? (concrete-grammar-e (binop (gt)
(number 1)
(number 2)))
(term (1 > 2)))
(check-equal? (concrete-grammar-e (binop (ge)
(number 1)
(number 2)))
(term (1 >= 2)))
(check-equal? (concrete-grammar-e (binop (eq)
(number 1)
(number 2)))
(term (1 == 2)))
(check-equal? (concrete-grammar-e (binop (\\and)
(number 1)
(number 2)))
(term (1 and 2)))
(check-equal? (concrete-grammar-e (binop (\\or)
(number 1)
(number 2)))
(term (1 or 2)))
(check-equal? (concrete-grammar-e (binop (str-concat)
(number "1")
(number "2")))
(term ("1" .. "2")))
(check-equal? (concrete-grammar-e (unop (unm)
(number 1)))
(term (- 1)))
(check-equal? (concrete-grammar-e (unop (\\not)
(true)))
(term (not true)))
(check-equal? (concrete-grammar-e (unop (len)
(number "asd")))
(term (\# "asd")))
(check-equal? (concrete-grammar-e (tuple (exps (list (number 1) (number 2)))))
(term (< 1 2 >)))
(check-equal? (concrete-grammar-e (tableconstructor
(fields (list
(kv-table-field
(number 1)
(number 2))
(v-table-field (number 3))))))
(term (\{ (\[ 1 \] = 2) 3 \})))
(check-equal? (concrete-grammar-e (func-def (id-name 'x)
(params (exps (list (id-name 'y))))
(skip)))
(term (function x (y) \; end)))
)
(provide phrases-constructors-test-suite) | null | https://raw.githubusercontent.com/Mallku2/lua-redex-model/13a1b8cacbdc72a1b5cb1a1f140f21cc974d71c3/Desugar/phrases_constructors_tests.rkt | racket | statements
))
else break end)))
end)))
end)))
expressions
end))) | #lang racket
(require redex
"phrases_constructors.rkt"
rackunit
rackunit/text-ui)
(define-test-suite phrases-constructors-test-suite
(check-equal? (concrete-grammar-s (skip))
(check-equal? (concrete-grammar-s (break))
(term break))
(check-equal? (concrete-grammar-s (return (tuple (exps (list (number 1))))))
(term (return (< 1 >))))
(check-equal? (concrete-grammar-s (fun-call (id-name 'x) (exps (list (number 1)))))
(term (x (1))))
(check-equal? (concrete-grammar-s (method-call (id-name 'x) (id-name 'y) (exps (list (number 1)))))
(term (x : y (1))))
(check-equal? (concrete-grammar-s (built-in-call (id-name 'service) (exps (list (number 1)))))
(term (\$builtIn service (1))))
(check-equal? (concrete-grammar-s (conditional (nil) (skip) (break)))
(check-equal? (concrete-grammar-s (while (true) (skip)))
(check-equal? (concrete-grammar-s (local-vars (exps (list (id-name 'x) (id-name 'y)))
(exps (list (number 1)))
(skip)))
(check-equal? (concrete-grammar-s (var-assign (exps (list (id-name 'x) (id-name 'y)))
(exps (list (number 1) (number 2)))))
(term ((x y) = (1 2))))
(check-equal? (concrete-grammar-s (conc-stats (list (skip)
(var-assign (exps (list (id-name 'x) (id-name 'y)))
(exps (list (number 1) (number 2)))))))
((x y) = (1 2)))))
(check-equal? (concrete-grammar-e (nil))
(term nil))
(check-equal? (concrete-grammar-e (true))
(term true))
(check-equal? (concrete-grammar-e (false))
(term false))
(check-equal? (concrete-grammar-e (number 1))
(term 1))
(check-equal? (concrete-grammar-e (number 1.1))
(term 1.1))
(check-equal? (concrete-grammar-e (str "asd"))
(term "asd"))
(check-equal? (concrete-grammar-e (id-name 'asd))
(term asd))
(check-equal? (concrete-grammar-e (id-vararg))
(term <<<))
(check-equal? (concrete-grammar-e (var-table-field (number 1) (number 2)))
(term (1 \[ 2 \])))
(check-equal? (concrete-grammar-e (parent-e (number 1)))
(term (\( 1 \))))
(check-equal? (concrete-grammar-e (binop (add)
(number 1)
(number 2)))
(term (1 + 2)))
(check-equal? (concrete-grammar-e (binop (sub)
(number 1)
(number 2)))
(term (1 - 2)))
(check-equal? (concrete-grammar-e (binop (mul)
(number 1)
(number 2)))
(term (1 * 2)))
(check-equal? (concrete-grammar-e (binop (div)
(number 1)
(number 2)))
(term (1 / 2)))
(check-equal? (concrete-grammar-e (binop (pow)
(number 1)
(number 2)))
(term (1 ^ 2)))
(check-equal? (concrete-grammar-e (binop (mod)
(number 1)
(number 2)))
(term (1 % 2)))
(check-equal? (concrete-grammar-e (binop (lt)
(number 1)
(number 2)))
(term (1 < 2)))
(check-equal? (concrete-grammar-e (binop (le)
(number 1)
(number 2)))
(term (1 <= 2)))
(check-equal? (concrete-grammar-e (binop (gt)
(number 1)
(number 2)))
(term (1 > 2)))
(check-equal? (concrete-grammar-e (binop (ge)
(number 1)
(number 2)))
(term (1 >= 2)))
(check-equal? (concrete-grammar-e (binop (eq)
(number 1)
(number 2)))
(term (1 == 2)))
(check-equal? (concrete-grammar-e (binop (\\and)
(number 1)
(number 2)))
(term (1 and 2)))
(check-equal? (concrete-grammar-e (binop (\\or)
(number 1)
(number 2)))
(term (1 or 2)))
(check-equal? (concrete-grammar-e (binop (str-concat)
(number "1")
(number "2")))
(term ("1" .. "2")))
(check-equal? (concrete-grammar-e (unop (unm)
(number 1)))
(term (- 1)))
(check-equal? (concrete-grammar-e (unop (\\not)
(true)))
(term (not true)))
(check-equal? (concrete-grammar-e (unop (len)
(number "asd")))
(term (\# "asd")))
(check-equal? (concrete-grammar-e (tuple (exps (list (number 1) (number 2)))))
(term (< 1 2 >)))
(check-equal? (concrete-grammar-e (tableconstructor
(fields (list
(kv-table-field
(number 1)
(number 2))
(v-table-field (number 3))))))
(term (\{ (\[ 1 \] = 2) 3 \})))
(check-equal? (concrete-grammar-e (func-def (id-name 'x)
(params (exps (list (id-name 'y))))
(skip)))
)
(provide phrases-constructors-test-suite) |
da7f878f66b31b1c0b7ba214ef9c8662af08e70d6e28aebcc50b437ce4de142f | abridgewater/nq-clim | manageable-frame-mixin.lisp | ;;;
nq - clim / frame / manageable - frame - mixin
;;;
;;; Application-frame side support for frame management.
;;;
(cl:defpackage :nq-clim/frame/manageable-frame-mixin
(:use :cl
:nq-clim/frame/application-frame
:nq-clim/frame/manageable-frame-functions)
(:export
"MANAGEABLE-FRAME-MIXIN"))
(cl:in-package :nq-clim/frame/manageable-frame-mixin)
(defclass manageable-frame-mixin ()
((top-level-sheet :initform nil :accessor frame-top-level-sheet)))
EOF
| null | https://raw.githubusercontent.com/abridgewater/nq-clim/11d339fd0ac77b6d624fc5537b170294a191a3de/frame/manageable-frame-mixin.lisp | lisp |
Application-frame side support for frame management.
| nq - clim / frame / manageable - frame - mixin
(cl:defpackage :nq-clim/frame/manageable-frame-mixin
(:use :cl
:nq-clim/frame/application-frame
:nq-clim/frame/manageable-frame-functions)
(:export
"MANAGEABLE-FRAME-MIXIN"))
(cl:in-package :nq-clim/frame/manageable-frame-mixin)
(defclass manageable-frame-mixin ()
((top-level-sheet :initform nil :accessor frame-top-level-sheet)))
EOF
|
e2e884ae6a67358ff0d750c44a2a72d9f1c02f78ca4dbbe217af023d7a28c14a | modular-macros/ocaml-macros | w50.ml | module A : sig end = struct
module L = List
module X1 = struct end
module Y1 = X1
end
| null | https://raw.githubusercontent.com/modular-macros/ocaml-macros/05372c7248b5a7b1aa507b3c581f710380f17fcd/testsuite/tests/warnings/w50.ml | ocaml | module A : sig end = struct
module L = List
module X1 = struct end
module Y1 = X1
end
| |
f3bb83bd7adae4764e5b0b183c332ee3bc002870be250112af9e65207b475c55 | riverford/compound | one_to_many.cljc | (ns compound.secondary-indexes.one-to-many
(:require [compound.custom-key :as cu]
[compound.secondary-indexes :as csi]
[clojure.spec.alpha :as s]))
(s/def ::key ::csi/key)
(s/def ::custom-key ::csi/custom-key)
(s/def ::id any?)
(defmethod csi/spec :compound/one-to-many
[_]
(s/keys :req-un [(or ::key ::custom-key)]
:opt-un [::id]))
(defmethod csi/empty :compound/one-to-many
[index-def]
{})
(defmethod csi/id :compound/one-to-many
[index-def]
(or (:id index-def)
(:custom-key index-def)
(:key index-def)))
(defmethod csi/add :compound/one-to-many
[index index-def added]
(let [{:keys [key custom-key]} index-def
key-fn (csi/key-fn index-def)
new-index (reduce (fn add-items [index item]
(let [k (key-fn item)
existing-items (get index k #{})]
(assoc! index k (conj existing-items item))))
(transient index)
added)]
(persistent! new-index)))
(defmethod csi/remove :compound/one-to-many
[index index-def removed]
(let [{:keys [key custom-key]} index-def
key-fn (csi/key-fn index-def)
new-index (reduce (fn remove-items [index item]
(let [k (key-fn item)
existing-items (get index k #{})
new-items (disj existing-items item)]
(if (empty? new-items)
(dissoc! index k)
(assoc! index k new-items))))
(transient index)
removed)]
(persistent! new-index)))
| null | https://raw.githubusercontent.com/riverford/compound/cab3f3cbdca041b70fdc9c02f474c0b7c8226d73/src/compound/secondary_indexes/one_to_many.cljc | clojure | (ns compound.secondary-indexes.one-to-many
(:require [compound.custom-key :as cu]
[compound.secondary-indexes :as csi]
[clojure.spec.alpha :as s]))
(s/def ::key ::csi/key)
(s/def ::custom-key ::csi/custom-key)
(s/def ::id any?)
(defmethod csi/spec :compound/one-to-many
[_]
(s/keys :req-un [(or ::key ::custom-key)]
:opt-un [::id]))
(defmethod csi/empty :compound/one-to-many
[index-def]
{})
(defmethod csi/id :compound/one-to-many
[index-def]
(or (:id index-def)
(:custom-key index-def)
(:key index-def)))
(defmethod csi/add :compound/one-to-many
[index index-def added]
(let [{:keys [key custom-key]} index-def
key-fn (csi/key-fn index-def)
new-index (reduce (fn add-items [index item]
(let [k (key-fn item)
existing-items (get index k #{})]
(assoc! index k (conj existing-items item))))
(transient index)
added)]
(persistent! new-index)))
(defmethod csi/remove :compound/one-to-many
[index index-def removed]
(let [{:keys [key custom-key]} index-def
key-fn (csi/key-fn index-def)
new-index (reduce (fn remove-items [index item]
(let [k (key-fn item)
existing-items (get index k #{})
new-items (disj existing-items item)]
(if (empty? new-items)
(dissoc! index k)
(assoc! index k new-items))))
(transient index)
removed)]
(persistent! new-index)))
| |
8af9023485d70be660c32de138094060e6da1336c541d5e3800efcfe8fe7b782 | TrustInSoft/tis-interpreter | gui_types.ml | Modified by TrustInSoft
(**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
open Cil_types
type gui_callstack =
| GC_Filtered (* Some results have been hidden by a filter *)
| GC_Consolidated (* Join of all possible callstacks *)
| GC_Single of Value_types.callstack (* Only one callstack possible here *)
One of multiple callstacks
let hash_gui_callstack = function
| GC_Filtered -> 0
| GC_Consolidated -> 1
| GC_Single cs -> 2 * Value_types.Callstack.hash cs
| GC_Callstack cs -> 4 * Value_types.Callstack.hash cs
let compare_gui_callstack cs1 cs2 = match cs1, cs2 with
| GC_Filtered, GC_Filtered -> 0
| GC_Consolidated, GC_Consolidated -> 0
| GC_Single cs1, GC_Single cs2 | GC_Callstack cs1, GC_Callstack cs2 ->
Value_types.Callstack.compare cs1 cs2
| _, GC_Filtered -> 1
| GC_Filtered, _ -> -1
| _, GC_Consolidated -> 1
| GC_Consolidated, _ -> -1
| _, GC_Single _ -> 1
| GC_Single _, _ -> -1
module GCallstackMap = FCMap.Make(struct
type t = gui_callstack
let compare = compare_gui_callstack
end)
type gui_selection =
| GS_TLVal of term | GS_LVal of lval | GS_AbsoluteMem
| GS_Expr of exp | GS_Term of term
| GS_Predicate of Cil_types.predicate Cil_types.named
let pretty_gui_selection fmt = function
| GS_TLVal t | GS_Term t -> Printer.pp_term fmt t
| GS_LVal l -> Printer.pp_lval fmt l
| GS_AbsoluteMem -> Format.pp_print_string fmt "NULL"
| GS_Expr e -> Printer.pp_exp fmt e
| GS_Predicate p -> Printer.pp_predicate fmt p.content
let gui_selection_equal e1 e2 = match e1, e2 with
| GS_TLVal t1, GS_TLVal t2 | GS_Term t1, GS_Term t2 ->
Cil_datatype.Term.equal t1 t2
| GS_LVal lv1, GS_LVal lv2 -> Cil_datatype.Lval.equal lv1 lv2
| GS_AbsoluteMem, GS_AbsoluteMem -> true
| GS_Expr e1, GS_Expr e2 -> Cil_datatype.Exp.equal e1 e2
| GS_Predicate p1, GS_Predicate p2 ->
(* Cil_datatype.Predicate_named.equal not implemented *)
p1.content == p2.content
| (GS_TLVal _ | GS_LVal _ | GS_AbsoluteMem | GS_Expr _ | GS_Term _ |
GS_Predicate _) , _ -> false
type gui_offsetmap_res =
| GO_Bottom (* Bottom memory state *)
| GO_Empty (* Location with Empty validity (e.g. empty struct) *)
State or size was Top
| GO_InvalidLoc (* Location is always invalid *)
| GO_Offsetmap of Cvalue.V_Offsetmap.t (* Normal result *)
let equal_gui_offsetmap_res r1 r2 = match r1, r2 with
| GO_Bottom, GO_Bottom -> true
| GO_Empty, GO_Empty -> true
| GO_Top, GO_Top -> true
| GO_InvalidLoc, GO_InvalidLoc -> true
| GO_Offsetmap o1, GO_Offsetmap o2 -> Cvalue.V_Offsetmap.equal o1 o2
| (GO_Bottom | GO_Empty | GO_Top | GO_InvalidLoc | GO_Offsetmap _), _ -> false
let pretty_gui_offsetmap_res ?typ fmt r =
match r with
| GO_Bottom -> Format.pp_print_string fmt "<BOTTOM>"
| GO_Empty -> Format.pp_print_string fmt "<EMPTY>"
| GO_InvalidLoc -> Format.pp_print_string fmt "<INVALID LOCATION>"
| GO_Top -> Format.pp_print_string fmt "<NO INFORMATION>"
| GO_Offsetmap off ->
Cvalue.V_Offsetmap.pretty_generic ?typ () fmt off;
match typ with
| None -> ()
| Some typ -> Eval_op.pretty_stitched_offsetmap fmt typ off
(* Some cases are impossible because of conflicting sizes *)
let join_gui_offsetmap_res r1 r2 = match r1, r2 with
| GO_Top, _ | _, GO_Top -> GO_Top
| GO_Bottom, x | x, GO_Bottom -> x
| GO_InvalidLoc, x | x, GO_InvalidLoc -> x
| GO_Empty, x | x, GO_Empty -> x
| GO_Offsetmap o1, GO_Offsetmap o2 ->
GO_Offsetmap (Cvalue.V_Offsetmap.join o1 o2)
type gui_res =
| GR_Empty
| GR_Offsm of gui_offsetmap_res * typ option
| GR_Value of Cvalue.V.t * typ option
| GR_Status of Eval_terms.predicate_status
| GR_Zone of Locations.Zone.t
let pretty_gui_res fmt = function
| GR_Empty -> ()
| GR_Offsm (offsm, typ) -> pretty_gui_offsetmap_res ?typ fmt offsm
| GR_Value (v, typ) -> Cvalue.V.pretty_typ typ fmt v
| GR_Status s -> Eval_terms.pretty_predicate_status fmt s
| GR_Zone z -> Locations.Zone.pretty fmt z
let equal_gui_res r1 r2 = match r1, r2 with
| GR_Empty, GR_Empty -> true
| GR_Offsm (o1, typ1), GR_Offsm (o2, typ2) ->
equal_gui_offsetmap_res o1 o2 &&
Extlib.opt_equal Cil_datatype.Typ.equal typ1 typ2
| GR_Value (v1, typ1), GR_Value (v2, typ2) ->
Cvalue.V.equal v1 v2 && Extlib.opt_equal Cil_datatype.Typ.equal typ1 typ2
| GR_Status s1, GR_Status s2 -> Extlib.compare_basic s1 s2 = 0
| GR_Zone z1, GR_Zone z2 -> Locations.Zone.equal z1 z2
| (GR_Empty | GR_Offsm _ | GR_Value _ | GR_Status _ | GR_Zone _), _ -> false
type gui_after = GA_After of gui_res | GA_NA | GA_Unchanged
let equal_gui_after a1 a2 = match a1, a2 with
| GA_NA, GA_NA | GA_Unchanged, GA_Unchanged -> true
| GA_After r1, GA_After r2 -> equal_gui_res r1 r2
| (GA_After _ | GA_NA | GA_Unchanged), _ -> false
type gui_loc =
| GL_Stmt of kernel_function * stmt
| GL_Pre of kernel_function (* pre-state of a function *)
| GL_Post of kernel_function (* post-state of a function *)
let gui_loc_equal lm1 lm2 =
match lm1, lm2 with
| GL_Stmt (_, s1), GL_Stmt (_, s2) -> Cil_datatype.Stmt.equal s1 s2
| GL_Pre kf1, GL_Pre kf2
| GL_Post kf1, GL_Post kf2 -> Kernel_function.equal kf1 kf2
| (GL_Stmt _ | GL_Pre _ | GL_Post _), _ -> false
module Gui_loc = Datatype.Make_with_collections(struct
include Datatype.Serializable_undefined
type t = gui_loc
let name = "Value.Gui_types.Gui_loc"
let structural_descr =
let open Structural_descr in
t_sum [|
[| Kernel_function.packed_descr; Cil_datatype.Stmt.packed_descr; |];
(* GL_Pre *)
[| Kernel_function.packed_descr; |];
(* GL_Post *)
[| Kernel_function.packed_descr; |]; |]
let reprs =
let acc_kf_repr fn acc =
List.fold_left (fun acc kf -> fn kf :: acc) acc Kernel_function.reprs
in
acc_kf_repr (fun kf -> GL_Pre kf)
(acc_kf_repr (fun kf -> GL_Post kf)
(acc_kf_repr
(fun kf ->
let stmt = Kernel_function.find_first_stmt kf in
GL_Stmt (kf, stmt))
[]))
let equal = gui_loc_equal
let hash gl =
match gl with
| GL_Pre kf -> Kernel_function.hash kf
| GL_Post kf -> 7 * Kernel_function.hash kf
| GL_Stmt (kf,s) ->
Kernel_function.hash kf + 17 * Cil_datatype.Stmt.hash s
let compare gl1 gl2 =
match gl1, gl2 with
| GL_Pre kf1, GL_Pre kf2
| GL_Post kf1, GL_Post kf2 -> Kernel_function.compare kf1 kf2
| GL_Stmt (kf1, s1), GL_Stmt (kf2, s2) ->
let c = Kernel_function.compare kf1 kf2 in
if c = 0 then Cil_datatype.Stmt.compare s1 s2
else c
| GL_Pre _, (GL_Post _ | GL_Stmt _) -> 1
| (GL_Post _ | GL_Stmt _), GL_Pre _ -> -1
| GL_Stmt _, GL_Post _ -> 1
| GL_Post _, GL_Stmt _ -> -1
end)
let gui_loc_loc = function
| GL_Stmt (_, stmt) -> Cil_datatype.Stmt.loc stmt
| GL_Pre kf | GL_Post kf -> Kernel_function.get_location kf
let kf_of_gui_loc = function
| GL_Stmt (kf, _) | GL_Pre kf | GL_Post kf -> kf
(* This pretty-printer drops the toplevel kf, which is always the function
in which we are pretty-printing the expression/term *)
let pretty_callstack fmt cs =
match cs with
| [_, Kglobal] -> ()
| (_kf_cur, Kstmt callsite) :: q -> begin
let rec aux callsite = function
| (kf, callsite') :: q -> begin
Format.fprintf fmt "%a (%a)"
Kernel_function.pretty kf
Cil_datatype.Location.pretty (Cil_datatype.Stmt.loc callsite);
match callsite' with
| Kglobal -> ()
| Kstmt callsite' ->
Format.fprintf fmt " ←@ ";
aux callsite' q
end
| _ -> assert false
in
Format.fprintf fmt "@[<hv>";
aux callsite q;
Format.fprintf fmt "@]"
end
| _ -> assert false
(* This pretty-printer prints only the lists of the functions, not
the locations *)
let pretty_callstack_short fmt cs =
match cs with
| [_, Kglobal] -> ()
| (_kf_cur, Kstmt _callsite) :: q ->
Pretty_utils.pp_flowlist ~left:"@[" ~sep:" ←@ " ~right:"@]"
(fun fmt (kf, _) -> Kernel_function.pretty fmt kf) fmt q
| _ -> assert false
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/TrustInSoft/tis-interpreter/33132ce4a825494ea48bf2dd6fd03a56b62cc5c3/src/plugins/value/gui_files/gui_types.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
Some results have been hidden by a filter
Join of all possible callstacks
Only one callstack possible here
Cil_datatype.Predicate_named.equal not implemented
Bottom memory state
Location with Empty validity (e.g. empty struct)
Location is always invalid
Normal result
Some cases are impossible because of conflicting sizes
pre-state of a function
post-state of a function
GL_Pre
GL_Post
This pretty-printer drops the toplevel kf, which is always the function
in which we are pretty-printing the expression/term
This pretty-printer prints only the lists of the functions, not
the locations
Local Variables:
compile-command: "make -C ../../.."
End:
| Modified by TrustInSoft
This file is part of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
open Cil_types
type gui_callstack =
One of multiple callstacks
let hash_gui_callstack = function
| GC_Filtered -> 0
| GC_Consolidated -> 1
| GC_Single cs -> 2 * Value_types.Callstack.hash cs
| GC_Callstack cs -> 4 * Value_types.Callstack.hash cs
let compare_gui_callstack cs1 cs2 = match cs1, cs2 with
| GC_Filtered, GC_Filtered -> 0
| GC_Consolidated, GC_Consolidated -> 0
| GC_Single cs1, GC_Single cs2 | GC_Callstack cs1, GC_Callstack cs2 ->
Value_types.Callstack.compare cs1 cs2
| _, GC_Filtered -> 1
| GC_Filtered, _ -> -1
| _, GC_Consolidated -> 1
| GC_Consolidated, _ -> -1
| _, GC_Single _ -> 1
| GC_Single _, _ -> -1
module GCallstackMap = FCMap.Make(struct
type t = gui_callstack
let compare = compare_gui_callstack
end)
type gui_selection =
| GS_TLVal of term | GS_LVal of lval | GS_AbsoluteMem
| GS_Expr of exp | GS_Term of term
| GS_Predicate of Cil_types.predicate Cil_types.named
let pretty_gui_selection fmt = function
| GS_TLVal t | GS_Term t -> Printer.pp_term fmt t
| GS_LVal l -> Printer.pp_lval fmt l
| GS_AbsoluteMem -> Format.pp_print_string fmt "NULL"
| GS_Expr e -> Printer.pp_exp fmt e
| GS_Predicate p -> Printer.pp_predicate fmt p.content
let gui_selection_equal e1 e2 = match e1, e2 with
| GS_TLVal t1, GS_TLVal t2 | GS_Term t1, GS_Term t2 ->
Cil_datatype.Term.equal t1 t2
| GS_LVal lv1, GS_LVal lv2 -> Cil_datatype.Lval.equal lv1 lv2
| GS_AbsoluteMem, GS_AbsoluteMem -> true
| GS_Expr e1, GS_Expr e2 -> Cil_datatype.Exp.equal e1 e2
| GS_Predicate p1, GS_Predicate p2 ->
p1.content == p2.content
| (GS_TLVal _ | GS_LVal _ | GS_AbsoluteMem | GS_Expr _ | GS_Term _ |
GS_Predicate _) , _ -> false
type gui_offsetmap_res =
State or size was Top
let equal_gui_offsetmap_res r1 r2 = match r1, r2 with
| GO_Bottom, GO_Bottom -> true
| GO_Empty, GO_Empty -> true
| GO_Top, GO_Top -> true
| GO_InvalidLoc, GO_InvalidLoc -> true
| GO_Offsetmap o1, GO_Offsetmap o2 -> Cvalue.V_Offsetmap.equal o1 o2
| (GO_Bottom | GO_Empty | GO_Top | GO_InvalidLoc | GO_Offsetmap _), _ -> false
let pretty_gui_offsetmap_res ?typ fmt r =
match r with
| GO_Bottom -> Format.pp_print_string fmt "<BOTTOM>"
| GO_Empty -> Format.pp_print_string fmt "<EMPTY>"
| GO_InvalidLoc -> Format.pp_print_string fmt "<INVALID LOCATION>"
| GO_Top -> Format.pp_print_string fmt "<NO INFORMATION>"
| GO_Offsetmap off ->
Cvalue.V_Offsetmap.pretty_generic ?typ () fmt off;
match typ with
| None -> ()
| Some typ -> Eval_op.pretty_stitched_offsetmap fmt typ off
let join_gui_offsetmap_res r1 r2 = match r1, r2 with
| GO_Top, _ | _, GO_Top -> GO_Top
| GO_Bottom, x | x, GO_Bottom -> x
| GO_InvalidLoc, x | x, GO_InvalidLoc -> x
| GO_Empty, x | x, GO_Empty -> x
| GO_Offsetmap o1, GO_Offsetmap o2 ->
GO_Offsetmap (Cvalue.V_Offsetmap.join o1 o2)
type gui_res =
| GR_Empty
| GR_Offsm of gui_offsetmap_res * typ option
| GR_Value of Cvalue.V.t * typ option
| GR_Status of Eval_terms.predicate_status
| GR_Zone of Locations.Zone.t
let pretty_gui_res fmt = function
| GR_Empty -> ()
| GR_Offsm (offsm, typ) -> pretty_gui_offsetmap_res ?typ fmt offsm
| GR_Value (v, typ) -> Cvalue.V.pretty_typ typ fmt v
| GR_Status s -> Eval_terms.pretty_predicate_status fmt s
| GR_Zone z -> Locations.Zone.pretty fmt z
let equal_gui_res r1 r2 = match r1, r2 with
| GR_Empty, GR_Empty -> true
| GR_Offsm (o1, typ1), GR_Offsm (o2, typ2) ->
equal_gui_offsetmap_res o1 o2 &&
Extlib.opt_equal Cil_datatype.Typ.equal typ1 typ2
| GR_Value (v1, typ1), GR_Value (v2, typ2) ->
Cvalue.V.equal v1 v2 && Extlib.opt_equal Cil_datatype.Typ.equal typ1 typ2
| GR_Status s1, GR_Status s2 -> Extlib.compare_basic s1 s2 = 0
| GR_Zone z1, GR_Zone z2 -> Locations.Zone.equal z1 z2
| (GR_Empty | GR_Offsm _ | GR_Value _ | GR_Status _ | GR_Zone _), _ -> false
type gui_after = GA_After of gui_res | GA_NA | GA_Unchanged
let equal_gui_after a1 a2 = match a1, a2 with
| GA_NA, GA_NA | GA_Unchanged, GA_Unchanged -> true
| GA_After r1, GA_After r2 -> equal_gui_res r1 r2
| (GA_After _ | GA_NA | GA_Unchanged), _ -> false
type gui_loc =
| GL_Stmt of kernel_function * stmt
let gui_loc_equal lm1 lm2 =
match lm1, lm2 with
| GL_Stmt (_, s1), GL_Stmt (_, s2) -> Cil_datatype.Stmt.equal s1 s2
| GL_Pre kf1, GL_Pre kf2
| GL_Post kf1, GL_Post kf2 -> Kernel_function.equal kf1 kf2
| (GL_Stmt _ | GL_Pre _ | GL_Post _), _ -> false
module Gui_loc = Datatype.Make_with_collections(struct
include Datatype.Serializable_undefined
type t = gui_loc
let name = "Value.Gui_types.Gui_loc"
let structural_descr =
let open Structural_descr in
t_sum [|
[| Kernel_function.packed_descr; Cil_datatype.Stmt.packed_descr; |];
[| Kernel_function.packed_descr; |];
[| Kernel_function.packed_descr; |]; |]
let reprs =
let acc_kf_repr fn acc =
List.fold_left (fun acc kf -> fn kf :: acc) acc Kernel_function.reprs
in
acc_kf_repr (fun kf -> GL_Pre kf)
(acc_kf_repr (fun kf -> GL_Post kf)
(acc_kf_repr
(fun kf ->
let stmt = Kernel_function.find_first_stmt kf in
GL_Stmt (kf, stmt))
[]))
let equal = gui_loc_equal
let hash gl =
match gl with
| GL_Pre kf -> Kernel_function.hash kf
| GL_Post kf -> 7 * Kernel_function.hash kf
| GL_Stmt (kf,s) ->
Kernel_function.hash kf + 17 * Cil_datatype.Stmt.hash s
let compare gl1 gl2 =
match gl1, gl2 with
| GL_Pre kf1, GL_Pre kf2
| GL_Post kf1, GL_Post kf2 -> Kernel_function.compare kf1 kf2
| GL_Stmt (kf1, s1), GL_Stmt (kf2, s2) ->
let c = Kernel_function.compare kf1 kf2 in
if c = 0 then Cil_datatype.Stmt.compare s1 s2
else c
| GL_Pre _, (GL_Post _ | GL_Stmt _) -> 1
| (GL_Post _ | GL_Stmt _), GL_Pre _ -> -1
| GL_Stmt _, GL_Post _ -> 1
| GL_Post _, GL_Stmt _ -> -1
end)
let gui_loc_loc = function
| GL_Stmt (_, stmt) -> Cil_datatype.Stmt.loc stmt
| GL_Pre kf | GL_Post kf -> Kernel_function.get_location kf
let kf_of_gui_loc = function
| GL_Stmt (kf, _) | GL_Pre kf | GL_Post kf -> kf
let pretty_callstack fmt cs =
match cs with
| [_, Kglobal] -> ()
| (_kf_cur, Kstmt callsite) :: q -> begin
let rec aux callsite = function
| (kf, callsite') :: q -> begin
Format.fprintf fmt "%a (%a)"
Kernel_function.pretty kf
Cil_datatype.Location.pretty (Cil_datatype.Stmt.loc callsite);
match callsite' with
| Kglobal -> ()
| Kstmt callsite' ->
Format.fprintf fmt " ←@ ";
aux callsite' q
end
| _ -> assert false
in
Format.fprintf fmt "@[<hv>";
aux callsite q;
Format.fprintf fmt "@]"
end
| _ -> assert false
let pretty_callstack_short fmt cs =
match cs with
| [_, Kglobal] -> ()
| (_kf_cur, Kstmt _callsite) :: q ->
Pretty_utils.pp_flowlist ~left:"@[" ~sep:" ←@ " ~right:"@]"
(fun fmt (kf, _) -> Kernel_function.pretty fmt kf) fmt q
| _ -> assert false
|
a3edd627bfea8e122aaf2ed1354e4147ef6cbe89eb8c53dc9e6c94c737e06f99 | stumpwm/stumpwm-contrib | package.lisp | package.lisp
(defpackage #:mpd
(:use #:cl :stumpwm))
| null | https://raw.githubusercontent.com/stumpwm/stumpwm-contrib/a7dc1c663d04e6c73a4772c8a6ad56a34381096a/minor-mode/mpd/package.lisp | lisp | package.lisp
(defpackage #:mpd
(:use #:cl :stumpwm))
| |
96d018e99de50878a4c1f2754a3ace8d53376335c21f5aeb41045f2f58bfc0f6 | basho/riak_kv | riak_kv_pb_counter.erl | %% -------------------------------------------------------------------
%%
%% riak_kv_pb_counter: Expose counters over Protocol Buffers
%%
Copyright ( c ) 2013 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
@doc < p > The Counter PB service for Riak KV . This covers the
%% following request messages:</p>
%%
%% <pre>
50 - RpbCounterUpdateReq
52 - RpbCounterGetReq
%% </pre>
%%
%% <p>This service produces the following responses:</p>
%%
%% <pre>
51 - RpbCounterUpdateResp - 0 length
53 - RpbCounterGetResp
%% </pre>
%%
%% @end
-module(riak_kv_pb_counter).
-include_lib("riak_pb/include/riak_kv_pb.hrl").
-include_lib("riak_pb/include/riak_pb_kv_codec.hrl").
-include("riak_kv_types.hrl").
-behaviour(riak_api_pb_service).
-export([init/0,
decode/2,
encode/1,
process/2,
process_stream/3]).
-import(riak_pb_kv_codec, [decode_quorum/1]).
-record(state, {client}).
-define(DEFAULT_TIMEOUT, 60000).
%% @doc init/0 callback. Returns the service internal start
%% state.
-spec init() -> any().
init() ->
{ok, C} = riak:local_client(),
#state{client=C}.
@doc decode/2 callback . an incoming message .
decode(Code, Bin) ->
Msg = riak_pb_codec:decode(Code, Bin),
%% no special permissions for counters, just get/put
case Msg of
#rpbcountergetreq{bucket=B} ->
Bucket = bucket_type(B),
{ok, Msg, {"riak_kv.get", Bucket}};
#rpbcounterupdatereq{bucket=B} ->
Bucket = bucket_type(B),
{ok, Msg, {"riak_kv.put", Bucket}}
end.
%% @doc encode/1 callback. Encodes an outgoing response message.
encode(Message) ->
{ok, riak_pb_codec:encode(Message)}.
%% @doc process/2 callback. Handles an incoming request message.
process(#rpbcountergetreq{bucket=B, key=K, r=R0, pr=PR0,
notfound_ok=NFOk,
node_confirms=NC,
basic_quorum=BQ},
#state{client=C} = State) ->
case lists:member(pncounter, riak_core_capability:get({riak_kv, crdt}, [])) of
true ->
R = decode_quorum(R0),
PR = decode_quorum(PR0),
Options = make_option(r, R) ++
make_option(pr, PR) ++
make_option(notfound_ok, NFOk) ++
make_option(node_confirms, NC) ++
make_option(basic_quorum, BQ),
case riak_client:get(B, K, Options, C) of
{ok, O} ->
{{_Ctx, Value}, _} = riak_kv_crdt:value(O, ?V1_COUNTER_TYPE),
{reply, #rpbcountergetresp{value = Value}, State};
{error, notfound} ->
{reply, #rpbcountergetresp{}, State};
{error, Reason} ->
{error, {format,Reason}, State}
end;
false ->
{error, {format, "Counters are not supported"}, State}
end;
process(#rpbcounterupdatereq{bucket=B, key=K, w=W0, dw=DW0, pw=PW0,
node_confirms=NodeConfirms0,
amount=CounterOp,
returnvalue=RetVal},
#state{client=C} = State) ->
case {allow_mult(B), lists:member(pncounter, riak_core_capability:get({riak_kv, crdt}, []))} of
{true, true} ->
O = riak_kv_crdt:new(B, K, ?V1_COUNTER_TYPE),
erlang_protobuffs encodes as 1/0 / undefined
W = decode_quorum(W0),
DW = decode_quorum(DW0),
PW = decode_quorum(PW0),
NodeConfirms = decode_quorum(NodeConfirms0),
Options = [{counter_op, CounterOp}] ++ return_value(RetVal),
Opts =
make_option(w, W) ++
make_option(dw, DW) ++
make_option(node_confirms, NodeConfirms) ++
make_option(pw, PW) ++
[{timeout, default_timeout()},
{retry_put_coordinator_failure, false} | Options],
case riak_client:put(O, Opts, C) of
ok ->
{reply, #rpbcounterupdateresp{}, State};
{ok, RObj} ->
{{_Ctx, Value}, _} = riak_kv_crdt:value(RObj, ?V1_COUNTER_TYPE),
{reply, #rpbcounterupdateresp{value=Value}, State};
{error, notfound} ->
{reply, #rpbcounterupdateresp{}, State};
{error, Reason} ->
{error, {format, Reason}, State}
end;
{_, false} ->
{error, {format, "Counters are not supported"}, State};
{false, true} ->
{error, {format, "Counters require bucket property 'allow_mult=true'"}, State}
end.
return_value(true) ->
[returnbody];
return_value(_) ->
[].
allow_mult(Bucket) ->
proplists:get_value(allow_mult, riak_core_bucket:get_bucket(Bucket)).
%% @doc process_stream/3 callback. This service does not create any
%% streaming responses and so ignores all incoming messages.
process_stream(_,_,State) ->
{ignore, State}.
%% ===================================================================
Internal functions
%% ===================================================================
%% return a key/value tuple that we can ++ to other options so long as the
%% value is not default or undefined -- those values are pulled from the
%% bucket by the get/put FSMs.
make_option(_, undefined) ->
[];
make_option(_, default) ->
[];
make_option(K, V) ->
[{K, V}].
default_timeout() ->
?DEFAULT_TIMEOUT.
%% always construct {Type, Bucket} tuple, filling in default type if needed
bucket_type(B) ->
{<<"default">>, B}.
| null | https://raw.githubusercontent.com/basho/riak_kv/aeef1591704d32230b773d952a2f1543cbfa1889/src/riak_kv_pb_counter.erl | erlang | -------------------------------------------------------------------
riak_kv_pb_counter: Expose counters over Protocol Buffers
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
following request messages:</p>
<pre>
</pre>
<p>This service produces the following responses:</p>
<pre>
</pre>
@end
@doc init/0 callback. Returns the service internal start
state.
no special permissions for counters, just get/put
@doc encode/1 callback. Encodes an outgoing response message.
@doc process/2 callback. Handles an incoming request message.
@doc process_stream/3 callback. This service does not create any
streaming responses and so ignores all incoming messages.
===================================================================
===================================================================
return a key/value tuple that we can ++ to other options so long as the
value is not default or undefined -- those values are pulled from the
bucket by the get/put FSMs.
always construct {Type, Bucket} tuple, filling in default type if needed | Copyright ( c ) 2013 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@doc < p > The Counter PB service for Riak KV . This covers the
50 - RpbCounterUpdateReq
52 - RpbCounterGetReq
51 - RpbCounterUpdateResp - 0 length
53 - RpbCounterGetResp
-module(riak_kv_pb_counter).
-include_lib("riak_pb/include/riak_kv_pb.hrl").
-include_lib("riak_pb/include/riak_pb_kv_codec.hrl").
-include("riak_kv_types.hrl").
-behaviour(riak_api_pb_service).
-export([init/0,
decode/2,
encode/1,
process/2,
process_stream/3]).
-import(riak_pb_kv_codec, [decode_quorum/1]).
-record(state, {client}).
-define(DEFAULT_TIMEOUT, 60000).
-spec init() -> any().
init() ->
{ok, C} = riak:local_client(),
#state{client=C}.
@doc decode/2 callback . an incoming message .
decode(Code, Bin) ->
Msg = riak_pb_codec:decode(Code, Bin),
case Msg of
#rpbcountergetreq{bucket=B} ->
Bucket = bucket_type(B),
{ok, Msg, {"riak_kv.get", Bucket}};
#rpbcounterupdatereq{bucket=B} ->
Bucket = bucket_type(B),
{ok, Msg, {"riak_kv.put", Bucket}}
end.
encode(Message) ->
{ok, riak_pb_codec:encode(Message)}.
process(#rpbcountergetreq{bucket=B, key=K, r=R0, pr=PR0,
notfound_ok=NFOk,
node_confirms=NC,
basic_quorum=BQ},
#state{client=C} = State) ->
case lists:member(pncounter, riak_core_capability:get({riak_kv, crdt}, [])) of
true ->
R = decode_quorum(R0),
PR = decode_quorum(PR0),
Options = make_option(r, R) ++
make_option(pr, PR) ++
make_option(notfound_ok, NFOk) ++
make_option(node_confirms, NC) ++
make_option(basic_quorum, BQ),
case riak_client:get(B, K, Options, C) of
{ok, O} ->
{{_Ctx, Value}, _} = riak_kv_crdt:value(O, ?V1_COUNTER_TYPE),
{reply, #rpbcountergetresp{value = Value}, State};
{error, notfound} ->
{reply, #rpbcountergetresp{}, State};
{error, Reason} ->
{error, {format,Reason}, State}
end;
false ->
{error, {format, "Counters are not supported"}, State}
end;
process(#rpbcounterupdatereq{bucket=B, key=K, w=W0, dw=DW0, pw=PW0,
node_confirms=NodeConfirms0,
amount=CounterOp,
returnvalue=RetVal},
#state{client=C} = State) ->
case {allow_mult(B), lists:member(pncounter, riak_core_capability:get({riak_kv, crdt}, []))} of
{true, true} ->
O = riak_kv_crdt:new(B, K, ?V1_COUNTER_TYPE),
erlang_protobuffs encodes as 1/0 / undefined
W = decode_quorum(W0),
DW = decode_quorum(DW0),
PW = decode_quorum(PW0),
NodeConfirms = decode_quorum(NodeConfirms0),
Options = [{counter_op, CounterOp}] ++ return_value(RetVal),
Opts =
make_option(w, W) ++
make_option(dw, DW) ++
make_option(node_confirms, NodeConfirms) ++
make_option(pw, PW) ++
[{timeout, default_timeout()},
{retry_put_coordinator_failure, false} | Options],
case riak_client:put(O, Opts, C) of
ok ->
{reply, #rpbcounterupdateresp{}, State};
{ok, RObj} ->
{{_Ctx, Value}, _} = riak_kv_crdt:value(RObj, ?V1_COUNTER_TYPE),
{reply, #rpbcounterupdateresp{value=Value}, State};
{error, notfound} ->
{reply, #rpbcounterupdateresp{}, State};
{error, Reason} ->
{error, {format, Reason}, State}
end;
{_, false} ->
{error, {format, "Counters are not supported"}, State};
{false, true} ->
{error, {format, "Counters require bucket property 'allow_mult=true'"}, State}
end.
return_value(true) ->
[returnbody];
return_value(_) ->
[].
allow_mult(Bucket) ->
proplists:get_value(allow_mult, riak_core_bucket:get_bucket(Bucket)).
process_stream(_,_,State) ->
{ignore, State}.
Internal functions
make_option(_, undefined) ->
[];
make_option(_, default) ->
[];
make_option(K, V) ->
[{K, V}].
default_timeout() ->
?DEFAULT_TIMEOUT.
bucket_type(B) ->
{<<"default">>, B}.
|
52ecad0972d378da811263029494a9009f1b6ddefeec74bd932d2853681bdb91 | GaloisInc/ivory | Type.hs | # LANGUAGE DeriveFunctor #
# LANGUAGE TemplateHaskell #
module Ivory.Language.Syntax.Type where
import Language.Haskell.TH.Lift (deriveLiftMany)
-- Types -----------------------------------------------------------------------
data Type
= TyVoid -- ^ Unit type
| TyInt IntSize -- ^ Signed ints
| TyWord WordSize -- ^ Unsigned ints
| TyIndex Integer -- ^ Indices with an upper bound
^ Booleans
| TyChar -- ^ Characters
| TyFloat -- ^ Floats
| TyDouble -- ^ Doubles
| TyProc Type [Type] -- ^ Procedures
| TyRef Type -- ^ References
| TyConstRef Type -- ^ Constant References
| TyPtr Type -- ^ Pointers
^
| TyArr Int Type -- ^ Arrays
| TyStruct String -- ^ Structures
| TyCArray Type -- ^ C Arrays
| TyOpaque -- ^ Opaque type---not implementable.
deriving (Show, Eq, Ord)
data IntSize
= Int8
| Int16
| Int32
| Int64
deriving (Show,Eq,Ord)
data WordSize
= Word8
| Word16
| Word32
| Word64
deriving (Show,Eq,Ord)
data Typed a = Typed
{ tType :: Type
, tValue :: a
} deriving (Show,Functor,Eq,Ord)
TH Lifting ------------------------------------------------------------------
deriveLiftMany [ ''Type, ''IntSize, ''WordSize, ''Typed ]
| null | https://raw.githubusercontent.com/GaloisInc/ivory/53a0795b4fbeb0b7da0f6cdaccdde18849a78cd6/ivory/src/Ivory/Language/Syntax/Type.hs | haskell | Types -----------------------------------------------------------------------
^ Unit type
^ Signed ints
^ Unsigned ints
^ Indices with an upper bound
^ Characters
^ Floats
^ Doubles
^ Procedures
^ References
^ Constant References
^ Pointers
^ Arrays
^ Structures
^ C Arrays
^ Opaque type---not implementable.
---------------------------------------------------------------- | # LANGUAGE DeriveFunctor #
# LANGUAGE TemplateHaskell #
module Ivory.Language.Syntax.Type where
import Language.Haskell.TH.Lift (deriveLiftMany)
data Type
^ Booleans
^
deriving (Show, Eq, Ord)
data IntSize
= Int8
| Int16
| Int32
| Int64
deriving (Show,Eq,Ord)
data WordSize
= Word8
| Word16
| Word32
| Word64
deriving (Show,Eq,Ord)
data Typed a = Typed
{ tType :: Type
, tValue :: a
} deriving (Show,Functor,Eq,Ord)
deriveLiftMany [ ''Type, ''IntSize, ''WordSize, ''Typed ]
|
ca1d98e9630738f1069a3edf374fdfda23f1b6bc44e38411f683fc49683f0c53 | ocaml/odoc | b.mli | * Module B depends on A and Lib .
type t = Lib.A.t
| null | https://raw.githubusercontent.com/ocaml/odoc/ed60dca7bdc7670953433ba60f63faf008d20a71/test/integration/depends.t/b.mli | ocaml | * Module B depends on A and Lib .
type t = Lib.A.t
| |
267b0ef216b1ced1bd6acfb5f5299756a64eb44d4cfaf77e3b46cc09fed5b68c | stephenpascoe/hs-arrow | UInt8DataType.hs |
|
Copyright : , and
License : LGPL-2.1
Maintainer : ( )
/No description available in the introspection data./
Copyright : Will Thompson, Iñaki García Etxebarria and Jonas Platte
License : LGPL-2.1
Maintainer : Iñaki García Etxebarria ()
/No description available in the introspection data./
-}
#define ENABLE_OVERLOADING (MIN_VERSION_haskell_gi_overloading(1,0,0) \
&& !defined(__HADDOCK_VERSION__))
module GI.Arrow.Objects.UInt8DataType
(
-- * Exported types
UInt8DataType(..) ,
IsUInt8DataType ,
toUInt8DataType ,
noUInt8DataType ,
-- * Methods
* * new # method : new #
uInt8DataTypeNew ,
) where
import Data.GI.Base.ShortPrelude
import qualified Data.GI.Base.ShortPrelude as SP
import qualified Data.GI.Base.Overloading as O
import qualified Prelude as P
import qualified Data.GI.Base.Attributes as GI.Attributes
import qualified Data.GI.Base.ManagedPtr as B.ManagedPtr
import qualified Data.GI.Base.GError as B.GError
import qualified Data.GI.Base.GVariant as B.GVariant
import qualified Data.GI.Base.GValue as B.GValue
import qualified Data.GI.Base.GParamSpec as B.GParamSpec
import qualified Data.GI.Base.CallStack as B.CallStack
import qualified Data.Text as T
import qualified Data.ByteString.Char8 as B
import qualified Data.Map as Map
import qualified Foreign.Ptr as FP
import {-# SOURCE #-} qualified GI.Arrow.Objects.DataType as Arrow.DataType
import {-# SOURCE #-} qualified GI.Arrow.Objects.FixedWidthDataType as Arrow.FixedWidthDataType
import {-# SOURCE #-} qualified GI.Arrow.Objects.IntegerDataType as Arrow.IntegerDataType
import {-# SOURCE #-} qualified GI.Arrow.Objects.NumericDataType as Arrow.NumericDataType
import qualified GI.GObject.Objects.Object as GObject.Object
-- | Memory-managed wrapper type.
newtype UInt8DataType = UInt8DataType (ManagedPtr UInt8DataType)
foreign import ccall "garrow_uint8_data_type_get_type"
c_garrow_uint8_data_type_get_type :: IO GType
instance GObject UInt8DataType where
gobjectType _ = c_garrow_uint8_data_type_get_type
| Type class for types which can be safely cast to ` UInt8DataType ` , for instance with ` toUInt8DataType ` .
class GObject o => IsUInt8DataType o
#if MIN_VERSION_base(4,9,0)
instance {-# OVERLAPPABLE #-} (GObject a, O.UnknownAncestorError UInt8DataType a) =>
IsUInt8DataType a
#endif
instance IsUInt8DataType UInt8DataType
instance Arrow.IntegerDataType.IsIntegerDataType UInt8DataType
instance Arrow.NumericDataType.IsNumericDataType UInt8DataType
instance Arrow.FixedWidthDataType.IsFixedWidthDataType UInt8DataType
instance Arrow.DataType.IsDataType UInt8DataType
instance GObject.Object.IsObject UInt8DataType
| Cast to ` UInt8DataType ` , for types for which this is known to be safe . For general casts , use ` Data . . ManagedPtr.castTo ` .
toUInt8DataType :: (MonadIO m, IsUInt8DataType o) => o -> m UInt8DataType
toUInt8DataType = liftIO . unsafeCastTo UInt8DataType
| A convenience alias for ` Nothing ` : : ` Maybe ` ` UInt8DataType ` .
noUInt8DataType :: Maybe UInt8DataType
noUInt8DataType = Nothing
#if ENABLE_OVERLOADING
type family ResolveUInt8DataTypeMethod (t :: Symbol) (o :: *) :: * where
ResolveUInt8DataTypeMethod "bindProperty" o = GObject.Object.ObjectBindPropertyMethodInfo
ResolveUInt8DataTypeMethod "bindPropertyFull" o = GObject.Object.ObjectBindPropertyFullMethodInfo
ResolveUInt8DataTypeMethod "equal" o = Arrow.DataType.DataTypeEqualMethodInfo
ResolveUInt8DataTypeMethod "forceFloating" o = GObject.Object.ObjectForceFloatingMethodInfo
ResolveUInt8DataTypeMethod "freezeNotify" o = GObject.Object.ObjectFreezeNotifyMethodInfo
ResolveUInt8DataTypeMethod "getv" o = GObject.Object.ObjectGetvMethodInfo
ResolveUInt8DataTypeMethod "isFloating" o = GObject.Object.ObjectIsFloatingMethodInfo
ResolveUInt8DataTypeMethod "notify" o = GObject.Object.ObjectNotifyMethodInfo
ResolveUInt8DataTypeMethod "notifyByPspec" o = GObject.Object.ObjectNotifyByPspecMethodInfo
ResolveUInt8DataTypeMethod "ref" o = GObject.Object.ObjectRefMethodInfo
ResolveUInt8DataTypeMethod "refSink" o = GObject.Object.ObjectRefSinkMethodInfo
ResolveUInt8DataTypeMethod "runDispose" o = GObject.Object.ObjectRunDisposeMethodInfo
ResolveUInt8DataTypeMethod "stealData" o = GObject.Object.ObjectStealDataMethodInfo
ResolveUInt8DataTypeMethod "stealQdata" o = GObject.Object.ObjectStealQdataMethodInfo
ResolveUInt8DataTypeMethod "thawNotify" o = GObject.Object.ObjectThawNotifyMethodInfo
ResolveUInt8DataTypeMethod "toString" o = Arrow.DataType.DataTypeToStringMethodInfo
ResolveUInt8DataTypeMethod "unref" o = GObject.Object.ObjectUnrefMethodInfo
ResolveUInt8DataTypeMethod "watchClosure" o = GObject.Object.ObjectWatchClosureMethodInfo
ResolveUInt8DataTypeMethod "getBitWidth" o = Arrow.FixedWidthDataType.FixedWidthDataTypeGetBitWidthMethodInfo
ResolveUInt8DataTypeMethod "getData" o = GObject.Object.ObjectGetDataMethodInfo
ResolveUInt8DataTypeMethod "getId" o = Arrow.DataType.DataTypeGetIdMethodInfo
ResolveUInt8DataTypeMethod "getProperty" o = GObject.Object.ObjectGetPropertyMethodInfo
ResolveUInt8DataTypeMethod "getQdata" o = GObject.Object.ObjectGetQdataMethodInfo
ResolveUInt8DataTypeMethod "setData" o = GObject.Object.ObjectSetDataMethodInfo
ResolveUInt8DataTypeMethod "setProperty" o = GObject.Object.ObjectSetPropertyMethodInfo
ResolveUInt8DataTypeMethod l o = O.MethodResolutionFailed l o
instance (info ~ ResolveUInt8DataTypeMethod t UInt8DataType, O.MethodInfo info UInt8DataType p) => O.IsLabelProxy t (UInt8DataType -> p) where
fromLabelProxy _ = O.overloadedMethod (O.MethodProxy :: O.MethodProxy info)
#if MIN_VERSION_base(4,9,0)
instance (info ~ ResolveUInt8DataTypeMethod t UInt8DataType, O.MethodInfo info UInt8DataType p) => O.IsLabel t (UInt8DataType -> p) where
#if MIN_VERSION_base(4,10,0)
fromLabel = O.overloadedMethod (O.MethodProxy :: O.MethodProxy info)
#else
fromLabel _ = O.overloadedMethod (O.MethodProxy :: O.MethodProxy info)
#endif
#endif
#endif
#if ENABLE_OVERLOADING
instance O.HasAttributeList UInt8DataType
type instance O.AttributeList UInt8DataType = UInt8DataTypeAttributeList
type UInt8DataTypeAttributeList = ('[ '("dataType", Arrow.DataType.DataTypeDataTypePropertyInfo)] :: [(Symbol, *)])
#endif
#if ENABLE_OVERLOADING
#endif
#if ENABLE_OVERLOADING
type instance O.SignalList UInt8DataType = UInt8DataTypeSignalList
type UInt8DataTypeSignalList = ('[ '("notify", GObject.Object.ObjectNotifySignalInfo)] :: [(Symbol, *)])
#endif
-- method UInt8DataType::new
-- method type : Constructor
: [ ]
-- Lengths : []
-- returnType : Just (TInterface (Name {namespace = "Arrow", name = "UInt8DataType"}))
-- throws : False
-- Skip return : False
foreign import ccall "garrow_uint8_data_type_new" garrow_uint8_data_type_new ::
IO (Ptr UInt8DataType)
{- |
/No description available in the introspection data./
-}
uInt8DataTypeNew ::
(B.CallStack.HasCallStack, MonadIO m) =>
m UInt8DataType
^ _ _ Returns : _ _ The newly created 8 - bit unsigned integer data type .
uInt8DataTypeNew = liftIO $ do
result <- garrow_uint8_data_type_new
checkUnexpectedReturnNULL "uInt8DataTypeNew" result
result' <- (wrapObject UInt8DataType) result
return result'
#if ENABLE_OVERLOADING
#endif
| null | https://raw.githubusercontent.com/stephenpascoe/hs-arrow/86c7c452a8626b1d69a3cffd277078d455823271/gi-arrow/GI/Arrow/Objects/UInt8DataType.hs | haskell | * Exported types
* Methods
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
| Memory-managed wrapper type.
# OVERLAPPABLE #
method UInt8DataType::new
method type : Constructor
Lengths : []
returnType : Just (TInterface (Name {namespace = "Arrow", name = "UInt8DataType"}))
throws : False
Skip return : False
|
/No description available in the introspection data./
|
|
Copyright : , and
License : LGPL-2.1
Maintainer : ( )
/No description available in the introspection data./
Copyright : Will Thompson, Iñaki García Etxebarria and Jonas Platte
License : LGPL-2.1
Maintainer : Iñaki García Etxebarria ()
/No description available in the introspection data./
-}
#define ENABLE_OVERLOADING (MIN_VERSION_haskell_gi_overloading(1,0,0) \
&& !defined(__HADDOCK_VERSION__))
module GI.Arrow.Objects.UInt8DataType
(
UInt8DataType(..) ,
IsUInt8DataType ,
toUInt8DataType ,
noUInt8DataType ,
* * new # method : new #
uInt8DataTypeNew ,
) where
import Data.GI.Base.ShortPrelude
import qualified Data.GI.Base.ShortPrelude as SP
import qualified Data.GI.Base.Overloading as O
import qualified Prelude as P
import qualified Data.GI.Base.Attributes as GI.Attributes
import qualified Data.GI.Base.ManagedPtr as B.ManagedPtr
import qualified Data.GI.Base.GError as B.GError
import qualified Data.GI.Base.GVariant as B.GVariant
import qualified Data.GI.Base.GValue as B.GValue
import qualified Data.GI.Base.GParamSpec as B.GParamSpec
import qualified Data.GI.Base.CallStack as B.CallStack
import qualified Data.Text as T
import qualified Data.ByteString.Char8 as B
import qualified Data.Map as Map
import qualified Foreign.Ptr as FP
import qualified GI.GObject.Objects.Object as GObject.Object
newtype UInt8DataType = UInt8DataType (ManagedPtr UInt8DataType)
foreign import ccall "garrow_uint8_data_type_get_type"
c_garrow_uint8_data_type_get_type :: IO GType
instance GObject UInt8DataType where
gobjectType _ = c_garrow_uint8_data_type_get_type
| Type class for types which can be safely cast to ` UInt8DataType ` , for instance with ` toUInt8DataType ` .
class GObject o => IsUInt8DataType o
#if MIN_VERSION_base(4,9,0)
IsUInt8DataType a
#endif
instance IsUInt8DataType UInt8DataType
instance Arrow.IntegerDataType.IsIntegerDataType UInt8DataType
instance Arrow.NumericDataType.IsNumericDataType UInt8DataType
instance Arrow.FixedWidthDataType.IsFixedWidthDataType UInt8DataType
instance Arrow.DataType.IsDataType UInt8DataType
instance GObject.Object.IsObject UInt8DataType
| Cast to ` UInt8DataType ` , for types for which this is known to be safe . For general casts , use ` Data . . ManagedPtr.castTo ` .
toUInt8DataType :: (MonadIO m, IsUInt8DataType o) => o -> m UInt8DataType
toUInt8DataType = liftIO . unsafeCastTo UInt8DataType
| A convenience alias for ` Nothing ` : : ` Maybe ` ` UInt8DataType ` .
noUInt8DataType :: Maybe UInt8DataType
noUInt8DataType = Nothing
#if ENABLE_OVERLOADING
type family ResolveUInt8DataTypeMethod (t :: Symbol) (o :: *) :: * where
ResolveUInt8DataTypeMethod "bindProperty" o = GObject.Object.ObjectBindPropertyMethodInfo
ResolveUInt8DataTypeMethod "bindPropertyFull" o = GObject.Object.ObjectBindPropertyFullMethodInfo
ResolveUInt8DataTypeMethod "equal" o = Arrow.DataType.DataTypeEqualMethodInfo
ResolveUInt8DataTypeMethod "forceFloating" o = GObject.Object.ObjectForceFloatingMethodInfo
ResolveUInt8DataTypeMethod "freezeNotify" o = GObject.Object.ObjectFreezeNotifyMethodInfo
ResolveUInt8DataTypeMethod "getv" o = GObject.Object.ObjectGetvMethodInfo
ResolveUInt8DataTypeMethod "isFloating" o = GObject.Object.ObjectIsFloatingMethodInfo
ResolveUInt8DataTypeMethod "notify" o = GObject.Object.ObjectNotifyMethodInfo
ResolveUInt8DataTypeMethod "notifyByPspec" o = GObject.Object.ObjectNotifyByPspecMethodInfo
ResolveUInt8DataTypeMethod "ref" o = GObject.Object.ObjectRefMethodInfo
ResolveUInt8DataTypeMethod "refSink" o = GObject.Object.ObjectRefSinkMethodInfo
ResolveUInt8DataTypeMethod "runDispose" o = GObject.Object.ObjectRunDisposeMethodInfo
ResolveUInt8DataTypeMethod "stealData" o = GObject.Object.ObjectStealDataMethodInfo
ResolveUInt8DataTypeMethod "stealQdata" o = GObject.Object.ObjectStealQdataMethodInfo
ResolveUInt8DataTypeMethod "thawNotify" o = GObject.Object.ObjectThawNotifyMethodInfo
ResolveUInt8DataTypeMethod "toString" o = Arrow.DataType.DataTypeToStringMethodInfo
ResolveUInt8DataTypeMethod "unref" o = GObject.Object.ObjectUnrefMethodInfo
ResolveUInt8DataTypeMethod "watchClosure" o = GObject.Object.ObjectWatchClosureMethodInfo
ResolveUInt8DataTypeMethod "getBitWidth" o = Arrow.FixedWidthDataType.FixedWidthDataTypeGetBitWidthMethodInfo
ResolveUInt8DataTypeMethod "getData" o = GObject.Object.ObjectGetDataMethodInfo
ResolveUInt8DataTypeMethod "getId" o = Arrow.DataType.DataTypeGetIdMethodInfo
ResolveUInt8DataTypeMethod "getProperty" o = GObject.Object.ObjectGetPropertyMethodInfo
ResolveUInt8DataTypeMethod "getQdata" o = GObject.Object.ObjectGetQdataMethodInfo
ResolveUInt8DataTypeMethod "setData" o = GObject.Object.ObjectSetDataMethodInfo
ResolveUInt8DataTypeMethod "setProperty" o = GObject.Object.ObjectSetPropertyMethodInfo
ResolveUInt8DataTypeMethod l o = O.MethodResolutionFailed l o
instance (info ~ ResolveUInt8DataTypeMethod t UInt8DataType, O.MethodInfo info UInt8DataType p) => O.IsLabelProxy t (UInt8DataType -> p) where
fromLabelProxy _ = O.overloadedMethod (O.MethodProxy :: O.MethodProxy info)
#if MIN_VERSION_base(4,9,0)
instance (info ~ ResolveUInt8DataTypeMethod t UInt8DataType, O.MethodInfo info UInt8DataType p) => O.IsLabel t (UInt8DataType -> p) where
#if MIN_VERSION_base(4,10,0)
fromLabel = O.overloadedMethod (O.MethodProxy :: O.MethodProxy info)
#else
fromLabel _ = O.overloadedMethod (O.MethodProxy :: O.MethodProxy info)
#endif
#endif
#endif
#if ENABLE_OVERLOADING
instance O.HasAttributeList UInt8DataType
type instance O.AttributeList UInt8DataType = UInt8DataTypeAttributeList
type UInt8DataTypeAttributeList = ('[ '("dataType", Arrow.DataType.DataTypeDataTypePropertyInfo)] :: [(Symbol, *)])
#endif
#if ENABLE_OVERLOADING
#endif
#if ENABLE_OVERLOADING
type instance O.SignalList UInt8DataType = UInt8DataTypeSignalList
type UInt8DataTypeSignalList = ('[ '("notify", GObject.Object.ObjectNotifySignalInfo)] :: [(Symbol, *)])
#endif
: [ ]
foreign import ccall "garrow_uint8_data_type_new" garrow_uint8_data_type_new ::
IO (Ptr UInt8DataType)
uInt8DataTypeNew ::
(B.CallStack.HasCallStack, MonadIO m) =>
m UInt8DataType
^ _ _ Returns : _ _ The newly created 8 - bit unsigned integer data type .
uInt8DataTypeNew = liftIO $ do
result <- garrow_uint8_data_type_new
checkUnexpectedReturnNULL "uInt8DataTypeNew" result
result' <- (wrapObject UInt8DataType) result
return result'
#if ENABLE_OVERLOADING
#endif
|
ccfc2a0d005923d688a46e6db9d7ee93fff41441f2d50165a22a35e3483751a4 | headwinds/reagent-reframe-material-ui | state.cljs | (ns devtools.formatters.state)
; - state management --------------------------------------------------------------------------------------------------------
;
; we have to maintain some state:
; a) to prevent infinite recursion in some pathological cases (-devtools/issues/2)
; b) to keep track of printed objects to visually signal circular data structures
;
; We dynamically bind *current-config* to the config passed from "outside" when entering calls to our API methods.
; Initially the state is empty, but we accumulate there a history of seen values when rendering individual values
in depth - first traversal order . See alt - printer - impl where we re - bind * current - config * for each traversal level .
; But there is a catch. For larger data structures our printing methods usually do not print everything at once.
; We can include so called "object references" which are just placeholders which can be expanded later
by DevTools UI ( when user clicks a disclosure triangle ) .
; For proper continuation in rendering of those references we have to carry our existing state over.
; We use "config" feature of custom formatters system to pass current state to future API calls.
(def ^:dynamic *current-state* nil)
(defn valid-current-state? []
(some? *current-state*))
(defn get-default-state []
{})
(defn get-current-state []
{:pre [(valid-current-state?)]}
*current-state*)
(defn update-current-state! [f & args]
{:pre [(valid-current-state?)]}
(set! *current-state* (apply f *current-state* args)))
; -- high level API ---------------------------------------------------------------------------------------------------------
(defn push-object-to-current-history! [object]
(update-current-state! update :history conj object))
(defn get-current-history []
(:history (get-current-state)))
(defn is-circular? [object]
(let [history (get-current-history)]
(some #(identical? % object) history)))
(defn ^bool prevent-recursion? []
(boolean (:prevent-recursion (get-current-state))))
(defn set-prevent-recursion [state val]
(if (some? val)
(assoc state :prevent-recursion val)
(dissoc state :prevent-recursion)))
(defn get-managed-print-level []
(:managed-print-level (get-current-state)))
(defn set-managed-print-level [state val]
(if (some? val)
(assoc state :managed-print-level val)
(dissoc state :managed-print-level)))
(defn get-depth-budget []
(:depth-budget (get-current-state)))
(defn set-depth-budget [state val]
(if (some? val)
(assoc state :depth-budget val)
(dissoc state :depth-budget)))
(defn reset-depth-limits [state]
(-> state
(set-depth-budget nil)
(set-managed-print-level nil)))
| null | https://raw.githubusercontent.com/headwinds/reagent-reframe-material-ui/8a6fba82a026cfedca38491becac85751be9a9d4/resources/public/js/out/devtools/formatters/state.cljs | clojure | - state management --------------------------------------------------------------------------------------------------------
we have to maintain some state:
a) to prevent infinite recursion in some pathological cases (-devtools/issues/2)
b) to keep track of printed objects to visually signal circular data structures
We dynamically bind *current-config* to the config passed from "outside" when entering calls to our API methods.
Initially the state is empty, but we accumulate there a history of seen values when rendering individual values
But there is a catch. For larger data structures our printing methods usually do not print everything at once.
We can include so called "object references" which are just placeholders which can be expanded later
For proper continuation in rendering of those references we have to carry our existing state over.
We use "config" feature of custom formatters system to pass current state to future API calls.
-- high level API --------------------------------------------------------------------------------------------------------- | (ns devtools.formatters.state)
in depth - first traversal order . See alt - printer - impl where we re - bind * current - config * for each traversal level .
by DevTools UI ( when user clicks a disclosure triangle ) .
(def ^:dynamic *current-state* nil)
(defn valid-current-state? []
(some? *current-state*))
(defn get-default-state []
{})
(defn get-current-state []
{:pre [(valid-current-state?)]}
*current-state*)
(defn update-current-state! [f & args]
{:pre [(valid-current-state?)]}
(set! *current-state* (apply f *current-state* args)))
(defn push-object-to-current-history! [object]
(update-current-state! update :history conj object))
(defn get-current-history []
(:history (get-current-state)))
(defn is-circular? [object]
(let [history (get-current-history)]
(some #(identical? % object) history)))
(defn ^bool prevent-recursion? []
(boolean (:prevent-recursion (get-current-state))))
(defn set-prevent-recursion [state val]
(if (some? val)
(assoc state :prevent-recursion val)
(dissoc state :prevent-recursion)))
(defn get-managed-print-level []
(:managed-print-level (get-current-state)))
(defn set-managed-print-level [state val]
(if (some? val)
(assoc state :managed-print-level val)
(dissoc state :managed-print-level)))
(defn get-depth-budget []
(:depth-budget (get-current-state)))
(defn set-depth-budget [state val]
(if (some? val)
(assoc state :depth-budget val)
(dissoc state :depth-budget)))
(defn reset-depth-limits [state]
(-> state
(set-depth-budget nil)
(set-managed-print-level nil)))
|
35488525d9336ba55fe12444106a112c29744dad1348f1867f9aaed49d53efb5 | aws-beam/aws-erlang | aws_mwaa.erl | %% WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
See -beam/aws-codegen for more details .
@doc Amazon Managed Workflows for Apache Airflow
%%
This section contains the Amazon Managed Workflows for Apache Airflow
%% (MWAA) API reference documentation.
%%
For more information , see What Is Amazon MWAA ? .
%%
%% Endpoints
%%
%% <ul> <li> `api.airflow.{region}.amazonaws.com' - This endpoint is used
%% for environment management.
%%
%% <ul> <li> CreateEnvironment
%%
%% </li> <li> DeleteEnvironment
%%
%% </li> <li> GetEnvironment
%%
%% </li> <li> ListEnvironments
%%
%% </li> <li> ListTagsForResource
%%
< /li > < li > TagResource
%%
< /li > < li > UntagResource
%%
%% </li> <li> UpdateEnvironment
%%
%% </li> </ul> </li> <li> `env.airflow.{region}.amazonaws.com' - This
%% endpoint is used to operate the Airflow environment.
%%
< ul > < li > CreateCliToken
%%
< /li > < li > CreateWebLoginToken
%%
%% </li> </ul> </li> <li> `ops.airflow.{region}.amazonaws.com' - This
%% endpoint is used to push environment metrics that track environment
%% health.
%%
< ul > < li > PublishMetrics
%%
%% </li> </ul> </li> </ul> Regions
%%
For a list of regions that Amazon MWAA supports , see Region availability
in the Amazon MWAA User Guide .
-module(aws_mwaa).
-export([create_cli_token/3,
create_cli_token/4,
create_environment/3,
create_environment/4,
create_web_login_token/3,
create_web_login_token/4,
delete_environment/3,
delete_environment/4,
get_environment/2,
get_environment/4,
get_environment/5,
list_environments/1,
list_environments/3,
list_environments/4,
list_tags_for_resource/2,
list_tags_for_resource/4,
list_tags_for_resource/5,
publish_metrics/3,
publish_metrics/4,
tag_resource/3,
tag_resource/4,
untag_resource/3,
untag_resource/4,
update_environment/3,
update_environment/4]).
-include_lib("hackney/include/hackney_lib.hrl").
%%====================================================================
%% API
%%====================================================================
%% @doc Creates a CLI token for the Airflow CLI.
%%
To learn more , see Creating an Apache Airflow CLI token .
create_cli_token(Client, Name, Input) ->
create_cli_token(Client, Name, Input, []).
create_cli_token(Client, Name, Input0, Options0) ->
Method = post,
Path = ["/clitoken/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Creates an Amazon Managed Workflows for Apache Airflow ( MWAA )
%% environment.
create_environment(Client, Name, Input) ->
create_environment(Client, Name, Input, []).
create_environment(Client, Name, Input0, Options0) ->
Method = put,
Path = ["/environments/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates a web login token for the Airflow Web UI.
%%
To learn more , see Creating an Apache Airflow web login token .
create_web_login_token(Client, Name, Input) ->
create_web_login_token(Client, Name, Input, []).
create_web_login_token(Client, Name, Input0, Options0) ->
Method = post,
Path = ["/webtoken/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Deletes an Amazon Managed Workflows for Apache Airflow ( MWAA )
%% environment.
delete_environment(Client, Name, Input) ->
delete_environment(Client, Name, Input, []).
delete_environment(Client, Name, Input0, Options0) ->
Method = delete,
Path = ["/environments/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Describes an Amazon Managed Workflows for Apache Airflow ( MWAA )
%% environment.
get_environment(Client, Name)
when is_map(Client) ->
get_environment(Client, Name, #{}, #{}).
get_environment(Client, Name, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_environment(Client, Name, QueryMap, HeadersMap, []).
get_environment(Client, Name, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/environments/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Lists the Amazon Managed Workflows for Apache Airflow ( MWAA )
%% environments.
list_environments(Client)
when is_map(Client) ->
list_environments(Client, #{}, #{}).
list_environments(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_environments(Client, QueryMap, HeadersMap, []).
list_environments(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/environments"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Lists the key - value tag pairs associated to the Amazon Managed
Workflows for Apache Airflow ( MWAA ) environment .
%%
%% For example, `"Environment": "Staging"'.
list_tags_for_resource(Client, ResourceArn)
when is_map(Client) ->
list_tags_for_resource(Client, ResourceArn, #{}, #{}).
list_tags_for_resource(Client, ResourceArn, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_tags_for_resource(Client, ResourceArn, QueryMap, HeadersMap, []).
list_tags_for_resource(Client, ResourceArn, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/tags/", aws_util:encode_uri(ResourceArn), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Internal only.
%%
Publishes environment health metrics to Amazon CloudWatch .
publish_metrics(Client, EnvironmentName, Input) ->
publish_metrics(Client, EnvironmentName, Input, []).
publish_metrics(Client, EnvironmentName, Input0, Options0) ->
Method = post,
Path = ["/metrics/environments/", aws_util:encode_uri(EnvironmentName), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Associates key - value tag pairs to your Amazon Managed Workflows for
Apache Airflow ( MWAA ) environment .
tag_resource(Client, ResourceArn, Input) ->
tag_resource(Client, ResourceArn, Input, []).
tag_resource(Client, ResourceArn, Input0, Options0) ->
Method = post,
Path = ["/tags/", aws_util:encode_uri(ResourceArn), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Removes key - value tag pairs associated to your Amazon Managed
Workflows for Apache Airflow ( MWAA ) environment .
%%
%% For example, `"Environment": "Staging"'.
untag_resource(Client, ResourceArn, Input) ->
untag_resource(Client, ResourceArn, Input, []).
untag_resource(Client, ResourceArn, Input0, Options0) ->
Method = delete,
Path = ["/tags/", aws_util:encode_uri(ResourceArn), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"tagKeys">>, <<"tagKeys">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Updates an Amazon Managed Workflows for Apache Airflow ( MWAA )
%% environment.
update_environment(Client, Name, Input) ->
update_environment(Client, Name, Input, []).
update_environment(Client, Name, Input0, Options0) ->
Method = patch,
Path = ["/environments/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%%====================================================================
Internal functions
%%====================================================================
-spec request(aws_client:aws_client(), atom(), iolist(), list(),
list(), map() | undefined, list(), pos_integer() | undefined) ->
{ok, {integer(), list()}} |
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map(),
Error :: map().
request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
RequestFun = fun() -> do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) end,
aws_request:request(RequestFun, Options).
do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
Client1 = Client#{service => <<"airflow">>},
Host = build_host(<<"airflow">>, Client1),
URL0 = build_url(Host, Path, Client1),
URL = aws_request:add_query(URL0, Query),
AdditionalHeaders1 = [ {<<"Host">>, Host}
, {<<"Content-Type">>, <<"application/x-amz-json-1.1">>}
],
Payload =
case proplists:get_value(send_body_as_binary, Options) of
true ->
maps:get(<<"Body">>, Input, <<"">>);
false ->
encode_payload(Input)
end,
AdditionalHeaders = case proplists:get_value(append_sha256_content_hash, Options, false) of
true ->
add_checksum_hash_header(AdditionalHeaders1, Payload);
false ->
AdditionalHeaders1
end,
Headers1 = aws_request:add_headers(AdditionalHeaders, Headers0),
MethodBin = aws_request:method_to_binary(Method),
SignedHeaders = aws_request:sign_request(Client1, MethodBin, URL, Headers1, Payload),
Response = hackney:request(Method, URL, SignedHeaders, Payload, Options),
DecodeBody = not proplists:get_value(receive_body_as_binary, Options),
handle_response(Response, SuccessStatusCode, DecodeBody).
add_checksum_hash_header(Headers, Body) ->
[ {<<"X-Amz-CheckSum-SHA256">>, base64:encode(crypto:hash(sha256, Body))}
| Headers
].
handle_response({ok, StatusCode, ResponseHeaders}, SuccessStatusCode, _DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
{ok, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders}, _, _DecodeBody) ->
{error, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders, Client}, SuccessStatusCode, DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
case hackney:body(Client) of
{ok, <<>>} when StatusCode =:= 200;
StatusCode =:= SuccessStatusCode ->
{ok, #{}, {StatusCode, ResponseHeaders, Client}};
{ok, Body} ->
Result = case DecodeBody of
true ->
try
jsx:decode(Body)
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
false -> #{<<"Body">> => Body}
end,
{ok, Result, {StatusCode, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, _ResponseHeaders, _Client}, _, _DecodeBody)
when StatusCode =:= 503 ->
Retriable error if retries are enabled
{error, service_unavailable};
handle_response({ok, StatusCode, ResponseHeaders, Client}, _, _DecodeBody) ->
{ok, Body} = hackney:body(Client),
try
DecodedError = jsx:decode(Body),
{error, DecodedError, {StatusCode, ResponseHeaders, Client}}
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
handle_response({error, Reason}, _, _DecodeBody) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Path0, Client) ->
Proto = aws_client:proto(Client),
Path = erlang:iolist_to_binary(Path0),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, Path], <<"">>).
-spec encode_payload(undefined | map()) -> binary().
encode_payload(undefined) ->
<<>>;
encode_payload(Input) ->
jsx:encode(Input).
| null | https://raw.githubusercontent.com/aws-beam/aws-erlang/699287cee7dfc9dc8c08ced5f090dcc192c9cba8/src/aws_mwaa.erl | erlang | WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
(MWAA) API reference documentation.
Endpoints
<ul> <li> `api.airflow.{region}.amazonaws.com' - This endpoint is used
for environment management.
<ul> <li> CreateEnvironment
</li> <li> DeleteEnvironment
</li> <li> GetEnvironment
</li> <li> ListEnvironments
</li> <li> ListTagsForResource
</li> <li> UpdateEnvironment
</li> </ul> </li> <li> `env.airflow.{region}.amazonaws.com' - This
endpoint is used to operate the Airflow environment.
</li> </ul> </li> <li> `ops.airflow.{region}.amazonaws.com' - This
endpoint is used to push environment metrics that track environment
health.
</li> </ul> </li> </ul> Regions
====================================================================
API
====================================================================
@doc Creates a CLI token for the Airflow CLI.
environment.
@doc Creates a web login token for the Airflow Web UI.
environment.
environment.
environments.
For example, `"Environment": "Staging"'.
@doc Internal only.
For example, `"Environment": "Staging"'.
environment.
====================================================================
==================================================================== | See -beam/aws-codegen for more details .
@doc Amazon Managed Workflows for Apache Airflow
This section contains the Amazon Managed Workflows for Apache Airflow
For more information , see What Is Amazon MWAA ? .
< /li > < li > TagResource
< /li > < li > UntagResource
< ul > < li > CreateCliToken
< /li > < li > CreateWebLoginToken
< ul > < li > PublishMetrics
For a list of regions that Amazon MWAA supports , see Region availability
in the Amazon MWAA User Guide .
-module(aws_mwaa).
-export([create_cli_token/3,
create_cli_token/4,
create_environment/3,
create_environment/4,
create_web_login_token/3,
create_web_login_token/4,
delete_environment/3,
delete_environment/4,
get_environment/2,
get_environment/4,
get_environment/5,
list_environments/1,
list_environments/3,
list_environments/4,
list_tags_for_resource/2,
list_tags_for_resource/4,
list_tags_for_resource/5,
publish_metrics/3,
publish_metrics/4,
tag_resource/3,
tag_resource/4,
untag_resource/3,
untag_resource/4,
update_environment/3,
update_environment/4]).
-include_lib("hackney/include/hackney_lib.hrl").
To learn more , see Creating an Apache Airflow CLI token .
create_cli_token(Client, Name, Input) ->
create_cli_token(Client, Name, Input, []).
create_cli_token(Client, Name, Input0, Options0) ->
Method = post,
Path = ["/clitoken/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Creates an Amazon Managed Workflows for Apache Airflow ( MWAA )
create_environment(Client, Name, Input) ->
create_environment(Client, Name, Input, []).
create_environment(Client, Name, Input0, Options0) ->
Method = put,
Path = ["/environments/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
To learn more , see Creating an Apache Airflow web login token .
create_web_login_token(Client, Name, Input) ->
create_web_login_token(Client, Name, Input, []).
create_web_login_token(Client, Name, Input0, Options0) ->
Method = post,
Path = ["/webtoken/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Deletes an Amazon Managed Workflows for Apache Airflow ( MWAA )
delete_environment(Client, Name, Input) ->
delete_environment(Client, Name, Input, []).
delete_environment(Client, Name, Input0, Options0) ->
Method = delete,
Path = ["/environments/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Describes an Amazon Managed Workflows for Apache Airflow ( MWAA )
get_environment(Client, Name)
when is_map(Client) ->
get_environment(Client, Name, #{}, #{}).
get_environment(Client, Name, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_environment(Client, Name, QueryMap, HeadersMap, []).
get_environment(Client, Name, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/environments/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Lists the Amazon Managed Workflows for Apache Airflow ( MWAA )
list_environments(Client)
when is_map(Client) ->
list_environments(Client, #{}, #{}).
list_environments(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_environments(Client, QueryMap, HeadersMap, []).
list_environments(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/environments"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"MaxResults">>, maps:get(<<"MaxResults">>, QueryMap, undefined)},
{<<"NextToken">>, maps:get(<<"NextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Lists the key - value tag pairs associated to the Amazon Managed
Workflows for Apache Airflow ( MWAA ) environment .
list_tags_for_resource(Client, ResourceArn)
when is_map(Client) ->
list_tags_for_resource(Client, ResourceArn, #{}, #{}).
list_tags_for_resource(Client, ResourceArn, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_tags_for_resource(Client, ResourceArn, QueryMap, HeadersMap, []).
list_tags_for_resource(Client, ResourceArn, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/tags/", aws_util:encode_uri(ResourceArn), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
Publishes environment health metrics to Amazon CloudWatch .
publish_metrics(Client, EnvironmentName, Input) ->
publish_metrics(Client, EnvironmentName, Input, []).
publish_metrics(Client, EnvironmentName, Input0, Options0) ->
Method = post,
Path = ["/metrics/environments/", aws_util:encode_uri(EnvironmentName), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Associates key - value tag pairs to your Amazon Managed Workflows for
Apache Airflow ( MWAA ) environment .
tag_resource(Client, ResourceArn, Input) ->
tag_resource(Client, ResourceArn, Input, []).
tag_resource(Client, ResourceArn, Input0, Options0) ->
Method = post,
Path = ["/tags/", aws_util:encode_uri(ResourceArn), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Removes key - value tag pairs associated to your Amazon Managed
Workflows for Apache Airflow ( MWAA ) environment .
untag_resource(Client, ResourceArn, Input) ->
untag_resource(Client, ResourceArn, Input, []).
untag_resource(Client, ResourceArn, Input0, Options0) ->
Method = delete,
Path = ["/tags/", aws_util:encode_uri(ResourceArn), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"tagKeys">>, <<"tagKeys">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Updates an Amazon Managed Workflows for Apache Airflow ( MWAA )
update_environment(Client, Name, Input) ->
update_environment(Client, Name, Input, []).
update_environment(Client, Name, Input0, Options0) ->
Method = patch,
Path = ["/environments/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
Internal functions
-spec request(aws_client:aws_client(), atom(), iolist(), list(),
list(), map() | undefined, list(), pos_integer() | undefined) ->
{ok, {integer(), list()}} |
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map(),
Error :: map().
request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
RequestFun = fun() -> do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) end,
aws_request:request(RequestFun, Options).
do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
Client1 = Client#{service => <<"airflow">>},
Host = build_host(<<"airflow">>, Client1),
URL0 = build_url(Host, Path, Client1),
URL = aws_request:add_query(URL0, Query),
AdditionalHeaders1 = [ {<<"Host">>, Host}
, {<<"Content-Type">>, <<"application/x-amz-json-1.1">>}
],
Payload =
case proplists:get_value(send_body_as_binary, Options) of
true ->
maps:get(<<"Body">>, Input, <<"">>);
false ->
encode_payload(Input)
end,
AdditionalHeaders = case proplists:get_value(append_sha256_content_hash, Options, false) of
true ->
add_checksum_hash_header(AdditionalHeaders1, Payload);
false ->
AdditionalHeaders1
end,
Headers1 = aws_request:add_headers(AdditionalHeaders, Headers0),
MethodBin = aws_request:method_to_binary(Method),
SignedHeaders = aws_request:sign_request(Client1, MethodBin, URL, Headers1, Payload),
Response = hackney:request(Method, URL, SignedHeaders, Payload, Options),
DecodeBody = not proplists:get_value(receive_body_as_binary, Options),
handle_response(Response, SuccessStatusCode, DecodeBody).
add_checksum_hash_header(Headers, Body) ->
[ {<<"X-Amz-CheckSum-SHA256">>, base64:encode(crypto:hash(sha256, Body))}
| Headers
].
handle_response({ok, StatusCode, ResponseHeaders}, SuccessStatusCode, _DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
{ok, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders}, _, _DecodeBody) ->
{error, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders, Client}, SuccessStatusCode, DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
case hackney:body(Client) of
{ok, <<>>} when StatusCode =:= 200;
StatusCode =:= SuccessStatusCode ->
{ok, #{}, {StatusCode, ResponseHeaders, Client}};
{ok, Body} ->
Result = case DecodeBody of
true ->
try
jsx:decode(Body)
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
false -> #{<<"Body">> => Body}
end,
{ok, Result, {StatusCode, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, _ResponseHeaders, _Client}, _, _DecodeBody)
when StatusCode =:= 503 ->
Retriable error if retries are enabled
{error, service_unavailable};
handle_response({ok, StatusCode, ResponseHeaders, Client}, _, _DecodeBody) ->
{ok, Body} = hackney:body(Client),
try
DecodedError = jsx:decode(Body),
{error, DecodedError, {StatusCode, ResponseHeaders, Client}}
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
handle_response({error, Reason}, _, _DecodeBody) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Path0, Client) ->
Proto = aws_client:proto(Client),
Path = erlang:iolist_to_binary(Path0),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, Path], <<"">>).
-spec encode_payload(undefined | map()) -> binary().
encode_payload(undefined) ->
<<>>;
encode_payload(Input) ->
jsx:encode(Input).
|
15f20d022cab9a181c651a823b0f8b37f49778f44bd9400f9eb0b2365ab19d98 | melhadad/fuf | icon3.lisp | ;;; -*- Mode:Lisp; Syntax:Common-Lisp; Package:FUG5 -*-
;;; ------------------------------------------------------------
;;; File : ICON3.LISP
Description : Simple inputs to test
Author :
Created : 27 - Nov-88
;;; Modified : 09-Nov-90
;;; Language : Common Lisp
Package : FUG5
;;; ------------------------------------------------------------
(in-package "FUG5")
(defun isetup-con3 ()
(clear-tests)
(def-test a1
"John takes a book from Mary but John is honest."
((cat discourse-segment)
(subordinate
((directive
((th ~(John Mary Book Transfer))
(u u1)
(if ((force assert)))
(ao ((scale dishonesty)
(conclusion
((process-type attributive)
(carrier === John)
(attribute === dishonest)))))
(pc ((cat clause)
(process-type action)
(concept Transfer)
(agent ((lex "John") (np-type proper)))
(benef ((lex "Mary") (np-type proper)))
(medium ((lex "book")
(definite no)
(np-type common)))))))))
(directive
((th ~(John Honest))
(u u2)
(if ((force assert)))
(ao ((scale honesty)))
(pc ((cat clause)
(process-type attributive)
(carrier ((lex "John") (np-type proper)))
(attribute ((cat adj)
(lex "honest")))))))))
(def-test a2
"John steals a book from Mary."
((cat discourse-segment)
(directive ((th ~(John Mary Book Transfer))
(if ((force assert)))
(ao ((scale dishonesty)
(conclusion
((process-type attributive)
(carrier === John)
(attribute === dishonest)))))
(pc ((cat clause)
(process-type action)
(concept Transfer)
(agent ((lex "John") (np-type proper)))
(benef ((lex "Mary") (np-type proper)))
(medium ((lex "book")
(definite no)
(np-type common)))))))))
(def-test a3
"<fail>"
((cat discourse-segment)
(directive ((th ~(John Mary Book Transfer))
(if ((force assert)))
(ao ((scale honesty)
(conclusion
((process-type attributive)
(carrier === John)
(attribute === honest)))))
(pc ((cat clause)
(process-type action)
(concept Transfer)
(agent ((lex "John") (np-type proper)))
(benef ((lex "Mary") (np-type proper)))
(medium ((lex "book")
(definite no)
(np-type common)))))))))
(def-test a4
"John takes a book from Mary."
((cat discourse-segment)
(directive ((th ~(John Mary Book Transfer))
(if ((force assert)))
(pc ((cat clause)
(process-type action)
(concept Transfer)
(agent ((lex "John") (np-type proper)))
(benef ((lex "Mary") (np-type proper)))
(medium ((lex "book")
(definite no)
(np-type common)))))))))
(format t "~%icon3 installed. 4 tests~%")
(values))
| null | https://raw.githubusercontent.com/melhadad/fuf/57bd0e31afc6aaa03b85f45f4c7195af701508b8/examples/icon3.lisp | lisp | -*- Mode:Lisp; Syntax:Common-Lisp; Package:FUG5 -*-
------------------------------------------------------------
File : ICON3.LISP
Modified : 09-Nov-90
Language : Common Lisp
------------------------------------------------------------ | Description : Simple inputs to test
Author :
Created : 27 - Nov-88
Package : FUG5
(in-package "FUG5")
(defun isetup-con3 ()
(clear-tests)
(def-test a1
"John takes a book from Mary but John is honest."
((cat discourse-segment)
(subordinate
((directive
((th ~(John Mary Book Transfer))
(u u1)
(if ((force assert)))
(ao ((scale dishonesty)
(conclusion
((process-type attributive)
(carrier === John)
(attribute === dishonest)))))
(pc ((cat clause)
(process-type action)
(concept Transfer)
(agent ((lex "John") (np-type proper)))
(benef ((lex "Mary") (np-type proper)))
(medium ((lex "book")
(definite no)
(np-type common)))))))))
(directive
((th ~(John Honest))
(u u2)
(if ((force assert)))
(ao ((scale honesty)))
(pc ((cat clause)
(process-type attributive)
(carrier ((lex "John") (np-type proper)))
(attribute ((cat adj)
(lex "honest")))))))))
(def-test a2
"John steals a book from Mary."
((cat discourse-segment)
(directive ((th ~(John Mary Book Transfer))
(if ((force assert)))
(ao ((scale dishonesty)
(conclusion
((process-type attributive)
(carrier === John)
(attribute === dishonest)))))
(pc ((cat clause)
(process-type action)
(concept Transfer)
(agent ((lex "John") (np-type proper)))
(benef ((lex "Mary") (np-type proper)))
(medium ((lex "book")
(definite no)
(np-type common)))))))))
(def-test a3
"<fail>"
((cat discourse-segment)
(directive ((th ~(John Mary Book Transfer))
(if ((force assert)))
(ao ((scale honesty)
(conclusion
((process-type attributive)
(carrier === John)
(attribute === honest)))))
(pc ((cat clause)
(process-type action)
(concept Transfer)
(agent ((lex "John") (np-type proper)))
(benef ((lex "Mary") (np-type proper)))
(medium ((lex "book")
(definite no)
(np-type common)))))))))
(def-test a4
"John takes a book from Mary."
((cat discourse-segment)
(directive ((th ~(John Mary Book Transfer))
(if ((force assert)))
(pc ((cat clause)
(process-type action)
(concept Transfer)
(agent ((lex "John") (np-type proper)))
(benef ((lex "Mary") (np-type proper)))
(medium ((lex "book")
(definite no)
(np-type common)))))))))
(format t "~%icon3 installed. 4 tests~%")
(values))
|
74bd3b80630ffbf6b83ab59b5c2a48c0660f3fd3d0ef6cfb958697d26275c6ce | frodwith/cl-urbit | convert.lisp | (defpackage #:urbit/tests/convert
(:use #:cl #:fiveam #:named-readtables #:urbit/tests
#:urbit/nock/cord #:urbit/hoon/tape
#:urbit/hoon/syntax #:urbit/nock/equality))
(in-package #:urbit/tests/convert)
(in-readtable hoon)
(def-suite convert-tests
:description "test conversions between lisp data and nouns"
:in all-tests)
(in-suite convert-tests)
(test cords
(is (= 97 (string->cord "a")))
(is (= 1953718630 (string->cord "fast")))
(is (string= "dEtErMiNiStIc"
(cord->string 7866321123923179247042781529444))))
(test tapes
(is (same [102 111 111 0] (string->tape "foo")))
(is (string= "foo" (tape->string [102 111 111 0]))))
(defun gen-non-null-ascii ()
(code-char (1+ (random 127))))
(defun gen-ascii ()
(code-char (random 128)))
(test property
(for-all ((s (gen-string :elements #'gen-ascii)))
(is (string= s (tape->string (string->tape s)))))
; nulls aren't preserved by cords
(for-all ((s (gen-string :elements #'gen-non-null-ascii)))
(is (string= s (cord->string (string->cord s))))))
| null | https://raw.githubusercontent.com/frodwith/cl-urbit/65af924ee58c4c974056f369158bbc1401308fea/t/convert.lisp | lisp | nulls aren't preserved by cords | (defpackage #:urbit/tests/convert
(:use #:cl #:fiveam #:named-readtables #:urbit/tests
#:urbit/nock/cord #:urbit/hoon/tape
#:urbit/hoon/syntax #:urbit/nock/equality))
(in-package #:urbit/tests/convert)
(in-readtable hoon)
(def-suite convert-tests
:description "test conversions between lisp data and nouns"
:in all-tests)
(in-suite convert-tests)
(test cords
(is (= 97 (string->cord "a")))
(is (= 1953718630 (string->cord "fast")))
(is (string= "dEtErMiNiStIc"
(cord->string 7866321123923179247042781529444))))
(test tapes
(is (same [102 111 111 0] (string->tape "foo")))
(is (string= "foo" (tape->string [102 111 111 0]))))
(defun gen-non-null-ascii ()
(code-char (1+ (random 127))))
(defun gen-ascii ()
(code-char (random 128)))
(test property
(for-all ((s (gen-string :elements #'gen-ascii)))
(is (string= s (tape->string (string->tape s)))))
(for-all ((s (gen-string :elements #'gen-non-null-ascii)))
(is (string= s (cord->string (string->cord s))))))
|
dd7bb44d0f86b332592199d72373a43929c36fbcb6c44d6187e9e87d0c217471 | oden-lang/oden | Type.hs | | A representation of a subset of the Go Programming Language type system ,
based on . Types not needed in are excluded .
module Oden.Go.Type where
import Oden.Go.Identifier
data StructField = StructField Identifier Type
deriving (Show, Eq)
data Returns = Returns [Type]
deriving (Show, Eq)
data Parameters = Parameters [Type] Bool
deriving (Show, Eq)
data InterfaceMethodSpec = Method Identifier Parameters Returns
| Embed Identifier
deriving (Show, Eq)
data Type = Basic Identifier Bool
| Pointer Type
| Array Int Type
| Slice Type
| Signature (Maybe Type) Parameters Returns
| Struct [StructField]
| Named String Identifier Type
| Interface [InterfaceMethodSpec]
| Unsupported String -- Temporary solution for the Importer.
deriving (Show, Eq)
| null | https://raw.githubusercontent.com/oden-lang/oden/10c99b59c8b77c4db51ade9a4d8f9573db7f4d14/src/Oden/Go/Type.hs | haskell | Temporary solution for the Importer. | | A representation of a subset of the Go Programming Language type system ,
based on . Types not needed in are excluded .
module Oden.Go.Type where
import Oden.Go.Identifier
data StructField = StructField Identifier Type
deriving (Show, Eq)
data Returns = Returns [Type]
deriving (Show, Eq)
data Parameters = Parameters [Type] Bool
deriving (Show, Eq)
data InterfaceMethodSpec = Method Identifier Parameters Returns
| Embed Identifier
deriving (Show, Eq)
data Type = Basic Identifier Bool
| Pointer Type
| Array Int Type
| Slice Type
| Signature (Maybe Type) Parameters Returns
| Struct [StructField]
| Named String Identifier Type
| Interface [InterfaceMethodSpec]
deriving (Show, Eq)
|
86cf0596f20dde98db86656736ce4edadff816681ef25583fb42d9c151d83001 | chrovis/cljam | sequence.clj | (ns cljam.io.sequence
"Functions to read and write formats representing sequences such as FASTA and
TwoBit."
(:refer-clojure :exclude [indexed?])
(:require [cljam.io.fasta.core :as fa-core]
[cljam.io.fasta.writer :as fa-writer]
[cljam.io.protocols :as protocols]
[cljam.io.twobit.reader :as tb-reader]
[cljam.io.twobit.writer :as tb-writer]
[cljam.io.util :as io-util])
(:import java.io.Closeable
cljam.io.fasta.reader.FASTAReader
cljam.io.fasta.writer.FASTAWriter
cljam.io.twobit.reader.TwoBitReader
cljam.io.twobit.writer.TwoBitWriter))
;; Reading
;; -------
(defn ^FASTAReader fasta-reader
"Returns an open cljam.io.fasta.reader.FASTAReader of f. Should be used inside
with-open to ensure the reader is properly closed."
[f]
(fa-core/reader f))
(defn ^TwoBitReader twobit-reader
"Returns an open cljam.io.twobit.reader.TwoBitReader of f. Should be used
inside with-open to ensure the reader is properly closed."
[f]
(tb-reader/reader f))
(defn ^Closeable reader
"Selects suitable reader from f's extension, returning the open reader. Opens
a new reader if the arg represents a file such as String path, java.io.File,
or java.net.URL. If a reader is given, clones the reader. This function
supports FASTA and TwoBit formats."
[f]
(cond
(io-util/fasta-reader? f) (fa-core/clone-reader f)
(io-util/twobit-reader? f) (tb-reader/clone-reader f)
:else (case (try
(io-util/file-type f)
(catch IllegalArgumentException _
(io-util/file-type-from-contents f)))
:fasta (fasta-reader f)
:2bit (twobit-reader f)
(throw (IllegalArgumentException. "Invalid file type")))))
(defn read-sequence
"Reads sequence in region of FASTA/TwoBit file."
([rdr region] (protocols/read-sequence rdr region))
([rdr region option] (protocols/read-sequence rdr region option)))
(defn read-all-sequences
"Reads all sequences of FASTA/TwoBit file."
([rdr] (protocols/read-all-sequences rdr))
([rdr option] (protocols/read-all-sequences rdr option)))
(defn read-seq-summaries
"Returns summaries of sequences in FASTA/TwoBit file. Returns a vector of maps
containing `:name` and `:len`."
[rdr]
(protocols/read-seq-summaries rdr))
(defn read-indices
"Reads metadata of indexed sequences. Returns a vector of maps containing
`:name`, `:len` and other format-specific keys. Forces loading all indices."
[rdr]
(protocols/read-indices rdr))
(defn indexed?
"Returns true if the reader can be randomly accessed, false if not. Note this
function immediately realizes a delayed index."
[rdr]
(protocols/indexed? rdr))
;; Writing
;; -------
(defn ^FASTAWriter fasta-writer
"Returns an open cljam.io.fasta.writer.FASTAWriter of f with options:
:cols - Maximum number of characters written in one row.
:create-index? - If true, .fai will be created simultaneously.
Should be used inside with-open to ensure the writer is properly closed."
([f]
(fasta-writer f {}))
([f options]
(fa-writer/writer f options)))
(defn ^TwoBitWriter twobit-writer
"Returns an open cljam.io.twobit.writer.TwoBitWriter of f with options:
:index - metadata of indexed sequences. The amount of memory usage can be
reduced if index is supplied.
Should be used inside with-open to ensure the writer is properly closed."
([f]
(twobit-writer f {}))
([f options]
(tb-writer/writer f options)))
(defn ^Closeable writer
"Selects suitable writer from f's extension, returning the open writer. This
function supports FASTA and TwoBit format."
[f & options]
(case (io-util/file-type f)
:fasta (apply fasta-writer f options)
:2bit (apply twobit-writer f options)
(throw (IllegalArgumentException. "Invalid file type"))))
(defn write-sequences
"Writes all sequences to FASTA/TwoBit file."
[wtr seqs]
(protocols/write-sequences wtr seqs))
| null | https://raw.githubusercontent.com/chrovis/cljam/2b8e7386765be8efdbbbb4f18dbc52447f4a08af/src/cljam/io/sequence.clj | clojure | Reading
-------
Writing
------- | (ns cljam.io.sequence
"Functions to read and write formats representing sequences such as FASTA and
TwoBit."
(:refer-clojure :exclude [indexed?])
(:require [cljam.io.fasta.core :as fa-core]
[cljam.io.fasta.writer :as fa-writer]
[cljam.io.protocols :as protocols]
[cljam.io.twobit.reader :as tb-reader]
[cljam.io.twobit.writer :as tb-writer]
[cljam.io.util :as io-util])
(:import java.io.Closeable
cljam.io.fasta.reader.FASTAReader
cljam.io.fasta.writer.FASTAWriter
cljam.io.twobit.reader.TwoBitReader
cljam.io.twobit.writer.TwoBitWriter))
(defn ^FASTAReader fasta-reader
"Returns an open cljam.io.fasta.reader.FASTAReader of f. Should be used inside
with-open to ensure the reader is properly closed."
[f]
(fa-core/reader f))
(defn ^TwoBitReader twobit-reader
"Returns an open cljam.io.twobit.reader.TwoBitReader of f. Should be used
inside with-open to ensure the reader is properly closed."
[f]
(tb-reader/reader f))
(defn ^Closeable reader
"Selects suitable reader from f's extension, returning the open reader. Opens
a new reader if the arg represents a file such as String path, java.io.File,
or java.net.URL. If a reader is given, clones the reader. This function
supports FASTA and TwoBit formats."
[f]
(cond
(io-util/fasta-reader? f) (fa-core/clone-reader f)
(io-util/twobit-reader? f) (tb-reader/clone-reader f)
:else (case (try
(io-util/file-type f)
(catch IllegalArgumentException _
(io-util/file-type-from-contents f)))
:fasta (fasta-reader f)
:2bit (twobit-reader f)
(throw (IllegalArgumentException. "Invalid file type")))))
(defn read-sequence
"Reads sequence in region of FASTA/TwoBit file."
([rdr region] (protocols/read-sequence rdr region))
([rdr region option] (protocols/read-sequence rdr region option)))
(defn read-all-sequences
"Reads all sequences of FASTA/TwoBit file."
([rdr] (protocols/read-all-sequences rdr))
([rdr option] (protocols/read-all-sequences rdr option)))
(defn read-seq-summaries
"Returns summaries of sequences in FASTA/TwoBit file. Returns a vector of maps
containing `:name` and `:len`."
[rdr]
(protocols/read-seq-summaries rdr))
(defn read-indices
"Reads metadata of indexed sequences. Returns a vector of maps containing
`:name`, `:len` and other format-specific keys. Forces loading all indices."
[rdr]
(protocols/read-indices rdr))
(defn indexed?
"Returns true if the reader can be randomly accessed, false if not. Note this
function immediately realizes a delayed index."
[rdr]
(protocols/indexed? rdr))
(defn ^FASTAWriter fasta-writer
"Returns an open cljam.io.fasta.writer.FASTAWriter of f with options:
:cols - Maximum number of characters written in one row.
:create-index? - If true, .fai will be created simultaneously.
Should be used inside with-open to ensure the writer is properly closed."
([f]
(fasta-writer f {}))
([f options]
(fa-writer/writer f options)))
(defn ^TwoBitWriter twobit-writer
"Returns an open cljam.io.twobit.writer.TwoBitWriter of f with options:
:index - metadata of indexed sequences. The amount of memory usage can be
reduced if index is supplied.
Should be used inside with-open to ensure the writer is properly closed."
([f]
(twobit-writer f {}))
([f options]
(tb-writer/writer f options)))
(defn ^Closeable writer
"Selects suitable writer from f's extension, returning the open writer. This
function supports FASTA and TwoBit format."
[f & options]
(case (io-util/file-type f)
:fasta (apply fasta-writer f options)
:2bit (apply twobit-writer f options)
(throw (IllegalArgumentException. "Invalid file type"))))
(defn write-sequences
"Writes all sequences to FASTA/TwoBit file."
[wtr seqs]
(protocols/write-sequences wtr seqs))
|
9fdd8fbb202122a3efc6e3771c8e7889514596427f9290ffeae40bccdfd6c789 | ocurrent/ocaml-ci | router.mli | val create :
github:Controller.Backend.t option ->
gitlab:Controller.Backend.t option ->
Dream.handler
| null | https://raw.githubusercontent.com/ocurrent/ocaml-ci/ce2e992f069687023b8170728683520de4badbd5/web-ui/router.mli | ocaml | val create :
github:Controller.Backend.t option ->
gitlab:Controller.Backend.t option ->
Dream.handler
| |
d854506db61737d1c61b8d39663f7e01b9ab2912084b9aa33f82918c427c4966 | AngelitoJ/Hydra-DHCP | pool.erl | Hydra DHCP Server project
( C ) 2014
-module(pool).
-export([new/0, new/1, get_next/1, remove_pid/2]).
%% make a new pool of Pids
new() -> new([]).
new([]) -> empty;
new(Pids) when is_list(Pids) -> {[],Pids}.
POOL : get next Pid in round - robin fashion or nothing if no available Pids
get_next(empty) -> empty;
get_next({Used, []}) -> get_next({[],Used}); %% Recycle the used list and try again
Return the next Pid and cycle the list
delete a faulty Pid from the available list of Pids and return the new pool or ' empty '
remove_pid(empty, Pid) when is_pid(Pid) ->
empty;
remove_pid({Used, Rest},Pid) when is_pid(Pid) ->
case {Used -- [Pid], Rest -- [Pid]} of
{[],[]} -> empty;
{List1,List2} -> {List1,List2}
end.
| null | https://raw.githubusercontent.com/AngelitoJ/Hydra-DHCP/b1d38d791926d5a7ab940b567c00c780e1bb43af/src/utils/pool.erl | erlang | make a new pool of Pids
Recycle the used list and try again | Hydra DHCP Server project
( C ) 2014
-module(pool).
-export([new/0, new/1, get_next/1, remove_pid/2]).
new() -> new([]).
new([]) -> empty;
new(Pids) when is_list(Pids) -> {[],Pids}.
POOL : get next Pid in round - robin fashion or nothing if no available Pids
get_next(empty) -> empty;
Return the next Pid and cycle the list
delete a faulty Pid from the available list of Pids and return the new pool or ' empty '
remove_pid(empty, Pid) when is_pid(Pid) ->
empty;
remove_pid({Used, Rest},Pid) when is_pid(Pid) ->
case {Used -- [Pid], Rest -- [Pid]} of
{[],[]} -> empty;
{List1,List2} -> {List1,List2}
end.
|
09a4144ee978203d7482c3abc1f60608fdec82768027c57263a5c794fcb16222 | IagoAbal/eba | gcc.ml | open Batteries
let rec filter_cpp_args = function
| []
-> []
| (a::args)
when String.(starts_with a "-D"
|| starts_with a "-U"
|| starts_with a "-I")
-> a :: filter_cpp_args args
| ("-include" as a1)::a2::args
-> a1 :: a2 :: filter_cpp_args args
| __skip::args
-> filter_cpp_args args
let find_c_file args =
try
let fp = List.find (fun s -> String.ends_with s ".c") args in
Utils.check_if_file_exists fp;
fp
with
Not_found ->
Printf.eprintf "eba: no input .c files";
exit 1
let gcc args =
(* should use Fpath.of_string *)
let c_file = Fpath.v @@ find_c_file args in
let dir, fname = Fpath.split_base c_file in
let cpp_dir = Fpath.(v "_eba" // dir) in
(* should check for failure *)
ignore @@ Sys.command (Printf.sprintf "mkdir -p %s" @@ Fpath.to_string cpp_dir);
let cpp_args = filter_cpp_args args in
let cpp_file = Fpath.(cpp_dir // fname) in
(* should check for failure *)
ignore @@ Sys.command Fpath.(Printf.sprintf
"gcc -E -o %s %s %s"
(to_string cpp_file)
(String.concat " " cpp_args)
(to_string c_file));
Fpath.to_string cpp_file
| null | https://raw.githubusercontent.com/IagoAbal/eba/81ab71efff1ea407a7b5a98f7e8fd8a9d8d60815/src/gcc.ml | ocaml | should use Fpath.of_string
should check for failure
should check for failure | open Batteries
let rec filter_cpp_args = function
| []
-> []
| (a::args)
when String.(starts_with a "-D"
|| starts_with a "-U"
|| starts_with a "-I")
-> a :: filter_cpp_args args
| ("-include" as a1)::a2::args
-> a1 :: a2 :: filter_cpp_args args
| __skip::args
-> filter_cpp_args args
let find_c_file args =
try
let fp = List.find (fun s -> String.ends_with s ".c") args in
Utils.check_if_file_exists fp;
fp
with
Not_found ->
Printf.eprintf "eba: no input .c files";
exit 1
let gcc args =
let c_file = Fpath.v @@ find_c_file args in
let dir, fname = Fpath.split_base c_file in
let cpp_dir = Fpath.(v "_eba" // dir) in
ignore @@ Sys.command (Printf.sprintf "mkdir -p %s" @@ Fpath.to_string cpp_dir);
let cpp_args = filter_cpp_args args in
let cpp_file = Fpath.(cpp_dir // fname) in
ignore @@ Sys.command Fpath.(Printf.sprintf
"gcc -E -o %s %s %s"
(to_string cpp_file)
(String.concat " " cpp_args)
(to_string c_file));
Fpath.to_string cpp_file
|
62003c7152a144c6c1d8ee471235ab200df1a59a1c0621b56757d8d04991ad9a | marigold-dev/deku | network_protocol.mli | open Deku_crypto
open Deku_concepts
exception Invalid_handshake
exception Invalid_message_size
module Connection : sig
type connection
type t = connection
val owner : connection -> Key.t
val read : connection -> Network_message.t
val write : connection -> Network_message.t -> unit
end
module Client : sig
exception Invalid_host
val connect :
identity:Identity.t ->
net:#Eio.Net.t ->
host:string ->
port:int ->
(Connection.t -> 'a) ->
'a
end
module Server : sig
val listen :
identity:Identity.t ->
net:#Eio.Net.t ->
port:int ->
on_error:(exn -> unit) ->
(Connection.t -> unit) ->
'a
end
val test : unit -> unit
| null | https://raw.githubusercontent.com/marigold-dev/deku/21ec2bf05ce688c5b6012be16545175e75403f06/deku-p/src/core/network/network_protocol.mli | ocaml | open Deku_crypto
open Deku_concepts
exception Invalid_handshake
exception Invalid_message_size
module Connection : sig
type connection
type t = connection
val owner : connection -> Key.t
val read : connection -> Network_message.t
val write : connection -> Network_message.t -> unit
end
module Client : sig
exception Invalid_host
val connect :
identity:Identity.t ->
net:#Eio.Net.t ->
host:string ->
port:int ->
(Connection.t -> 'a) ->
'a
end
module Server : sig
val listen :
identity:Identity.t ->
net:#Eio.Net.t ->
port:int ->
on_error:(exn -> unit) ->
(Connection.t -> unit) ->
'a
end
val test : unit -> unit
| |
531cc1bd04c61d2b0b6f8c515f312a01d9b304825064ce4e1c1291ea14ed3bd7 | marcoheisig/sealable-metaobjects | domain.lisp | (in-package #:sealable-metaobjects)
(defclass domain ()
((%specializers
:initform (required-argument :specializers)
:initarg :specializers
:reader domain-specializers)
(%arity
:initform (required-argument :arity)
:initarg :arity
:reader domain-arity)))
(defmethod print-object ((domain domain) stream)
(print-unreadable-object (domain stream :type t)
(format stream "~{~S~^ ~}"
(mapcar #'specializer-type (domain-specializers domain)))))
(defun make-domain (specializers &aux (arity (list-length specializers)))
(dolist (specializer specializers)
(check-type specializer specializer))
(make-instance 'domain
:specializers specializers
:arity arity))
(defmethod ensure-domain ((domain domain))
domain)
(defmethod ensure-domain ((sequence sequence))
(make-domain
(map 'list #'ensure-specializer sequence)))
(defmethod method-domain ((method method))
(make-domain (method-specializers method)))
(defmethod domain-equal
((domain-1 domain)
(domain-2 domain))
(and (= (domain-arity domain-1)
(domain-arity domain-2))
(every #'eq
(domain-specializers domain-1)
(domain-specializers domain-2))))
(defmethod domain-intersectionp
((domain-1 domain)
(domain-2 domain))
(assert (= (domain-arity domain-1)
(domain-arity domain-2)))
(every #'specializer-intersectionp
(domain-specializers domain-1)
(domain-specializers domain-2)))
(defmethod domain-subsetp
((domain-1 domain)
(domain-2 domain))
(assert (= (domain-arity domain-1)
(domain-arity domain-2)))
(every #'specializer-subsetp
(domain-specializers domain-1)
(domain-specializers domain-2)))
| null | https://raw.githubusercontent.com/marcoheisig/sealable-metaobjects/e09ec97252e0844528f61abdc0c7ee256875f8ee/code/domain.lisp | lisp | (in-package #:sealable-metaobjects)
(defclass domain ()
((%specializers
:initform (required-argument :specializers)
:initarg :specializers
:reader domain-specializers)
(%arity
:initform (required-argument :arity)
:initarg :arity
:reader domain-arity)))
(defmethod print-object ((domain domain) stream)
(print-unreadable-object (domain stream :type t)
(format stream "~{~S~^ ~}"
(mapcar #'specializer-type (domain-specializers domain)))))
(defun make-domain (specializers &aux (arity (list-length specializers)))
(dolist (specializer specializers)
(check-type specializer specializer))
(make-instance 'domain
:specializers specializers
:arity arity))
(defmethod ensure-domain ((domain domain))
domain)
(defmethod ensure-domain ((sequence sequence))
(make-domain
(map 'list #'ensure-specializer sequence)))
(defmethod method-domain ((method method))
(make-domain (method-specializers method)))
(defmethod domain-equal
((domain-1 domain)
(domain-2 domain))
(and (= (domain-arity domain-1)
(domain-arity domain-2))
(every #'eq
(domain-specializers domain-1)
(domain-specializers domain-2))))
(defmethod domain-intersectionp
((domain-1 domain)
(domain-2 domain))
(assert (= (domain-arity domain-1)
(domain-arity domain-2)))
(every #'specializer-intersectionp
(domain-specializers domain-1)
(domain-specializers domain-2)))
(defmethod domain-subsetp
((domain-1 domain)
(domain-2 domain))
(assert (= (domain-arity domain-1)
(domain-arity domain-2)))
(every #'specializer-subsetp
(domain-specializers domain-1)
(domain-specializers domain-2)))
| |
a37e62f1056db7ae650b66d0146ff8291679a27db91e36b661b0293ecbccd1f8 | kyleburton/sandbox | numbers.clj | (ns scratchpad.numbers)
(comment
(loop [arg 9
ii 0
max 99]
(cond
(> ii max)
:done
:otherwise
(do
(printf "%s * %s = %s\n" arg ii (* arg ii))
(if (= 657 (* arg ii))
(printf "LAUNCHING MISSILE TO RUSSIA!!!\n"))
(recur arg (inc ii) max))))
)
(defn sq [xx]
(* xx xx))
(defn sqrt [xx]
(Math/sqrt xx))
(defn is-right-triangle? [aa bb cc]
(= (sq cc)
(+ (sq aa) (sq bb))))
(defn is-right-triangle-with-details? [aa bb cc]
{:aa aa
:bb bb
:cc cc
:sq-aa (sq aa)
:sq-bb (sq bb)
:sq-cc (sq cc)
:aa-sq-bb-sq (+ (sq aa) (sq bb))
:delta (- (sq cc) (+ (sq aa) (sq bb)))
:is-right-triangle? (is-right-triangle? aa bb cc)})
(defn find-cc [aa bb]
(sqrt (+ (sq aa) (sq bb))))
(defn find-aa [bb cc]
(sqrt (- (sq cc) (sq bb))))
(defn find-aa-with-details [bb cc]
(let [aa (find-aa bb cc)]
{:aa aa
:bb bb
:cc cc
:sq-aa (sq aa)
:sq-bb (sq bb)
:sq-cc (sq cc)}))
(defn triangle->area
([aa bb _cc]
(triangle->area aa bb))
([aa bb]
(* 1/2 aa bb)))
(comment
(find-cc 3 4)
(find-aa 3 5)
4.0
(find-aa 4 5)
3.0
(is-right-triangle? 3 4 5)
(triangle->area 3 4 5)
6N
(triangle->area 3 4)
(find-aa 17 19)
8.48528137423857
(find-cc 5 14)
14.866068747318506
(find-aa 6.4 12.2)
(is-right-triangle? 6.4 12 12.2)
false
(is-right-triangle-with-details? 6.4 12 12.2)
(find-cc 8 5)
9.433981132056603
)
| null | https://raw.githubusercontent.com/kyleburton/sandbox/cccbcc9a97026336691063a0a7eb59293a35c31a/examples/clojure/cider-scratchpad/src/scratchpad/numbers.clj | clojure | (ns scratchpad.numbers)
(comment
(loop [arg 9
ii 0
max 99]
(cond
(> ii max)
:done
:otherwise
(do
(printf "%s * %s = %s\n" arg ii (* arg ii))
(if (= 657 (* arg ii))
(printf "LAUNCHING MISSILE TO RUSSIA!!!\n"))
(recur arg (inc ii) max))))
)
(defn sq [xx]
(* xx xx))
(defn sqrt [xx]
(Math/sqrt xx))
(defn is-right-triangle? [aa bb cc]
(= (sq cc)
(+ (sq aa) (sq bb))))
(defn is-right-triangle-with-details? [aa bb cc]
{:aa aa
:bb bb
:cc cc
:sq-aa (sq aa)
:sq-bb (sq bb)
:sq-cc (sq cc)
:aa-sq-bb-sq (+ (sq aa) (sq bb))
:delta (- (sq cc) (+ (sq aa) (sq bb)))
:is-right-triangle? (is-right-triangle? aa bb cc)})
(defn find-cc [aa bb]
(sqrt (+ (sq aa) (sq bb))))
(defn find-aa [bb cc]
(sqrt (- (sq cc) (sq bb))))
(defn find-aa-with-details [bb cc]
(let [aa (find-aa bb cc)]
{:aa aa
:bb bb
:cc cc
:sq-aa (sq aa)
:sq-bb (sq bb)
:sq-cc (sq cc)}))
(defn triangle->area
([aa bb _cc]
(triangle->area aa bb))
([aa bb]
(* 1/2 aa bb)))
(comment
(find-cc 3 4)
(find-aa 3 5)
4.0
(find-aa 4 5)
3.0
(is-right-triangle? 3 4 5)
(triangle->area 3 4 5)
6N
(triangle->area 3 4)
(find-aa 17 19)
8.48528137423857
(find-cc 5 14)
14.866068747318506
(find-aa 6.4 12.2)
(is-right-triangle? 6.4 12 12.2)
false
(is-right-triangle-with-details? 6.4 12 12.2)
(find-cc 8 5)
9.433981132056603
)
| |
793ed7c559a0beb0c4649b44bbc5ec64652954d6bcef08dbc5369f0f740fa4c3 | tonyrog/can | can_tp.erl | @author < >
( C ) 2020 ,
%%% @doc
%%% Implement simple ISO-TP client
%%% @end
Created : 9 Jun 2020 by < >
-module(can_tp).
-export([read/2]).
-include("../include/can.hrl").
-define(SINGLE, 0).
-define(FIRST, 1).
-define(NEXT, 2).
-define(FLOW, 3).
-define(CONTINUE, 0).
-define(WAIT, 1).
-define(ABORT, 2).
read(_ID,Timeout) -> %% fixme check match ID and ID1!!! depend on broadcast
receive
Frame = #can_frame{id=ID1,data= <<?SINGLE:4,_/bitstring>>} ->
single_frame(ID1,Frame);
Frame = #can_frame{id=ID1,data= <<?FIRST:4,_/bitstring>>} ->
first_frame(ID1,Frame)
after Timeout ->
timeout
end.
single_frame(ID,#can_frame{data= <<?SINGLE:4,Len:4,
Data:Len/binary,_/binary>>}) ->
{ID,Data}.
first_frame(ID,#can_frame{data=(<<?FIRST:4, Size:12, Data:6/binary>>)}) ->
can:send(ID-8,
<<?FLOW:4, ?CONTINUE:4, 0, 1, 16#CC,16#CC,16#CC,16#CC,16#CC>>),
read_next(ID, Size-6, 1, Data).
read_next(ID, Remain, I, Buf) ->
receive
#can_frame{id=ID, data=(<<?NEXT:4,I:4, Data/binary>>)} ->
Size = byte_size(Data),
if Remain > Size ->
read_next(ID, Remain-Size, (I+1) band 16#f,
<<Buf/binary,Data/binary>>);
Remain =:= Size ->
{ID, <<Buf/binary,Data/binary>>};
true ->
<<Data1:Remain/binary,_/binary>> = Data,
{ID, <<Buf/binary,Data1/binary>>}
end
end.
| null | https://raw.githubusercontent.com/tonyrog/can/003e23c1a067657b010199a6b86c7cc49046b0b0/src/can_tp.erl | erlang | @doc
Implement simple ISO-TP client
@end
fixme check match ID and ID1!!! depend on broadcast | @author < >
( C ) 2020 ,
Created : 9 Jun 2020 by < >
-module(can_tp).
-export([read/2]).
-include("../include/can.hrl").
-define(SINGLE, 0).
-define(FIRST, 1).
-define(NEXT, 2).
-define(FLOW, 3).
-define(CONTINUE, 0).
-define(WAIT, 1).
-define(ABORT, 2).
receive
Frame = #can_frame{id=ID1,data= <<?SINGLE:4,_/bitstring>>} ->
single_frame(ID1,Frame);
Frame = #can_frame{id=ID1,data= <<?FIRST:4,_/bitstring>>} ->
first_frame(ID1,Frame)
after Timeout ->
timeout
end.
single_frame(ID,#can_frame{data= <<?SINGLE:4,Len:4,
Data:Len/binary,_/binary>>}) ->
{ID,Data}.
first_frame(ID,#can_frame{data=(<<?FIRST:4, Size:12, Data:6/binary>>)}) ->
can:send(ID-8,
<<?FLOW:4, ?CONTINUE:4, 0, 1, 16#CC,16#CC,16#CC,16#CC,16#CC>>),
read_next(ID, Size-6, 1, Data).
read_next(ID, Remain, I, Buf) ->
receive
#can_frame{id=ID, data=(<<?NEXT:4,I:4, Data/binary>>)} ->
Size = byte_size(Data),
if Remain > Size ->
read_next(ID, Remain-Size, (I+1) band 16#f,
<<Buf/binary,Data/binary>>);
Remain =:= Size ->
{ID, <<Buf/binary,Data/binary>>};
true ->
<<Data1:Remain/binary,_/binary>> = Data,
{ID, <<Buf/binary,Data1/binary>>}
end
end.
|
4e227cb8bebe5cd8621e33d11c1645df9af9a3b3d56a0c6cf6cfe2750c4efa67 | michaelklishin/neocons | labels.clj | Copyright ( c ) 2011 - 2015 , , and The ClojureWerkz
Team
;;
;; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php )
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns clojurewerkz.neocons.rest.labels
"Operations on labels (Neo4J 2.0+)"
(:require [clj-http.client :as http]
[cheshire.core :as json]
[clojure.string :as string]
[clojurewerkz.neocons.rest :as rest]
[clojurewerkz.neocons.rest.conversion :as conv]
[clojurewerkz.neocons.rest.records :as records]
[clojurewerkz.support.http.statuses :refer [missing?]])
(:import [clojurewerkz.neocons.rest Connection])
(:refer-clojure :exclude [node replace remove rest]))
(defn- get-location-url
[node]
(str (:location-uri node) "/labels"))
(defn add
"Adds a string label or a list of labels (string or keyword) to a node.
See -api-node-labels.html#rest-api-adding-a-label-to-a-node"
[^Connection connection node labels]
(rest/POST
connection
(get-location-url node)
:body (json/encode (conv/kw-to-string labels))))
(defn replace
"This removes any existing labels for the node and adds the labels passes to the function.
See -api-node-labels.html#rest-api-replacing-labels-on-a-node"
[^Connection connection node labels]
(conv/string-to-kw
(rest/PUT
connection
(get-location-url node)
:body (json/encode labels))))
(defn remove
"This removes the specified label from the node.
See -api-node-labels.html#rest-api-removing-a-label-from-a-node"
[^Connection connection node label]
(rest/DELETE
connection
(str (get-location-url node) "/" (conv/encode-kw-to-string label))))
(defn- get-labels
[^Connection connection ^String uri]
(let [{:keys [status headers body]} (rest/GET connection uri)]
(when-not (missing? status)
(conv/string-to-kw
(json/decode body true)))))
(defn get-all-labels
"This function gets all labels in the database if no argument is passed.
If a node is passed, then it returns all the labels associated with the node.
See -api-node-labels.html#rest-api-listing-labels-for-a-node
and -api-node-labels.html#rest-api-list-all-labels"
([^Connection connection]
(get-labels connection (str (get-in connection [:endpoint :uri]) "labels")))
([^Connection connection node]
(get-labels connection (get-location-url node))))
(defn- encode-params
[^Connection connection ^String label ^String x y]
(str (get-in connection [:endpoint :uri])
"label/"
(conv/encode-kw-to-string label)
"/nodes"
(when (and x y)
(str "?"
(http/generate-query-string
[[(conv/kw-to-string x) (json/encode y)]])))))
(defn get-all-nodes
"This returns all the nodes which have a particular label.
See -api-node-labels.html#rest-api-get-all-nodes-with-a-label
You can also pass a property name and value you want to filter the nodes on.
See -api-node-labels.html#rest-api-get-nodes-by-label-and-property"
([^Connection connection label]
(get-all-nodes connection label nil nil))
([^Connection connection label prop-name prop-value]
(let [base-uri (encode-params connection label prop-name prop-value)
{:keys [status headers body]} (rest/GET connection base-uri)]
(when-not (missing? status)
(map records/instantiate-node-from (json/decode body true))))))
| null | https://raw.githubusercontent.com/michaelklishin/neocons/30f30e95686a01f7a34082600bc1221877c2acbd/src/clojure/clojurewerkz/neocons/rest/labels.clj | clojure |
The use and distribution terms for this software are covered by the
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright ( c ) 2011 - 2015 , , and The ClojureWerkz
Team
Eclipse Public License 1.0 ( -1.0.php )
(ns clojurewerkz.neocons.rest.labels
"Operations on labels (Neo4J 2.0+)"
(:require [clj-http.client :as http]
[cheshire.core :as json]
[clojure.string :as string]
[clojurewerkz.neocons.rest :as rest]
[clojurewerkz.neocons.rest.conversion :as conv]
[clojurewerkz.neocons.rest.records :as records]
[clojurewerkz.support.http.statuses :refer [missing?]])
(:import [clojurewerkz.neocons.rest Connection])
(:refer-clojure :exclude [node replace remove rest]))
(defn- get-location-url
[node]
(str (:location-uri node) "/labels"))
(defn add
"Adds a string label or a list of labels (string or keyword) to a node.
See -api-node-labels.html#rest-api-adding-a-label-to-a-node"
[^Connection connection node labels]
(rest/POST
connection
(get-location-url node)
:body (json/encode (conv/kw-to-string labels))))
(defn replace
"This removes any existing labels for the node and adds the labels passes to the function.
See -api-node-labels.html#rest-api-replacing-labels-on-a-node"
[^Connection connection node labels]
(conv/string-to-kw
(rest/PUT
connection
(get-location-url node)
:body (json/encode labels))))
(defn remove
"This removes the specified label from the node.
See -api-node-labels.html#rest-api-removing-a-label-from-a-node"
[^Connection connection node label]
(rest/DELETE
connection
(str (get-location-url node) "/" (conv/encode-kw-to-string label))))
(defn- get-labels
[^Connection connection ^String uri]
(let [{:keys [status headers body]} (rest/GET connection uri)]
(when-not (missing? status)
(conv/string-to-kw
(json/decode body true)))))
(defn get-all-labels
"This function gets all labels in the database if no argument is passed.
If a node is passed, then it returns all the labels associated with the node.
See -api-node-labels.html#rest-api-listing-labels-for-a-node
and -api-node-labels.html#rest-api-list-all-labels"
([^Connection connection]
(get-labels connection (str (get-in connection [:endpoint :uri]) "labels")))
([^Connection connection node]
(get-labels connection (get-location-url node))))
(defn- encode-params
[^Connection connection ^String label ^String x y]
(str (get-in connection [:endpoint :uri])
"label/"
(conv/encode-kw-to-string label)
"/nodes"
(when (and x y)
(str "?"
(http/generate-query-string
[[(conv/kw-to-string x) (json/encode y)]])))))
(defn get-all-nodes
"This returns all the nodes which have a particular label.
See -api-node-labels.html#rest-api-get-all-nodes-with-a-label
You can also pass a property name and value you want to filter the nodes on.
See -api-node-labels.html#rest-api-get-nodes-by-label-and-property"
([^Connection connection label]
(get-all-nodes connection label nil nil))
([^Connection connection label prop-name prop-value]
(let [base-uri (encode-params connection label prop-name prop-value)
{:keys [status headers body]} (rest/GET connection base-uri)]
(when-not (missing? status)
(map records/instantiate-node-from (json/decode body true))))))
|
3f34b9ff871d8b18f5f1e9ebcdaa5159760b9e9c9234e539b24243f05ed042b0 | erlangonrails/devdb | mathlib.erl | 2007 - 2010 fuer Informationstechnik Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author < >
%% @doc Math utility functions.
@version $ I d : mathlib.erl 906 2010 - 07 - 23 14:09:20Z schuett $
-module(mathlib).
-author('').
-vsn('$Id: mathlib.erl 906 2010-07-23 14:09:20Z schuett $').
-export([closestPoints/1, euclideanDistance/1, euclideanDistance/2, u/1,
vecAdd/2, vecSub/2, vecMult/2, vecWeightedAvg/4, zeros/1, median/1,
aggloClustering/3]).
-type(vector() :: [number(),...]).
-type(centroid() :: vector()).
%% @doc Median of an unsorted non-empty list of numbers, i.e. a vector.
-spec median(vector()) -> number().
median(L) ->
L1 = lists:sort(L),
N = length(L1),
case N rem 2 of
1 -> lists:nth(round(N / 2), L1);
0 -> (lists:nth(trunc(N / 2), L1) + lists:nth(trunc(N / 2) + 1, L1)) / 2
end.
@doc Add two vectors X , Y , i.e. X + Y.
-spec vecAdd(X::vector(), Y::vector()) -> vector().
vecAdd(X, Y) ->
lists:zipwith(fun(Xi, Yi) -> Xi + Yi end, X, Y).
@doc Substract two vectors X , Y , i.e.
-spec vecSub(X::vector(), Y::vector()) -> vector().
vecSub(X, Y) ->
lists:zipwith(fun(Xi, Yi) -> Xi - Yi end, X, Y).
%% @doc Multiply vector V with a scalar S.
-spec vecMult(V::vector(), S::float()) -> vector().
vecMult(V, S) ->
lists:map(fun(X) -> S*X end, V).
-spec vecWeightedAvg(V1::vector(), V2::vector(), W1::float(), W2::float()) -> vector().
vecWeightedAvg(V1, V2, W1, W2) ->
vecMult(vecAdd(vecMult(V1, W1), vecMult(V2, W2)), 1 / (W1 + W2)).
@doc Euclidean distance between origin and V.
-spec euclideanDistance(V::vector()) -> Distance::float().
euclideanDistance(V) ->
math:sqrt(lists:foldl(fun(Vi, OldDist) -> OldDist + math:pow(Vi, 2) end,
0.0, V)).
@doc Euclidean distance between two vectors .
-spec euclideanDistance(V::vector(), W::vector()) -> Distance::float().
euclideanDistance(V, W) ->
math:sqrt(util:zipfoldl(fun(Vi, Wi) -> math:pow(Vi - Wi, 2) end,
fun(Dist, OldDist) -> OldDist + Dist end,
V, W, 0.0)).
@doc Unit vector u(v ) = v/||v||
-spec u(V::vector()) -> UV::vector().
u(V) ->
vecMult(V, 1 / euclideanDistance(V)).
%% @doc Find indices of closest centroids.
-spec closestPoints(Centroids::[centroid()])
-> {Min::number(), I::pos_integer(), J::pos_integer()} | {-1, -1, -1}.
closestPoints([C1, C2 | Rest]) ->
closestPointsForI([C1, C2 | Rest], 1, 2, euclideanDistance(C1, C2), 1, 2);
closestPoints(_) ->
{-1, -1, -1}.
-spec closestPointsForI(Centroids::[centroid()], I::pos_integer(), J::pos_integer(),
Min::number(), MinI::pos_integer(), MinJ::pos_integer())
-> {DistMin::number(), IMin::pos_integer(), JMin::pos_integer()}.
closestPointsForI([First | Rest], I, J, Min, MinI, MinJ) ->
{Min1, MinI1, MinJ1} = closestPointsForJ(First, Rest, I, J, Min, MinI, MinJ),
I1 = I + 1,
J1 = J + 1,
closestPointsForI(Rest, I1, J1, Min1, MinI1, MinJ1);
closestPointsForI([], _, _, Min, I, J) ->
{Min, I, J}.
-spec closestPointsForJ(First::centroid(), Rest::[centroid()],
I::pos_integer(), J::pos_integer(),
Min::number(), MinI::pos_integer(), MinJ::pos_integer())
-> {DistMin::number(), IMin::pos_integer(), JMin::pos_integer()}.
closestPointsForJ(First, [Centroid | Rest], I, J, Min, MinI, MinJ) ->
Dist = euclideanDistance(First, Centroid),
{Min1, MinI1, MinJ1} = condExchange(Min, MinI, MinJ, Dist, I, J),
J1 = J + 1,
closestPointsForJ(First, Rest, I, J1, Min1, MinI1, MinJ1);
closestPointsForJ(_, [], _, _, Min, MinI, MinJ) ->
{Min, MinI, MinJ}.
%% @doc Update smallest distance and its indices.
-spec condExchange(Min::number(), MinI::pos_integer(), MinJ::pos_integer(),
Dist::number(), DistI::pos_integer(), DistJ::pos_integer())
-> {DistMin::number(), IMin::integer(), JMin::integer()}.
condExchange(Min, I, J, Dist, _, _) when Min =< Dist ->
{Min, I, J};
condExchange(_, _, _, Dist, I, J) ->
{Dist, I, J}.
@doc Create a list with N zeros .
-spec zeros(N::0) -> [];
(N::pos_integer()) -> [0,...].
zeros(0) ->
[];
zeros(N) ->
[0 || _ <- lists:seq(1,N)].
@doc Get closest centroids and merge them if their distance is within Radius .
-spec aggloClustering(Centroids::[centroid()], Sizes::vector(),
Radius::number()) -> {[centroid()], vector()}.
aggloClustering(Centroids, Sizes, Radius) ->
{Min, I, J} = closestPoints(Centroids),
aggloClusteringHelper(Centroids, Sizes, Radius, Min, I, J).
-spec aggloClusteringHelper
(Centroids::[centroid(),...], Sizes::vector(), Radius::number(),
Min::number(), I::pos_integer(), J::pos_integer()) -> {[centroid()], vector()};
(Centroids::[centroid()], Sizes::vector(), Radius::number(),
Min::-1, I::-1, J::-1) -> {[centroid()], vector()}.
Note : closestPoints/1 creates , I , J and only returns { -1 , -1 , -1 } if
Centroids contains less than two elements . This is not the case in the first
% pattern and we can thus assume these values are pos_integer().
aggloClusteringHelper([_,_|_] = Centroids, [_,_|_] = Sizes, Radius, Min, I, J) when Min =< Radius ->
C1 = lists:nth(I, Centroids),
C2 = lists:nth(J, Centroids),
S1 = lists:nth(I, Sizes),
S2 = lists:nth(J, Sizes),
Centroids1 = [vecWeightedAvg(C1, C2, S1, S2) | tools:rmvTwo(Centroids, I, J)],
{Min1, I1, J1} = closestPoints(Centroids1),
aggloClusteringHelper(Centroids1, [S1 + S2 | tools:rmvTwo(Sizes, I, J)],
Radius, Min1, I1, J1);
aggloClusteringHelper(Centroids, Sizes, _Radius, _Min, _I, _J) ->
{Centroids, Sizes}.
| null | https://raw.githubusercontent.com/erlangonrails/devdb/0e7eaa6bd810ec3892bfc3d933439560620d0941/dev/scalaris/src/mathlib.erl | erlang | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc Math utility functions.
@doc Median of an unsorted non-empty list of numbers, i.e. a vector.
@doc Multiply vector V with a scalar S.
@doc Find indices of closest centroids.
@doc Update smallest distance and its indices.
pattern and we can thus assume these values are pos_integer(). | 2007 - 2010 fuer Informationstechnik Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author < >
@version $ I d : mathlib.erl 906 2010 - 07 - 23 14:09:20Z schuett $
-module(mathlib).
-author('').
-vsn('$Id: mathlib.erl 906 2010-07-23 14:09:20Z schuett $').
-export([closestPoints/1, euclideanDistance/1, euclideanDistance/2, u/1,
vecAdd/2, vecSub/2, vecMult/2, vecWeightedAvg/4, zeros/1, median/1,
aggloClustering/3]).
-type(vector() :: [number(),...]).
-type(centroid() :: vector()).
-spec median(vector()) -> number().
median(L) ->
L1 = lists:sort(L),
N = length(L1),
case N rem 2 of
1 -> lists:nth(round(N / 2), L1);
0 -> (lists:nth(trunc(N / 2), L1) + lists:nth(trunc(N / 2) + 1, L1)) / 2
end.
@doc Add two vectors X , Y , i.e. X + Y.
-spec vecAdd(X::vector(), Y::vector()) -> vector().
vecAdd(X, Y) ->
lists:zipwith(fun(Xi, Yi) -> Xi + Yi end, X, Y).
@doc Substract two vectors X , Y , i.e.
-spec vecSub(X::vector(), Y::vector()) -> vector().
vecSub(X, Y) ->
lists:zipwith(fun(Xi, Yi) -> Xi - Yi end, X, Y).
-spec vecMult(V::vector(), S::float()) -> vector().
vecMult(V, S) ->
lists:map(fun(X) -> S*X end, V).
-spec vecWeightedAvg(V1::vector(), V2::vector(), W1::float(), W2::float()) -> vector().
vecWeightedAvg(V1, V2, W1, W2) ->
vecMult(vecAdd(vecMult(V1, W1), vecMult(V2, W2)), 1 / (W1 + W2)).
@doc Euclidean distance between origin and V.
-spec euclideanDistance(V::vector()) -> Distance::float().
euclideanDistance(V) ->
math:sqrt(lists:foldl(fun(Vi, OldDist) -> OldDist + math:pow(Vi, 2) end,
0.0, V)).
@doc Euclidean distance between two vectors .
-spec euclideanDistance(V::vector(), W::vector()) -> Distance::float().
euclideanDistance(V, W) ->
math:sqrt(util:zipfoldl(fun(Vi, Wi) -> math:pow(Vi - Wi, 2) end,
fun(Dist, OldDist) -> OldDist + Dist end,
V, W, 0.0)).
@doc Unit vector u(v ) = v/||v||
-spec u(V::vector()) -> UV::vector().
u(V) ->
vecMult(V, 1 / euclideanDistance(V)).
-spec closestPoints(Centroids::[centroid()])
-> {Min::number(), I::pos_integer(), J::pos_integer()} | {-1, -1, -1}.
closestPoints([C1, C2 | Rest]) ->
closestPointsForI([C1, C2 | Rest], 1, 2, euclideanDistance(C1, C2), 1, 2);
closestPoints(_) ->
{-1, -1, -1}.
-spec closestPointsForI(Centroids::[centroid()], I::pos_integer(), J::pos_integer(),
Min::number(), MinI::pos_integer(), MinJ::pos_integer())
-> {DistMin::number(), IMin::pos_integer(), JMin::pos_integer()}.
closestPointsForI([First | Rest], I, J, Min, MinI, MinJ) ->
{Min1, MinI1, MinJ1} = closestPointsForJ(First, Rest, I, J, Min, MinI, MinJ),
I1 = I + 1,
J1 = J + 1,
closestPointsForI(Rest, I1, J1, Min1, MinI1, MinJ1);
closestPointsForI([], _, _, Min, I, J) ->
{Min, I, J}.
-spec closestPointsForJ(First::centroid(), Rest::[centroid()],
I::pos_integer(), J::pos_integer(),
Min::number(), MinI::pos_integer(), MinJ::pos_integer())
-> {DistMin::number(), IMin::pos_integer(), JMin::pos_integer()}.
closestPointsForJ(First, [Centroid | Rest], I, J, Min, MinI, MinJ) ->
Dist = euclideanDistance(First, Centroid),
{Min1, MinI1, MinJ1} = condExchange(Min, MinI, MinJ, Dist, I, J),
J1 = J + 1,
closestPointsForJ(First, Rest, I, J1, Min1, MinI1, MinJ1);
closestPointsForJ(_, [], _, _, Min, MinI, MinJ) ->
{Min, MinI, MinJ}.
-spec condExchange(Min::number(), MinI::pos_integer(), MinJ::pos_integer(),
Dist::number(), DistI::pos_integer(), DistJ::pos_integer())
-> {DistMin::number(), IMin::integer(), JMin::integer()}.
condExchange(Min, I, J, Dist, _, _) when Min =< Dist ->
{Min, I, J};
condExchange(_, _, _, Dist, I, J) ->
{Dist, I, J}.
@doc Create a list with N zeros .
-spec zeros(N::0) -> [];
(N::pos_integer()) -> [0,...].
zeros(0) ->
[];
zeros(N) ->
[0 || _ <- lists:seq(1,N)].
@doc Get closest centroids and merge them if their distance is within Radius .
-spec aggloClustering(Centroids::[centroid()], Sizes::vector(),
Radius::number()) -> {[centroid()], vector()}.
aggloClustering(Centroids, Sizes, Radius) ->
{Min, I, J} = closestPoints(Centroids),
aggloClusteringHelper(Centroids, Sizes, Radius, Min, I, J).
-spec aggloClusteringHelper
(Centroids::[centroid(),...], Sizes::vector(), Radius::number(),
Min::number(), I::pos_integer(), J::pos_integer()) -> {[centroid()], vector()};
(Centroids::[centroid()], Sizes::vector(), Radius::number(),
Min::-1, I::-1, J::-1) -> {[centroid()], vector()}.
Note : closestPoints/1 creates , I , J and only returns { -1 , -1 , -1 } if
Centroids contains less than two elements . This is not the case in the first
aggloClusteringHelper([_,_|_] = Centroids, [_,_|_] = Sizes, Radius, Min, I, J) when Min =< Radius ->
C1 = lists:nth(I, Centroids),
C2 = lists:nth(J, Centroids),
S1 = lists:nth(I, Sizes),
S2 = lists:nth(J, Sizes),
Centroids1 = [vecWeightedAvg(C1, C2, S1, S2) | tools:rmvTwo(Centroids, I, J)],
{Min1, I1, J1} = closestPoints(Centroids1),
aggloClusteringHelper(Centroids1, [S1 + S2 | tools:rmvTwo(Sizes, I, J)],
Radius, Min1, I1, J1);
aggloClusteringHelper(Centroids, Sizes, _Radius, _Min, _I, _J) ->
{Centroids, Sizes}.
|
feb5e12341e9b64f515de351acb0c75fa57eb836493c8f0074867d5f894267c4 | podenv/podenv | TH.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE ImportQualifiedPost #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE NoImplicitPrelude #
-- makeLenses does not produce those
# OPTIONS_GHC -Wno - missing - signatures #
# OPTIONS_GHC -fno - warn - missing - export - lists #
| This module defines data types and lenses for the podenv dhall schemas .
module Podenv.Dhall.TH where
import Data.Either.Validation
import Data.Void
import Dhall qualified
import Dhall.Core (Expr ())
import Dhall.Core qualified as Dhall
import Dhall.TH qualified
import Lens.Family.TH (makeLensesBy)
import Podenv.Prelude
| The hub submodule commit , this is only used for the PODENV environment value
hubCommit :: Expr Void Void
hubCommit = $(Dhall.TH.staticDhallExpression "env:HUB_COMMIT as Text ? ./.git/modules/hub/HEAD as Text")
-- | Embed static dhall code
podenvPackage :: Expr Void Void
podenvPackage = $(Dhall.TH.staticDhallExpression "./hub/package.dhall")
appType, appDefault, runtimeType, containerBuildDefault, capsDefault :: Expr Void Void
appType = $(Dhall.TH.staticDhallExpression "(./hub/schemas/Application.dhall).Type")
runtimeType = $(Dhall.TH.staticDhallExpression "./hub/schemas/Runtime.dhall")
containerBuildDefault = $(Dhall.TH.staticDhallExpression "(./hub/schemas/ContainerBuild.dhall).default")
appDefault = $(Dhall.TH.staticDhallExpression "(./hub/schemas/Application.dhall).default")
capsDefault = $(Dhall.TH.staticDhallExpression "(./hub/schemas/Capabilities.dhall).default")
| Generate Haskell Types from Dhall Types .
-- See: -1.40.0/docs/Dhall-TH.html
Dhall.TH.makeHaskellTypes
( let mainPath name = "(./hub/schemas/" <> name <> ".dhall).Type"
main' cname name = Dhall.TH.SingleConstructor cname cname $ mainPath name
main name = main' name name
in [ main "Capabilities",
main "Application",
main "ApplicationResource",
main "ContainerBuild",
main "Flakes",
Dhall.TH.SingleConstructor "LabelKV" "LabelKV" "{mapKey : Text, mapValue : Text}",
main "Metadata",
Dhall.TH.MultipleConstructors "Runtime" "./hub/schemas/Runtime.dhall",
Dhall.TH.MultipleConstructors "Network" "./hub/schemas/Network.dhall"
]
)
$(makeLensesBy (lensName "cap") ''Capabilities)
$(makeLensesBy (lensName "app") ''Application)
$(makeLensesBy (lensName "ar") ''ApplicationResource)
$(makeLensesBy (lensName "meta") ''Metadata)
$(makeLensesBy (lensName "cb") ''ContainerBuild)
deriving instance Show Runtime
deriving instance Eq Runtime
deriving instance Show ContainerBuild
deriving instance Eq ContainerBuild
deriving instance Show Flakes
deriving instance Eq Flakes
deriving instance Show Capabilities
deriving instance Eq Capabilities
deriving instance Show Application
deriving instance Eq Application
deriving instance Show LabelKV
deriving instance Eq LabelKV
deriving instance Show Metadata
deriving instance Eq Metadata
deriving instance Show ApplicationResource
deriving instance Eq ApplicationResource
deriving instance Show Network
deriving instance Eq Network
extractDhallDefault :: (HasCallStack, Dhall.FromDhall a) => Expr Void Void -> a
extractDhallDefault def = case Dhall.extract Dhall.auto (Dhall.renote def) of
Success app -> app
Failure v -> error $ "Invalid defaults: " <> show v
| null | https://raw.githubusercontent.com/podenv/podenv/6964a88e264e060ed8307443a4dbfa13ddf6ff85/src/Podenv/Dhall/TH.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
makeLenses does not produce those
| Embed static dhall code
See: -1.40.0/docs/Dhall-TH.html | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE ImportQualifiedPost #
# LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -Wno - missing - signatures #
# OPTIONS_GHC -fno - warn - missing - export - lists #
| This module defines data types and lenses for the podenv dhall schemas .
module Podenv.Dhall.TH where
import Data.Either.Validation
import Data.Void
import Dhall qualified
import Dhall.Core (Expr ())
import Dhall.Core qualified as Dhall
import Dhall.TH qualified
import Lens.Family.TH (makeLensesBy)
import Podenv.Prelude
| The hub submodule commit , this is only used for the PODENV environment value
hubCommit :: Expr Void Void
hubCommit = $(Dhall.TH.staticDhallExpression "env:HUB_COMMIT as Text ? ./.git/modules/hub/HEAD as Text")
podenvPackage :: Expr Void Void
podenvPackage = $(Dhall.TH.staticDhallExpression "./hub/package.dhall")
appType, appDefault, runtimeType, containerBuildDefault, capsDefault :: Expr Void Void
appType = $(Dhall.TH.staticDhallExpression "(./hub/schemas/Application.dhall).Type")
runtimeType = $(Dhall.TH.staticDhallExpression "./hub/schemas/Runtime.dhall")
containerBuildDefault = $(Dhall.TH.staticDhallExpression "(./hub/schemas/ContainerBuild.dhall).default")
appDefault = $(Dhall.TH.staticDhallExpression "(./hub/schemas/Application.dhall).default")
capsDefault = $(Dhall.TH.staticDhallExpression "(./hub/schemas/Capabilities.dhall).default")
| Generate Haskell Types from Dhall Types .
Dhall.TH.makeHaskellTypes
( let mainPath name = "(./hub/schemas/" <> name <> ".dhall).Type"
main' cname name = Dhall.TH.SingleConstructor cname cname $ mainPath name
main name = main' name name
in [ main "Capabilities",
main "Application",
main "ApplicationResource",
main "ContainerBuild",
main "Flakes",
Dhall.TH.SingleConstructor "LabelKV" "LabelKV" "{mapKey : Text, mapValue : Text}",
main "Metadata",
Dhall.TH.MultipleConstructors "Runtime" "./hub/schemas/Runtime.dhall",
Dhall.TH.MultipleConstructors "Network" "./hub/schemas/Network.dhall"
]
)
$(makeLensesBy (lensName "cap") ''Capabilities)
$(makeLensesBy (lensName "app") ''Application)
$(makeLensesBy (lensName "ar") ''ApplicationResource)
$(makeLensesBy (lensName "meta") ''Metadata)
$(makeLensesBy (lensName "cb") ''ContainerBuild)
deriving instance Show Runtime
deriving instance Eq Runtime
deriving instance Show ContainerBuild
deriving instance Eq ContainerBuild
deriving instance Show Flakes
deriving instance Eq Flakes
deriving instance Show Capabilities
deriving instance Eq Capabilities
deriving instance Show Application
deriving instance Eq Application
deriving instance Show LabelKV
deriving instance Eq LabelKV
deriving instance Show Metadata
deriving instance Eq Metadata
deriving instance Show ApplicationResource
deriving instance Eq ApplicationResource
deriving instance Show Network
deriving instance Eq Network
extractDhallDefault :: (HasCallStack, Dhall.FromDhall a) => Expr Void Void -> a
extractDhallDefault def = case Dhall.extract Dhall.auto (Dhall.renote def) of
Success app -> app
Failure v -> error $ "Invalid defaults: " <> show v
|
55792bbddea0486ad78565a8aa01f8832187d58037b2a750ca68fd6250d970b3 | ekmett/category-extras | CoKleisli.hs | # OPTIONS_GHC -cpp #
-------------------------------------------------------------------------------------------
-- |
-- Module : Control.Arrow.CoKleisli
Copyright : 2008
-- License : BSD3
--
Maintainer : < >
-- Stability : experimental
-- Portability : portable
--
-------------------------------------------------------------------------------------------
module Control.Arrow.CoKleisli
( CoKleisli(..)
) where
import Prelude hiding (id,(.))
import Control.Category
import Control.Comonad
import Control.Arrow
newtype CoKleisli w a b = CoKleisli { runCoKleisli :: w a -> b }
instance Functor (CoKleisli w a) where
fmap f (CoKleisli g) = CoKleisli (f . g)
instance Comonad w => Arrow (CoKleisli w) where
arr f = CoKleisli (f . extract)
CoKleisli a &&& CoKleisli b = CoKleisli (a &&& b)
CoKleisli a *** CoKleisli b = CoKleisli (a . fmap fst &&& b . fmap snd)
first a = a *** CoKleisli extract
second a = CoKleisli extract *** a
#if __GLASGOW_HASKELL__ < 609
CoKleisli a >>> CoKleisli b = CoKleisli (b . fmap a . duplicate)
#endif
instance Comonad w => Category (CoKleisli w) where
id = CoKleisli extract
CoKleisli b . CoKleisli a = CoKleisli (b . fmap a . duplicate)
| null | https://raw.githubusercontent.com/ekmett/category-extras/f0f3ca38a3dfcb49d39aa2bb5b31b719f2a5b1ae/Control/Arrow/CoKleisli.hs | haskell | -----------------------------------------------------------------------------------------
|
Module : Control.Arrow.CoKleisli
License : BSD3
Stability : experimental
Portability : portable
----------------------------------------------------------------------------------------- | # OPTIONS_GHC -cpp #
Copyright : 2008
Maintainer : < >
module Control.Arrow.CoKleisli
( CoKleisli(..)
) where
import Prelude hiding (id,(.))
import Control.Category
import Control.Comonad
import Control.Arrow
newtype CoKleisli w a b = CoKleisli { runCoKleisli :: w a -> b }
instance Functor (CoKleisli w a) where
fmap f (CoKleisli g) = CoKleisli (f . g)
instance Comonad w => Arrow (CoKleisli w) where
arr f = CoKleisli (f . extract)
CoKleisli a &&& CoKleisli b = CoKleisli (a &&& b)
CoKleisli a *** CoKleisli b = CoKleisli (a . fmap fst &&& b . fmap snd)
first a = a *** CoKleisli extract
second a = CoKleisli extract *** a
#if __GLASGOW_HASKELL__ < 609
CoKleisli a >>> CoKleisli b = CoKleisli (b . fmap a . duplicate)
#endif
instance Comonad w => Category (CoKleisli w) where
id = CoKleisli extract
CoKleisli b . CoKleisli a = CoKleisli (b . fmap a . duplicate)
|
8637f41fd41b2bd9f3b46b36f34dadc031e49d773dc977ea484b937b09a24bf0 | camfort/camfort | InferSpec.hs | module Camfort.Specification.Units.Analysis.InferSpec (spec) where
import System.FilePath ((</>))
import Control.Lens
import Test.Hspec hiding (Spec)
import qualified Test.Hspec as Test
import Language.Fortran.Util.ModFile (ModFile, emptyModFiles)
import Camfort.Analysis hiding (describe)
import Camfort.Analysis.ModFile (genModFiles, readParseSrcDir)
import Camfort.Specification.Units.Analysis (compileUnits)
import Camfort.Specification.Units.Analysis.Infer (inferUnits)
import Camfort.Specification.Units.Monad
(LiteralsOpt(..), unitOpts0, uoLiterals, runUnitAnalysis, UnitEnv(..))
spec :: Test.Spec
spec =
describe "fixtures integration tests" $ do
it "infers correctly based on simple addition" $
"example-simple-1.f90" `unitsInferReportIs` exampleInferSimple1Report
describe "Polymorphic functions" $
it "squarePoly1" $
"squarePoly1.f90" `unitsInferReportIs` squarePoly1Report
describe "Recursive functions" $
it "Recursive Addition is OK" $
"recursive1.f90" `unitsInferReportIs` recursive1Report
describe "Explicitly annotated parametric polymorphic unit variables" $ do
it "inside-outside" $
"insideOutside.f90" `unitsInferReportIs` insideOutsideReport
it "eapVarScope" $
"eapVarScope.f90" `unitsInferReportIs` eapVarScopeReport
it "eapVarApp" $
"eapVarApp.f90" `unitsInferReportIs` eapVarAppReport
describe "Implicit parametric polymorphic unit variables" $
it "inferPoly1" $
"inferPoly1.f90" `unitsInferReportIs` inferPoly1Report
describe "Intrinsic functions" $
it "sqrtPoly" $
"sqrtPoly.f90" `unitsInferReportIs` sqrtPolyReport
describe "Intrinsic function transfer (explicit cast)" $
it "transfer" $
"transfer.f90" `unitsInferReportIs` transferReport
describe "GCD of powers" $
it "gcd1" $
"gcd1.f90" `unitsInferReportIs` gcd1Report
describe "literals" $ do
it "literal-zero" $
"literal-zero.f90" `unitsInferReportIs` literalZeroReport
it "literal-nonzero" $
"literal-nonzero.f90" `unitsInferReportIs` literalNonZeroReport
it "literal-nonzero" $
"literal-nonzero2.f90" `unitsInferReportIs` literalNonZero2Report
it "do-loop1" $
"do-loop1.f90" `unitsInferReportIs` doLoop1Report
it "do-loop2" $
"do-loop2.f90" `unitsInferReportIs` doLoop2Report
describe "cross module analysis" $ do
it "with literals" $
unitsInferReportWithMod ["cross-module-b/cross-module-b1.f90"] "cross-module-b/cross-module-b2.f90"
crossModuleBReport
fixturesDir :: String
fixturesDir = "tests" </> "fixtures" </> "Specification" </> "Units"
-- | Assert that the report of performing units inference on a file is as expected.
unitsInferReportIs :: String -> String -> Expectation
unitsInferReportIs fileName expectedReport = do
unitsInferReportWithMod [] fileName expectedReport
-- | Assert that the report of performing units inference on a file is as expected (with mod files).
unitsInferReportWithMod :: [String] -> String -> String -> Expectation
unitsInferReportWithMod modNames fileName expectedReport = do
let file = fixturesDir </> fileName
modPaths = fmap (fixturesDir </>) modNames
modFiles <- mapM mkTestModFile modPaths
[(pf,_)] <- readParseSrcDir Nothing modFiles file []
let uEnv = UnitEnv { unitOpts = uOpts, unitProgramFile = pf }
report <- runAnalysisT file (logOutputNone True) LogError modFiles $ runUnitAnalysis uEnv $ inferUnits
let res = report ^?! arResult . _ARSuccess
show res `shouldBe` expectedReport
where uOpts = unitOpts0 { uoLiterals = LitMixed }
| Helper for producing a basic ModFile from a ( terminal ) module file .
mkTestModFile :: String -> IO ModFile
mkTestModFile file = head <$> genModFiles Nothing emptyModFiles compileUnits unitOpts0 file []
exampleInferSimple1Report :: String
exampleInferSimple1Report =
"\ntests/fixtures/Specification/Units/example-simple-1.f90:\n\
\ 3:14 unit s :: x\n\
\ 3:17 unit s :: y\n"
inferReport :: String -> String -> String
inferReport fname res = concat ["\n", fixturesDir </> fname, ":\n", res]
squarePoly1Report :: String
squarePoly1Report = inferReport "squarePoly1.f90"
" 4:11 unit m**2 :: x\n\
\ 5:11 unit s**2 :: y\n\
\ 7:11 unit m :: a\n\
\ 9:11 unit s :: b\n\
\ 13:3 unit ('b)**2 :: square\n\
\ 14:13 unit 'b :: n\n\
\ 17:3 unit ('a)**2 :: squarep\n\
\ 18:13 unit 'a :: m\n"
recursive1Report :: String
recursive1Report = inferReport "recursive1.f90"
" 3:14 unit 1 :: x\n\
\ 3:21 unit m :: y\n\
\ 3:28 unit m :: z\n\
\ 7:3 unit 'a :: r\n\
\ 8:16 unit 1 :: n\n\
\ 8:19 unit 'a :: b\n"
insideOutsideReport :: String
insideOutsideReport = inferReport "insideOutside.f90"
" 5:13 unit 'a :: x\n\
\ 5:16 unit 'a :: k\n\
\ 5:19 unit ('a)**2 :: m\n\
\ 5:22 unit ('a)**2 :: outside\n\
\ 12:15 unit 'a :: y\n\
\ 12:18 unit ('a)**2 :: inside\n"
eapVarScopeReport :: String
eapVarScopeReport = inferReport "eapVarScope.f90"
" 5:13 unit 'a :: x\n\
\ 5:16 unit ('a)**3 :: k\n\
\ 5:19 unit ('a)**3 :: f\n\
\ 11:13 unit 'a :: y\n\
\ 11:16 unit 'a :: j\n\
\ 11:19 unit 'a :: g\n"
eapVarAppReport :: String
eapVarAppReport = inferReport "eapVarApp.f90"
" 5:13 unit 'a :: fx\n\
\ 5:17 unit 'a :: fj\n\
\ 5:21 unit ('a)**2 :: fk\n\
\ 5:25 unit ('a)**4 :: fl\n\
\ 5:29 unit ('a)**2 :: f\n\
\ 13:13 unit 'b :: gx\n\
\ 13:17 unit 'b :: gn\n\
\ 13:21 unit 'b :: gm\n\
\ 13:25 unit 'b :: g\n\
\ 20:13 unit m :: hx\n\
\ 20:17 unit m**2 :: h\n\
\ 20:20 unit m**2 :: hy\n"
inferPoly1Report :: String
inferPoly1Report = inferReport "inferPoly1.f90"
" 4:13 unit 'c :: x1\n\
\ 4:17 unit 'c :: id\n\
\ 8:13 unit 'f :: x2\n\
\ 8:17 unit ('f)**2 :: sqr\n\
\ 12:13 unit 'a :: x3\n\
\ 12:17 unit 'b :: y3\n\
\ 12:21 unit 'a :: fst\n\
\ 16:13 unit 'e :: x4\n\
\ 16:17 unit 'd :: y4\n\
\ 16:21 unit 'd :: snd\n"
sqrtPolyReport :: String
sqrtPolyReport = inferReport "sqrtPoly.f90"
" 4:11 unit m :: x\n\
\ 6:11 unit s :: y\n\
\ 8:11 unit j :: z\n\
\ 9:14 unit m**2 :: a\n\
\ 10:14 unit s**4 :: b\n\
\ 11:14 unit j**2 :: c\n\
\ 16:3 unit ('a)**2 :: square\n\
\ 17:13 unit 'a :: n\n"
transferReport :: String
transferReport = inferReport "transfer.f90"
" 4:11 unit m :: x\n\
\ 6:11 unit s :: y\n"
gcd1Report :: String
gcd1Report = inferReport "gcd1.f90"
" 3:3 unit ('a)**12 :: g\n\
\ 4:13 unit ('a)**2 :: x\n\
\ 4:16 unit ('a)**3 :: y\n"
literalZeroReport :: String
literalZeroReport = inferReport "literal-zero.f90"
" 3:11 unit m :: a\n\
\ 3:14 unit m :: b\n\
\ 9:3 unit 'a :: f\n\
\ 11:13 unit 'a :: x\n"
literalNonZeroReport :: String
literalNonZeroReport = inferReport "literal-nonzero.f90"
" 2:11 unit m s :: a\n\
\ 2:14 unit m s :: b\n\
\ 8:3 unit m s :: f\n\
\ 10:13 unit m s :: x\n"
literalNonZero2Report :: String
literalNonZero2Report = inferReport "literal-nonzero2.f90"
" 3:11 unit m :: a\n\
\ 3:14 unit m :: b\n\
\ 3:17 unit m :: c\n\
\ 3:20 unit m :: d\n\
\ 10:3 unit m :: f\n\
\ 11:13 unit m :: x\n"
doLoop1Report :: String
doLoop1Report = inferReport "do-loop1.f90"
" 3:11 unit m :: x\n\
\ 3:14 unit m :: y\n\
\ 4:14 unit m :: i\n\
\ 10:3 unit 1 :: f\n\
\ 11:13 unit 1 :: x\n\
\ 11:16 unit 1 :: y\n\
\ 12:16 unit 1 :: i\n"
doLoop2Report :: String
doLoop2Report = inferReport "do-loop2.f90"
" 3:11 unit m :: x\n\
\ 3:14 unit m :: y\n\
\ 4:14 unit m :: i\n\
\ 10:3 unit 1 :: f\n\
\ 11:13 unit 1 :: x\n\
\ 11:16 unit 1 :: y\n\
\ 12:16 unit 1 :: i\n\
\ 19:3 unit 1 :: g\n\
\ 20:13 unit 1 :: x\n\
\ 20:16 unit 1 :: y\n\
\ 21:16 unit 1 :: i\n\
\ 28:3 unit 'a :: h\n\
\ 29:13 unit 'a :: x\n\
\ 29:16 unit 'a :: y\n\
\ 30:16 unit 'a :: i\n"
crossModuleBReport :: String
crossModuleBReport =
"\ntests/fixtures/Specification/Units/cross-module-b/cross-module-b2.f90:\n\
\ 6:24 unit c :: foo\n\
\ 9:13 unit c :: tc\n\
\ 9:17 unit k :: t\n"
| null | https://raw.githubusercontent.com/camfort/camfort/3421e85f6fbbcaa6503a266b3fae029a09d2ff24/tests/Camfort/Specification/Units/Analysis/InferSpec.hs | haskell | | Assert that the report of performing units inference on a file is as expected.
| Assert that the report of performing units inference on a file is as expected (with mod files). | module Camfort.Specification.Units.Analysis.InferSpec (spec) where
import System.FilePath ((</>))
import Control.Lens
import Test.Hspec hiding (Spec)
import qualified Test.Hspec as Test
import Language.Fortran.Util.ModFile (ModFile, emptyModFiles)
import Camfort.Analysis hiding (describe)
import Camfort.Analysis.ModFile (genModFiles, readParseSrcDir)
import Camfort.Specification.Units.Analysis (compileUnits)
import Camfort.Specification.Units.Analysis.Infer (inferUnits)
import Camfort.Specification.Units.Monad
(LiteralsOpt(..), unitOpts0, uoLiterals, runUnitAnalysis, UnitEnv(..))
spec :: Test.Spec
spec =
describe "fixtures integration tests" $ do
it "infers correctly based on simple addition" $
"example-simple-1.f90" `unitsInferReportIs` exampleInferSimple1Report
describe "Polymorphic functions" $
it "squarePoly1" $
"squarePoly1.f90" `unitsInferReportIs` squarePoly1Report
describe "Recursive functions" $
it "Recursive Addition is OK" $
"recursive1.f90" `unitsInferReportIs` recursive1Report
describe "Explicitly annotated parametric polymorphic unit variables" $ do
it "inside-outside" $
"insideOutside.f90" `unitsInferReportIs` insideOutsideReport
it "eapVarScope" $
"eapVarScope.f90" `unitsInferReportIs` eapVarScopeReport
it "eapVarApp" $
"eapVarApp.f90" `unitsInferReportIs` eapVarAppReport
describe "Implicit parametric polymorphic unit variables" $
it "inferPoly1" $
"inferPoly1.f90" `unitsInferReportIs` inferPoly1Report
describe "Intrinsic functions" $
it "sqrtPoly" $
"sqrtPoly.f90" `unitsInferReportIs` sqrtPolyReport
describe "Intrinsic function transfer (explicit cast)" $
it "transfer" $
"transfer.f90" `unitsInferReportIs` transferReport
describe "GCD of powers" $
it "gcd1" $
"gcd1.f90" `unitsInferReportIs` gcd1Report
describe "literals" $ do
it "literal-zero" $
"literal-zero.f90" `unitsInferReportIs` literalZeroReport
it "literal-nonzero" $
"literal-nonzero.f90" `unitsInferReportIs` literalNonZeroReport
it "literal-nonzero" $
"literal-nonzero2.f90" `unitsInferReportIs` literalNonZero2Report
it "do-loop1" $
"do-loop1.f90" `unitsInferReportIs` doLoop1Report
it "do-loop2" $
"do-loop2.f90" `unitsInferReportIs` doLoop2Report
describe "cross module analysis" $ do
it "with literals" $
unitsInferReportWithMod ["cross-module-b/cross-module-b1.f90"] "cross-module-b/cross-module-b2.f90"
crossModuleBReport
fixturesDir :: String
fixturesDir = "tests" </> "fixtures" </> "Specification" </> "Units"
unitsInferReportIs :: String -> String -> Expectation
unitsInferReportIs fileName expectedReport = do
unitsInferReportWithMod [] fileName expectedReport
unitsInferReportWithMod :: [String] -> String -> String -> Expectation
unitsInferReportWithMod modNames fileName expectedReport = do
let file = fixturesDir </> fileName
modPaths = fmap (fixturesDir </>) modNames
modFiles <- mapM mkTestModFile modPaths
[(pf,_)] <- readParseSrcDir Nothing modFiles file []
let uEnv = UnitEnv { unitOpts = uOpts, unitProgramFile = pf }
report <- runAnalysisT file (logOutputNone True) LogError modFiles $ runUnitAnalysis uEnv $ inferUnits
let res = report ^?! arResult . _ARSuccess
show res `shouldBe` expectedReport
where uOpts = unitOpts0 { uoLiterals = LitMixed }
| Helper for producing a basic ModFile from a ( terminal ) module file .
mkTestModFile :: String -> IO ModFile
mkTestModFile file = head <$> genModFiles Nothing emptyModFiles compileUnits unitOpts0 file []
exampleInferSimple1Report :: String
exampleInferSimple1Report =
"\ntests/fixtures/Specification/Units/example-simple-1.f90:\n\
\ 3:14 unit s :: x\n\
\ 3:17 unit s :: y\n"
inferReport :: String -> String -> String
inferReport fname res = concat ["\n", fixturesDir </> fname, ":\n", res]
squarePoly1Report :: String
squarePoly1Report = inferReport "squarePoly1.f90"
" 4:11 unit m**2 :: x\n\
\ 5:11 unit s**2 :: y\n\
\ 7:11 unit m :: a\n\
\ 9:11 unit s :: b\n\
\ 13:3 unit ('b)**2 :: square\n\
\ 14:13 unit 'b :: n\n\
\ 17:3 unit ('a)**2 :: squarep\n\
\ 18:13 unit 'a :: m\n"
recursive1Report :: String
recursive1Report = inferReport "recursive1.f90"
" 3:14 unit 1 :: x\n\
\ 3:21 unit m :: y\n\
\ 3:28 unit m :: z\n\
\ 7:3 unit 'a :: r\n\
\ 8:16 unit 1 :: n\n\
\ 8:19 unit 'a :: b\n"
insideOutsideReport :: String
insideOutsideReport = inferReport "insideOutside.f90"
" 5:13 unit 'a :: x\n\
\ 5:16 unit 'a :: k\n\
\ 5:19 unit ('a)**2 :: m\n\
\ 5:22 unit ('a)**2 :: outside\n\
\ 12:15 unit 'a :: y\n\
\ 12:18 unit ('a)**2 :: inside\n"
eapVarScopeReport :: String
eapVarScopeReport = inferReport "eapVarScope.f90"
" 5:13 unit 'a :: x\n\
\ 5:16 unit ('a)**3 :: k\n\
\ 5:19 unit ('a)**3 :: f\n\
\ 11:13 unit 'a :: y\n\
\ 11:16 unit 'a :: j\n\
\ 11:19 unit 'a :: g\n"
eapVarAppReport :: String
eapVarAppReport = inferReport "eapVarApp.f90"
" 5:13 unit 'a :: fx\n\
\ 5:17 unit 'a :: fj\n\
\ 5:21 unit ('a)**2 :: fk\n\
\ 5:25 unit ('a)**4 :: fl\n\
\ 5:29 unit ('a)**2 :: f\n\
\ 13:13 unit 'b :: gx\n\
\ 13:17 unit 'b :: gn\n\
\ 13:21 unit 'b :: gm\n\
\ 13:25 unit 'b :: g\n\
\ 20:13 unit m :: hx\n\
\ 20:17 unit m**2 :: h\n\
\ 20:20 unit m**2 :: hy\n"
inferPoly1Report :: String
inferPoly1Report = inferReport "inferPoly1.f90"
" 4:13 unit 'c :: x1\n\
\ 4:17 unit 'c :: id\n\
\ 8:13 unit 'f :: x2\n\
\ 8:17 unit ('f)**2 :: sqr\n\
\ 12:13 unit 'a :: x3\n\
\ 12:17 unit 'b :: y3\n\
\ 12:21 unit 'a :: fst\n\
\ 16:13 unit 'e :: x4\n\
\ 16:17 unit 'd :: y4\n\
\ 16:21 unit 'd :: snd\n"
sqrtPolyReport :: String
sqrtPolyReport = inferReport "sqrtPoly.f90"
" 4:11 unit m :: x\n\
\ 6:11 unit s :: y\n\
\ 8:11 unit j :: z\n\
\ 9:14 unit m**2 :: a\n\
\ 10:14 unit s**4 :: b\n\
\ 11:14 unit j**2 :: c\n\
\ 16:3 unit ('a)**2 :: square\n\
\ 17:13 unit 'a :: n\n"
transferReport :: String
transferReport = inferReport "transfer.f90"
" 4:11 unit m :: x\n\
\ 6:11 unit s :: y\n"
gcd1Report :: String
gcd1Report = inferReport "gcd1.f90"
" 3:3 unit ('a)**12 :: g\n\
\ 4:13 unit ('a)**2 :: x\n\
\ 4:16 unit ('a)**3 :: y\n"
literalZeroReport :: String
literalZeroReport = inferReport "literal-zero.f90"
" 3:11 unit m :: a\n\
\ 3:14 unit m :: b\n\
\ 9:3 unit 'a :: f\n\
\ 11:13 unit 'a :: x\n"
literalNonZeroReport :: String
literalNonZeroReport = inferReport "literal-nonzero.f90"
" 2:11 unit m s :: a\n\
\ 2:14 unit m s :: b\n\
\ 8:3 unit m s :: f\n\
\ 10:13 unit m s :: x\n"
literalNonZero2Report :: String
literalNonZero2Report = inferReport "literal-nonzero2.f90"
" 3:11 unit m :: a\n\
\ 3:14 unit m :: b\n\
\ 3:17 unit m :: c\n\
\ 3:20 unit m :: d\n\
\ 10:3 unit m :: f\n\
\ 11:13 unit m :: x\n"
doLoop1Report :: String
doLoop1Report = inferReport "do-loop1.f90"
" 3:11 unit m :: x\n\
\ 3:14 unit m :: y\n\
\ 4:14 unit m :: i\n\
\ 10:3 unit 1 :: f\n\
\ 11:13 unit 1 :: x\n\
\ 11:16 unit 1 :: y\n\
\ 12:16 unit 1 :: i\n"
doLoop2Report :: String
doLoop2Report = inferReport "do-loop2.f90"
" 3:11 unit m :: x\n\
\ 3:14 unit m :: y\n\
\ 4:14 unit m :: i\n\
\ 10:3 unit 1 :: f\n\
\ 11:13 unit 1 :: x\n\
\ 11:16 unit 1 :: y\n\
\ 12:16 unit 1 :: i\n\
\ 19:3 unit 1 :: g\n\
\ 20:13 unit 1 :: x\n\
\ 20:16 unit 1 :: y\n\
\ 21:16 unit 1 :: i\n\
\ 28:3 unit 'a :: h\n\
\ 29:13 unit 'a :: x\n\
\ 29:16 unit 'a :: y\n\
\ 30:16 unit 'a :: i\n"
crossModuleBReport :: String
crossModuleBReport =
"\ntests/fixtures/Specification/Units/cross-module-b/cross-module-b2.f90:\n\
\ 6:24 unit c :: foo\n\
\ 9:13 unit c :: tc\n\
\ 9:17 unit k :: t\n"
|
838c75ceb019edf862ad68bfcbc6b6f5af350fb74ae390261a115afb83353f10 | pink-gorilla/demo-goldly | time.clj | (ns time
(:require
[taoensso.timbre :as log :refer [tracef debug debugf info infof warn error errorf]]
[modular.date :refer [now-str]]
[clojure.core.async :as async :refer [<! <!! >! >!! put! chan go go-loop]]
[modular.ws.core :refer [send! send-all! send-response connected-uids]]))
(defn start-time-pusher! []
(go-loop []
5 seconds
(let [snow (now-str)]
(debug "sending time: " snow)
(send-all! [:demo/time {:time snow}]))
(recur)))
(start-time-pusher!)
| null | https://raw.githubusercontent.com/pink-gorilla/demo-goldly/3266bfe422ced382647c02ffd61831915980d43e/src/time.clj | clojure | (ns time
(:require
[taoensso.timbre :as log :refer [tracef debug debugf info infof warn error errorf]]
[modular.date :refer [now-str]]
[clojure.core.async :as async :refer [<! <!! >! >!! put! chan go go-loop]]
[modular.ws.core :refer [send! send-all! send-response connected-uids]]))
(defn start-time-pusher! []
(go-loop []
5 seconds
(let [snow (now-str)]
(debug "sending time: " snow)
(send-all! [:demo/time {:time snow}]))
(recur)))
(start-time-pusher!)
| |
123b90556365d3ac443f8528421c7cd22c441b92823935c3938c4168a058dccd | racketscript/racketscript | if-body-wcm.rkt | #lang racket
(define (extract-current-continuation-marks key)
(continuation-mark-set->list
(current-continuation-marks)
key))
(define (main)
(with-continuation-mark 'key 'mark-main
(if (add1 0)
(with-continuation-mark 'key 'mark-if
(extract-current-continuation-marks 'key))
#f)))
(displayln (main))
| null | https://raw.githubusercontent.com/racketscript/racketscript/f94006d11338a674ae10f6bd83fc53e6806d07d8/tests/wcm/if-body-wcm.rkt | racket | #lang racket
(define (extract-current-continuation-marks key)
(continuation-mark-set->list
(current-continuation-marks)
key))
(define (main)
(with-continuation-mark 'key 'mark-main
(if (add1 0)
(with-continuation-mark 'key 'mark-if
(extract-current-continuation-marks 'key))
#f)))
(displayln (main))
| |
400e7f2747f0c9b3fd05ea103503aeba62b874ba8216adc2f61119de3e530ade | manuel-serrano/bigloo | rgc.scm | ;*=====================================================================*/
* serrano / prgm / project / bigloo / recette / rgc.scm * /
;* ------------------------------------------------------------- */
* Author : * /
* Creation : Thu Sep 8 11:03:03 1994 * /
* Last change : We d Apr 18 18:55:04 2012 ( serrano ) * /
;* ------------------------------------------------------------- */
* tests * /
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* Le module */
;*---------------------------------------------------------------------*/
(module rgc
(import (main "main.scm"))
(include "test.sch")
(export (test-rgc)))
cette grammaire plante tous les bigloo ( a la compilation )
;; jusqu'a la version 1.6c
(regular-grammar
((sign (in #\+ #\-))
(optsign (>= 1 sign))
(octdigit (in ("07"))))
((: #\0 optsign octdigit)
0))
(regular-grammar ()
((: #\a (? #\b) #\c)
0))
(regular-grammar ()
((: (? #\a) #\b #\c)
0))
(regular-grammar ()
((: #\a #\b (? #\c))
0))
une lors du permier boot de bigloo1.8
(define *std-grammar*
(regular-grammar ((chiffre (in ("09")))
(lettre (in ("azAZ") #a128 #a255))
(special (in "!@~$%^&*></.-_+\|=?:"))
(id (: (or lettre chiffre special)
(* (or lettre chiffre special #\, #\' #\`)))))
((: #\# #\a chiffre chiffre chiffre)
;; character ascii forms
0)
((: ";" (* all))
;; commets
(ignore))
((: #\# (or id (: #\. (+ #\.))) #\()
;; typed vectors beginning
1)
(else
2)))
(define *number*
(regular-grammar ()
((: (submatch (+ digit)) "." (submatch (+ digit)))
(cons (string->integer (the-submatch 1))
(string->integer (the-submatch 2))))))
(define *number2*
(regular-grammar ()
((: (submatch (* digit)) "." (submatch (* digit)))
(cons (string->integer (the-submatch 1))
(string->integer (the-submatch 2))))))
(define *symbol*
(regular-grammar ()
((+ (in ("az")))
(the-subsymbol 1 (-fx (the-length) 1)))
((: (in ("09")) (+ (in ("az"))))
(the-symbol))))
(define (recette-suffix string)
(string-case string
((: (* all) "." (submatch (+ (out "."))))
(the-submatch 1))
(else
"")))
(define (test-rgc= str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((= 2 (: #\; (* all) #\newline))
(the-string))
(else
""))))
(read/rp gram port)))
(define (test-rgc-substring str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((: #\" (+ alpha) #\")
(string=? (the-substring 1 (-fx (the-length) 1))
(the-substring 1 -1)))
(else
#f))))
(read/rp gram port)))
(define (test-rgc>= str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((>= 2 (: #\; (* all) #\newline))
(the-string))
(else
""))))
(read/rp gram port)))
(define (rgc-and str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((+ (and (#\a #\b) "09abcd")) (the-string))
(else
""))))
(read/rp gram port)))
(define (rgc-and-2 str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((+ (and "am" "nz")) (the-string))
(else
""))))
(read/rp gram port)))
(define (rgc-but str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((+ (but ("09ad") ("ce"))) (the-string))
(else
""))))
(read/rp gram port)))
(define (rgc-** str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((** 3 6 #\a) (the-string)))))
(read/rp gram port)))
(define (rgc-etc str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((... 3 "begin") (the-string)))))
(read/rp gram port)))
(define (rgc-submatch str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((: (submatch (* #\space))
(submatch (+ #\+))
(submatch (* #\space)))
(string-append (the-submatch 1) (the-submatch 3))))))
(read/rp gram port)))
(define (test-bof)
(with-input-from-string "abcd"
(lambda ()
(read/rp
(regular-grammar ()
((bof all)
(let ((c (the-character)))
(cons `(bof ,c) (ignore))))
((eof all)
(let ((c (the-character)))
(cons `(eof ,c) (ignore))))
((bol all)
(let ((c (the-character)))
(cons `(bol ,c) (ignore))))
(else
(let ((char (the-failure)))
(cond ((eof-object? char) '())
(else
(cons `(char ,char) (ignore)))))))
(current-input-port)))))
(define (test-read-chars bufsize strsize)
(let ((p (if (number? bufsize)
(open-input-file "misc/input.txt" bufsize)
(open-input-file "misc/input.txt"))))
(unwind-protect
(let loop ((str (read-chars strsize p))
(acc '()))
(if (or (eof-object? str) (string=? str ""))
(apply string-append (reverse! acc))
(loop (read-chars strsize p)
(cons str acc))))
(close-input-port p))))
(define (test-read-chars2 n bf of)
(let ((p (open-input-file "misc/input.txt" bf)))
(when (> of 0) (read-chars of p))
(unwind-protect
(begin
(read-chars n p)
(input-port-position p))
(close-input-port p))))
(define (test-read-chars3 n bf of)
(let ((p (open-input-file "misc/input.txt" bf)))
(when (> of 0) (read-chars of p))
(unwind-protect
(let* ((s1 (read-chars (* 2 n) p))
(s2 (read-chars (* 2 n) p)))
(string-append s1 s2))
(close-input-port p))))
(define (test-read-chars4 n bf of)
(let ((p (open-input-file "misc/input.txt" bf)))
(when (> of 0) (read-chars of p))
(unwind-protect
(read-chars (* 2 n) p)
(close-input-port p))))
(define (test-read-chars5 s)
(with-input-from-string s
(lambda ()
(let loop ((buf (read-chars 2))
(old #f))
(if (eof-object? buf)
old
(loop (read-chars 2) buf))))))
;*---------------------------------------------------------------------*/
;* test-unread ... */
;*---------------------------------------------------------------------*/
(define (test-unread)
(string-case ",1"
((: #\, #\1)
(unread-char! (string-ref (the-string) 1) (the-port))
(read (the-port)))))
;*---------------------------------------------------------------------*/
;* test-rgc ... */
;*---------------------------------------------------------------------*/
(define (test-rgc)
(test-module "rgc" "rgc.scm")
(test "submatch+"
(read/rp *number* (open-input-string "3.1415")) '(3 . 1415))
(test "submatch*"
(read/rp *number2* (open-input-string "3.1415")) '(3 . 1415))
(test "symbol.1"
(read/rp *symbol* (open-input-string "abcdefgh")) 'bcdefg)
(test "symbol.2"
(read/rp *symbol* (open-input-string "0abcdefgh")) '0abcdefgh)
(test "string-case" (recette-suffix "toto.org.scm") "scm")
(test "rgc ="
(test-rgc= #";1line\n;2line\n;3line\n;4line\n")
#";1line\n;2line\n")
(test "rgc substring"
(test-rgc-substring "\"foo\"")
#t)
(let ((str #";1line\n;2line\n;3line\n;4line\n"))
(test "rgc >="
(test-rgc>= str)
str))
(test "rgc and" (rgc-and "aaaabbbbccc") "aaaabbbb")
(test "rgc and" (rgc-and-2 "aaaabbbbccc") "")
(test "rgc but" (rgc-but "aaaabbbbccc") "aaaabbbb")
(test "rgc **" (rgc-** "aaaaaaaaaaabbbbccc") "aaaaaa")
(test "rgc ..." (rgc-etc "begin") "beg")
(test "rgc submatch" (rgc-submatch " +++ ") " ")
(test "fixnum" (read/rp (regular-grammar () ((: digit) (the-fixnum)))
(open-input-string "1234"))
1)
(test "fixnum" (read/rp (regular-grammar () ((+ digit) (the-fixnum)))
(open-input-string "1234"))
1234)
(test "bof" (test-bof) '((bof #\a) (char #\b) (char #\c) (eof #\d)))
(let ((res (test-read-chars #f 8192)))
(test "read-chars.1" (test-read-chars 10 1) res)
(test "read-chars.2" (test-read-chars 10 2) res)
(test "read-chars.3" (test-read-chars 10 8) res)
(test "read-chars.4" (test-read-chars 10 9) res)
(test "read-chars.5" (test-read-chars 10 10) res)
(test "read-chars.6" (test-read-chars 10 11) res)
(test "read-chars.7" (test-read-chars 10 111) res)
(test "read-chars.8" (test-read-chars2 3 3 0) 3)
(test "read-chars.8b" (test-read-chars2 3 3 3) 6)
(test "read-chars.8c" (test-read-chars2 3 10 3) 6)
(test "read-chars.9" (test-read-chars3 3 3 0)
(test-read-chars4 6 3 0))
(test "read-chars.9b" (test-read-chars3 3 3 10)
(test-read-chars4 6 3 10))
(test "read-chars.9c" (test-read-chars3 10 3 10)
(test-read-chars4 20 3 10))
(test "read-chars.9d" (test-read-chars3 10 3 0)
(test-read-chars4 20 3 0))
(test "read-chars.10" (test-read-chars5 "123") "3")
(test "read-chars.11" (test-read-chars5 "12") "12")
(test "read-chars.12" (test-read-chars5 "1") "1"))
(test "unread-chars" (test-unread) 1))
| null | https://raw.githubusercontent.com/manuel-serrano/bigloo/eb650ed4429155f795a32465e009706bbf1b8d74/recette/rgc.scm | scheme | *=====================================================================*/
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
*=====================================================================*/
*---------------------------------------------------------------------*/
* Le module */
*---------------------------------------------------------------------*/
jusqu'a la version 1.6c
character ascii forms
commets
typed vectors beginning
(* all) #\newline))
(* all) #\newline))
*---------------------------------------------------------------------*/
* test-unread ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* test-rgc ... */
*---------------------------------------------------------------------*/ | * serrano / prgm / project / bigloo / recette / rgc.scm * /
* Author : * /
* Creation : Thu Sep 8 11:03:03 1994 * /
* Last change : We d Apr 18 18:55:04 2012 ( serrano ) * /
* tests * /
(module rgc
(import (main "main.scm"))
(include "test.sch")
(export (test-rgc)))
cette grammaire plante tous les bigloo ( a la compilation )
(regular-grammar
((sign (in #\+ #\-))
(optsign (>= 1 sign))
(octdigit (in ("07"))))
((: #\0 optsign octdigit)
0))
(regular-grammar ()
((: #\a (? #\b) #\c)
0))
(regular-grammar ()
((: (? #\a) #\b #\c)
0))
(regular-grammar ()
((: #\a #\b (? #\c))
0))
une lors du permier boot de bigloo1.8
(define *std-grammar*
(regular-grammar ((chiffre (in ("09")))
(lettre (in ("azAZ") #a128 #a255))
(special (in "!@~$%^&*></.-_+\|=?:"))
(id (: (or lettre chiffre special)
(* (or lettre chiffre special #\, #\' #\`)))))
((: #\# #\a chiffre chiffre chiffre)
0)
((: ";" (* all))
(ignore))
((: #\# (or id (: #\. (+ #\.))) #\()
1)
(else
2)))
(define *number*
(regular-grammar ()
((: (submatch (+ digit)) "." (submatch (+ digit)))
(cons (string->integer (the-submatch 1))
(string->integer (the-submatch 2))))))
(define *number2*
(regular-grammar ()
((: (submatch (* digit)) "." (submatch (* digit)))
(cons (string->integer (the-submatch 1))
(string->integer (the-submatch 2))))))
(define *symbol*
(regular-grammar ()
((+ (in ("az")))
(the-subsymbol 1 (-fx (the-length) 1)))
((: (in ("09")) (+ (in ("az"))))
(the-symbol))))
(define (recette-suffix string)
(string-case string
((: (* all) "." (submatch (+ (out "."))))
(the-submatch 1))
(else
"")))
(define (test-rgc= str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
(the-string))
(else
""))))
(read/rp gram port)))
(define (test-rgc-substring str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((: #\" (+ alpha) #\")
(string=? (the-substring 1 (-fx (the-length) 1))
(the-substring 1 -1)))
(else
#f))))
(read/rp gram port)))
(define (test-rgc>= str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
(the-string))
(else
""))))
(read/rp gram port)))
(define (rgc-and str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((+ (and (#\a #\b) "09abcd")) (the-string))
(else
""))))
(read/rp gram port)))
(define (rgc-and-2 str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((+ (and "am" "nz")) (the-string))
(else
""))))
(read/rp gram port)))
(define (rgc-but str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((+ (but ("09ad") ("ce"))) (the-string))
(else
""))))
(read/rp gram port)))
(define (rgc-** str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((** 3 6 #\a) (the-string)))))
(read/rp gram port)))
(define (rgc-etc str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((... 3 "begin") (the-string)))))
(read/rp gram port)))
(define (rgc-submatch str)
(let ((port (open-input-string str))
(gram (regular-grammar ()
((: (submatch (* #\space))
(submatch (+ #\+))
(submatch (* #\space)))
(string-append (the-submatch 1) (the-submatch 3))))))
(read/rp gram port)))
(define (test-bof)
(with-input-from-string "abcd"
(lambda ()
(read/rp
(regular-grammar ()
((bof all)
(let ((c (the-character)))
(cons `(bof ,c) (ignore))))
((eof all)
(let ((c (the-character)))
(cons `(eof ,c) (ignore))))
((bol all)
(let ((c (the-character)))
(cons `(bol ,c) (ignore))))
(else
(let ((char (the-failure)))
(cond ((eof-object? char) '())
(else
(cons `(char ,char) (ignore)))))))
(current-input-port)))))
(define (test-read-chars bufsize strsize)
(let ((p (if (number? bufsize)
(open-input-file "misc/input.txt" bufsize)
(open-input-file "misc/input.txt"))))
(unwind-protect
(let loop ((str (read-chars strsize p))
(acc '()))
(if (or (eof-object? str) (string=? str ""))
(apply string-append (reverse! acc))
(loop (read-chars strsize p)
(cons str acc))))
(close-input-port p))))
(define (test-read-chars2 n bf of)
(let ((p (open-input-file "misc/input.txt" bf)))
(when (> of 0) (read-chars of p))
(unwind-protect
(begin
(read-chars n p)
(input-port-position p))
(close-input-port p))))
(define (test-read-chars3 n bf of)
(let ((p (open-input-file "misc/input.txt" bf)))
(when (> of 0) (read-chars of p))
(unwind-protect
(let* ((s1 (read-chars (* 2 n) p))
(s2 (read-chars (* 2 n) p)))
(string-append s1 s2))
(close-input-port p))))
(define (test-read-chars4 n bf of)
(let ((p (open-input-file "misc/input.txt" bf)))
(when (> of 0) (read-chars of p))
(unwind-protect
(read-chars (* 2 n) p)
(close-input-port p))))
(define (test-read-chars5 s)
(with-input-from-string s
(lambda ()
(let loop ((buf (read-chars 2))
(old #f))
(if (eof-object? buf)
old
(loop (read-chars 2) buf))))))
(define (test-unread)
(string-case ",1"
((: #\, #\1)
(unread-char! (string-ref (the-string) 1) (the-port))
(read (the-port)))))
(define (test-rgc)
(test-module "rgc" "rgc.scm")
(test "submatch+"
(read/rp *number* (open-input-string "3.1415")) '(3 . 1415))
(test "submatch*"
(read/rp *number2* (open-input-string "3.1415")) '(3 . 1415))
(test "symbol.1"
(read/rp *symbol* (open-input-string "abcdefgh")) 'bcdefg)
(test "symbol.2"
(read/rp *symbol* (open-input-string "0abcdefgh")) '0abcdefgh)
(test "string-case" (recette-suffix "toto.org.scm") "scm")
(test "rgc ="
(test-rgc= #";1line\n;2line\n;3line\n;4line\n")
#";1line\n;2line\n")
(test "rgc substring"
(test-rgc-substring "\"foo\"")
#t)
(let ((str #";1line\n;2line\n;3line\n;4line\n"))
(test "rgc >="
(test-rgc>= str)
str))
(test "rgc and" (rgc-and "aaaabbbbccc") "aaaabbbb")
(test "rgc and" (rgc-and-2 "aaaabbbbccc") "")
(test "rgc but" (rgc-but "aaaabbbbccc") "aaaabbbb")
(test "rgc **" (rgc-** "aaaaaaaaaaabbbbccc") "aaaaaa")
(test "rgc ..." (rgc-etc "begin") "beg")
(test "rgc submatch" (rgc-submatch " +++ ") " ")
(test "fixnum" (read/rp (regular-grammar () ((: digit) (the-fixnum)))
(open-input-string "1234"))
1)
(test "fixnum" (read/rp (regular-grammar () ((+ digit) (the-fixnum)))
(open-input-string "1234"))
1234)
(test "bof" (test-bof) '((bof #\a) (char #\b) (char #\c) (eof #\d)))
(let ((res (test-read-chars #f 8192)))
(test "read-chars.1" (test-read-chars 10 1) res)
(test "read-chars.2" (test-read-chars 10 2) res)
(test "read-chars.3" (test-read-chars 10 8) res)
(test "read-chars.4" (test-read-chars 10 9) res)
(test "read-chars.5" (test-read-chars 10 10) res)
(test "read-chars.6" (test-read-chars 10 11) res)
(test "read-chars.7" (test-read-chars 10 111) res)
(test "read-chars.8" (test-read-chars2 3 3 0) 3)
(test "read-chars.8b" (test-read-chars2 3 3 3) 6)
(test "read-chars.8c" (test-read-chars2 3 10 3) 6)
(test "read-chars.9" (test-read-chars3 3 3 0)
(test-read-chars4 6 3 0))
(test "read-chars.9b" (test-read-chars3 3 3 10)
(test-read-chars4 6 3 10))
(test "read-chars.9c" (test-read-chars3 10 3 10)
(test-read-chars4 20 3 10))
(test "read-chars.9d" (test-read-chars3 10 3 0)
(test-read-chars4 20 3 0))
(test "read-chars.10" (test-read-chars5 "123") "3")
(test "read-chars.11" (test-read-chars5 "12") "12")
(test "read-chars.12" (test-read-chars5 "1") "1"))
(test "unread-chars" (test-unread) 1))
|
d67f3381b1af0a1c525259140a5404d5492d93f317acfb29488e704efc67f599 | guicho271828/alien | relaxation.lisp |
(in-package :alien)
(deftype relaxer ()
"a class of functions that returns a relaxed version of an operator."
`(function (op) op))
(ftype* relaxed-sg (simple-array op) relaxer &optional t (values sg (simple-array op)))
(defun relaxed-sg (ops relaxer &optional (simplify t))
"Relaxes a SG using a relaxer function.
Returns two values: a relaxed SG and a vector of relaxed ops.
The original SG and operators are not destructively modified.
Operators with no effects are removed from the results and does not belong to the SG.
If SIMPLIFY is non-nil (default), operators which becomes identical are pruned.
Setting this to NIL is useful when you want to keep the original op id.
"
(let ((relaxed-ops (map 'vector relaxer ops)))
(when simplify
(setf relaxed-ops (delete-duplicates relaxed-ops :test #'equalp)))
(values (generate-sg relaxed-ops)
(coerce relaxed-ops
'(simple-array op)))))
;; these definitions should come before solve-common,
;; otherwise with-parsed-information5 does not know it should be treated as a special variable
(defvar *delete-relaxed-sg* nil "Relaxed successor generators.")
(defvar *delete-relaxed-ops* nil "Relaxed operators.")
(defvar *delete-relaxed-op-size* nil "Relaxed operator size.")
(ftype* delete-relax-op op op)
(defun delete-relax-op (op)
(ematch op
((op pre eff)
(let ((relaxed-pre (remove-if #'minusp pre))
(relaxed-eff (delete-relax-effects eff)))
(make-op :pre relaxed-pre
:eff relaxed-eff)))))
(ftype* delete-relax-effects (array effect) (simple-array effect))
(defun delete-relax-effects (effects)
(coerce
(iter (for e in-vector effects)
(ematch e
((effect con eff)
(unless (minusp eff)
(collecting
(make-effect :con (remove-if #'minusp con)
:eff eff)
result-type vector)))))
'(simple-array effect)))
(defun ensure-delete-relaxed-sg (&optional (simplify t))
(unless (symbol-value '*delete-relaxed-sg*)
(log:info "instantiating delete-relaxed successor generator")
(setf (values *delete-relaxed-sg*
*delete-relaxed-ops*)
(relaxed-sg *instantiated-ops* #'delete-relax-op simplify)
*delete-relaxed-op-size*
(length *delete-relaxed-ops*))
(log:info "~11@a: ~a" "op" (length *instantiated-ops*))
(log:info "~11@a: ~a" "relaxed op" (length *delete-relaxed-ops*))))
;; these functions are not used.
(defvar *random-semi-delete-relaxed-sg* nil "Semi-relaxed successor generators.")
(defvar *random-semi-delete-relaxed-ops* nil "Semi-relaxed operators.")
(defvar *random-semi-delete-relaxed-op-size* nil "Semi-relaxed operator size.")
(defvar *random-semi-relax-ratio* 0.8)
(ftype* random-semi-delete-relax-op op op)
(defun random-semi-delete-relax-op (op)
(ematch op
((op pre eff)
(let ((relaxed-pre (remove-if #'minusp pre))
(relaxed-eff (random-semi-delete-relax-effects eff)))
(make-op :pre relaxed-pre
:eff relaxed-eff)))))
(ftype* random-semi-delete-relax-effects (array effect) (simple-array effect))
(defun random-semi-delete-relax-effects (effects)
(coerce
(iter (for e in-vector effects)
(ematch e
((effect con eff)
(if (minusp eff)
(when (< *random-semi-relax-ratio* (random 1.0))
(collecting
(make-effect :con (remove-if #'minusp con)
:eff eff)
result-type vector))
(collecting
(make-effect :con (remove-if #'minusp con)
:eff eff)
result-type vector)))))
'(simple-array effect)))
(defun ensure-random-semi-delete-relaxed-sg ()
(unless (symbol-value '*random-semi-delete-relaxed-sg*)
(log:info "instantiating randomly semi-delete-relaxed successor generator")
(setf (values *random-semi-delete-relaxed-sg*
*random-semi-delete-relaxed-ops*)
(relaxed-sg *instantiated-ops* #'random-semi-delete-relax-op)
*random-semi-delete-relaxed-op-size*
(length *random-semi-delete-relaxed-ops*))
(iter outer
(for op in-vector *instantiated-ops*)
(match op
((op eff)
(iter (for e in-vector eff)
(match e
((effect eff)
(in outer
(counting (minusp eff) into result)))))))
(finally
(log:info "~a deletes (original)" result)))
(iter outer
(for op in-vector *random-semi-delete-relaxed-ops*)
(match op
((op eff)
(iter (for e in-vector eff)
(match e
((effect eff)
(in outer
(counting (minusp eff) into result)))))))
(finally
(log:info "~a deletes (semi-relaxed)" result)))
(log:info "~11@a: ~a" "op" (length *instantiated-ops*))
(log:info "~11@a: ~a" "relaxed op" (length *random-semi-delete-relaxed-ops*))))
(defvar *delete-only-sg* nil "Successor generators which contains delete-effects only.")
(defvar *delete-only-ops* nil "Operators which contains delete-effects only.")
(defvar *delete-only-op-size* nil "delete-only operator size.")
(ftype* delete-only-op op op)
(defun delete-only-op (op)
(ematch op
((op pre eff)
(let ((relaxed-pre (remove-if #'minusp pre))
(relaxed-eff (delete-only-effects eff)))
(make-op :pre relaxed-pre
:eff relaxed-eff)))))
(ftype* delete-only-effects (array effect) (simple-array effect))
(defun delete-only-effects (effects)
(coerce
(iter (for e in-vector effects)
(ematch e
((effect con eff)
(when (minusp eff)
(collecting
(make-effect :con (remove-if #'minusp con)
:eff eff)
result-type vector)))))
'(simple-array effect)))
(defun ensure-delete-only-sg (&optional (simplify t))
(unless (symbol-value '*delete-only-sg*)
(log:info "instantiating delete-only successor generator")
(setf (values *delete-only-sg*
*delete-only-ops*)
(relaxed-sg *instantiated-ops* #'delete-only-op simplify)
*delete-only-op-size*
(length *delete-only-ops*))
(log:info "~11@a: ~a" "op" (length *instantiated-ops*))
(log:info "~11@a: ~a" "delete-only op" (length *delete-only-ops*))))
| null | https://raw.githubusercontent.com/guicho271828/alien/6987d034426b8928adf67f498dc6cb06fd7f923e/search/relaxation.lisp | lisp | these definitions should come before solve-common,
otherwise with-parsed-information5 does not know it should be treated as a special variable
these functions are not used. |
(in-package :alien)
(deftype relaxer ()
"a class of functions that returns a relaxed version of an operator."
`(function (op) op))
(ftype* relaxed-sg (simple-array op) relaxer &optional t (values sg (simple-array op)))
(defun relaxed-sg (ops relaxer &optional (simplify t))
"Relaxes a SG using a relaxer function.
Returns two values: a relaxed SG and a vector of relaxed ops.
The original SG and operators are not destructively modified.
Operators with no effects are removed from the results and does not belong to the SG.
If SIMPLIFY is non-nil (default), operators which becomes identical are pruned.
Setting this to NIL is useful when you want to keep the original op id.
"
(let ((relaxed-ops (map 'vector relaxer ops)))
(when simplify
(setf relaxed-ops (delete-duplicates relaxed-ops :test #'equalp)))
(values (generate-sg relaxed-ops)
(coerce relaxed-ops
'(simple-array op)))))
(defvar *delete-relaxed-sg* nil "Relaxed successor generators.")
(defvar *delete-relaxed-ops* nil "Relaxed operators.")
(defvar *delete-relaxed-op-size* nil "Relaxed operator size.")
(ftype* delete-relax-op op op)
(defun delete-relax-op (op)
(ematch op
((op pre eff)
(let ((relaxed-pre (remove-if #'minusp pre))
(relaxed-eff (delete-relax-effects eff)))
(make-op :pre relaxed-pre
:eff relaxed-eff)))))
(ftype* delete-relax-effects (array effect) (simple-array effect))
(defun delete-relax-effects (effects)
(coerce
(iter (for e in-vector effects)
(ematch e
((effect con eff)
(unless (minusp eff)
(collecting
(make-effect :con (remove-if #'minusp con)
:eff eff)
result-type vector)))))
'(simple-array effect)))
(defun ensure-delete-relaxed-sg (&optional (simplify t))
(unless (symbol-value '*delete-relaxed-sg*)
(log:info "instantiating delete-relaxed successor generator")
(setf (values *delete-relaxed-sg*
*delete-relaxed-ops*)
(relaxed-sg *instantiated-ops* #'delete-relax-op simplify)
*delete-relaxed-op-size*
(length *delete-relaxed-ops*))
(log:info "~11@a: ~a" "op" (length *instantiated-ops*))
(log:info "~11@a: ~a" "relaxed op" (length *delete-relaxed-ops*))))
(defvar *random-semi-delete-relaxed-sg* nil "Semi-relaxed successor generators.")
(defvar *random-semi-delete-relaxed-ops* nil "Semi-relaxed operators.")
(defvar *random-semi-delete-relaxed-op-size* nil "Semi-relaxed operator size.")
(defvar *random-semi-relax-ratio* 0.8)
(ftype* random-semi-delete-relax-op op op)
(defun random-semi-delete-relax-op (op)
(ematch op
((op pre eff)
(let ((relaxed-pre (remove-if #'minusp pre))
(relaxed-eff (random-semi-delete-relax-effects eff)))
(make-op :pre relaxed-pre
:eff relaxed-eff)))))
(ftype* random-semi-delete-relax-effects (array effect) (simple-array effect))
(defun random-semi-delete-relax-effects (effects)
(coerce
(iter (for e in-vector effects)
(ematch e
((effect con eff)
(if (minusp eff)
(when (< *random-semi-relax-ratio* (random 1.0))
(collecting
(make-effect :con (remove-if #'minusp con)
:eff eff)
result-type vector))
(collecting
(make-effect :con (remove-if #'minusp con)
:eff eff)
result-type vector)))))
'(simple-array effect)))
(defun ensure-random-semi-delete-relaxed-sg ()
(unless (symbol-value '*random-semi-delete-relaxed-sg*)
(log:info "instantiating randomly semi-delete-relaxed successor generator")
(setf (values *random-semi-delete-relaxed-sg*
*random-semi-delete-relaxed-ops*)
(relaxed-sg *instantiated-ops* #'random-semi-delete-relax-op)
*random-semi-delete-relaxed-op-size*
(length *random-semi-delete-relaxed-ops*))
(iter outer
(for op in-vector *instantiated-ops*)
(match op
((op eff)
(iter (for e in-vector eff)
(match e
((effect eff)
(in outer
(counting (minusp eff) into result)))))))
(finally
(log:info "~a deletes (original)" result)))
(iter outer
(for op in-vector *random-semi-delete-relaxed-ops*)
(match op
((op eff)
(iter (for e in-vector eff)
(match e
((effect eff)
(in outer
(counting (minusp eff) into result)))))))
(finally
(log:info "~a deletes (semi-relaxed)" result)))
(log:info "~11@a: ~a" "op" (length *instantiated-ops*))
(log:info "~11@a: ~a" "relaxed op" (length *random-semi-delete-relaxed-ops*))))
(defvar *delete-only-sg* nil "Successor generators which contains delete-effects only.")
(defvar *delete-only-ops* nil "Operators which contains delete-effects only.")
(defvar *delete-only-op-size* nil "delete-only operator size.")
(ftype* delete-only-op op op)
(defun delete-only-op (op)
(ematch op
((op pre eff)
(let ((relaxed-pre (remove-if #'minusp pre))
(relaxed-eff (delete-only-effects eff)))
(make-op :pre relaxed-pre
:eff relaxed-eff)))))
(ftype* delete-only-effects (array effect) (simple-array effect))
(defun delete-only-effects (effects)
(coerce
(iter (for e in-vector effects)
(ematch e
((effect con eff)
(when (minusp eff)
(collecting
(make-effect :con (remove-if #'minusp con)
:eff eff)
result-type vector)))))
'(simple-array effect)))
(defun ensure-delete-only-sg (&optional (simplify t))
(unless (symbol-value '*delete-only-sg*)
(log:info "instantiating delete-only successor generator")
(setf (values *delete-only-sg*
*delete-only-ops*)
(relaxed-sg *instantiated-ops* #'delete-only-op simplify)
*delete-only-op-size*
(length *delete-only-ops*))
(log:info "~11@a: ~a" "op" (length *instantiated-ops*))
(log:info "~11@a: ~a" "delete-only op" (length *delete-only-ops*))))
|
377a065fd25ef40814e6723c85ad42c5e523cdb9423464e62ac3b08e3754aff6 | rabbitmq/rabbit-stress | rabbit_http_stress.erl | -module(rabbit_http_stress).
-export([main/1]).
-define(OPTIONS, [
{host, $H, "url", {string, "localhost"}, "HTTP host to connect to."},
{port, $P, "port", {integer, 15672}, "Port to connect to."},
{total_requests, $r, "total_requests", {integer, 1000}, "Total number of requests for each target"},
{parallel, $p, "parallel", {integer, 5}, "Number of requests for each target to run in parallel"},
{report_memory, $M, "report_memory", {boolean, false}, "Report memory breakdown during run"},
{node, $n, "node", atom, "Node name of tested broker node. Required to report memory"},
{self_node_name, $N, "self_node_name", {atom, http_test}, "Name of the test node."}
]).
main(["-h"]) ->
getopt:usage(?OPTIONS, "rabbit_http_stress");
main(Args) ->
case getopt:parse(?OPTIONS, Args) of
{ok, {Options, []}} ->
run_test(Options);
{ok, {_, Invalid}} ->
io:format("Invalid options ~p~n"
"Run 'rabbit_http_stress -h' to see available options~n",
[Invalid])
end.
run_test(Options) ->
Host = proplists:get_value(host, Options),
Port = proplists:get_value(port, Options),
Total = proplists:get_value(total_requests, Options),
Parallel = proplists:get_value(parallel, Options),
Node = proplists:get_value(node, Options),
ReportMemory = proplists:get_value(report_memory, Options),
SelfNode = proplists:get_value(self_node_name, Options),
rabbit_stress:start_distribution(SelfNode),
TestFun = fun() -> http_api_measure:start_test(Host, Port, Total, Parallel) end,
case ReportMemory of
true ->
rabbit_stress:with_memory(Node, 5000, TestFun);
false ->
TestFun()
end.
| null | https://raw.githubusercontent.com/rabbitmq/rabbit-stress/9655c700c62b6271f9d226924d983c14b95ed0c2/src/rabbit_http_stress.erl | erlang | -module(rabbit_http_stress).
-export([main/1]).
-define(OPTIONS, [
{host, $H, "url", {string, "localhost"}, "HTTP host to connect to."},
{port, $P, "port", {integer, 15672}, "Port to connect to."},
{total_requests, $r, "total_requests", {integer, 1000}, "Total number of requests for each target"},
{parallel, $p, "parallel", {integer, 5}, "Number of requests for each target to run in parallel"},
{report_memory, $M, "report_memory", {boolean, false}, "Report memory breakdown during run"},
{node, $n, "node", atom, "Node name of tested broker node. Required to report memory"},
{self_node_name, $N, "self_node_name", {atom, http_test}, "Name of the test node."}
]).
main(["-h"]) ->
getopt:usage(?OPTIONS, "rabbit_http_stress");
main(Args) ->
case getopt:parse(?OPTIONS, Args) of
{ok, {Options, []}} ->
run_test(Options);
{ok, {_, Invalid}} ->
io:format("Invalid options ~p~n"
"Run 'rabbit_http_stress -h' to see available options~n",
[Invalid])
end.
run_test(Options) ->
Host = proplists:get_value(host, Options),
Port = proplists:get_value(port, Options),
Total = proplists:get_value(total_requests, Options),
Parallel = proplists:get_value(parallel, Options),
Node = proplists:get_value(node, Options),
ReportMemory = proplists:get_value(report_memory, Options),
SelfNode = proplists:get_value(self_node_name, Options),
rabbit_stress:start_distribution(SelfNode),
TestFun = fun() -> http_api_measure:start_test(Host, Port, Total, Parallel) end,
case ReportMemory of
true ->
rabbit_stress:with_memory(Node, 5000, TestFun);
false ->
TestFun()
end.
| |
f38b5fef86bf8cb00873a4405e41babbdbe9a37bf9dead234674afaa3f9dd06f | fluree/db | event_bus.cljc | (ns fluree.db.event-bus
(:require #?(:clj [clojure.core.async :as async]
:cljs [cljs.core.async :as async])
[fluree.db.util.core :as util]))
#?(:clj (set! *warn-on-reflection* true))
;; handles a pub/sub mechanism for pushing out different events to external query peers or internal listeners
;; all events keyed by network + db
(def sub-state (atom {}))
(defn reset-sub
[]
(reset! sub-state {}))
(defn publish
"'subject' currently only supports db-ident and
must be in the form of [network ledger-id]"
[event-type dbv data]
(let [db-subs (keys (get @sub-state dbv))
evt-subs (keys (get @sub-state event-type))]
(doseq [sub db-subs]
(when-not (async/put! sub [event-type dbv data])
(swap! sub-state update dbv dissoc sub)))
(doseq [sub evt-subs]
(when-not (async/put! sub [event-type dbv data])
(swap! sub-state update event-type dissoc sub)))))
(defn subscribe-db
"Subscribes to all events for a specific db-ident"
[dbv c]
(swap! sub-state assoc-in [dbv c] (util/current-time-millis))
nil)
(defn unsubscribe-db
"Unsubscribes channel from db."
[dbv c]
(swap! sub-state update dbv dissoc c)
nil)
(defn subscribe-event
"Subscribes to all events of a specified event type"
[event-type c]
(swap! sub-state assoc-in [event-type c] (util/current-time-millis))
nil)
(defn unsubscribe-event
"Unsubscribes channel from event updates."
[event-type c]
(swap! sub-state update event-type dissoc c))
| null | https://raw.githubusercontent.com/fluree/db/da8a0f5cf1dc0919d03b1c1e81e89354942d10ae/src/fluree/db/event_bus.cljc | clojure | handles a pub/sub mechanism for pushing out different events to external query peers or internal listeners
all events keyed by network + db | (ns fluree.db.event-bus
(:require #?(:clj [clojure.core.async :as async]
:cljs [cljs.core.async :as async])
[fluree.db.util.core :as util]))
#?(:clj (set! *warn-on-reflection* true))
(def sub-state (atom {}))
(defn reset-sub
[]
(reset! sub-state {}))
(defn publish
"'subject' currently only supports db-ident and
must be in the form of [network ledger-id]"
[event-type dbv data]
(let [db-subs (keys (get @sub-state dbv))
evt-subs (keys (get @sub-state event-type))]
(doseq [sub db-subs]
(when-not (async/put! sub [event-type dbv data])
(swap! sub-state update dbv dissoc sub)))
(doseq [sub evt-subs]
(when-not (async/put! sub [event-type dbv data])
(swap! sub-state update event-type dissoc sub)))))
(defn subscribe-db
"Subscribes to all events for a specific db-ident"
[dbv c]
(swap! sub-state assoc-in [dbv c] (util/current-time-millis))
nil)
(defn unsubscribe-db
"Unsubscribes channel from db."
[dbv c]
(swap! sub-state update dbv dissoc c)
nil)
(defn subscribe-event
"Subscribes to all events of a specified event type"
[event-type c]
(swap! sub-state assoc-in [event-type c] (util/current-time-millis))
nil)
(defn unsubscribe-event
"Unsubscribes channel from event updates."
[event-type c]
(swap! sub-state update event-type dissoc c))
|
ccbcc09ba23740fcce5b3425ef5e75dd84d64f048c0f8f912bc349d4ed1ff00b | 15Galan/asignatura-204 | PriorityQueueDemo.hs | -------------------------------------------------------------------------------
-- Simple client module using a Priority Queue
--
Data Structures . en Informática . UMA .
, 2012
-------------------------------------------------------------------------------
module Demos.PriorityQueue.PriorityQueueDemo where
import DataStructures.PriorityQueue.MaxiphobicHeapPriorityQueue
import . Heap . WBLHPriorityQueue
import . PriorityQueue . LinearPriorityQueue
import DataStructures.PriorityQueue.PriorityQueueAxioms(priorityQueueAxioms)
q1 :: PQueue Int
q1 = enqueue 2 (enqueue 3 (enqueue 1 empty))
size :: (Ord a) => PQueue a -> Int
size s
| isEmpty s = 0
| otherwise = 1 + size (dequeue s)
| null | https://raw.githubusercontent.com/15Galan/asignatura-204/894f33ff8e0f52a75d8f9ff15155c656f1a8f771/Pr%C3%A1cticas/Demos/PriorityQueue/PriorityQueueDemo.hs | haskell | -----------------------------------------------------------------------------
Simple client module using a Priority Queue
----------------------------------------------------------------------------- | Data Structures . en Informática . UMA .
, 2012
module Demos.PriorityQueue.PriorityQueueDemo where
import DataStructures.PriorityQueue.MaxiphobicHeapPriorityQueue
import . Heap . WBLHPriorityQueue
import . PriorityQueue . LinearPriorityQueue
import DataStructures.PriorityQueue.PriorityQueueAxioms(priorityQueueAxioms)
q1 :: PQueue Int
q1 = enqueue 2 (enqueue 3 (enqueue 1 empty))
size :: (Ord a) => PQueue a -> Int
size s
| isEmpty s = 0
| otherwise = 1 + size (dequeue s)
|
f5a3aee7c2b83797c39836843ac46c1ccdf8ad60e4000137cb2f923a54d99923 | input-output-hk/ouroboros-network | NoHardForks.hs | module Ouroboros.Consensus.HardFork.Combinator.Abstract.NoHardForks (
NoHardForks (..)
, noHardForksEpochInfo
) where
import Data.Functor.Identity (runIdentity)
import Cardano.Slotting.EpochInfo
import Ouroboros.Consensus.Config
import Ouroboros.Consensus.HardFork.History as History
import Ouroboros.Consensus.Ledger.Abstract
import Ouroboros.Consensus.HardFork.Combinator.Abstract.SingleEraBlock
import Ouroboros.Consensus.HardFork.Combinator.PartialConfig
{-------------------------------------------------------------------------------
Blocks that don't /have/ any transitions
-------------------------------------------------------------------------------}
class SingleEraBlock blk => NoHardForks blk where
-- | Extract 'EraParams' from the top-level config
--
The HFC itself does not care about this , as it must be given the full shape
across eras .
getEraParams :: TopLevelConfig blk -> EraParams
-- | Construct partial ledger config from full ledger config
--
-- See also 'toPartialConsensusConfig'
toPartialLedgerConfig :: proxy blk
-> LedgerConfig blk -> PartialLedgerConfig blk
noHardForksEpochInfo :: (Monad m, NoHardForks blk)
=> TopLevelConfig blk
-> EpochInfo m
noHardForksEpochInfo cfg =
hoistEpochInfo (pure . runIdentity)
$ fixedEpochInfo
(History.eraEpochSize params)
(History.eraSlotLength params)
where
params :: EraParams
params = getEraParams cfg
| null | https://raw.githubusercontent.com/input-output-hk/ouroboros-network/0a3a61ae80244dde943843fd39ef17cd85260980/ouroboros-consensus/src/Ouroboros/Consensus/HardFork/Combinator/Abstract/NoHardForks.hs | haskell | ------------------------------------------------------------------------------
Blocks that don't /have/ any transitions
------------------------------------------------------------------------------
| Extract 'EraParams' from the top-level config
| Construct partial ledger config from full ledger config
See also 'toPartialConsensusConfig' | module Ouroboros.Consensus.HardFork.Combinator.Abstract.NoHardForks (
NoHardForks (..)
, noHardForksEpochInfo
) where
import Data.Functor.Identity (runIdentity)
import Cardano.Slotting.EpochInfo
import Ouroboros.Consensus.Config
import Ouroboros.Consensus.HardFork.History as History
import Ouroboros.Consensus.Ledger.Abstract
import Ouroboros.Consensus.HardFork.Combinator.Abstract.SingleEraBlock
import Ouroboros.Consensus.HardFork.Combinator.PartialConfig
class SingleEraBlock blk => NoHardForks blk where
The HFC itself does not care about this , as it must be given the full shape
across eras .
getEraParams :: TopLevelConfig blk -> EraParams
toPartialLedgerConfig :: proxy blk
-> LedgerConfig blk -> PartialLedgerConfig blk
noHardForksEpochInfo :: (Monad m, NoHardForks blk)
=> TopLevelConfig blk
-> EpochInfo m
noHardForksEpochInfo cfg =
hoistEpochInfo (pure . runIdentity)
$ fixedEpochInfo
(History.eraEpochSize params)
(History.eraSlotLength params)
where
params :: EraParams
params = getEraParams cfg
|
0a573f2742c783bd963977c017274dc3b19541ffea17e33e474c74ea4e65c567 | input-output-hk/project-icarus-importer | Types.hs | -- | Delegation-related local types.
module Pos.Delegation.Types
( DlgPayload (..)
, DlgUndo (..)
, DlgMemPool
, ProxySKBlockInfo
, module Pos.Core.Delegation
, isRevokePsk
, DlgBlock
, DlgBlund
) where
import Universum
import qualified Data.Text.Buildable as Buildable
import Formatting (bprint, (%))
import Serokell.Util.Text (listJson)
import Pos.Core (ComponentBlock (..), ProxySKHeavy, StakeholderId)
import Pos.Core.Delegation (DlgPayload (..), checkDlgPayload)
import Pos.Crypto (ProxySecretKey, PublicKey, isSelfSignedPsk)
-- | Undo for the delegation component.
data DlgUndo = DlgUndo
{ duPsks :: ![ProxySKHeavy]
^ PSKs we 've modified when applying the block ( by deleting or
overwriting ) . There should be no duplicates , every psk must
-- have a unique issuer.
, duPrevEpochPosted :: !(HashSet StakeholderId)
-- ^ Set of stakeholders that posted in epoch i. This field
-- should be present only for genesis block of epoch i+1.
} deriving (Eq, Show, Generic)
instance NFData DlgUndo
instance Buildable DlgUndo where
build DlgUndo{..} =
bprint ("DlgUndo:"%
"\n duPsks: "%listJson%
"\n duPrevEpochPosted: "%listJson)
duPsks duPrevEpochPosted
-- | Map from issuer public keys to related heavy certs.
type DlgMemPool = HashMap PublicKey ProxySKHeavy
-- | Heavyweight PSK with real leader public key (because heavyweight
-- psks have redelegation feature, so pskIssuerPk hPsk /= leader in
-- general case). This is used to create a block header only.
type ProxySKBlockInfo = Maybe (ProxySKHeavy, PublicKey)
| Checks if given revokes delegation ( issuer = = delegate ) .
isRevokePsk :: ProxySecretKey w -> Bool
isRevokePsk = isSelfSignedPsk
----------------------------------------------------------------------------
----------------------------------------------------------------------------
type DlgBlock = ComponentBlock DlgPayload
type DlgBlund = (DlgBlock, DlgUndo)
| null | https://raw.githubusercontent.com/input-output-hk/project-icarus-importer/36342f277bcb7f1902e677a02d1ce93e4cf224f0/delegation/src/Pos/Delegation/Types.hs | haskell | | Delegation-related local types.
| Undo for the delegation component.
have a unique issuer.
^ Set of stakeholders that posted in epoch i. This field
should be present only for genesis block of epoch i+1.
| Map from issuer public keys to related heavy certs.
| Heavyweight PSK with real leader public key (because heavyweight
psks have redelegation feature, so pskIssuerPk hPsk /= leader in
general case). This is used to create a block header only.
--------------------------------------------------------------------------
-------------------------------------------------------------------------- |
module Pos.Delegation.Types
( DlgPayload (..)
, DlgUndo (..)
, DlgMemPool
, ProxySKBlockInfo
, module Pos.Core.Delegation
, isRevokePsk
, DlgBlock
, DlgBlund
) where
import Universum
import qualified Data.Text.Buildable as Buildable
import Formatting (bprint, (%))
import Serokell.Util.Text (listJson)
import Pos.Core (ComponentBlock (..), ProxySKHeavy, StakeholderId)
import Pos.Core.Delegation (DlgPayload (..), checkDlgPayload)
import Pos.Crypto (ProxySecretKey, PublicKey, isSelfSignedPsk)
data DlgUndo = DlgUndo
{ duPsks :: ![ProxySKHeavy]
^ PSKs we 've modified when applying the block ( by deleting or
overwriting ) . There should be no duplicates , every psk must
, duPrevEpochPosted :: !(HashSet StakeholderId)
} deriving (Eq, Show, Generic)
instance NFData DlgUndo
instance Buildable DlgUndo where
build DlgUndo{..} =
bprint ("DlgUndo:"%
"\n duPsks: "%listJson%
"\n duPrevEpochPosted: "%listJson)
duPsks duPrevEpochPosted
type DlgMemPool = HashMap PublicKey ProxySKHeavy
type ProxySKBlockInfo = Maybe (ProxySKHeavy, PublicKey)
| Checks if given revokes delegation ( issuer = = delegate ) .
isRevokePsk :: ProxySecretKey w -> Bool
isRevokePsk = isSelfSignedPsk
type DlgBlock = ComponentBlock DlgPayload
type DlgBlund = (DlgBlock, DlgUndo)
|
b15499c1008cb5fbeeb05f99398416eb78e3a97495265836818c9e20611ee7b6 | CatalaLang/catala | print.ml | This file is part of the Catala compiler , a specification language for tax
and social benefits computation rules . Copyright ( C ) 2020 , contributor :
< >
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
use this file except in compliance with the License . You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the
License for the specific language governing permissions and limitations under
the License .
and social benefits computation rules. Copyright (C) 2020 Inria, contributor:
Denis Merigoux <>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. *)
open Catala_utils
open Definitions
let typ_needs_parens (ty : typ) : bool =
match Marked.unmark ty with TArrow _ | TArray _ -> true | _ -> false
let uid_list (fmt : Format.formatter) (infos : Uid.MarkedString.info list) :
unit =
Format.pp_print_list
~pp_sep:(fun fmt () -> Format.pp_print_char fmt '.')
(fun fmt info ->
Cli.format_with_style
(if String.begins_with_uppercase (Marked.unmark info) then
[ANSITerminal.red]
else [])
fmt
(Uid.MarkedString.to_string info))
fmt infos
let keyword (fmt : Format.formatter) (s : string) : unit =
Cli.format_with_style [ANSITerminal.red] fmt s
let base_type (fmt : Format.formatter) (s : string) : unit =
Cli.format_with_style [ANSITerminal.yellow] fmt s
let punctuation (fmt : Format.formatter) (s : string) : unit =
Cli.format_with_style [ANSITerminal.cyan] fmt s
let op_style (fmt : Format.formatter) (s : string) : unit =
Cli.format_with_style [ANSITerminal.green] fmt s
let lit_style (fmt : Format.formatter) (s : string) : unit =
Cli.format_with_style [ANSITerminal.yellow] fmt s
let tlit (fmt : Format.formatter) (l : typ_lit) : unit =
base_type fmt
(match l with
| TUnit -> "unit"
| TBool -> "bool"
| TInt -> "integer"
| TRat -> "decimal"
| TMoney -> "money"
| TDuration -> "duration"
| TDate -> "date")
let location (type a) (fmt : Format.formatter) (l : a glocation) : unit =
match l with
| DesugaredScopeVar (v, _st) -> ScopeVar.format_t fmt (Marked.unmark v)
| ScopelangScopeVar v -> ScopeVar.format_t fmt (Marked.unmark v)
| SubScopeVar (_, subindex, subvar) ->
Format.fprintf fmt "%a.%a" SubScopeName.format_t (Marked.unmark subindex)
ScopeVar.format_t (Marked.unmark subvar)
| ToplevelVar v -> TopdefName.format_t fmt (Marked.unmark v)
let enum_constructor (fmt : Format.formatter) (c : EnumConstructor.t) : unit =
Cli.format_with_style [ANSITerminal.magenta] fmt
(Format.asprintf "%a" EnumConstructor.format_t c)
let rec typ (ctx : decl_ctx option) (fmt : Format.formatter) (ty : typ) : unit =
let typ = typ ctx in
let typ_with_parens (fmt : Format.formatter) (t : typ) =
if typ_needs_parens t then Format.fprintf fmt "(%a)" typ t else typ fmt t
in
match Marked.unmark ty with
| TLit l -> tlit fmt l
| TTuple ts ->
Format.fprintf fmt "@[<hov 2>(%a)@]"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "@ %a@ " op_style "*")
typ)
ts
| TStruct s -> (
match ctx with
| None -> Format.fprintf fmt "@[<hov 2>%a@]" StructName.format_t s
| Some ctx ->
Format.fprintf fmt "@[<hov 2>%a@ %a%a%a@]" StructName.format_t s
punctuation "{"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "%a@ " punctuation ";")
(fun fmt (field, mty) ->
Format.fprintf fmt "%a%a%a%a@ %a" punctuation "\""
StructField.format_t field punctuation "\"" punctuation ":" typ
mty))
(StructField.Map.bindings (StructName.Map.find s ctx.ctx_structs))
punctuation "}")
| TEnum e -> (
match ctx with
| None -> Format.fprintf fmt "@[<hov 2>%a@]" EnumName.format_t e
| Some ctx ->
Format.fprintf fmt "@[<hov 2>%a%a%a%a@]" EnumName.format_t e punctuation
"["
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "@ %a@ " punctuation "|")
(fun fmt (case, mty) ->
Format.fprintf fmt "%a%a@ %a" enum_constructor case punctuation ":"
typ mty))
(EnumConstructor.Map.bindings (EnumName.Map.find e ctx.ctx_enums))
punctuation "]")
| TOption t -> Format.fprintf fmt "@[<hov 2>%a@ %a@]" base_type "option" typ t
| TArrow ([t1], t2) ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@]" typ_with_parens t1 op_style "→"
typ t2
| TArrow (t1, t2) ->
Format.fprintf fmt "@[<hov 2>%a%a%a@ %a@ %a@]" op_style "("
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "%a@ " op_style ",")
typ_with_parens)
t1 op_style ")" op_style "→" typ t2
| TArray t1 ->
Format.fprintf fmt "@[<hov 2>%a@ %a@]" base_type "collection" typ t1
| TAny -> base_type fmt "any"
let lit (type a) (fmt : Format.formatter) (l : a glit) : unit =
match l with
| LBool b -> lit_style fmt (string_of_bool b)
| LInt i -> lit_style fmt (Runtime.integer_to_string i)
| LEmptyError -> lit_style fmt "∅ "
| LUnit -> lit_style fmt "()"
| LRat i ->
lit_style fmt
(Runtime.decimal_to_string ~max_prec_digits:!Cli.max_prec_digits i)
| LMoney e -> (
match !Cli.locale_lang with
| En -> lit_style fmt (Format.asprintf "$%s" (Runtime.money_to_string e))
| Fr -> lit_style fmt (Format.asprintf "%s €" (Runtime.money_to_string e))
| Pl -> lit_style fmt (Format.asprintf "%s PLN" (Runtime.money_to_string e))
)
| LDate d -> lit_style fmt (Runtime.date_to_string d)
| LDuration d -> lit_style fmt (Runtime.duration_to_string d)
let log_entry (fmt : Format.formatter) (entry : log_entry) : unit =
Format.fprintf fmt "@<2>%a"
(fun fmt -> function
| VarDef _ -> Cli.format_with_style [ANSITerminal.blue] fmt "≔ "
| BeginCall -> Cli.format_with_style [ANSITerminal.yellow] fmt "→ "
| EndCall -> Cli.format_with_style [ANSITerminal.yellow] fmt "← "
| PosRecordIfTrueBool ->
Cli.format_with_style [ANSITerminal.green] fmt "☛ ")
entry
let operator_to_string : type a k. (a, k) Op.t -> string = function
| Not -> "~"
| Length -> "length"
| GetDay -> "get_day"
| GetMonth -> "get_month"
| GetYear -> "get_year"
| FirstDayOfMonth -> "first_day_of_month"
| LastDayOfMonth -> "last_day_of_month"
| ToRat -> "to_rat"
| ToRat_int -> "to_rat_int"
| ToRat_mon -> "to_rat_mon"
| ToMoney -> "to_mon"
| ToMoney_rat -> "to_mon_rat"
| Round -> "round"
| Round_rat -> "round_rat"
| Round_mon -> "round_mon"
| Log _ -> "Log"
| Minus -> "-"
| Minus_int -> "-!"
| Minus_rat -> "-."
| Minus_mon -> "-$"
| Minus_dur -> "-^"
| And -> "&&"
| Or -> "||"
| Xor -> "xor"
| Eq -> "="
| Map -> "map"
| Reduce -> "reduce"
| Concat -> "++"
| Filter -> "filter"
| Add -> "+"
| Add_int_int -> "+!"
| Add_rat_rat -> "+."
| Add_mon_mon -> "+$"
| Add_dat_dur -> "+@"
| Add_dur_dur -> "+^"
| Sub -> "-"
| Sub_int_int -> "-!"
| Sub_rat_rat -> "-."
| Sub_mon_mon -> "-$"
| Sub_dat_dat -> "-@"
| Sub_dat_dur -> "-@^"
| Sub_dur_dur -> "-^"
| Mult -> "*"
| Mult_int_int -> "*!"
| Mult_rat_rat -> "*."
| Mult_mon_rat -> "*$"
| Mult_dur_int -> "*^"
| Div -> "/"
| Div_int_int -> "/!"
| Div_rat_rat -> "/."
| Div_mon_mon -> "/$"
| Div_mon_rat -> "/$."
| Lt -> "<"
| Lt_int_int -> "<!"
| Lt_rat_rat -> "<."
| Lt_mon_mon -> "<$"
| Lt_dur_dur -> "<^"
| Lt_dat_dat -> "<@"
| Lte -> "<="
| Lte_int_int -> "<=!"
| Lte_rat_rat -> "<=."
| Lte_mon_mon -> "<=$"
| Lte_dur_dur -> "<=^"
| Lte_dat_dat -> "<=@"
| Gt -> ">"
| Gt_int_int -> ">!"
| Gt_rat_rat -> ">."
| Gt_mon_mon -> ">$"
| Gt_dur_dur -> ">^"
| Gt_dat_dat -> ">@"
| Gte -> ">="
| Gte_int_int -> ">=!"
| Gte_rat_rat -> ">=."
| Gte_mon_mon -> ">=$"
| Gte_dur_dur -> ">=^"
| Gte_dat_dat -> ">=@"
| Eq_int_int -> "=!"
| Eq_rat_rat -> "=."
| Eq_mon_mon -> "=$"
| Eq_dur_dur -> "=^"
| Eq_dat_dat -> "=@"
| Fold -> "fold"
let operator (type k) (fmt : Format.formatter) (op : ('a, k) operator) : unit =
match op with
| Log (entry, infos) ->
Format.fprintf fmt "%a@[<hov 2>[%a|%a]@]" op_style "log" log_entry entry
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt ".")
(fun fmt info -> Uid.MarkedString.format fmt info))
infos
| op -> Format.fprintf fmt "%a" op_style (operator_to_string op)
let except (fmt : Format.formatter) (exn : except) : unit =
op_style fmt
(match exn with
| EmptyError -> "EmptyError"
| ConflictError -> "ConflictError"
| Crash -> "Crash"
| NoValueProvided -> "NoValueProvided")
let var_debug fmt v =
Format.fprintf fmt "%s_%d" (Bindlib.name_of v) (Bindlib.uid_of v)
let var fmt v = Format.pp_print_string fmt (Bindlib.name_of v)
let needs_parens (type a) (e : (a, _) gexpr) : bool =
match Marked.unmark e with EAbs _ | EStruct _ -> true | _ -> false
let rec expr_aux :
type a.
?debug:bool ->
decl_ctx option ->
Bindlib.ctxt ->
Format.formatter ->
(a, 't) gexpr ->
unit =
fun ?(debug = false) ctx bnd_ctx fmt e ->
let exprb bnd_ctx e = expr_aux ~debug ctx bnd_ctx e in
let expr e = exprb bnd_ctx e in
let var = if debug then var_debug else var in
let with_parens fmt e =
if needs_parens e then (
punctuation fmt "(";
expr fmt e;
punctuation fmt ")")
else expr fmt e
in
match Marked.unmark e with
| EVar v -> var fmt v
| ETuple es ->
Format.fprintf fmt "@[<hov 2>%a%a%a@]" punctuation "("
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt ",@ ")
(fun fmt e -> expr fmt e))
es punctuation ")"
| EArray es ->
Format.fprintf fmt "@[<hov 2>%a%a%a@]" punctuation "["
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt ";@ ")
(fun fmt e -> expr fmt e))
es punctuation "]"
| ETupleAccess { e; index; _ } ->
expr fmt e;
punctuation fmt ".";
Format.pp_print_int fmt index
| ELit l -> lit fmt l
| EApp { f = EAbs { binder; tys }, _; args } ->
let xs, body, bnd_ctx = Bindlib.unmbind_in bnd_ctx binder in
let expr = exprb bnd_ctx in
let xs_tau = List.mapi (fun i tau -> xs.(i), tau) tys in
let xs_tau_arg = List.map2 (fun (x, tau) arg -> x, tau, arg) xs_tau args in
Format.fprintf fmt "%a%a"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "")
(fun fmt (x, tau, arg) ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@ %a@ %a@ %a@ %a@]@\n" keyword
"let" var x punctuation ":" (typ ctx) tau punctuation "=" expr arg
keyword "in"))
xs_tau_arg expr body
| EAbs { binder; tys } ->
let xs, body, bnd_ctx = Bindlib.unmbind_in bnd_ctx binder in
let expr = exprb bnd_ctx in
let xs_tau = List.mapi (fun i tau -> xs.(i), tau) tys in
Format.fprintf fmt "@[<hov 2>%a @[<hov 2>%a@] %a@ %a@]" punctuation "λ"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "@ ")
(fun fmt (x, tau) ->
Format.fprintf fmt "%a%a%a %a%a" punctuation "(" var x punctuation
":" (typ ctx) tau punctuation ")"))
xs_tau punctuation "→" expr body
| EApp { f = EOp { op = (Map | Filter) as op; _ }, _; args = [arg1; arg2] } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@]" operator op with_parens arg1
with_parens arg2
| EApp { f = EOp { op; _ }, _; args = [arg1; arg2] } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@]" with_parens arg1 operator op
with_parens arg2
| EApp { f = EOp { op = Log _; _ }, _; args = [arg1] } when not debug ->
expr fmt arg1
| EApp { f = EOp { op; _ }, _; args = [arg1] } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@]" operator op with_parens arg1
| EApp { f; args } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@]" expr f
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "@ ")
with_parens)
args
| EIfThenElse { cond; etrue; efalse } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@ %a@ %a@ %a@]" keyword "if" expr
cond keyword "then" expr etrue keyword "else" expr efalse
| EOp { op; _ } -> operator fmt op
| EDefault { excepts; just; cons } ->
if List.length excepts = 0 then
Format.fprintf fmt "@[<hov 2>%a%a@ %a@ %a%a@]" punctuation "⟨" expr just
punctuation "⊢" expr cons punctuation "⟩"
else
Format.fprintf fmt "@[<hov 2>%a%a@ %a@ %a@ %a@ %a%a@]" punctuation "⟨"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "%a@ " punctuation ",")
expr)
excepts punctuation "|" expr just punctuation "⊢" expr cons punctuation
"⟩"
| EErrorOnEmpty e' ->
Format.fprintf fmt "%a@ %a" op_style "error_empty" with_parens e'
| EAssert e' ->
Format.fprintf fmt "@[<hov 2>%a@ %a%a%a@]" keyword "assert" punctuation "("
expr e' punctuation ")"
| ECatch { body; exn; handler } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@ %a ->@ %a@]" keyword "try"
with_parens body keyword "with" except exn with_parens handler
| ERaise exn ->
Format.fprintf fmt "@[<hov 2>%a@ %a@]" keyword "raise" except exn
| ELocation loc -> location fmt loc
| EDStructAccess { e; field; _ } ->
Format.fprintf fmt "%a%a%a%a%a" expr e punctuation "." punctuation "\""
IdentName.format_t field punctuation "\""
| EStruct { name; fields } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@ %a@]" StructName.format_t name
punctuation "{"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "%a@ " punctuation ";")
(fun fmt (field_name, field_expr) ->
Format.fprintf fmt "%a%a%a%a@ %a" punctuation "\""
StructField.format_t field_name punctuation "\"" punctuation "="
expr field_expr))
(StructField.Map.bindings fields)
punctuation "}"
| EStructAccess { e; field; _ } ->
Format.fprintf fmt "%a%a%a%a%a" expr e punctuation "." punctuation "\""
StructField.format_t field punctuation "\""
| EInj { e; cons; _ } ->
Format.fprintf fmt "%a@ %a" EnumConstructor.format_t cons expr e
| EMatch { e; cases; _ } ->
Format.fprintf fmt "@[<hov 0>%a@ @[<hov 2>%a@]@ %a@ %a@]" keyword "match"
expr e keyword "with"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "@\n")
(fun fmt (cons_name, case_expr) ->
Format.fprintf fmt "@[<hov 2>%a %a@ %a@ %a@]" punctuation "|"
enum_constructor cons_name punctuation "→" expr case_expr))
(EnumConstructor.Map.bindings cases)
| EScopeCall { scope; args } ->
Format.pp_open_hovbox fmt 2;
ScopeName.format_t fmt scope;
Format.pp_print_space fmt ();
keyword fmt "of";
Format.pp_print_space fmt ();
Format.pp_open_hvbox fmt 2;
punctuation fmt "{";
Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "%a@ " punctuation ";")
(fun fmt (field_name, field_expr) ->
Format.fprintf fmt "%a%a%a%a@ %a" punctuation "\"" ScopeVar.format_t
field_name punctuation "\"" punctuation "=" expr field_expr)
fmt
(ScopeVar.Map.bindings args);
Format.pp_close_box fmt ();
punctuation fmt "}";
Format.pp_close_box fmt ()
let typ_debug = typ None
let typ ctx = typ (Some ctx)
let expr_debug ?debug = expr_aux ?debug None Bindlib.empty_ctxt
let expr ?debug ctx = expr_aux ?debug (Some ctx) Bindlib.empty_ctxt
| null | https://raw.githubusercontent.com/CatalaLang/catala/5bd140ae5fb2a997a578b9cd67a932c4a8733526/compiler/shared_ast/print.ml | ocaml | This file is part of the Catala compiler , a specification language for tax
and social benefits computation rules . Copyright ( C ) 2020 , contributor :
< >
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
use this file except in compliance with the License . You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the
License for the specific language governing permissions and limitations under
the License .
and social benefits computation rules. Copyright (C) 2020 Inria, contributor:
Denis Merigoux <>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. *)
open Catala_utils
open Definitions
let typ_needs_parens (ty : typ) : bool =
match Marked.unmark ty with TArrow _ | TArray _ -> true | _ -> false
let uid_list (fmt : Format.formatter) (infos : Uid.MarkedString.info list) :
unit =
Format.pp_print_list
~pp_sep:(fun fmt () -> Format.pp_print_char fmt '.')
(fun fmt info ->
Cli.format_with_style
(if String.begins_with_uppercase (Marked.unmark info) then
[ANSITerminal.red]
else [])
fmt
(Uid.MarkedString.to_string info))
fmt infos
let keyword (fmt : Format.formatter) (s : string) : unit =
Cli.format_with_style [ANSITerminal.red] fmt s
let base_type (fmt : Format.formatter) (s : string) : unit =
Cli.format_with_style [ANSITerminal.yellow] fmt s
let punctuation (fmt : Format.formatter) (s : string) : unit =
Cli.format_with_style [ANSITerminal.cyan] fmt s
let op_style (fmt : Format.formatter) (s : string) : unit =
Cli.format_with_style [ANSITerminal.green] fmt s
let lit_style (fmt : Format.formatter) (s : string) : unit =
Cli.format_with_style [ANSITerminal.yellow] fmt s
let tlit (fmt : Format.formatter) (l : typ_lit) : unit =
base_type fmt
(match l with
| TUnit -> "unit"
| TBool -> "bool"
| TInt -> "integer"
| TRat -> "decimal"
| TMoney -> "money"
| TDuration -> "duration"
| TDate -> "date")
let location (type a) (fmt : Format.formatter) (l : a glocation) : unit =
match l with
| DesugaredScopeVar (v, _st) -> ScopeVar.format_t fmt (Marked.unmark v)
| ScopelangScopeVar v -> ScopeVar.format_t fmt (Marked.unmark v)
| SubScopeVar (_, subindex, subvar) ->
Format.fprintf fmt "%a.%a" SubScopeName.format_t (Marked.unmark subindex)
ScopeVar.format_t (Marked.unmark subvar)
| ToplevelVar v -> TopdefName.format_t fmt (Marked.unmark v)
let enum_constructor (fmt : Format.formatter) (c : EnumConstructor.t) : unit =
Cli.format_with_style [ANSITerminal.magenta] fmt
(Format.asprintf "%a" EnumConstructor.format_t c)
let rec typ (ctx : decl_ctx option) (fmt : Format.formatter) (ty : typ) : unit =
let typ = typ ctx in
let typ_with_parens (fmt : Format.formatter) (t : typ) =
if typ_needs_parens t then Format.fprintf fmt "(%a)" typ t else typ fmt t
in
match Marked.unmark ty with
| TLit l -> tlit fmt l
| TTuple ts ->
Format.fprintf fmt "@[<hov 2>(%a)@]"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "@ %a@ " op_style "*")
typ)
ts
| TStruct s -> (
match ctx with
| None -> Format.fprintf fmt "@[<hov 2>%a@]" StructName.format_t s
| Some ctx ->
Format.fprintf fmt "@[<hov 2>%a@ %a%a%a@]" StructName.format_t s
punctuation "{"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "%a@ " punctuation ";")
(fun fmt (field, mty) ->
Format.fprintf fmt "%a%a%a%a@ %a" punctuation "\""
StructField.format_t field punctuation "\"" punctuation ":" typ
mty))
(StructField.Map.bindings (StructName.Map.find s ctx.ctx_structs))
punctuation "}")
| TEnum e -> (
match ctx with
| None -> Format.fprintf fmt "@[<hov 2>%a@]" EnumName.format_t e
| Some ctx ->
Format.fprintf fmt "@[<hov 2>%a%a%a%a@]" EnumName.format_t e punctuation
"["
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "@ %a@ " punctuation "|")
(fun fmt (case, mty) ->
Format.fprintf fmt "%a%a@ %a" enum_constructor case punctuation ":"
typ mty))
(EnumConstructor.Map.bindings (EnumName.Map.find e ctx.ctx_enums))
punctuation "]")
| TOption t -> Format.fprintf fmt "@[<hov 2>%a@ %a@]" base_type "option" typ t
| TArrow ([t1], t2) ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@]" typ_with_parens t1 op_style "→"
typ t2
| TArrow (t1, t2) ->
Format.fprintf fmt "@[<hov 2>%a%a%a@ %a@ %a@]" op_style "("
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "%a@ " op_style ",")
typ_with_parens)
t1 op_style ")" op_style "→" typ t2
| TArray t1 ->
Format.fprintf fmt "@[<hov 2>%a@ %a@]" base_type "collection" typ t1
| TAny -> base_type fmt "any"
let lit (type a) (fmt : Format.formatter) (l : a glit) : unit =
match l with
| LBool b -> lit_style fmt (string_of_bool b)
| LInt i -> lit_style fmt (Runtime.integer_to_string i)
| LEmptyError -> lit_style fmt "∅ "
| LUnit -> lit_style fmt "()"
| LRat i ->
lit_style fmt
(Runtime.decimal_to_string ~max_prec_digits:!Cli.max_prec_digits i)
| LMoney e -> (
match !Cli.locale_lang with
| En -> lit_style fmt (Format.asprintf "$%s" (Runtime.money_to_string e))
| Fr -> lit_style fmt (Format.asprintf "%s €" (Runtime.money_to_string e))
| Pl -> lit_style fmt (Format.asprintf "%s PLN" (Runtime.money_to_string e))
)
| LDate d -> lit_style fmt (Runtime.date_to_string d)
| LDuration d -> lit_style fmt (Runtime.duration_to_string d)
let log_entry (fmt : Format.formatter) (entry : log_entry) : unit =
Format.fprintf fmt "@<2>%a"
(fun fmt -> function
| VarDef _ -> Cli.format_with_style [ANSITerminal.blue] fmt "≔ "
| BeginCall -> Cli.format_with_style [ANSITerminal.yellow] fmt "→ "
| EndCall -> Cli.format_with_style [ANSITerminal.yellow] fmt "← "
| PosRecordIfTrueBool ->
Cli.format_with_style [ANSITerminal.green] fmt "☛ ")
entry
let operator_to_string : type a k. (a, k) Op.t -> string = function
| Not -> "~"
| Length -> "length"
| GetDay -> "get_day"
| GetMonth -> "get_month"
| GetYear -> "get_year"
| FirstDayOfMonth -> "first_day_of_month"
| LastDayOfMonth -> "last_day_of_month"
| ToRat -> "to_rat"
| ToRat_int -> "to_rat_int"
| ToRat_mon -> "to_rat_mon"
| ToMoney -> "to_mon"
| ToMoney_rat -> "to_mon_rat"
| Round -> "round"
| Round_rat -> "round_rat"
| Round_mon -> "round_mon"
| Log _ -> "Log"
| Minus -> "-"
| Minus_int -> "-!"
| Minus_rat -> "-."
| Minus_mon -> "-$"
| Minus_dur -> "-^"
| And -> "&&"
| Or -> "||"
| Xor -> "xor"
| Eq -> "="
| Map -> "map"
| Reduce -> "reduce"
| Concat -> "++"
| Filter -> "filter"
| Add -> "+"
| Add_int_int -> "+!"
| Add_rat_rat -> "+."
| Add_mon_mon -> "+$"
| Add_dat_dur -> "+@"
| Add_dur_dur -> "+^"
| Sub -> "-"
| Sub_int_int -> "-!"
| Sub_rat_rat -> "-."
| Sub_mon_mon -> "-$"
| Sub_dat_dat -> "-@"
| Sub_dat_dur -> "-@^"
| Sub_dur_dur -> "-^"
| Mult -> "*"
| Mult_int_int -> "*!"
| Mult_rat_rat -> "*."
| Mult_mon_rat -> "*$"
| Mult_dur_int -> "*^"
| Div -> "/"
| Div_int_int -> "/!"
| Div_rat_rat -> "/."
| Div_mon_mon -> "/$"
| Div_mon_rat -> "/$."
| Lt -> "<"
| Lt_int_int -> "<!"
| Lt_rat_rat -> "<."
| Lt_mon_mon -> "<$"
| Lt_dur_dur -> "<^"
| Lt_dat_dat -> "<@"
| Lte -> "<="
| Lte_int_int -> "<=!"
| Lte_rat_rat -> "<=."
| Lte_mon_mon -> "<=$"
| Lte_dur_dur -> "<=^"
| Lte_dat_dat -> "<=@"
| Gt -> ">"
| Gt_int_int -> ">!"
| Gt_rat_rat -> ">."
| Gt_mon_mon -> ">$"
| Gt_dur_dur -> ">^"
| Gt_dat_dat -> ">@"
| Gte -> ">="
| Gte_int_int -> ">=!"
| Gte_rat_rat -> ">=."
| Gte_mon_mon -> ">=$"
| Gte_dur_dur -> ">=^"
| Gte_dat_dat -> ">=@"
| Eq_int_int -> "=!"
| Eq_rat_rat -> "=."
| Eq_mon_mon -> "=$"
| Eq_dur_dur -> "=^"
| Eq_dat_dat -> "=@"
| Fold -> "fold"
let operator (type k) (fmt : Format.formatter) (op : ('a, k) operator) : unit =
match op with
| Log (entry, infos) ->
Format.fprintf fmt "%a@[<hov 2>[%a|%a]@]" op_style "log" log_entry entry
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt ".")
(fun fmt info -> Uid.MarkedString.format fmt info))
infos
| op -> Format.fprintf fmt "%a" op_style (operator_to_string op)
let except (fmt : Format.formatter) (exn : except) : unit =
op_style fmt
(match exn with
| EmptyError -> "EmptyError"
| ConflictError -> "ConflictError"
| Crash -> "Crash"
| NoValueProvided -> "NoValueProvided")
let var_debug fmt v =
Format.fprintf fmt "%s_%d" (Bindlib.name_of v) (Bindlib.uid_of v)
let var fmt v = Format.pp_print_string fmt (Bindlib.name_of v)
let needs_parens (type a) (e : (a, _) gexpr) : bool =
match Marked.unmark e with EAbs _ | EStruct _ -> true | _ -> false
let rec expr_aux :
type a.
?debug:bool ->
decl_ctx option ->
Bindlib.ctxt ->
Format.formatter ->
(a, 't) gexpr ->
unit =
fun ?(debug = false) ctx bnd_ctx fmt e ->
let exprb bnd_ctx e = expr_aux ~debug ctx bnd_ctx e in
let expr e = exprb bnd_ctx e in
let var = if debug then var_debug else var in
let with_parens fmt e =
if needs_parens e then (
punctuation fmt "(";
expr fmt e;
punctuation fmt ")")
else expr fmt e
in
match Marked.unmark e with
| EVar v -> var fmt v
| ETuple es ->
Format.fprintf fmt "@[<hov 2>%a%a%a@]" punctuation "("
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt ",@ ")
(fun fmt e -> expr fmt e))
es punctuation ")"
| EArray es ->
Format.fprintf fmt "@[<hov 2>%a%a%a@]" punctuation "["
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt ";@ ")
(fun fmt e -> expr fmt e))
es punctuation "]"
| ETupleAccess { e; index; _ } ->
expr fmt e;
punctuation fmt ".";
Format.pp_print_int fmt index
| ELit l -> lit fmt l
| EApp { f = EAbs { binder; tys }, _; args } ->
let xs, body, bnd_ctx = Bindlib.unmbind_in bnd_ctx binder in
let expr = exprb bnd_ctx in
let xs_tau = List.mapi (fun i tau -> xs.(i), tau) tys in
let xs_tau_arg = List.map2 (fun (x, tau) arg -> x, tau, arg) xs_tau args in
Format.fprintf fmt "%a%a"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "")
(fun fmt (x, tau, arg) ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@ %a@ %a@ %a@ %a@]@\n" keyword
"let" var x punctuation ":" (typ ctx) tau punctuation "=" expr arg
keyword "in"))
xs_tau_arg expr body
| EAbs { binder; tys } ->
let xs, body, bnd_ctx = Bindlib.unmbind_in bnd_ctx binder in
let expr = exprb bnd_ctx in
let xs_tau = List.mapi (fun i tau -> xs.(i), tau) tys in
Format.fprintf fmt "@[<hov 2>%a @[<hov 2>%a@] %a@ %a@]" punctuation "λ"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "@ ")
(fun fmt (x, tau) ->
Format.fprintf fmt "%a%a%a %a%a" punctuation "(" var x punctuation
":" (typ ctx) tau punctuation ")"))
xs_tau punctuation "→" expr body
| EApp { f = EOp { op = (Map | Filter) as op; _ }, _; args = [arg1; arg2] } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@]" operator op with_parens arg1
with_parens arg2
| EApp { f = EOp { op; _ }, _; args = [arg1; arg2] } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@]" with_parens arg1 operator op
with_parens arg2
| EApp { f = EOp { op = Log _; _ }, _; args = [arg1] } when not debug ->
expr fmt arg1
| EApp { f = EOp { op; _ }, _; args = [arg1] } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@]" operator op with_parens arg1
| EApp { f; args } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@]" expr f
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "@ ")
with_parens)
args
| EIfThenElse { cond; etrue; efalse } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@ %a@ %a@ %a@]" keyword "if" expr
cond keyword "then" expr etrue keyword "else" expr efalse
| EOp { op; _ } -> operator fmt op
| EDefault { excepts; just; cons } ->
if List.length excepts = 0 then
Format.fprintf fmt "@[<hov 2>%a%a@ %a@ %a%a@]" punctuation "⟨" expr just
punctuation "⊢" expr cons punctuation "⟩"
else
Format.fprintf fmt "@[<hov 2>%a%a@ %a@ %a@ %a@ %a%a@]" punctuation "⟨"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "%a@ " punctuation ",")
expr)
excepts punctuation "|" expr just punctuation "⊢" expr cons punctuation
"⟩"
| EErrorOnEmpty e' ->
Format.fprintf fmt "%a@ %a" op_style "error_empty" with_parens e'
| EAssert e' ->
Format.fprintf fmt "@[<hov 2>%a@ %a%a%a@]" keyword "assert" punctuation "("
expr e' punctuation ")"
| ECatch { body; exn; handler } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@ %a ->@ %a@]" keyword "try"
with_parens body keyword "with" except exn with_parens handler
| ERaise exn ->
Format.fprintf fmt "@[<hov 2>%a@ %a@]" keyword "raise" except exn
| ELocation loc -> location fmt loc
| EDStructAccess { e; field; _ } ->
Format.fprintf fmt "%a%a%a%a%a" expr e punctuation "." punctuation "\""
IdentName.format_t field punctuation "\""
| EStruct { name; fields } ->
Format.fprintf fmt "@[<hov 2>%a@ %a@ %a@ %a@]" StructName.format_t name
punctuation "{"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "%a@ " punctuation ";")
(fun fmt (field_name, field_expr) ->
Format.fprintf fmt "%a%a%a%a@ %a" punctuation "\""
StructField.format_t field_name punctuation "\"" punctuation "="
expr field_expr))
(StructField.Map.bindings fields)
punctuation "}"
| EStructAccess { e; field; _ } ->
Format.fprintf fmt "%a%a%a%a%a" expr e punctuation "." punctuation "\""
StructField.format_t field punctuation "\""
| EInj { e; cons; _ } ->
Format.fprintf fmt "%a@ %a" EnumConstructor.format_t cons expr e
| EMatch { e; cases; _ } ->
Format.fprintf fmt "@[<hov 0>%a@ @[<hov 2>%a@]@ %a@ %a@]" keyword "match"
expr e keyword "with"
(Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "@\n")
(fun fmt (cons_name, case_expr) ->
Format.fprintf fmt "@[<hov 2>%a %a@ %a@ %a@]" punctuation "|"
enum_constructor cons_name punctuation "→" expr case_expr))
(EnumConstructor.Map.bindings cases)
| EScopeCall { scope; args } ->
Format.pp_open_hovbox fmt 2;
ScopeName.format_t fmt scope;
Format.pp_print_space fmt ();
keyword fmt "of";
Format.pp_print_space fmt ();
Format.pp_open_hvbox fmt 2;
punctuation fmt "{";
Format.pp_print_list
~pp_sep:(fun fmt () -> Format.fprintf fmt "%a@ " punctuation ";")
(fun fmt (field_name, field_expr) ->
Format.fprintf fmt "%a%a%a%a@ %a" punctuation "\"" ScopeVar.format_t
field_name punctuation "\"" punctuation "=" expr field_expr)
fmt
(ScopeVar.Map.bindings args);
Format.pp_close_box fmt ();
punctuation fmt "}";
Format.pp_close_box fmt ()
let typ_debug = typ None
let typ ctx = typ (Some ctx)
let expr_debug ?debug = expr_aux ?debug None Bindlib.empty_ctxt
let expr ?debug ctx = expr_aux ?debug (Some ctx) Bindlib.empty_ctxt
| |
f83d98feda41860c37f0c11d58131a7909db23d68b1447eef54d75a3332b730f | yallop/ocaml-pb | test_messages.ml | open Pb
module Enum =
struct
module E = (val enum "Enum")
let one = E.constant "one" 1_l
let two = E.constant "two" 2_l
end
module Small =
struct
module S = (val message "Small")
let s = S.optional string "small_s" 100
let i = S.optional int64 "small_i" 200
end
module TwoString =
struct
module T = (val message "TwoString")
let s = T.required string "two_s" 1000
let b = T.required string "two_b" 2000
end
module Comprehensive =
struct
module C = (val message "Comprehensive")
let repeated_uint32 =
C.repeated uint32 "repeated_uint32" 1
let required_int32 =
C.required int32 "required_int32" 2
let required_Small =
C.required (msg Small.S.t) "required_Small" 3
let required_double =
C.required double "required_double" 4
let optional_sfixed32 =
C.optional sfixed32 "optional_sfixed32" 5
let optional_fixed32 =
C.optional fixed32 "optional_fixed32" 6
let repeated_bytes =
C.repeated bytes "repeated_bytes" 7
let repeated_bool =
C.repeated bool "repeated_bool" 8
let repeated_sfixed64 =
C.repeated sfixed64 "repeated_sfixed64" 9
let optional_bool =
C.optional bool "optional_bool" 10
let required_uint32 =
C.required uint32 "required_uint32" 11
let optional_double =
C.optional double "optional_double" 12
let required_int64 =
C.required int64 "required_int64" 13
let required_uint64 =
C.required uint64 "required_uint64" 14
let required_string =
C.required string "required_string" 15
let required_bytes =
C.required bytes "required_bytes" 16
let optional_bytes =
C.optional bytes "optional_bytes" 17
let optional_sint64 =
C.optional sint64 "optional_sint64" 18
let repeated_sint64 =
C.repeated sint64 "repeated_sint64" 19
let repeated_fixed32 =
C.repeated fixed32 "repeated_fixed32" 20
let optional_Small =
C.optional (msg Small.S.t) "optional_Small" 21
let optional_int32 =
C.optional int32 "optional_int32" 22
let optional_fixed64 =
C.optional fixed64 "optional_fixed64" 23
let optional_enum =
C.optional Enum.E.t "optional_enum" 24
let required_float =
C.required float "required_float" 25
let optional_sfixed64 =
C.optional sfixed64 "optional_sfixed64" 26
let required_sfixed32 =
C.required sfixed32 "required_sfixed32" 27
let required_bool =
C.required bool "required_bool" 28
let repeated_fixed64 =
C.repeated fixed64 "repeated_fixed64" 29
let optional_sint32 =
C.optional sint32 "optional_sint32" 30
let repeated_int64 =
C.repeated int64 "repeated_int64" 31
let required_fixed64 =
C.required fixed64 "required_fixed64" 32
let repeated_enum =
C.repeated Enum.E.t "repeated_enum" 33
let optional_int64 =
C.optional int64 "optional_int64" 34
let repeated_float =
C.repeated float "repeated_float" 35
let repeated_sint32 =
C.repeated sint32 "repeated_sint32" 36
let repeated_uint64 =
C.repeated uint64 "repeated_uint64" 37
let repeated_Small =
C.repeated (msg Small.S.t) "repeated_Small" 38
let repeated_double =
C.repeated double "repeated_double" 39
let repeated_string =
C.repeated string "repeated_string" 40
let required_sfixed64 =
C.required sfixed64 "required_sfixed64" 41
let required_sint64 =
C.required sint64 "required_sint64" 42
let optional_string =
C.optional string "optional_string" 43
let optional_uint32 =
C.optional uint32 "optional_uint32" 44
let repeated_sfixed32 =
C.repeated sfixed32 "repeated_sfixed32" 45
let optional_float =
C.optional float "optional_float" 46
let optional_uint64 =
C.optional uint64 "optional_uint64" 47
let required_enum =
C.required Enum.E.t "required_enum" 48
let required_sint32 =
C.required sint32 "required_sint32" 49
let required_fixed32 =
C.required fixed32 "required_fixed32" 50
let repeated_int32 =
C.repeated int32 "repeated_int32" 51
end
| null | https://raw.githubusercontent.com/yallop/ocaml-pb/b6cbb5c57ef6dc00ccf67af00634a14b451af742/pb/test/test_messages.ml | ocaml | open Pb
module Enum =
struct
module E = (val enum "Enum")
let one = E.constant "one" 1_l
let two = E.constant "two" 2_l
end
module Small =
struct
module S = (val message "Small")
let s = S.optional string "small_s" 100
let i = S.optional int64 "small_i" 200
end
module TwoString =
struct
module T = (val message "TwoString")
let s = T.required string "two_s" 1000
let b = T.required string "two_b" 2000
end
module Comprehensive =
struct
module C = (val message "Comprehensive")
let repeated_uint32 =
C.repeated uint32 "repeated_uint32" 1
let required_int32 =
C.required int32 "required_int32" 2
let required_Small =
C.required (msg Small.S.t) "required_Small" 3
let required_double =
C.required double "required_double" 4
let optional_sfixed32 =
C.optional sfixed32 "optional_sfixed32" 5
let optional_fixed32 =
C.optional fixed32 "optional_fixed32" 6
let repeated_bytes =
C.repeated bytes "repeated_bytes" 7
let repeated_bool =
C.repeated bool "repeated_bool" 8
let repeated_sfixed64 =
C.repeated sfixed64 "repeated_sfixed64" 9
let optional_bool =
C.optional bool "optional_bool" 10
let required_uint32 =
C.required uint32 "required_uint32" 11
let optional_double =
C.optional double "optional_double" 12
let required_int64 =
C.required int64 "required_int64" 13
let required_uint64 =
C.required uint64 "required_uint64" 14
let required_string =
C.required string "required_string" 15
let required_bytes =
C.required bytes "required_bytes" 16
let optional_bytes =
C.optional bytes "optional_bytes" 17
let optional_sint64 =
C.optional sint64 "optional_sint64" 18
let repeated_sint64 =
C.repeated sint64 "repeated_sint64" 19
let repeated_fixed32 =
C.repeated fixed32 "repeated_fixed32" 20
let optional_Small =
C.optional (msg Small.S.t) "optional_Small" 21
let optional_int32 =
C.optional int32 "optional_int32" 22
let optional_fixed64 =
C.optional fixed64 "optional_fixed64" 23
let optional_enum =
C.optional Enum.E.t "optional_enum" 24
let required_float =
C.required float "required_float" 25
let optional_sfixed64 =
C.optional sfixed64 "optional_sfixed64" 26
let required_sfixed32 =
C.required sfixed32 "required_sfixed32" 27
let required_bool =
C.required bool "required_bool" 28
let repeated_fixed64 =
C.repeated fixed64 "repeated_fixed64" 29
let optional_sint32 =
C.optional sint32 "optional_sint32" 30
let repeated_int64 =
C.repeated int64 "repeated_int64" 31
let required_fixed64 =
C.required fixed64 "required_fixed64" 32
let repeated_enum =
C.repeated Enum.E.t "repeated_enum" 33
let optional_int64 =
C.optional int64 "optional_int64" 34
let repeated_float =
C.repeated float "repeated_float" 35
let repeated_sint32 =
C.repeated sint32 "repeated_sint32" 36
let repeated_uint64 =
C.repeated uint64 "repeated_uint64" 37
let repeated_Small =
C.repeated (msg Small.S.t) "repeated_Small" 38
let repeated_double =
C.repeated double "repeated_double" 39
let repeated_string =
C.repeated string "repeated_string" 40
let required_sfixed64 =
C.required sfixed64 "required_sfixed64" 41
let required_sint64 =
C.required sint64 "required_sint64" 42
let optional_string =
C.optional string "optional_string" 43
let optional_uint32 =
C.optional uint32 "optional_uint32" 44
let repeated_sfixed32 =
C.repeated sfixed32 "repeated_sfixed32" 45
let optional_float =
C.optional float "optional_float" 46
let optional_uint64 =
C.optional uint64 "optional_uint64" 47
let required_enum =
C.required Enum.E.t "required_enum" 48
let required_sint32 =
C.required sint32 "required_sint32" 49
let required_fixed32 =
C.required fixed32 "required_fixed32" 50
let repeated_int32 =
C.repeated int32 "repeated_int32" 51
end
| |
20a516cffab75e57886a5eabfac75e2ab5c1cccb0d83661acf55d155932ecb97 | PapenfussLab/bioshake | Picard.hs | # LANGUAGE FlexibleInstances #
# LANGUAGE GADTs #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
module Bioshake.Picard where
import Bioshake
import Bioshake.Internal.Picard
import Bioshake.TH
import Development.Shake
import Development.Shake.FilePath
$(makeSingleThread ''MarkDups [''Sorted, ''PairedEnd, ''IsBam] 'buildMarkDups)
$ markDups duplicates using picard tools
$(makeSingleThread ''DeDup [''Sorted, ''PairedEnd, ''IsBam] 'buildDeDup)
{- $markDups De-duplicate using picard tools -}
$(makeSingleThread ''FixMates [''Sorted, ''PairedEnd, ''IsBam] 'buildFixMates)
| null | https://raw.githubusercontent.com/PapenfussLab/bioshake/afeb7219b171e242b6e9bb9e99e2f80c0a099aff/Bioshake/Picard.hs | haskell | # LANGUAGE TemplateHaskell #
# LANGUAGE TypeOperators #
$markDups De-duplicate using picard tools | # LANGUAGE FlexibleInstances #
# LANGUAGE GADTs #
# LANGUAGE MultiParamTypeClasses #
module Bioshake.Picard where
import Bioshake
import Bioshake.Internal.Picard
import Bioshake.TH
import Development.Shake
import Development.Shake.FilePath
$(makeSingleThread ''MarkDups [''Sorted, ''PairedEnd, ''IsBam] 'buildMarkDups)
$ markDups duplicates using picard tools
$(makeSingleThread ''DeDup [''Sorted, ''PairedEnd, ''IsBam] 'buildDeDup)
$(makeSingleThread ''FixMates [''Sorted, ''PairedEnd, ''IsBam] 'buildFixMates)
|
ea740743d8cf8022b228517b4d8d5bbaa000ac63ff5dd327a906cfd8d5a10c1d | ghc/testsuite | tcfail067.hs | # LANGUAGE DatatypeContexts #
module ShouldFail where
infixr 1 `rangeOf`
data Ord a => SubRange a = SubRange (a, a) a
type IntSubRange = SubRange Int
subRangeValue :: SubRange a -> a
subRangeValue (SubRange (lower, upper) value) = value
subRange :: SubRange a -> (a, a)
subRange (SubRange r value) = r
newRange :: (Ord a, Show a) => (a, a) -> a -> SubRange a
newRange r value = checkRange (SubRange r value)
checkRange :: (Ord a, Show a) => SubRange a -> SubRange a
checkRange (SubRange (lower, upper) value)
= if (value < lower) || (value > upper) then
error ("### sub range error. range = " ++ show lower ++
".." ++ show upper ++ " value = " ++ show value ++ "\n")
else
SubRange (lower, upper) value
instance Eq a => Eq (SubRange a) where
(==) a b = subRangeValue a == subRangeValue b
instance (Ord a) => Ord (SubRange a) where
(<) = relOp (<)
(<=) = relOp (<=)
(>=) = relOp (>=)
(>) = relOp (>)
relOp :: Ord a => (a->a->Bool) -> SubRange a -> SubRange a -> Bool
relOp op a b = (subRangeValue a) `op` (subRangeValue b)
rangeOf :: (Ord a, Show a) => SubRange a -> SubRange a -> SubRange a
rangeOf a b = checkRange (SubRange (subRange b) (subRangeValue a))
showRange :: Show a => SubRange a -> String
showRange (SubRange (lower, upper) value)
= show value ++ " :" ++ show lower ++ ".." ++ show upper
showRangePair :: (Show a, Show b) => (SubRange a, SubRange b) -> String
showRangePair (a, b)
= "(" ++ showRange a ++ ", " ++ showRange b ++ ")"
showRangeTriple :: (Show a, Show b, Show c) =>
(SubRange a, SubRange b, SubRange c) -> String
showRangeTriple (a, b, c)
= "(" ++ showRange a ++ ", " ++ showRange b ++ ", " ++ showRange c ++ ")"
instance Num a => Num (SubRange a) where
negate = numSubRangeNegate
(+) = numSubRangeAdd
(-) = numSubRangeSubtract
(*) = numSubRangeMultiply
fromInteger a = SubRange (fromInteger a, fromInteger a) (fromInteger a)
numSubRangeNegate :: (Ord a, Show a, Num a) => SubRange a -> SubRange a
numSubRangeNegate (SubRange (lower, upper) value)
= checkRange (SubRange (lower, upper) (-value))
numSubRangeBinOp :: Num a => (a -> a -> a) ->
SubRange a -> SubRange a -> SubRange a
numSubRangeBinOp op a b
= SubRange (result, result) result
where
result = (subRangeValue a) `op` (subRangeValue b)
-- partain:
numSubRangeAdd, numSubRangeSubtract, numSubRangeMultiply :: Num a => SubRange a -> SubRange a -> SubRange a
numSubRangeAdd = numSubRangeBinOp (+)
numSubRangeSubtract = numSubRangeBinOp (-)
numSubRangeMultiply = numSubRangeBinOp (*)
unsignedBits :: Int -> (Int, Int)
unsignedBits n = (0, 2^n-1)
signedBits :: Int -> (Int, Int)
signedBits n = (-2^(n-1), 2^(n-1)-1)
si_n :: Int -> Int -> IntSubRange
si_n bits value = SubRange (signedBits bits) value
si8, si10, si16 :: Int -> IntSubRange
si8 = si_n 8
si10 = si_n 10
si16 = si_n 16
| null | https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/typecheck/should_fail/tcfail067.hs | haskell | partain: | # LANGUAGE DatatypeContexts #
module ShouldFail where
infixr 1 `rangeOf`
data Ord a => SubRange a = SubRange (a, a) a
type IntSubRange = SubRange Int
subRangeValue :: SubRange a -> a
subRangeValue (SubRange (lower, upper) value) = value
subRange :: SubRange a -> (a, a)
subRange (SubRange r value) = r
newRange :: (Ord a, Show a) => (a, a) -> a -> SubRange a
newRange r value = checkRange (SubRange r value)
checkRange :: (Ord a, Show a) => SubRange a -> SubRange a
checkRange (SubRange (lower, upper) value)
= if (value < lower) || (value > upper) then
error ("### sub range error. range = " ++ show lower ++
".." ++ show upper ++ " value = " ++ show value ++ "\n")
else
SubRange (lower, upper) value
instance Eq a => Eq (SubRange a) where
(==) a b = subRangeValue a == subRangeValue b
instance (Ord a) => Ord (SubRange a) where
(<) = relOp (<)
(<=) = relOp (<=)
(>=) = relOp (>=)
(>) = relOp (>)
relOp :: Ord a => (a->a->Bool) -> SubRange a -> SubRange a -> Bool
relOp op a b = (subRangeValue a) `op` (subRangeValue b)
rangeOf :: (Ord a, Show a) => SubRange a -> SubRange a -> SubRange a
rangeOf a b = checkRange (SubRange (subRange b) (subRangeValue a))
showRange :: Show a => SubRange a -> String
showRange (SubRange (lower, upper) value)
= show value ++ " :" ++ show lower ++ ".." ++ show upper
showRangePair :: (Show a, Show b) => (SubRange a, SubRange b) -> String
showRangePair (a, b)
= "(" ++ showRange a ++ ", " ++ showRange b ++ ")"
showRangeTriple :: (Show a, Show b, Show c) =>
(SubRange a, SubRange b, SubRange c) -> String
showRangeTriple (a, b, c)
= "(" ++ showRange a ++ ", " ++ showRange b ++ ", " ++ showRange c ++ ")"
instance Num a => Num (SubRange a) where
negate = numSubRangeNegate
(+) = numSubRangeAdd
(-) = numSubRangeSubtract
(*) = numSubRangeMultiply
fromInteger a = SubRange (fromInteger a, fromInteger a) (fromInteger a)
numSubRangeNegate :: (Ord a, Show a, Num a) => SubRange a -> SubRange a
numSubRangeNegate (SubRange (lower, upper) value)
= checkRange (SubRange (lower, upper) (-value))
numSubRangeBinOp :: Num a => (a -> a -> a) ->
SubRange a -> SubRange a -> SubRange a
numSubRangeBinOp op a b
= SubRange (result, result) result
where
result = (subRangeValue a) `op` (subRangeValue b)
numSubRangeAdd, numSubRangeSubtract, numSubRangeMultiply :: Num a => SubRange a -> SubRange a -> SubRange a
numSubRangeAdd = numSubRangeBinOp (+)
numSubRangeSubtract = numSubRangeBinOp (-)
numSubRangeMultiply = numSubRangeBinOp (*)
unsignedBits :: Int -> (Int, Int)
unsignedBits n = (0, 2^n-1)
signedBits :: Int -> (Int, Int)
signedBits n = (-2^(n-1), 2^(n-1)-1)
si_n :: Int -> Int -> IntSubRange
si_n bits value = SubRange (signedBits bits) value
si8, si10, si16 :: Int -> IntSubRange
si8 = si_n 8
si10 = si_n 10
si16 = si_n 16
|
dece9f49d4cabc465097b4f548ab6ae7c45b6cd5e36f32797c971e71c7660d5b | racket/typed-racket | info.rkt | #lang info
(define collection 'use-pkg-name)
(define deps '("redex-lib"
"sandbox-lib"
("base" #:version "6.2.900.16")
("typed-racket-lib" #:version "1.10")
"typed-racket-more"
"2d"
"typed-racket-compatibility"
"racket-index"
"rackunit-lib"
"compatibility-lib"
"racket-test-core"
))
(define build-deps '("scheme-lib"
"base"
"racket-benchmarks"
"compiler-lib"
"htdp-lib"
))
(define update-implies '("typed-racket-lib"
"typed-racket-more"
"typed-racket-compatibility"))
(define pkg-desc "tests for \"typed-racket\"")
(define pkg-authors '(samth stamourv endobson asumu "" "moonsolo+"))
(define version "1.10")
;; Collection info
(define name "Typed Racket Test Suite")
(define test-timeouts
'(("optimizer/run.rkt" 1200)
("run.rkt" 1800)
("with-tr-contracts.rkt" 3000)))
;; No need to compile the actual integration tests, just the harness.
(define compile-omit-paths
'("succeed"
"external"
"fail"
"unit-tests/shallow-rewrite-expansion"
"xfail"
"racketcs-eval-server.rkt"
"optimizer" ;; FIXME: should be improved by stamourv
"gui/succeed"))
(define test-omit-paths '("fail"
"external/fail"
"unit-tests/shallow-rewrite-expansion"
"xfail"))
(define test-command-line-arguments
'(("succeed/priority-queue.scm" ())
("succeed/hw01.scm" ())
("succeed/foo.scm" ())
("succeed/batched-queue.scm" ())))
(define license
'(Apache-2.0 OR MIT))
| null | https://raw.githubusercontent.com/racket/typed-racket/6ae375e58dd482c72be26d408e0d74de28611a59/typed-racket-test/info.rkt | racket | Collection info
No need to compile the actual integration tests, just the harness.
FIXME: should be improved by stamourv | #lang info
(define collection 'use-pkg-name)
(define deps '("redex-lib"
"sandbox-lib"
("base" #:version "6.2.900.16")
("typed-racket-lib" #:version "1.10")
"typed-racket-more"
"2d"
"typed-racket-compatibility"
"racket-index"
"rackunit-lib"
"compatibility-lib"
"racket-test-core"
))
(define build-deps '("scheme-lib"
"base"
"racket-benchmarks"
"compiler-lib"
"htdp-lib"
))
(define update-implies '("typed-racket-lib"
"typed-racket-more"
"typed-racket-compatibility"))
(define pkg-desc "tests for \"typed-racket\"")
(define pkg-authors '(samth stamourv endobson asumu "" "moonsolo+"))
(define version "1.10")
(define name "Typed Racket Test Suite")
(define test-timeouts
'(("optimizer/run.rkt" 1200)
("run.rkt" 1800)
("with-tr-contracts.rkt" 3000)))
(define compile-omit-paths
'("succeed"
"external"
"fail"
"unit-tests/shallow-rewrite-expansion"
"xfail"
"racketcs-eval-server.rkt"
"gui/succeed"))
(define test-omit-paths '("fail"
"external/fail"
"unit-tests/shallow-rewrite-expansion"
"xfail"))
(define test-command-line-arguments
'(("succeed/priority-queue.scm" ())
("succeed/hw01.scm" ())
("succeed/foo.scm" ())
("succeed/batched-queue.scm" ())))
(define license
'(Apache-2.0 OR MIT))
|
45c79303b88cee428cd287cab5c9ff6b2a5745422e701291082ce5f686ec03ee | yitzchak/common-lisp-jupyter | installer.lisp | (in-package #:jupyter/common-lisp)
(defclass cl-installer (jupyter:installer)
((load-system :accessor installer-load-system
:initarg :load-system
:type (or null string)))
(:default-initargs
:class 'kernel
:language +language+
:debugger t
:resources
(mapcar #'asdf:component-pathname
(asdf:component-children
(or (asdf:find-component :common-lisp-jupyter (list "res" (format nil "~(~A~)" (uiop:implementation-type))))
(asdf:find-component :common-lisp-jupyter '("res" "cl")))))
:systems '(:common-lisp-jupyter)))
(defclass system-installer (jupyter:system-installer cl-installer)
()
(:documentation "common-lisp-jupyter system installer."))
(defclass system-bundle-installer (jupyter:system-bundle-installer cl-installer)
()
(:documentation "common-lisp-jupyter system bundle installer."))
(defclass user-installer (jupyter:user-installer cl-installer)
()
(:documentation "common-lisp-jupyter user installer."))
(defclass user-image-installer (jupyter:user-image-installer cl-installer)
()
(:documentation "common-lisp-jupyter user image installer."))
(defclass user-roswell-installer (jupyter:user-installer cl-installer)
()
(:documentation "common-lisp-jupyter roswell installer."))
(defmethod jupyter:command-line ((instance user-installer))
"Get the command line for a user installation."
(let ((implementation (jupyter:installer-implementation instance)))
(append (list (or implementation
(first (uiop:raw-command-line-arguments))
(format nil "~(~A~)" (uiop:implementation-type))))
(when (installer-load-system instance)
(list +eval-flag+ (installer-load-system instance)))
(list +eval-flag+ "(jupyter:run-kernel 'jupyter/common-lisp:kernel)")
(when +user-options+
(list +user-options+))
(list "{connection_file}"))))
(defmethod jupyter:command-line ((instance system-installer))
"Get the command line for a user installation."
(let ((implementation (jupyter:installer-implementation instance)))
(append (list (or implementation
(first (uiop:raw-command-line-arguments))
(format nil "~(~A~)" (uiop:implementation-type))))
(when (installer-load-system instance)
(list +eval-flag+ (installer-load-system instance)))
(list +eval-flag+ "(jupyter:run-kernel 'jupyter/common-lisp:kernel)")
(when +user-options+
(list +user-options+))
(list "{connection_file}"))))
(defmethod jupyter:command-line ((instance system-bundle-installer))
"Get the command line for a system bundle installation."
(let ((implementation (jupyter:installer-implementation instance)))
(append (list (or implementation
(first (uiop:raw-command-line-arguments))
(format nil "~(~A~)" (uiop:implementation-type)))
+load-flag+ (namestring (jupyter:installer-path instance :bundle))
+eval-flag+ (if (find-package :quicklisp)
"(ql:quickload :common-lisp-jupyter)"
"(asdf:load-system :common-lisp-jupyter)"))
(list +eval-flag+ "(jupyter:run-kernel 'jupyter/common-lisp:kernel)")
(when +user-options+
(list +user-options+))
(list "{connection_file}"))))
(defmethod jupyter:command-line ((instance user-roswell-installer))
"Get the command line for a roswell installation."
(let ((implementation (jupyter:installer-implementation instance)))
(append
(if (or implementation (uiop:os-windows-p))
'("ros")
'("cl-jupyter"))
(when implementation
(list "--lisp" implementation))
(when (or implementation (uiop:os-windows-p))
(list (namestring
(merge-pathnames
(make-pathname :directory '(:relative ".roswell" "bin")
:name "cl-jupyter")
Get the home from % USERPROFILE% if on Windows to avoid MSYS home
(uiop:getenv-absolute-directory "USERPROFILE")
(truename (user-homedir-pathname)))))))
'("{connection_file}"))))
(defun install (&key bin-path implementation system bundle local prefix jupyter program
(load-system t))
"Install Common Lisp kernel based on the current implementation.
- `bin-path` specifies path to LISP binary.
- `implementation` toggles including implementation details in kernel name.
- `system` toggles system versus user installation.
- `bundle` creates a quicklisp bundle for system installations.
- `local` toggles `/usr/local/share versus` `/usr/share` for system installations.
- `prefix` key specifies directory prefix for packaging.
- `jupyter` key specifies the root under which the Jupyter folder is found. Is automatically determined if not provided."
(jupyter:install
(make-instance
(cond
((and system bundle)
'system-bundle-installer)
(system
'system-installer)
(t
'user-installer))
:display-name
(if implementation
(format nil "~A (~A)" +display-name+ (if (stringp implementation)
implementation
(lisp-implementation-type)))
+display-name+)
:implementation bin-path
:local local
:load-system (cond ((or (null load-system)
(stringp load-system))
load-system)
((find-package :quicklisp)
"(ql:quickload :common-lisp-jupyter)")
(t
"(asdf:load-system :common-lisp-jupyter)"))
:kernel-name
(if implementation
(format nil "~A_~(~A~)" +language+ (if (stringp implementation)
implementation
(lisp-implementation-type)))
+language+)
:prefix prefix
:jupyter-path jupyter
:program-path program)))
(defun install-image (&key implementation prefix jupyter program)
"Install Common Lisp kernel based on image of current implementation.
- `implementation` toggles including implementation details in kernel name.
- `prefix` key specifies directory prefix for packaging.
- `jupyter` key specifies the root under which the Jupyter folder is found. Is automatically determined if not provided."
(jupyter:install
(make-instance 'user-image-installer
:display-name
(if implementation
(format nil "~A (~A)" +display-name+ (if (stringp implementation)
implementation
(lisp-implementation-type)))
+display-name+)
:kernel-name
(if implementation
(format nil "~A_~(~A~)" +language+ (if (stringp implementation)
implementation
(lisp-implementation-type)))
+language+)
:prefix prefix
:jupyter-path jupyter
:program-path program)))
(defun install-roswell (&key implementation)
"Install Common Lisp kernel using Roswell. `implementation` key toggles
including implementation details in kernel name."
(jupyter:install
(make-instance 'user-roswell-installer
:display-name
(if implementation
(format nil "~A (~A)" +display-name+ implementation)
+display-name+)
:implementation implementation
:kernel-name
(if implementation
(format nil "~A_~A" +language+ (substitute #\_ #\/ implementation))
+language+))))
| null | https://raw.githubusercontent.com/yitzchak/common-lisp-jupyter/abe87bc5857236f4add3d00bd4a54f80b6582795/src/cl-jupyter/installer.lisp | lisp | (in-package #:jupyter/common-lisp)
(defclass cl-installer (jupyter:installer)
((load-system :accessor installer-load-system
:initarg :load-system
:type (or null string)))
(:default-initargs
:class 'kernel
:language +language+
:debugger t
:resources
(mapcar #'asdf:component-pathname
(asdf:component-children
(or (asdf:find-component :common-lisp-jupyter (list "res" (format nil "~(~A~)" (uiop:implementation-type))))
(asdf:find-component :common-lisp-jupyter '("res" "cl")))))
:systems '(:common-lisp-jupyter)))
(defclass system-installer (jupyter:system-installer cl-installer)
()
(:documentation "common-lisp-jupyter system installer."))
(defclass system-bundle-installer (jupyter:system-bundle-installer cl-installer)
()
(:documentation "common-lisp-jupyter system bundle installer."))
(defclass user-installer (jupyter:user-installer cl-installer)
()
(:documentation "common-lisp-jupyter user installer."))
(defclass user-image-installer (jupyter:user-image-installer cl-installer)
()
(:documentation "common-lisp-jupyter user image installer."))
(defclass user-roswell-installer (jupyter:user-installer cl-installer)
()
(:documentation "common-lisp-jupyter roswell installer."))
(defmethod jupyter:command-line ((instance user-installer))
"Get the command line for a user installation."
(let ((implementation (jupyter:installer-implementation instance)))
(append (list (or implementation
(first (uiop:raw-command-line-arguments))
(format nil "~(~A~)" (uiop:implementation-type))))
(when (installer-load-system instance)
(list +eval-flag+ (installer-load-system instance)))
(list +eval-flag+ "(jupyter:run-kernel 'jupyter/common-lisp:kernel)")
(when +user-options+
(list +user-options+))
(list "{connection_file}"))))
(defmethod jupyter:command-line ((instance system-installer))
"Get the command line for a user installation."
(let ((implementation (jupyter:installer-implementation instance)))
(append (list (or implementation
(first (uiop:raw-command-line-arguments))
(format nil "~(~A~)" (uiop:implementation-type))))
(when (installer-load-system instance)
(list +eval-flag+ (installer-load-system instance)))
(list +eval-flag+ "(jupyter:run-kernel 'jupyter/common-lisp:kernel)")
(when +user-options+
(list +user-options+))
(list "{connection_file}"))))
(defmethod jupyter:command-line ((instance system-bundle-installer))
"Get the command line for a system bundle installation."
(let ((implementation (jupyter:installer-implementation instance)))
(append (list (or implementation
(first (uiop:raw-command-line-arguments))
(format nil "~(~A~)" (uiop:implementation-type)))
+load-flag+ (namestring (jupyter:installer-path instance :bundle))
+eval-flag+ (if (find-package :quicklisp)
"(ql:quickload :common-lisp-jupyter)"
"(asdf:load-system :common-lisp-jupyter)"))
(list +eval-flag+ "(jupyter:run-kernel 'jupyter/common-lisp:kernel)")
(when +user-options+
(list +user-options+))
(list "{connection_file}"))))
(defmethod jupyter:command-line ((instance user-roswell-installer))
"Get the command line for a roswell installation."
(let ((implementation (jupyter:installer-implementation instance)))
(append
(if (or implementation (uiop:os-windows-p))
'("ros")
'("cl-jupyter"))
(when implementation
(list "--lisp" implementation))
(when (or implementation (uiop:os-windows-p))
(list (namestring
(merge-pathnames
(make-pathname :directory '(:relative ".roswell" "bin")
:name "cl-jupyter")
Get the home from % USERPROFILE% if on Windows to avoid MSYS home
(uiop:getenv-absolute-directory "USERPROFILE")
(truename (user-homedir-pathname)))))))
'("{connection_file}"))))
(defun install (&key bin-path implementation system bundle local prefix jupyter program
(load-system t))
"Install Common Lisp kernel based on the current implementation.
- `bin-path` specifies path to LISP binary.
- `implementation` toggles including implementation details in kernel name.
- `system` toggles system versus user installation.
- `bundle` creates a quicklisp bundle for system installations.
- `local` toggles `/usr/local/share versus` `/usr/share` for system installations.
- `prefix` key specifies directory prefix for packaging.
- `jupyter` key specifies the root under which the Jupyter folder is found. Is automatically determined if not provided."
(jupyter:install
(make-instance
(cond
((and system bundle)
'system-bundle-installer)
(system
'system-installer)
(t
'user-installer))
:display-name
(if implementation
(format nil "~A (~A)" +display-name+ (if (stringp implementation)
implementation
(lisp-implementation-type)))
+display-name+)
:implementation bin-path
:local local
:load-system (cond ((or (null load-system)
(stringp load-system))
load-system)
((find-package :quicklisp)
"(ql:quickload :common-lisp-jupyter)")
(t
"(asdf:load-system :common-lisp-jupyter)"))
:kernel-name
(if implementation
(format nil "~A_~(~A~)" +language+ (if (stringp implementation)
implementation
(lisp-implementation-type)))
+language+)
:prefix prefix
:jupyter-path jupyter
:program-path program)))
(defun install-image (&key implementation prefix jupyter program)
"Install Common Lisp kernel based on image of current implementation.
- `implementation` toggles including implementation details in kernel name.
- `prefix` key specifies directory prefix for packaging.
- `jupyter` key specifies the root under which the Jupyter folder is found. Is automatically determined if not provided."
(jupyter:install
(make-instance 'user-image-installer
:display-name
(if implementation
(format nil "~A (~A)" +display-name+ (if (stringp implementation)
implementation
(lisp-implementation-type)))
+display-name+)
:kernel-name
(if implementation
(format nil "~A_~(~A~)" +language+ (if (stringp implementation)
implementation
(lisp-implementation-type)))
+language+)
:prefix prefix
:jupyter-path jupyter
:program-path program)))
(defun install-roswell (&key implementation)
"Install Common Lisp kernel using Roswell. `implementation` key toggles
including implementation details in kernel name."
(jupyter:install
(make-instance 'user-roswell-installer
:display-name
(if implementation
(format nil "~A (~A)" +display-name+ implementation)
+display-name+)
:implementation implementation
:kernel-name
(if implementation
(format nil "~A_~A" +language+ (substitute #\_ #\/ implementation))
+language+))))
| |
a3afb011fb359cb4cb23876411371ad92bfcc3b16d7d14d816e7203eff9b0d9f | BardurArantsson/cqrs | Repository.hs | module Data.CQRS.Repository
( -- * Repository
Repository
, newRepository
-- * Settings
, Settings
, setSnapshotFrequency
, setClock
, defaultSettings
) where
import Data.CQRS.Internal.Repository
| null | https://raw.githubusercontent.com/BardurArantsson/cqrs/2491d83e2bcd68c883aaea33cdce6c5ea8c0cd1a/cqrs-core/src/Data/CQRS/Repository.hs | haskell | * Repository
* Settings | module Data.CQRS.Repository
Repository
, newRepository
, Settings
, setSnapshotFrequency
, setClock
, defaultSettings
) where
import Data.CQRS.Internal.Repository
|
e377e41622ab68d0670440d553e76722d6215951d76274e776f4d53dbeebaf80 | babashka/babashka | zip_test.cljc | (ns rewrite-clj.zip-test
"This test namespace originated from rewrite-cljs."
(:require [clojure.string :as string]
[clojure.test :refer [deftest testing is are]]
[rewrite-clj.node :as n]
[rewrite-clj.zip :as z]))
(deftest of-string-simple-sexpr
(let [sexpr "(+ 1 2)"]
(is (= sexpr (-> sexpr z/of-string z/root-string)))))
(deftest manipulate-sexpr
(let [sexpr
(string/join
"\n" [""
" ^{:dynamic true} (+ 1 1"
" (+ 2 2)"
" (reduce + [1 3 4]))"])
expected
(string/join
"\n" [""
" ^{:dynamic true} (+ 1 1"
" (+ 2 2)"
" (reduce + [6 7 [1 2]]))"])]
(is (= expected (-> (z/of-string sexpr {:track-position? true})
should find [ 1 3 4 ] col 19 points to element 4 in vector
replaces [ 1 3 4 ] with [ 5 6 7 ]
appends [ 1 2 ] to [ 5 6 7 ] giving [ 5 6 [ 1 2 ] ]
navigate to 5
remove 5 giving [ 6 7 [ 1 2 ] ]
z/root-string)))))
(deftest t-rowcolumn-positions-from-position-tracking-zipper
;; if you update this test, please also review/update:
;; rewrite-clj.parser-test.t-rowcolumn-metadata-from-clojure-tools-reader
(let [s (str
;12345678901234
"(defn f\n"
" [x]\n"
" (println x))")
positions (->> (z/of-string s {:track-position? true})
(iterate z/next)
(take-while #(not (z/end? %)))
(reduce (fn [acc zloc]
(let [[start end] (z/position-span zloc)]
(assoc acc start {:node (z/node zloc) :end-pos end})))
{}))]
(are [?pos ?end ?t ?s ?sexpr]
(let [{:keys [node end-pos]} (positions ?pos)]
(is (= ?t (n/tag node)))
(is (= ?s (n/string node)))
(is (= ?sexpr (n/sexpr node)))
(is (= ?end end-pos)))
[1 1] [3 15] :list s '(defn f [x] (println x))
[1 2] [1 6] :token "defn" 'defn
[1 7] [1 8] :token "f" 'f
[2 3] [2 6] :vector "[x]" '[x]
[2 4] [2 5] :token "x" 'x
[3 3] [3 14] :list "(println x)" '(println x)
[3 4] [3 11] :token "println" 'println
[3 12] [3 13] :token "x" 'x)))
(deftest namespaced-keywords
(is (= ":dill" (-> ":dill" z/of-string z/root-string)))
(is (= "::dill" (-> "::dill" z/of-string z/root-string)))
(is (= ":dill/dall" (-> ":dill/dall" z/of-string z/root-string)))
(is (= "::dill/dall" (-> "::dill/dall" z/of-string z/root-string)))
(is (= ":%dill.*" (-> ":%dill.*" z/of-string z/root-string))))
(deftest sexpr-udpates-correctly-for-namespaced-map-keys
(testing "on parse"
(is (= '(:prefix/a 1 :prefix/b 2 prefix/c 3)
(-> "#:prefix {:a 1 :b 2 c 3}"
z/of-string
z/down
z/rightmost
z/child-sexprs))))
(testing "on insert new key val"
(is (= '(:prefix/a 1 :prefix/b 2 prefix/c 3 prefix/d 4)
(-> "#:prefix {:a 1 :b 2 c 3}"
z/of-string
z/down
z/rightmost
(z/append-child 'd)
(z/append-child 4)
z/up ;; changes and also nsmap context are applied when moving up to nsmap
z/down
z/rightmost
z/child-sexprs))))
(testing "on update existing key val"
(is (= '(:prefix/a 1 :prefix/b2 2 prefix/c 3)
(-> "#:prefix {:a 1 :b 2 c 3}"
z/of-string
z/down
z/rightmost
z/down
z/right
z/right
(z/replace :b2)
z/up ;; changes and also nsmap context are applied when moving up to nsmap
z/up
z/down
z/rightmost
z/child-sexprs))))
(testing "on update creating unbalanced map (which rewrite-clj allows) context is cleared/applied as appropriate"
(is (= '(:prefix/hi :a prefix/b :c prefix/d e prefix/f)
(-> "#:prefix {:a b :c d e f}"
z/of-string
z/down
z/rightmost
(z/insert-child :hi)
z/up ;; changes and also nsmap context are applied when moving up to nsmap
z/down
z/rightmost
z/child-sexprs))))
(testing "namespaced map qualifier can be changed and affect sexpr of its map keys"
(is (= '(:??_ns-alias_??/a 1 :??_ns-alias_??/b 2 :c 3)
(-> "#:prefix {:a 1 :b 2 :_/c 3}"
z/of-string
z/down
(z/replace (n/map-qualifier-node true "ns-alias"))
z/up
z/down
z/rightmost
z/child-sexprs))))
(testing "node context can be be explicitly removed when moving node out of namespaced map"
(is (= '[{:prefix/b 2 :prefix/c 3}
{:a 1 :z 99}]
(let [zloc (-> "[#:prefix {:a 1 :b 2 :c 3}{:z 99}]"
z/of-string
z/down
z/down
z/rightmost
z/down)
move-me1 (-> zloc z/node n/map-context-clear) ;; if we don't clear the map context it will remain
zloc (-> zloc z/remove z/down)
move-me2 (-> zloc z/node)
zloc (z/remove zloc)]
(-> zloc
z/up
z/right
(z/insert-child move-me2)
(z/insert-child move-me1)
z/up
z/sexpr)))))
(testing "node context can be explicitly reapplied to entire zloc downward"
(is (= '[{:prefix/b 2 :prefix/c 3}
{:a 1 :z 99}]
(let [zloc (-> "[#:prefix {:a 1 :b 2 :c 3}{:z 99}]"
z/of-string
z/down
z/down
z/rightmost
z/down)
move-me1 (-> zloc z/node) ;; notice we don't clear context here
zloc (-> zloc z/remove z/down)
move-me2 (-> zloc z/node)
zloc (z/remove zloc)]
(-> zloc
z/up
z/right
(z/insert-child move-me2)
(z/insert-child move-me1)
z/up
z/reapply-context ;; but we do reapply context to tree before doing a sexpr
z/sexpr))))))
(deftest t-sexpr-able
;; spot check, more thorough testing done on node tests
(is (= false (-> "," z/of-string z/next* z/sexpr-able?)))
(is (= true (-> "heyy" z/of-string z/sexpr-able?))))
| null | https://raw.githubusercontent.com/babashka/babashka/3dfc15f5a40efaec07cba991892c1207a352fab4/test-resources/lib_tests/rewrite_clj/zip_test.cljc | clojure | if you update this test, please also review/update:
rewrite-clj.parser-test.t-rowcolumn-metadata-from-clojure-tools-reader
12345678901234
changes and also nsmap context are applied when moving up to nsmap
changes and also nsmap context are applied when moving up to nsmap
changes and also nsmap context are applied when moving up to nsmap
if we don't clear the map context it will remain
notice we don't clear context here
but we do reapply context to tree before doing a sexpr
spot check, more thorough testing done on node tests | (ns rewrite-clj.zip-test
"This test namespace originated from rewrite-cljs."
(:require [clojure.string :as string]
[clojure.test :refer [deftest testing is are]]
[rewrite-clj.node :as n]
[rewrite-clj.zip :as z]))
(deftest of-string-simple-sexpr
(let [sexpr "(+ 1 2)"]
(is (= sexpr (-> sexpr z/of-string z/root-string)))))
(deftest manipulate-sexpr
(let [sexpr
(string/join
"\n" [""
" ^{:dynamic true} (+ 1 1"
" (+ 2 2)"
" (reduce + [1 3 4]))"])
expected
(string/join
"\n" [""
" ^{:dynamic true} (+ 1 1"
" (+ 2 2)"
" (reduce + [6 7 [1 2]]))"])]
(is (= expected (-> (z/of-string sexpr {:track-position? true})
should find [ 1 3 4 ] col 19 points to element 4 in vector
replaces [ 1 3 4 ] with [ 5 6 7 ]
appends [ 1 2 ] to [ 5 6 7 ] giving [ 5 6 [ 1 2 ] ]
navigate to 5
remove 5 giving [ 6 7 [ 1 2 ] ]
z/root-string)))))
(deftest t-rowcolumn-positions-from-position-tracking-zipper
(let [s (str
"(defn f\n"
" [x]\n"
" (println x))")
positions (->> (z/of-string s {:track-position? true})
(iterate z/next)
(take-while #(not (z/end? %)))
(reduce (fn [acc zloc]
(let [[start end] (z/position-span zloc)]
(assoc acc start {:node (z/node zloc) :end-pos end})))
{}))]
(are [?pos ?end ?t ?s ?sexpr]
(let [{:keys [node end-pos]} (positions ?pos)]
(is (= ?t (n/tag node)))
(is (= ?s (n/string node)))
(is (= ?sexpr (n/sexpr node)))
(is (= ?end end-pos)))
[1 1] [3 15] :list s '(defn f [x] (println x))
[1 2] [1 6] :token "defn" 'defn
[1 7] [1 8] :token "f" 'f
[2 3] [2 6] :vector "[x]" '[x]
[2 4] [2 5] :token "x" 'x
[3 3] [3 14] :list "(println x)" '(println x)
[3 4] [3 11] :token "println" 'println
[3 12] [3 13] :token "x" 'x)))
(deftest namespaced-keywords
(is (= ":dill" (-> ":dill" z/of-string z/root-string)))
(is (= "::dill" (-> "::dill" z/of-string z/root-string)))
(is (= ":dill/dall" (-> ":dill/dall" z/of-string z/root-string)))
(is (= "::dill/dall" (-> "::dill/dall" z/of-string z/root-string)))
(is (= ":%dill.*" (-> ":%dill.*" z/of-string z/root-string))))
(deftest sexpr-udpates-correctly-for-namespaced-map-keys
(testing "on parse"
(is (= '(:prefix/a 1 :prefix/b 2 prefix/c 3)
(-> "#:prefix {:a 1 :b 2 c 3}"
z/of-string
z/down
z/rightmost
z/child-sexprs))))
(testing "on insert new key val"
(is (= '(:prefix/a 1 :prefix/b 2 prefix/c 3 prefix/d 4)
(-> "#:prefix {:a 1 :b 2 c 3}"
z/of-string
z/down
z/rightmost
(z/append-child 'd)
(z/append-child 4)
z/down
z/rightmost
z/child-sexprs))))
(testing "on update existing key val"
(is (= '(:prefix/a 1 :prefix/b2 2 prefix/c 3)
(-> "#:prefix {:a 1 :b 2 c 3}"
z/of-string
z/down
z/rightmost
z/down
z/right
z/right
(z/replace :b2)
z/up
z/down
z/rightmost
z/child-sexprs))))
(testing "on update creating unbalanced map (which rewrite-clj allows) context is cleared/applied as appropriate"
(is (= '(:prefix/hi :a prefix/b :c prefix/d e prefix/f)
(-> "#:prefix {:a b :c d e f}"
z/of-string
z/down
z/rightmost
(z/insert-child :hi)
z/down
z/rightmost
z/child-sexprs))))
(testing "namespaced map qualifier can be changed and affect sexpr of its map keys"
(is (= '(:??_ns-alias_??/a 1 :??_ns-alias_??/b 2 :c 3)
(-> "#:prefix {:a 1 :b 2 :_/c 3}"
z/of-string
z/down
(z/replace (n/map-qualifier-node true "ns-alias"))
z/up
z/down
z/rightmost
z/child-sexprs))))
(testing "node context can be be explicitly removed when moving node out of namespaced map"
(is (= '[{:prefix/b 2 :prefix/c 3}
{:a 1 :z 99}]
(let [zloc (-> "[#:prefix {:a 1 :b 2 :c 3}{:z 99}]"
z/of-string
z/down
z/down
z/rightmost
z/down)
zloc (-> zloc z/remove z/down)
move-me2 (-> zloc z/node)
zloc (z/remove zloc)]
(-> zloc
z/up
z/right
(z/insert-child move-me2)
(z/insert-child move-me1)
z/up
z/sexpr)))))
(testing "node context can be explicitly reapplied to entire zloc downward"
(is (= '[{:prefix/b 2 :prefix/c 3}
{:a 1 :z 99}]
(let [zloc (-> "[#:prefix {:a 1 :b 2 :c 3}{:z 99}]"
z/of-string
z/down
z/down
z/rightmost
z/down)
zloc (-> zloc z/remove z/down)
move-me2 (-> zloc z/node)
zloc (z/remove zloc)]
(-> zloc
z/up
z/right
(z/insert-child move-me2)
(z/insert-child move-me1)
z/up
z/sexpr))))))
(deftest t-sexpr-able
(is (= false (-> "," z/of-string z/next* z/sexpr-able?)))
(is (= true (-> "heyy" z/of-string z/sexpr-able?))))
|
f77c2e80a599b3fe0f5bdec0d509da4639a8a0d7775925462618ab0d6915d16a | nokijp/pietc | SyntaxTestHelper.hs | module SyntaxTestHelper
( rl, rr, dl, dr, ll, lr, ul, ur
) where
import Language.Piet.Syntax
rl :: DPCC
rl = DPCC DPRight CCLeft
rr :: DPCC
rr = DPCC DPRight CCRight
dl :: DPCC
dl = DPCC DPDown CCLeft
dr :: DPCC
dr = DPCC DPDown CCRight
ll :: DPCC
ll = DPCC DPLeft CCLeft
lr :: DPCC
lr = DPCC DPLeft CCRight
ul :: DPCC
ul = DPCC DPUp CCLeft
ur :: DPCC
ur = DPCC DPUp CCRight
| null | https://raw.githubusercontent.com/nokijp/pietc/da6699c286a7b428b01211c8e467ce4ec1f7967e/test/SyntaxTestHelper.hs | haskell | module SyntaxTestHelper
( rl, rr, dl, dr, ll, lr, ul, ur
) where
import Language.Piet.Syntax
rl :: DPCC
rl = DPCC DPRight CCLeft
rr :: DPCC
rr = DPCC DPRight CCRight
dl :: DPCC
dl = DPCC DPDown CCLeft
dr :: DPCC
dr = DPCC DPDown CCRight
ll :: DPCC
ll = DPCC DPLeft CCLeft
lr :: DPCC
lr = DPCC DPLeft CCRight
ul :: DPCC
ul = DPCC DPUp CCLeft
ur :: DPCC
ur = DPCC DPUp CCRight
| |
4a1ebd689165361850d6047aaab638026dea8133e626687be6808f259b5d4007 | Nike-Inc/bartlett | Configuration.hs | |
Module : Configuration
Description : Configuration management for Bartlett
Copyright : ( c ) Nike , Inc. , 2016 - present
License : :
Stability : stable
Configuration management for .
Module : Configuration
Description : Configuration management for Bartlett
Copyright : (c) Nike, Inc., 2016-present
License : BSD3
Maintainer :
Stability : stable
Configuration management for Bartlett.
-}
module Bartlett.Configuration (
-- * Configuration Management
defaultConfigLoc,
getConfiguration,
-- * Convenience Accessors
getUsername,
getJenkinsInstance,
getStorePassword
) where
import Bartlett.Types
import Bartlett.Util (toText)
import Data.ByteString.Lazy.Char8 (toStrict)
import qualified Data.Configurator as C
import Data.Configurator.Types
import System.FilePath ((</>))
import URI.ByteString (parseURI, strictURIParserOptions)
-- | Default config file location
defaultConfigLoc :: FilePath
defaultConfigLoc = "$(HOME)" </> ".bartlett.cfg"
-- | Attempt to retrieve the default configuration.
--
-- Returns an empty configuration if it could not load the default.
getConfiguration :: Profile -> IO Config
getConfiguration p =
C.subconfig (toText p) <$> C.load [Optional defaultConfigLoc]
-- | Retrieve the username for the given profile.
getUsername :: Config -> IO (Maybe Username)
getUsername cfg =
C.lookup cfg (toText "username")
| Retrieve the instance for the given profile .
getJenkinsInstance :: Config -> IO (Maybe JenkinsInstance)
getJenkinsInstance cfg = do
ioInst <- C.lookup cfg (toText "jenkins_instance")
case ioInst of
Nothing ->
return Nothing
Just inst ->
case parseURI strictURIParserOptions (toStrict inst) of
Left _ ->
return Nothing
Right i ->
return $ Just i
-- | Get the value determining whether the user's password should be stored.
getStorePassword :: Config -> IO (Maybe Bool)
getStorePassword cfg =
C.lookup cfg (toText "store_password")
| null | https://raw.githubusercontent.com/Nike-Inc/bartlett/b8874ae5b1c3867672956936c3f90a6237404b85/src/Bartlett/Configuration.hs | haskell | * Configuration Management
* Convenience Accessors
| Default config file location
| Attempt to retrieve the default configuration.
Returns an empty configuration if it could not load the default.
| Retrieve the username for the given profile.
| Get the value determining whether the user's password should be stored. | |
Module : Configuration
Description : Configuration management for Bartlett
Copyright : ( c ) Nike , Inc. , 2016 - present
License : :
Stability : stable
Configuration management for .
Module : Configuration
Description : Configuration management for Bartlett
Copyright : (c) Nike, Inc., 2016-present
License : BSD3
Maintainer :
Stability : stable
Configuration management for Bartlett.
-}
module Bartlett.Configuration (
defaultConfigLoc,
getConfiguration,
getUsername,
getJenkinsInstance,
getStorePassword
) where
import Bartlett.Types
import Bartlett.Util (toText)
import Data.ByteString.Lazy.Char8 (toStrict)
import qualified Data.Configurator as C
import Data.Configurator.Types
import System.FilePath ((</>))
import URI.ByteString (parseURI, strictURIParserOptions)
defaultConfigLoc :: FilePath
defaultConfigLoc = "$(HOME)" </> ".bartlett.cfg"
getConfiguration :: Profile -> IO Config
getConfiguration p =
C.subconfig (toText p) <$> C.load [Optional defaultConfigLoc]
getUsername :: Config -> IO (Maybe Username)
getUsername cfg =
C.lookup cfg (toText "username")
| Retrieve the instance for the given profile .
getJenkinsInstance :: Config -> IO (Maybe JenkinsInstance)
getJenkinsInstance cfg = do
ioInst <- C.lookup cfg (toText "jenkins_instance")
case ioInst of
Nothing ->
return Nothing
Just inst ->
case parseURI strictURIParserOptions (toStrict inst) of
Left _ ->
return Nothing
Right i ->
return $ Just i
getStorePassword :: Config -> IO (Maybe Bool)
getStorePassword cfg =
C.lookup cfg (toText "store_password")
|
8ad49fe2e88fc87439db6b5ee41aeb57a7757c136b6c608c4f3cefa5bbf9bc48 | huangjs/cl | dmap.lisp | ;;; ----------------------------------------------------------------
DMAP - Lite
;;; ----------------------------------------------------------------
;;;
Everything you wanted in a Direct Memory Access Parser and less .
;;;
;;; Change log
;;; ----------
;;; 06/07/2006: changed monitors to support local with-monitors macro,
merged remove - monitor and remove - all - monitors functionality [ CKR ]
;;; 06/06/2006: changed monitors to use tag ids, because equality not
guaranteed for functions [ CKR ]
06/06/2006 : Updated packaging code to standard CL2 [ CKR ]
11/16/1998 Added : ADD argument to call to find - instances [ CKR ]
;;; PACKAGES
;;; --------
(defpackage #:dmap
(:use #:common-lisp #:tables #:mops)
(:export #:parse #:defphrase
#:reset-cseqs #:remove-all-cseqs #:remove-cseqs
#:add-monitor #:remove-monitors
#:with-monitors)
)
(in-package #:dmap)
;;; ----------------------------------------------------------------
;;; Globals (external)
;;; ----------------------------------------------------------------
(defvar *dmap-concept-package* nil
"Default package for DMAP concept symbols")
;;; ----------------------------------------------------------------
;;; Globals (internal)
;;; ----------------------------------------------------------------
(defvar *dmap-pos* 0 "Global text position for DMAP")
(defvar *monitors* nil)
;;; ----------------------------------------------------------------
;;; Structures
;;; ----------------------------------------------------------------
(defstruct cseq base seq start end slots)
(defstruct (monitor (:type list)) base tag function)
;;; ----------------------------------------------------------------
;;; Defining concept sequences
;;; ----------------------------------------------------------------
(deftable base-cseqs)
(deftable target-cseqs)
(defmacro defphrase (base &rest seq)
(if (and (eql base (car seq)) (null (cdr seq)))
(error "Can't DEFPHRASE ~S to itself" base)
`(progn (install-cseq (make-cseq :base ',base :seq ',seq))
',seq)))
(defun install-cseq (cseq)
(push cseq (target-cseqs (cseq-target cseq))))
;;; ----------------------------------------------------------------
;;; Removing concept sequences
;;; ----------------------------------------------------------------
(defun reset-cseqs ()
(remove-cseqs
#'(lambda (cseq) (not (null (cseq-start cseq))))))
(defun remove-all-cseqs ()
(clear-table (base-cseqs))
(clear-table (target-cseqs)))
(defun remove-cseqs (pred)
(remove-base-cseqs pred)
(remove-target-cseqs pred))
(defun remove-base-cseqs (pred)
(map-table #'(lambda (base cseqs)
(setf (base-cseqs base)
(delete-if pred cseqs)))
(base-cseqs)))
(defun remove-target-cseqs (pred)
(map-table #'(lambda (target cseqs)
(setf (target-cseqs target)
(delete-if pred cseqs)))
(target-cseqs)))
;;; ----------------------------------------------------------------
PARSE
;;; ----------------------------------------------------------------
(defun parse (sent &key package)
(dolist (w sent)
(increment-text-position)
(reference w (text-position) (text-position) package)))
(defun reference (item start end package)
(dolist (abst (all-absts-of item))
(mapc #'(lambda (monitor)
(when (eql abst (monitor-base monitor))
(funcall (monitor-function monitor) item start end)))
*monitors*)
(advance-cseqs (target-cseqs abst) item start end package)))
(defun advance-cseqs (cseqs item start end package)
(dolist (cseq cseqs)
(when (cseq-applies-p cseq start)
(advance-cseq cseq item start end package))))
(defun cseq-applies-p (cseq start)
(or (null (cseq-end cseq))
(= (1+ (cseq-end cseq)) start)))
(defun advance-cseq (cseq item start end package)
(let ((base (cseq-base cseq))
(seq (rest (cseq-seq cseq)))
(slots (extend-slots cseq item))
(start (or (cseq-start cseq) start)))
(if (null seq)
(reference-instances base slots start end package)
(install-cseq (make-cseq :base base :seq seq :slots slots
:start start :end (text-position))))))
(defun extend-slots (cseq item)
(let ((spec (car (cseq-seq cseq)))
(slots (cseq-slots cseq)))
(if (consp spec)
(if (abstp item (cseq-target cseq))
slots
(list* (car spec) item slots))
slots)))
(defun reference-instances (base slots start end package)
(dolist (instance (find-instances base slots :add t :package package))
(reference instance start end package)))
(defun cseq-target (cseq)
(let ((spec (car (cseq-seq cseq))))
(if (consp spec)
(let ((base (cseq-base cseq))
(role (car spec)))
(or (inherit-filler base role)
(error "~S not a role in ~S" role base)))
spec)))
(defun text-position () *dmap-pos*)
(defun increment-text-position () (incf *dmap-pos*))
(defun reset-text-position () (setq *dmap-pos* 0))
;;; ----------------------------------------------------------------
;;; Monitors
;;; ----------------------------------------------------------------
(defmacro with-monitors (monitors &rest body)
`(let ((*monitors* (list ,@(mapcar #'expand-monitor monitors)))) ,@body))
(defun expand-monitor (monitor)
(if (atom monitor)
(expand-monitor (list monitor))
(destructuring-bind
(base &optional (tag :print) (function '#'print-monitor))
monitor
`(make-monitor :base ',base :tag ',tag :function ,function))))
(defun print-monitor (item start end)
(declare (ignore start end))
(print item))
(defun add-monitor (base &optional (tag :print) (function #'print-monitor))
(remove-monitor base tag)
(let ((monitor (make-monitor :base base :tag tag :function function)))
(push monitor *monitors*)
monitor))
(defun remove-monitors (&key (base nil base-p) (tag nil tag-p))
(setq *monitors*
(delete-if #'(lambda (monitor)
(and (or (null base)
(eql base (monitor-base monitor)))
(or (null tag-p)
(eql tag (monitor-tag monitor)))))
*monitors*)))
;;; ----------------------------------------------------------------
;;; End of module
;;; ----------------------------------------------------------------
(provide "dmap")
| null | https://raw.githubusercontent.com/huangjs/cl/96158b3f82f82a6b7d53ef04b3b29c5c8de2dbf7/lib/other-code/cs325/www.cs.northwestern.edu/academics/courses/325/programs/dmap.lisp | lisp | ----------------------------------------------------------------
----------------------------------------------------------------
Change log
----------
06/07/2006: changed monitors to support local with-monitors macro,
06/06/2006: changed monitors to use tag ids, because equality not
PACKAGES
--------
----------------------------------------------------------------
Globals (external)
----------------------------------------------------------------
----------------------------------------------------------------
Globals (internal)
----------------------------------------------------------------
----------------------------------------------------------------
Structures
----------------------------------------------------------------
----------------------------------------------------------------
Defining concept sequences
----------------------------------------------------------------
----------------------------------------------------------------
Removing concept sequences
----------------------------------------------------------------
----------------------------------------------------------------
----------------------------------------------------------------
----------------------------------------------------------------
Monitors
----------------------------------------------------------------
----------------------------------------------------------------
End of module
---------------------------------------------------------------- | DMAP - Lite
Everything you wanted in a Direct Memory Access Parser and less .
merged remove - monitor and remove - all - monitors functionality [ CKR ]
guaranteed for functions [ CKR ]
06/06/2006 : Updated packaging code to standard CL2 [ CKR ]
11/16/1998 Added : ADD argument to call to find - instances [ CKR ]
(defpackage #:dmap
(:use #:common-lisp #:tables #:mops)
(:export #:parse #:defphrase
#:reset-cseqs #:remove-all-cseqs #:remove-cseqs
#:add-monitor #:remove-monitors
#:with-monitors)
)
(in-package #:dmap)
(defvar *dmap-concept-package* nil
"Default package for DMAP concept symbols")
(defvar *dmap-pos* 0 "Global text position for DMAP")
(defvar *monitors* nil)
(defstruct cseq base seq start end slots)
(defstruct (monitor (:type list)) base tag function)
(deftable base-cseqs)
(deftable target-cseqs)
(defmacro defphrase (base &rest seq)
(if (and (eql base (car seq)) (null (cdr seq)))
(error "Can't DEFPHRASE ~S to itself" base)
`(progn (install-cseq (make-cseq :base ',base :seq ',seq))
',seq)))
(defun install-cseq (cseq)
(push cseq (target-cseqs (cseq-target cseq))))
(defun reset-cseqs ()
(remove-cseqs
#'(lambda (cseq) (not (null (cseq-start cseq))))))
(defun remove-all-cseqs ()
(clear-table (base-cseqs))
(clear-table (target-cseqs)))
(defun remove-cseqs (pred)
(remove-base-cseqs pred)
(remove-target-cseqs pred))
(defun remove-base-cseqs (pred)
(map-table #'(lambda (base cseqs)
(setf (base-cseqs base)
(delete-if pred cseqs)))
(base-cseqs)))
(defun remove-target-cseqs (pred)
(map-table #'(lambda (target cseqs)
(setf (target-cseqs target)
(delete-if pred cseqs)))
(target-cseqs)))
PARSE
(defun parse (sent &key package)
(dolist (w sent)
(increment-text-position)
(reference w (text-position) (text-position) package)))
(defun reference (item start end package)
(dolist (abst (all-absts-of item))
(mapc #'(lambda (monitor)
(when (eql abst (monitor-base monitor))
(funcall (monitor-function monitor) item start end)))
*monitors*)
(advance-cseqs (target-cseqs abst) item start end package)))
(defun advance-cseqs (cseqs item start end package)
(dolist (cseq cseqs)
(when (cseq-applies-p cseq start)
(advance-cseq cseq item start end package))))
(defun cseq-applies-p (cseq start)
(or (null (cseq-end cseq))
(= (1+ (cseq-end cseq)) start)))
(defun advance-cseq (cseq item start end package)
(let ((base (cseq-base cseq))
(seq (rest (cseq-seq cseq)))
(slots (extend-slots cseq item))
(start (or (cseq-start cseq) start)))
(if (null seq)
(reference-instances base slots start end package)
(install-cseq (make-cseq :base base :seq seq :slots slots
:start start :end (text-position))))))
(defun extend-slots (cseq item)
(let ((spec (car (cseq-seq cseq)))
(slots (cseq-slots cseq)))
(if (consp spec)
(if (abstp item (cseq-target cseq))
slots
(list* (car spec) item slots))
slots)))
(defun reference-instances (base slots start end package)
(dolist (instance (find-instances base slots :add t :package package))
(reference instance start end package)))
(defun cseq-target (cseq)
(let ((spec (car (cseq-seq cseq))))
(if (consp spec)
(let ((base (cseq-base cseq))
(role (car spec)))
(or (inherit-filler base role)
(error "~S not a role in ~S" role base)))
spec)))
(defun text-position () *dmap-pos*)
(defun increment-text-position () (incf *dmap-pos*))
(defun reset-text-position () (setq *dmap-pos* 0))
(defmacro with-monitors (monitors &rest body)
`(let ((*monitors* (list ,@(mapcar #'expand-monitor monitors)))) ,@body))
(defun expand-monitor (monitor)
(if (atom monitor)
(expand-monitor (list monitor))
(destructuring-bind
(base &optional (tag :print) (function '#'print-monitor))
monitor
`(make-monitor :base ',base :tag ',tag :function ,function))))
(defun print-monitor (item start end)
(declare (ignore start end))
(print item))
(defun add-monitor (base &optional (tag :print) (function #'print-monitor))
(remove-monitor base tag)
(let ((monitor (make-monitor :base base :tag tag :function function)))
(push monitor *monitors*)
monitor))
(defun remove-monitors (&key (base nil base-p) (tag nil tag-p))
(setq *monitors*
(delete-if #'(lambda (monitor)
(and (or (null base)
(eql base (monitor-base monitor)))
(or (null tag-p)
(eql tag (monitor-tag monitor)))))
*monitors*)))
(provide "dmap")
|
3df31521870406543f1ccb58720fdfca7de509ca284fee4083176513675fc691 | vaibhavsagar/duffer | Spec.hs | # LANGUAGE TypeApplications #
# LANGUAGE ScopedTypeVariables #
import Control.Monad (zipWithM_)
import Data.Aeson (encode, decode, ToJSON, FromJSON)
import Data.ByteString (hGetContents)
import Data.ByteString.UTF8 (lines, toString)
import Data.Coerce (Coercible, coerce)
import Data.Foldable (traverse_)
import Data.Maybe (fromJust)
import System.Process (CreateProcess(..), StdStream(..), createProcess
,shell)
import Test.Hspec (hspec, expectationFailure, parallel, describe, it
,shouldBe, Expectation, SpecWith)
import Prelude hiding (lines)
import Duffer.Unified (readObject)
import Duffer.Loose.Objects (Ref)
import Duffer.WithRepo (withRepo)
import Duffer.JSON (GitObjectJSON(..), RefJSON(..))
repo :: String
repo = "../.git"
gitDir :: String
gitDir = "GIT_DIR=" ++ repo ++ " "
main :: IO ()
main = let objectTypes = ["blob", "tree", "commit", "tag"] in
traverse objectsOfType objectTypes >>=
hspec . parallel . describe "JSON" . testJSON objectTypes
testJSON :: [String] -> [[Ref]] -> SpecWith ()
testJSON types partitionedRefs = describe "decoding and encoding" $ do
zipWithM_ describeDecodingEncodingAll types partitionedRefs
testRefs $ concat partitionedRefs
describeDecodingEncodingAll :: String -> [Ref] -> SpecWith ()
describeDecodingEncodingAll oType =
it ("correctly decodes and encodes all " ++ oType ++ "s") .
traverse_ (decodeEncodeObject repo)
decodeEncodeObject :: FilePath -> Ref -> Expectation
decodeEncodeObject path ref = withRepo path (readObject ref) >>= maybe
(expectationFailure $ toString ref ++ "not read")
((roundTrip . GitObjectJSON) >>= shouldBe)
testRefs :: [Ref] -> SpecWith ()
testRefs = it "correctly decodes and encodes all refs" .
traverse_ ((roundTrip . RefJSON) >>= shouldBe)
roundTrip :: forall a b. (Coercible a b, FromJSON a, ToJSON a) => a -> b
roundTrip = coerce @a . fromJust . decode . encode
objectsOfType :: String -> IO [Ref]
objectsOfType objectType = fmap lines $
cmd (gitDir ++ "git rev-list --objects --all")
>|> (gitDir ++
"git cat-file --batch-check='%(objectname) %(objecttype) %(rest)'")
>|> ("grep '^[^ ]* " ++ objectType ++ "'")
>|> "cut -d' ' -f1"
>>= hGetContents
where
cmd command = createProcess (shell command) {std_out = CreatePipe} >>=
\(_, Just handle, _, _) -> return handle
(>|>) handle command = withPipe =<< handle
where withPipe pipe = createProcess (shell command)
{std_out = CreatePipe, std_in = UseHandle pipe} >>=
\(_, Just handle', _, _) -> return handle'
| null | https://raw.githubusercontent.com/vaibhavsagar/duffer/bad9c6c6cf09f717e22d3d65f95564236896d610/duffer-json/test/Spec.hs | haskell | # LANGUAGE TypeApplications #
# LANGUAGE ScopedTypeVariables #
import Control.Monad (zipWithM_)
import Data.Aeson (encode, decode, ToJSON, FromJSON)
import Data.ByteString (hGetContents)
import Data.ByteString.UTF8 (lines, toString)
import Data.Coerce (Coercible, coerce)
import Data.Foldable (traverse_)
import Data.Maybe (fromJust)
import System.Process (CreateProcess(..), StdStream(..), createProcess
,shell)
import Test.Hspec (hspec, expectationFailure, parallel, describe, it
,shouldBe, Expectation, SpecWith)
import Prelude hiding (lines)
import Duffer.Unified (readObject)
import Duffer.Loose.Objects (Ref)
import Duffer.WithRepo (withRepo)
import Duffer.JSON (GitObjectJSON(..), RefJSON(..))
repo :: String
repo = "../.git"
gitDir :: String
gitDir = "GIT_DIR=" ++ repo ++ " "
main :: IO ()
main = let objectTypes = ["blob", "tree", "commit", "tag"] in
traverse objectsOfType objectTypes >>=
hspec . parallel . describe "JSON" . testJSON objectTypes
testJSON :: [String] -> [[Ref]] -> SpecWith ()
testJSON types partitionedRefs = describe "decoding and encoding" $ do
zipWithM_ describeDecodingEncodingAll types partitionedRefs
testRefs $ concat partitionedRefs
describeDecodingEncodingAll :: String -> [Ref] -> SpecWith ()
describeDecodingEncodingAll oType =
it ("correctly decodes and encodes all " ++ oType ++ "s") .
traverse_ (decodeEncodeObject repo)
decodeEncodeObject :: FilePath -> Ref -> Expectation
decodeEncodeObject path ref = withRepo path (readObject ref) >>= maybe
(expectationFailure $ toString ref ++ "not read")
((roundTrip . GitObjectJSON) >>= shouldBe)
testRefs :: [Ref] -> SpecWith ()
testRefs = it "correctly decodes and encodes all refs" .
traverse_ ((roundTrip . RefJSON) >>= shouldBe)
roundTrip :: forall a b. (Coercible a b, FromJSON a, ToJSON a) => a -> b
roundTrip = coerce @a . fromJust . decode . encode
objectsOfType :: String -> IO [Ref]
objectsOfType objectType = fmap lines $
cmd (gitDir ++ "git rev-list --objects --all")
>|> (gitDir ++
"git cat-file --batch-check='%(objectname) %(objecttype) %(rest)'")
>|> ("grep '^[^ ]* " ++ objectType ++ "'")
>|> "cut -d' ' -f1"
>>= hGetContents
where
cmd command = createProcess (shell command) {std_out = CreatePipe} >>=
\(_, Just handle, _, _) -> return handle
(>|>) handle command = withPipe =<< handle
where withPipe pipe = createProcess (shell command)
{std_out = CreatePipe, std_in = UseHandle pipe} >>=
\(_, Just handle', _, _) -> return handle'
| |
3e8b91424e5028000a6ffcd3dec1fa2b7ea9feab72a0d9c0887c9bdcafbeb35a | ZHaskell/zoovisitor | Utils.hs | module ZooKeeper.Recipe.Utils
( -- * Types
SequenceNumWithGUID(..)
, mkSequenceNumWithGUID
, extractSeqNum
, ZkRecipeException (..)
* operations
, createSeqEphemeralZNode
) where
import Control.Exception
import qualified Data.List as L
import Z.Data.CBytes (CBytes)
import qualified Z.Data.CBytes as CB
import ZooKeeper
import ZooKeeper.Exception
import ZooKeeper.Types
--------------------------------------------------------------------------------
| Represenets a name of a SEQUENCE|EPHEMERAL znode . It contains two parts ,
a GUID , and a sequence number . The GUID is used for handleing recoverable
-- exceptions so we only care about the sequence number part when we comparing
two of them .
newtype SequenceNumWithGUID = SequenceNumWithGUID
{ unSequenceNumWithGUID :: CBytes
}
mkSequenceNumWithGUID :: CBytes -> SequenceNumWithGUID
mkSequenceNumWithGUID = SequenceNumWithGUID
instance Eq SequenceNumWithGUID where
(SequenceNumWithGUID s1) == (SequenceNumWithGUID s2) =
extractSeqNum s1 == extractSeqNum s2
instance Ord SequenceNumWithGUID where
(SequenceNumWithGUID s1) <= (SequenceNumWithGUID s2) =
extractSeqNum s1 <= extractSeqNum s2
instance Show SequenceNumWithGUID where
show (SequenceNumWithGUID s) = CB.unpack s
| Exrtact only the sequence number part from an ` SequenceNumWithGUID ` .
extractSeqNum :: CBytes -> CBytes
extractSeqNum = CB.pack . reverse . takeWhile (/= '_') . reverse . CB.unpack
--------------------------------------------------------------------------------
-- | Creates a sequential and ephemeral znode with specified prefix
and GUID . The created znode is as ` prefixPath / GUID - n_0000000001 ` .
Note that it uses a GUID to handle recoverable exceptions , see
-- [this](#sc_recipes_GuidNote)
-- for more details.
createSeqEphemeralZNode :: ZHandle -> CBytes -> CBytes -> IO StringCompletion
createSeqEphemeralZNode zk prefixPath guid = do
let seqPath = prefixPath <> "/" <> guid <> "_"
catches (zooCreate zk seqPath Nothing zooOpenAclUnsafe ZooEphemeralSequential)
[ Handler (\(_ :: ZCONNECTIONLOSS ) -> retry)
, Handler (\(_ :: ZOPERATIONTIMEOUT) -> retry)
]
where
retry :: IO StringCompletion
retry = do
(StringsCompletion (StringVector children)) <- zooGetChildren zk prefixPath
case L.find (\child -> CB.unpack guid `L.isSubsequenceOf` CB.unpack child) children of
Just child -> return $ StringCompletion child
Nothing -> createSeqEphemeralZNode zk prefixPath guid
--------------------------------------------------------------------------------
newtype ZkRecipeException = ZkRecipeException String
deriving (Show, Eq)
instance Exception ZkRecipeException
| null | https://raw.githubusercontent.com/ZHaskell/zoovisitor/53053dbf358899906565a05dc2477fd1679b8aa9/src/ZooKeeper/Recipe/Utils.hs | haskell | * Types
------------------------------------------------------------------------------
exceptions so we only care about the sequence number part when we comparing
------------------------------------------------------------------------------
| Creates a sequential and ephemeral znode with specified prefix
[this](#sc_recipes_GuidNote)
for more details.
------------------------------------------------------------------------------ | module ZooKeeper.Recipe.Utils
SequenceNumWithGUID(..)
, mkSequenceNumWithGUID
, extractSeqNum
, ZkRecipeException (..)
* operations
, createSeqEphemeralZNode
) where
import Control.Exception
import qualified Data.List as L
import Z.Data.CBytes (CBytes)
import qualified Z.Data.CBytes as CB
import ZooKeeper
import ZooKeeper.Exception
import ZooKeeper.Types
| Represenets a name of a SEQUENCE|EPHEMERAL znode . It contains two parts ,
a GUID , and a sequence number . The GUID is used for handleing recoverable
two of them .
newtype SequenceNumWithGUID = SequenceNumWithGUID
{ unSequenceNumWithGUID :: CBytes
}
mkSequenceNumWithGUID :: CBytes -> SequenceNumWithGUID
mkSequenceNumWithGUID = SequenceNumWithGUID
instance Eq SequenceNumWithGUID where
(SequenceNumWithGUID s1) == (SequenceNumWithGUID s2) =
extractSeqNum s1 == extractSeqNum s2
instance Ord SequenceNumWithGUID where
(SequenceNumWithGUID s1) <= (SequenceNumWithGUID s2) =
extractSeqNum s1 <= extractSeqNum s2
instance Show SequenceNumWithGUID where
show (SequenceNumWithGUID s) = CB.unpack s
| Exrtact only the sequence number part from an ` SequenceNumWithGUID ` .
extractSeqNum :: CBytes -> CBytes
extractSeqNum = CB.pack . reverse . takeWhile (/= '_') . reverse . CB.unpack
and GUID . The created znode is as ` prefixPath / GUID - n_0000000001 ` .
Note that it uses a GUID to handle recoverable exceptions , see
createSeqEphemeralZNode :: ZHandle -> CBytes -> CBytes -> IO StringCompletion
createSeqEphemeralZNode zk prefixPath guid = do
let seqPath = prefixPath <> "/" <> guid <> "_"
catches (zooCreate zk seqPath Nothing zooOpenAclUnsafe ZooEphemeralSequential)
[ Handler (\(_ :: ZCONNECTIONLOSS ) -> retry)
, Handler (\(_ :: ZOPERATIONTIMEOUT) -> retry)
]
where
retry :: IO StringCompletion
retry = do
(StringsCompletion (StringVector children)) <- zooGetChildren zk prefixPath
case L.find (\child -> CB.unpack guid `L.isSubsequenceOf` CB.unpack child) children of
Just child -> return $ StringCompletion child
Nothing -> createSeqEphemeralZNode zk prefixPath guid
newtype ZkRecipeException = ZkRecipeException String
deriving (Show, Eq)
instance Exception ZkRecipeException
|
bb435afeef7f1073cae36c506601ea69e68510357722b09ab44432654cdaf20a | brainsickcc/bscc | HelpAndVersion.hs | Copyright © 2012
-- This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see </>.
module Test.Bscc.HelpAndVersion (helpAndVersionTests) where
import Bscc.HelpAndVersion (helpMessage, versionMessage)
import Data.List (isInfixOf)
import qualified Test.Tasty as T
import qualified Test.Tasty.HUnit as HU
helpAndVersionTests =
T.testGroup "Bscc.HelpAndVersion"
[
HU.testCase "help message" test_helpMessage,
HU.testCase "version message" test_versionMessage
]
test_helpMessage = do
HU.assertBool "" $ "" `isInfixOf` helpMessage
test_versionMessage = do
let hasCopyright = "Copyright" `isInfixOf` versionMessage
acksIain = "Iain Nicol" `isInfixOf` versionMessage
HU.assertBool "" $ hasCopyright && acksIain
| null | https://raw.githubusercontent.com/brainsickcc/bscc/f47a83f5c77d0ef483e957032715ac4007fcb44d/test/Test/Bscc/HelpAndVersion.hs | haskell | This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
along with this program. If not, see </>. | Copyright © 2012
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU Affero General Public License
module Test.Bscc.HelpAndVersion (helpAndVersionTests) where
import Bscc.HelpAndVersion (helpMessage, versionMessage)
import Data.List (isInfixOf)
import qualified Test.Tasty as T
import qualified Test.Tasty.HUnit as HU
helpAndVersionTests =
T.testGroup "Bscc.HelpAndVersion"
[
HU.testCase "help message" test_helpMessage,
HU.testCase "version message" test_versionMessage
]
test_helpMessage = do
HU.assertBool "" $ "" `isInfixOf` helpMessage
test_versionMessage = do
let hasCopyright = "Copyright" `isInfixOf` versionMessage
acksIain = "Iain Nicol" `isInfixOf` versionMessage
HU.assertBool "" $ hasCopyright && acksIain
|
4c27f33bd1e05c7013e3b3436d98438ea1f076fcf922878ae7fc93aa150d8287 | higherkindness/mu-haskell | ExampleServer.hs | # language DataKinds #
{-# language OverloadedStrings #-}
{-# language TypeFamilies #-}
module Main where
import Mu.Adapter.ProtoBuf
import Mu.GRpc.Server
import Mu.Rpc.Examples
import Mu.Schema
type instance AnnotatedSchema ProtoBufAnnotation QuickstartSchema
= '[ 'AnnField "HelloRequest" "name" ('ProtoBufId 1 '[])
, 'AnnField "HelloResponse" "message" ('ProtoBufId 1 '[])
, 'AnnField "HiRequest" "number" ('ProtoBufId 1 '[]) ]
main :: IO ()
main = do
putStrLn "running quickstart application"
runGRpcApp msgProtoBuf 8080 quickstartServer
| null | https://raw.githubusercontent.com/higherkindness/mu-haskell/e41ba786f556cfac962e0f183b36bf9ae81d69e4/grpc/server/exe/ExampleServer.hs | haskell | # language OverloadedStrings #
# language TypeFamilies # | # language DataKinds #
module Main where
import Mu.Adapter.ProtoBuf
import Mu.GRpc.Server
import Mu.Rpc.Examples
import Mu.Schema
type instance AnnotatedSchema ProtoBufAnnotation QuickstartSchema
= '[ 'AnnField "HelloRequest" "name" ('ProtoBufId 1 '[])
, 'AnnField "HelloResponse" "message" ('ProtoBufId 1 '[])
, 'AnnField "HiRequest" "number" ('ProtoBufId 1 '[]) ]
main :: IO ()
main = do
putStrLn "running quickstart application"
runGRpcApp msgProtoBuf 8080 quickstartServer
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.