_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
2026a0c168dac8b49912d18e1ebb6b1ec09f3db2f51e6d3dc758cd55344da882 | erlang/otp | mnesia_evil_coverage_test.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1996 - 2022 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
-module(mnesia_evil_coverage_test).
-author('').
-include("mnesia_test_lib.hrl").
-export([init_per_testcase/2, end_per_testcase/2,
init_per_group/2, end_per_group/2,
all/0, groups/0]).
-export([system_info/1, table_info/1, error_description/1,
db_node_lifecycle/1, evil_delete_db_node/1, start_and_stop/1,
checkpoint/1, table_lifecycle/1, storage_options/1,
add_copy_conflict/1, add_copy_when_going_down/1, add_copy_when_dst_going_down/1,
add_copy_with_down/1,
replica_management/1, clear_table_during_load/1,
schema_availability/1, local_content/1,
replica_location/1, user_properties/1, unsupp_user_props/1,
sorted_ets/1, index_cleanup/1,
change_table_access_mode/1, change_table_load_order/1,
set_master_nodes/1, offline_set_master_nodes/1,
dump_tables/1, dump_log/1, wait_for_tables/1, force_load_table/1,
snmp_open_table/1, snmp_close_table/1, snmp_get_next_index/1,
snmp_get_row/1, snmp_get_mnesia_key/1, snmp_update_counter/1,
snmp_order/1, subscribe_standard/1, subscribe_extended/1,
foldl/1, info/1, schema_0/1, schema_1/1, view_0/1, view_1/1, view_2/1,
lkill/1, kill/1,
record_name_dirty_access_ram/1,
record_name_dirty_access_disc/1,
record_name_dirty_access_disc_only/1,
record_name_dirty_access_xets/1]).
-export([info_check/8, index_size/1]).
-define(cleanup(N, Config),
mnesia_test_lib:prepare_test_case([{reload_appls, [mnesia]}],
N, Config, ?FILE, ?LINE)).
init_per_testcase(Func, Conf) ->
mnesia_test_lib:init_per_testcase(Func, Conf).
end_per_testcase(Func, Conf) ->
mnesia_test_lib:end_per_testcase(Func, Conf).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
all() ->
[system_info, table_info, error_description,
db_node_lifecycle, evil_delete_db_node, start_and_stop,
checkpoint, table_lifecycle, storage_options,
add_copy_conflict,
add_copy_when_going_down, add_copy_when_dst_going_down, add_copy_with_down,
replica_management,
clear_table_during_load,
schema_availability, local_content,
{group, table_access_modifications}, replica_location,
{group, table_sync}, user_properties, unsupp_user_props,
{group, record_name}, {group, snmp_access},
{group, subscriptions}, {group, iteration},
{group, debug_support}, sorted_ets, index_cleanup,
{mnesia_dirty_access_test, all},
{mnesia_trans_access_test, all},
{mnesia_evil_backup, all}].
groups() ->
[{table_access_modifications, [],
[change_table_access_mode, change_table_load_order,
set_master_nodes, offline_set_master_nodes]},
{table_sync, [],
[dump_tables, dump_log, wait_for_tables,
force_load_table]},
{snmp_access, [],
[snmp_open_table, snmp_close_table, snmp_get_next_index,
snmp_get_row, snmp_get_mnesia_key, snmp_update_counter,
snmp_order]},
{subscriptions, [],
[subscribe_standard, subscribe_extended]},
{iteration, [], [foldl]},
{debug_support, [],
[info, schema_0, schema_1, view_0, view_1, view_2,
lkill, kill]},
{record_name, [], [{group, record_name_dirty_access}]},
{record_name_dirty_access, [],
[record_name_dirty_access_ram,
record_name_dirty_access_disc,
record_name_dirty_access_disc_only,
record_name_dirty_access_xets
]}].
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Get meta info about
system_info(suite) -> [];
system_info(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(all, Config),
Ns = ?sort(Nodes),
?match(yes, mnesia:system_info(is_running)),
?match(Ns, ?sort(mnesia:system_info(db_nodes))),
?match(Ns, ?sort(mnesia:system_info(running_db_nodes))),
?match(A when is_atom(A), mnesia:system_info(debug)),
?match(L when is_list(L), mnesia:system_info(directory)),
?match(L when is_list(L), mnesia:system_info(log_version)),
?match({_, _}, mnesia:system_info(schema_version)),
?match(L when is_list(L), mnesia:system_info(tables)),
?match(L when is_list(L), mnesia:system_info(local_tables)),
?match(L when is_list(L), mnesia:system_info(held_locks)),
?match(L when is_list(L), mnesia:system_info(lock_queue)),
?match(L when is_list(L), mnesia:system_info(transactions)),
?match(I when is_integer(I), mnesia:system_info(transaction_failures)),
?match(I when is_integer(I), mnesia:system_info(transaction_commits)),
?match(I when is_integer(I), mnesia:system_info(transaction_restarts)),
?match(L when is_list(L), mnesia:system_info(checkpoints)),
?match(A when is_atom(A), mnesia:system_info(backup_module)),
?match(true, mnesia:system_info(auto_repair)),
?match({_, _}, mnesia:system_info(dump_log_interval)),
?match(A when is_atom(A), mnesia:system_info(dump_log_update_in_place)),
?match(I when is_integer(I), mnesia:system_info(transaction_log_writes)),
?match(I when is_integer(I), mnesia:system_info(send_compressed)),
?match(I when is_integer(I), mnesia:system_info(max_transfer_size)),
?match(L when is_list(L), mnesia:system_info(all)),
?match(L when is_list(L), mnesia:system_info(backend_types)),
?match({'EXIT', {aborted, Reason }} when element(1, Reason) == badarg
, mnesia:system_info(ali_baba)),
?verify_mnesia(Nodes, []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Get meta info about table
table_info(suite) -> [];
table_info(Config) when is_list(Config) ->
[Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
Tab = table_info,
Type = bag,
ValPos = 3,
Attrs = [k, v],
Arity = length(Attrs) +1,
Schema =
case mnesia_test_lib:diskless(Config) of
true -> [{type, Type}, {attributes, Attrs}, {index, [ValPos]},
{ram_copies, [Node1, Node2]}, {ext_ets, [Node3]}];
false ->
[{type, Type}, {attributes, Attrs}, {index, [ValPos]},
{disc_only_copies, [Node1]}, {ram_copies, [Node2]},
{ext_ets, [Node3]}]
end,
?match({atomic, ok}, mnesia:create_table(Tab, Schema)),
Size = 10,
Keys = lists:seq(1, Size),
Records = [{Tab, A, 7} || A <- Keys],
lists:foreach(fun(Rec) -> ?match(ok, mnesia:dirty_write(Rec)) end, Records),
case mnesia_test_lib:diskless(Config) of
true ->
?match(Nodes, mnesia:table_info(Tab, ram_copies));
false ->
?match([Node3], mnesia:table_info(Tab, ext_ets)),
?match([Node2], mnesia:table_info(Tab, ram_copies)),
?match([Node1], mnesia:table_info(Tab, mnesia_test_lib:storage_type(disc_only_copies, Config)))
end,
Read = [Node1, Node2, Node3],
Write = ?sort([Node1, Node2, Node3]),
{[ok,ok,ok], []} = rpc:multicall(Nodes, ?MODULE, info_check,
[Tab, Read, Write, Size, Type, ValPos, Arity, Attrs]),
?match({atomic, Attrs}, mnesia:transaction(fun() -> mnesia:table_info(Tab, attributes) end)),
?match(L when is_list(L), mnesia:table_info(Tab, all)),
%% Table info when table not loaded
?match({atomic, ok},
mnesia:create_table(tab_info, Schema)),
?match(stopped, mnesia:stop()),
?match(stopped, rpc:call(Node2, mnesia, stop, [])),
?match(ok, mnesia:start()),
?match(ok, mnesia:wait_for_tables([tab_info], 5000)),
?match(0, mnesia:table_info(tab_info, size)),
?verify_mnesia([Node1, Node3], [Node2]).
info_check(Tab, Read, Write, Size, Type, ValPos, Arity, Attrs) ->
?match(true, lists:member(mnesia:table_info(Tab, where_to_read), Read)),
?match(Write, ?sort(mnesia:table_info(Tab, where_to_write))),
?match(Mem when is_integer(Mem), mnesia:table_info(Tab, memory)),
?match(Size, mnesia:table_info(Tab, size)),
?match(Type, mnesia:table_info(Tab, type)),
?match([ValPos], mnesia:table_info(Tab, index)),
?match(Arity, mnesia:table_info(Tab, arity)),
?match(Attrs, mnesia:table_info(Tab, attributes)),
?match({Tab, '_', '_'}, mnesia:table_info(Tab, wild_pattern)),
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Check the error descriptions
error_description(suite) -> [];
error_description(Config) when is_list(Config) ->
?acquire_nodes(1, Config),
Errors = [nested_transaction, badarg, no_transaction, combine_error,
bad_index, already_exists, index_exists, no_exists, system_limit,
mnesia_down, not_a_db_node, bad_type, node_not_running,
truncated_binary_file, active, illegal
],
?match(X when is_atom(X), mnesia:error_description({error, bad_error_msg})),
?match(X when is_tuple(X), mnesia:error_description({'EXIT', pid, bad})),
%% This is real error msg
?match(X when is_tuple(X), mnesia:error_description(
{error,
{"Cannot prepare checkpoint (bad reply)",
{{877,957351,758147},a@legolas},
{error,{node_not_running,a1@legolas}}}})),
check_errors(error, Errors),
check_errors(aborted, Errors),
check_errors('EXIT', Errors).
check_errors(_Err, []) -> ok;
check_errors(Err, [Desc|R]) ->
?match(X when is_list(X), mnesia:error_description({Err, Desc})),
check_errors(Err, R).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Add and drop db nodes
db_node_lifecycle(suite) -> [];
db_node_lifecycle(Config) when is_list(Config) ->
[Node1, Node2, Node3] = AllNodes = ?acquire_nodes(3, Config),
Tab = db_node_lifecycle,
Who = fun(T) ->
L1 = mnesia:table_info(T, ram_copies),
L2 = mnesia:table_info(T, disc_copies),
L3 = mnesia:table_info(T, disc_only_copies),
L4 = mnesia:table_info(T, ext_ets),
L1 ++ L2 ++ L3 ++ L4
end,
SNs = ?sort(AllNodes),
Schema = [{name, Tab}, {ram_copies, [Node1, Node2]}],
?match({atomic, ok}, mnesia:create_table(Schema)),
?match([], mnesia_test_lib:stop_mnesia(AllNodes)),
?match(ok, mnesia:delete_schema(AllNodes)),
?match({error, _}, mnesia:create_schema(foo)),
?match({error, _}, mnesia:create_schema([foo])),
?match({error, _}, mnesia:create_schema([foo@bar])),
?match(ok, mnesia:start()),
?match(false, mnesia:system_info(use_dir)),
?match([ram_copies, disc_copies, disc_only_copies], mnesia:system_info(backend_types)),
?match({atomic, ok}, mnesia:create_table(Tab, [])),
?match({aborted, {has_no_disc, Node1}}, mnesia:dump_tables([Tab])),
?match({aborted, {has_no_disc, Node1}}, mnesia:change_table_copy_type(Tab, node(), disc_copies)),
?match({aborted, {has_no_disc, Node1}}, mnesia:change_table_copy_type(Tab, node(), disc_only_copies)),
?match(stopped, mnesia:stop()),
?match(ok, mnesia:create_schema(AllNodes, ?BACKEND)),
?match([], mnesia_test_lib:start_mnesia(AllNodes)),
?match([SNs, SNs, SNs],
lists:map(fun lists:sort/1,
element(1, rpc:multicall(AllNodes, mnesia, table_info,
[schema, disc_copies])))),
?match({aborted, {already_exists, schema, Node2, _}},
mnesia:change_table_copy_type(schema, Node2, disc_copies)),
?match({atomic, ok},
mnesia:change_table_copy_type(schema, Node2, ram_copies)),
?match({aborted, {already_exists, schema, Node2, _}},
mnesia:change_table_copy_type(schema, Node2, ram_copies)),
?match({atomic, ok},
mnesia:change_table_copy_type(schema, Node2, disc_copies)),
?match([SNs, SNs, SNs],
lists:map(fun lists:sort/1,
element(1, rpc:multicall(AllNodes, mnesia, table_info,
[schema, disc_copies])))),
%% Delete the DB
Tab2 = disk_tab,
Tab3 = not_local,
Tab4 = local,
Tab5 = remote,
Tab6 = ext1,
Tabs = [Schema,
[{name, Tab2}, {disc_copies, AllNodes}],
[{name, Tab3}, {ram_copies, [Node2, Node3]}],
[{name, Tab4}, {disc_only_copies, [Node1]}],
[{name, Tab5}, {disc_only_copies, [Node2]}],
[{name, Tab6}, {ext_ets, [Node1, Node2]}]
],
[?match({atomic, ok}, mnesia:create_table(T)) || T <- Tabs ],
?match({aborted, {active, _, Node2}},
mnesia:del_table_copy(schema, Node2)),
?match([], mnesia_test_lib:stop_mnesia([Node1])),
?match({aborted, {node_not_running, Node1}},
mnesia:del_table_copy(schema, Node2)),
?match([], mnesia_test_lib:start_mnesia([Node1],[Tab2,Tab4,Tab6])),
?match([], mnesia_test_lib:stop_mnesia([Node2])),
?match({atomic, ok}, mnesia:del_table_copy(schema, Node2)),
%% Check
RemNodes = AllNodes -- [Node2],
?match(RemNodes, mnesia:system_info(db_nodes)),
?match([Node1], Who(Tab)),
?match(RemNodes, Who(Tab2)),
?match([Node3], Who(Tab3)),
?match([Node1], Who(Tab4)),
?match({'EXIT', {aborted, {no_exists, _, _}}}, Who(Tab5)),
?match([Node1], Who(Tab6)),
?match({atomic, ok}, mnesia:change_table_copy_type(Tab2, Node3, ram_copies)),
?match({atomic, ok}, mnesia:change_table_copy_type(schema, Node3, ram_copies)),
?match([], mnesia_test_lib:stop_mnesia([Node3])),
?match({atomic, ok}, mnesia:del_table_copy(schema, Node3)),
?match([Node1], mnesia:system_info(db_nodes)),
?match([Node1], Who(Tab)),
?match([Node1], Who(Tab2)),
?match({'EXIT', {aborted, {no_exists, _, _}}}, Who(Tab3)),
?match([Node1], Who(Tab4)),
?match({'EXIT', {aborted, {no_exists, _, _}}}, Who(Tab5)),
?verify_mnesia([Node1], []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Drop a db node when several disk resident nodes are down
evil_delete_db_node(suite) -> [];
evil_delete_db_node(Config) when is_list(Config) ->
[Node1, Node2, Node3] = AllNodes = ?acquire_nodes(3, Config),
Tab = evil_delete_db_node,
?match({atomic, ok}, mnesia:create_table(Tab, [{disc_copies, AllNodes}])),
?match([], mnesia_test_lib:stop_mnesia([Node2, Node3])),
?match({atomic, ok}, mnesia:del_table_copy(schema, Node2)),
RemNodes = AllNodes -- [Node2],
?match(RemNodes, mnesia:system_info(db_nodes)),
?match(RemNodes, mnesia:table_info(Tab, disc_copies)),
?verify_mnesia([Node1], []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Start and stop the system
start_and_stop(suite) -> [];
start_and_stop(Config) when is_list(Config) ->
[Node1 | _] = Nodes = ?acquire_nodes(all, Config),
?match(stopped, rpc:call(Node1, mnesia, stop, [])),
?match(stopped, rpc:call(Node1, mnesia, stop, [])),
?match(ok, rpc:call(Node1, mnesia, start, [])),
?match(ok, rpc:call(Node1, mnesia, start, [])),
?match(stopped, rpc:call(Node1, mnesia, stop, [])),
?verify_mnesia(Nodes -- [Node1], [Node1]),
?match([], mnesia_test_lib:start_mnesia(Nodes)),
?verify_mnesia(Nodes, []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Checkpoints and backup management
checkpoint(suite) -> [];
checkpoint(Config) when is_list(Config) ->
checkpoint(2, Config),
checkpoint(3, Config),
ok.
checkpoint(NodeConfig, Config) ->
[Node1 | _] = TabNodes = ?acquire_nodes(NodeConfig, Config),
CreateTab = fun(Type, N, Ns) ->
Tab0 = lists:concat(["local_checkpoint_", Type, N]),
Tab = list_to_atom(Tab0),
catch mnesia:delete_table(Tab),
?match({atomic, ok},
mnesia:create_table(Tab, [{Type, Ns}])),
Tab
end,
CreateTabs = fun(Type, Acc) ->
[CreateTab(Type, 1, [hd(TabNodes)]),
CreateTab(Type, 2, TabNodes),
CreateTab(Type, 3, [lists:last(TabNodes)])] ++
Acc
end,
Types = [ram_copies, disc_copies, disc_only_copies, ext_ets],
Tabs = lists:foldl(CreateTabs, [], Types),
Recs = ?sort([{T, N, N} || T <- Tabs, N <- lists:seq(1, 10)]),
lists:foreach(fun(R) -> ?match(ok, mnesia:dirty_write(R)) end, Recs),
CpName = a_checkpoint_name,
MinArgs = [{name, CpName}, {min, Tabs}, {allow_remote, false}],
?match({error, _}, rpc:call(Node1, mnesia, activate_checkpoint, [MinArgs])),
MaxArgs = [{name, CpName}, {max, Tabs}, {allow_remote, true}],
?match({ok, CpName, L} when is_list(L),
rpc:call(Node1, mnesia, activate_checkpoint, [MaxArgs])),
?match(ok, rpc:call(Node1, mnesia, deactivate_checkpoint, [CpName])),
Args = [{name, CpName}, {min, Tabs}, {allow_remote, true}],
?match({ok, CpName, L} when is_list(L),
rpc:call(Node1, mnesia, activate_checkpoint, [Args])),
Recs2 = ?sort([{T, K, 0} || {T, K, _} <- Recs]),
lists:foreach(fun(R) -> ?match(ok, mnesia:dirty_write(R)) end, Recs2),
?match(ok, rpc:call(Node1, mnesia, deactivate_checkpoint, [CpName])),
?match({error, Reason1 } when element(1, Reason1) == no_exists,
mnesia:deactivate_checkpoint(CpName)),
?match({error, Reason2 } when element(1, Reason2) == badarg,
mnesia:activate_checkpoint(foo)),
?match({error, Reason3 } when element(1, Reason3) == badarg,
mnesia:activate_checkpoint([{foo, foo}])),
?match({error, Reason4 } when element(1, Reason4) == badarg,
mnesia:activate_checkpoint([{max, foo}])),
?match({error, Reason5 } when element(1, Reason5) == badarg,
mnesia:activate_checkpoint([{min, foo}])),
?match({error, _}, mnesia:activate_checkpoint([{min, [foo@bar]}])),
?match({error, Reason6 } when element(1, Reason6) == badarg,
mnesia:activate_checkpoint([{allow_remote, foo}])),
Fun = fun(Tab) -> ?match({atomic, ok}, mnesia:delete_table(Tab)) end,
lists:foreach(Fun, Tabs),
?verify_mnesia(TabNodes, []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Create and delete tables
%% Get meta info about table
-define(vrl, mnesia_test_lib:verify_replica_location).
replica_location(suite) -> [];
replica_location(Config) when is_list(Config) ->
[Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
Create three replicas
Check = fun(Tab, Schema) ->
?match({atomic, ok}, mnesia:create_table([{name, Tab}|Schema])),
?match([], ?vrl(Tab, [Node1], [Node2], [Node3], Nodes)),
Delete one replica
?match({atomic, ok}, mnesia:del_table_copy(Tab, Node2)),
?match([], ?vrl(Tab, [Node1], [], [Node3], Nodes)),
Move one replica
?match({atomic, ok}, mnesia:move_table_copy(Tab, Node1, Node2)),
?match([], ?vrl(Tab, [Node2], [], [Node3], Nodes)),
%% Change replica type
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node2, ram_copies)),
?match([], ?vrl(Tab, [], [Node2], [Node3], Nodes))
end,
Check(replica_location, [{disc_only_copies, [Node1]},
{ram_copies, [Node2]}, {disc_copies, [Node3]}]),
Check(ext_location, [{disc_only_copies, [Node1]},
{ext_ets, [Node2]}, {disc_copies, [Node3]}]),
?verify_mnesia(Nodes, []).
table_lifecycle(suite) -> [];
table_lifecycle(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
?match({atomic, ok}, mnesia:create_table([{type, bag},
{ram_copies, [Node1]},
{attributes, [rajtan, tajtan]},
{name, order_of_args}])),
?match([], mnesia:dirty_read({order_of_args, 4711})),
?match({atomic, ok}, mnesia:create_table([{name, already_exists},
{ram_copies, [Node1]}])),
?match({aborted, Reason23 } when element(1, Reason23) ==already_exists,
mnesia:create_table([{name, already_exists},
{ram_copies, [Node1]}])),
?match({aborted, Reason21 } when element(1, Reason21) == bad_type,
mnesia:create_table([{name, bad_node}, {ram_copies, ["foo"]}])),
?match({aborted, Reason2} when element(1, Reason2) == bad_type,
mnesia:create_table([{name, zero_arity}, {attributes, []}])),
?match({aborted, Reason3} when element(1, Reason3) == badarg,
mnesia:create_table([])),
?match({aborted, Reason4} when element(1, Reason4) == badarg,
mnesia:create_table(atom)),
?match({aborted, Reason5} when element(1, Reason5) == badarg,
mnesia:create_table({cstruct, table_name_as_atom})),
?match({aborted, Reason6 } when element(1, Reason6) == bad_type,
mnesia:create_table([{name, no_host}, {ram_copies, foo}])),
?match({aborted, Reason7 } when element(1, Reason7) == bad_type,
mnesia:create_table([{name, no_host}, {disc_only_copies, foo}])),
?match({aborted, Reason8} when element(1, Reason8) == bad_type,
mnesia:create_table([{name, no_host}, {disc_copies, foo}])),
CreateFun =
fun() -> ?match({aborted, nested_transaction},
mnesia:create_table([{name, nested_trans}])), ok
end,
?match({atomic, ok}, mnesia:transaction(CreateFun)),
?match({atomic, ok}, mnesia:create_table([{name, remote_tab},
{ram_copies, [Node2]}])),
?match({atomic, ok}, mnesia:create_table([{name, a_brand_new_tab},
{ram_copies, [Node1]}])),
?match([], mnesia:dirty_read({a_brand_new_tab, 4711})),
?match({atomic, ok}, mnesia:delete_table(a_brand_new_tab)),
?match({'EXIT', {aborted, Reason31}} when element(1, Reason31) == no_exists,
mnesia:dirty_read({a_brand_new_tab, 4711})),
?match({aborted, Reason41} when element(1, Reason41) == no_exists,
mnesia:delete_table(a_brand_new_tab)),
?match({aborted, Reason9} when element(1, Reason9) == badarg,
mnesia:create_table([])),
?match({atomic, ok}, mnesia:create_table([{name, nested_del_trans},
{ram_copies, [Node1]}])),
DeleteFun = fun() -> ?match({aborted, nested_transaction},
mnesia:delete_table(nested_del_trans)), ok end,
?match({atomic, ok}, mnesia:transaction(DeleteFun)),
?match({aborted, Reason10} when element(1, Reason10) == bad_type,
mnesia:create_table([{name, create_with_index}, {index, 2}])),
?match({aborted, Reason32} when element(1, Reason32) == bad_type,
mnesia:create_table([{name, create_with_index}, {index, [-1]}])),
?match({aborted, Reason33} when element(1, Reason33) == bad_type,
mnesia:create_table([{name, create_with_index}, {index, [0]}])),
?match({aborted, Reason34} when element(1, Reason34) == bad_type,
mnesia:create_table([{name, create_with_index}, {index, [1]}])),
?match({aborted, Reason35} when element(1, Reason35) == bad_type,
mnesia:create_table([{name, create_with_index}, {index, [2]}])),
?match({atomic, ok},
mnesia:create_table([{name, create_with_index}, {index, [3]},
{ram_copies, [Node1]}])),
ets:new(ets_table, [named_table]),
?match({aborted, _}, mnesia:create_table(ets_table, [{ram_copies, Nodes}])),
?match({aborted, _}, mnesia:create_table(ets_table, [{ram_copies, [Node1]}])),
ets:delete(ets_table),
?match({atomic, ok}, mnesia:create_table(ets_table, [{ram_copies, [Node1]}])),
?match(Node1, rpc:call(Node1, mnesia_lib, val, [{ets_table,where_to_read}])),
?match(Node1, rpc:call(Node2, mnesia_lib, val, [{ets_table,where_to_read}])),
?match({atomic, ok}, mnesia:change_table_copy_type(ets_table, Node1, disc_only_copies)),
?match(Node1, rpc:call(Node2, mnesia_lib, val, [{ets_table,where_to_read}])),
?verify_mnesia(Nodes, []).
storage_options(suite) -> [];
storage_options(Config) when is_list(Config) ->
[N1,N2,N3] = Nodes = ?acquire_nodes(3, Config),
?match({aborted,_}, mnesia:create_table(a, [{storage_properties, [{ets,foobar}]}])),
?match({aborted,_}, mnesia:create_table(a, [{storage_properties, [{ets,[foobar]}]}])),
?match({aborted,_}, mnesia:create_table(a, [{storage_properties, [{ets,[duplicate_bag]}]}])),
?match({aborted,_}, mnesia:create_table(a, [{storage_properties, [{dets,[{type,bag}]}]}])),
?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, [N1]},
{disc_only_copies, [N2]},
{storage_properties,
[{ets,[compressed]},
{dets, [{auto_save, 5000}]} ]}])),
?match(true, ets:info(a, compressed)),
?match(5000, rpc:call(N2, dets, info, [a, auto_save])),
?match(ok, mnesia:dirty_write({a,1,1})),
?match([{a,1,1}], mnesia:dirty_read({a,1})),
mnesia:dump_log(),
W2C1 = [{N2, disc_only_copies}, {N1, ram_copies}],
?match(W2C1, lists:sort(rpc:call(N2, mnesia_lib, val, [{a, where_to_commit}]))),
?match(W2C1, lists:sort(rpc:call(N3, mnesia_lib, val, [{a, where_to_commit}]))),
?match({atomic,ok}, mnesia:change_table_copy_type(a, N1, disc_only_copies)),
W2C2 = [{N2, disc_only_copies}, {N1, disc_only_copies}],
?match(W2C2, lists:sort(rpc:call(N2, mnesia_lib, val, [{a, where_to_commit}]))),
?match(W2C2, lists:sort(rpc:call(N3, mnesia_lib, val, [{a, where_to_commit}]))),
?match(undefined, ets:info(a, compressed)),
?match(5000, dets:info(a, auto_save)),
?match({atomic,ok}, mnesia:change_table_copy_type(a, N1, disc_copies)),
?match(true, ets:info(a, compressed)),
?verify_mnesia(Nodes, []).
clear_table_during_load(suite) -> [];
clear_table_during_load(doc) ->
["Clear table caused during load caused a schema entry in the actual tab"];
clear_table_during_load(Config) when is_list(Config) ->
Nodes = [_, Node2] = ?acquire_nodes(2, Config ++ [{tc_timeout, timer:minutes(2)}]),
?match({atomic,ok}, mnesia:create_table(cleartab, [{ram_copies, Nodes}])),
Tester = self(),
Bin = <<"Testingasdasd", 0:32000>>,
Fill = fun() -> [mnesia:write({cleartab, N, Bin}) || N <- lists:seq(1, 3000)], ok end,
?match({atomic, ok}, mnesia:sync_transaction(Fill)),
StopAndStart = fun() ->
stopped = mnesia:stop(),
Tester ! {self(), stopped},
receive start_node -> ok end,
ok = mnesia:start(),
ok = mnesia:wait_for_tables([cleartab], 2000),
lists:foreach(fun({cleartab,_,_}) -> ok;
(What) -> Tester ! {failed, What},
unlink(Tester),
exit(foo)
end,
ets:tab2list(cleartab)),
Tester ! {self(), ok},
normal
end,
Test = fun(N) ->
Pid = spawn_link(Node2, StopAndStart),
receive {Pid, stopped} -> ok end,
Pid ! start_node,
timer:sleep(N*10),
{atomic, ok} = mnesia:clear_table(cleartab),
receive
{Pid, ok} -> ok;
{failed, What} ->
io:format("Failed in ~p tries, with ~p~n",[N, What]),
exit({error, What});
{'EXIT', Pid, Reason} ->
exit({died, Reason})
end
end,
[Test(N) || N <- lists:seq(1, 10)],
?verify_mnesia(Nodes, []).
add_copy_conflict(suite) -> [];
add_copy_conflict(doc) ->
["Verify that OTP-5065 doesn't happen again, whitebox testing"];
add_copy_conflict(Config) when is_list(Config) ->
Nodes = [Node1, Node2] =
?acquire_nodes(2, Config ++ [{tc_timeout, timer:minutes(2)}]),
?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, Nodes}])),
?match({atomic, ok}, mnesia:create_table(b, [{ram_copies, Nodes}])),
?match({atomic, ok}, mnesia:create_table(test, [{ram_copies, [Node2]}])),
mnesia:stop(),
?match(ok,mnesia:start([{no_table_loaders, 1}])),
verify_ll_queue(10),
Self = self(),
Test = fun() ->
Res = mnesia:add_table_copy(test, Node1, ram_copies),
Self ! {test, Res}
end,
%% Create conflict with loader queue.
spawn_link(Test),
?match_receive(timeout),
%% Conflict ok
mnesia_controller:unblock_controller(),
?match_receive({test, {atomic,ok}}),
?match(ok, mnesia:wait_for_tables([a,b], 3000)),
?verify_mnesia(Nodes, []),
?cleanup(1, Config).
verify_ll_queue(0) ->
?error("Couldn't find anything in queue~n",[]);
verify_ll_queue(N) ->
?match(granted,mnesia_controller:block_controller()),
case mnesia_controller:get_info(1000) of
{info,{state,_,true,[],_Loader,[],[],[],_,_,_,_,_,_}} ->
%% Very slow SMP machines haven't loaded it yet..
mnesia_controller:unblock_controller(),
timer:sleep(10),
verify_ll_queue(N-1);
{info,{state,_,true,[],Loader,LL,[],[],_,_,_,_,_,_}} ->
io:format("~p~n", [{Loader,LL}]),
?match([_], LL); %% Verify that something is in the loader queue
Else ->
?error("No match ~p maybe the internal format has changed~n",[Else])
end.
add_copy_when_going_down(suite) -> [];
add_copy_when_going_down(doc) ->
["Tests abort when node we load from goes down"];
add_copy_when_going_down(Config) ->
[Node1, Node2] =
?acquire_nodes(2, Config ++ [{tc_timeout, timer:minutes(2)}]),
?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, [Node1]}])),
%% Grab a write lock
Tester = self(),
WriteAndWait = fun() ->
mnesia:write({a,1,1}),
Tester ! {self(), got_lock},
receive continue -> ok
end
end,
Locker = spawn(fun() -> mnesia:transaction(WriteAndWait) end),
receive {Locker, got_lock} -> ok end,
spawn_link(fun() -> Res = rpc:call(Node2, mnesia, add_table_copy,
[a, Node2, ram_copies]),
Tester ! {test, Res}
end),
%% We have a lock here we should get a timeout
?match_receive(timeout),
mnesia_test_lib:kill_mnesia([Node1]),
?match_receive({test,{aborted,_}}),
?verify_mnesia([Node2], []).
add_copy_when_dst_going_down(suite) -> [];
add_copy_when_dst_going_down(doc) ->
["Table copy destination node goes down. Verify that the issue fixed in erlang/otp#6013 doesn't happen again, whitebox testing."];
add_copy_when_dst_going_down(Config) ->
[Node1, Node2] = ?acquire_nodes(2, Config),
?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, [Node1]}])),
lists:foreach(fun(I) ->
ok = mnesia:sync_dirty(fun() -> mnesia:write({a, I, I}) end)
end,
lists:seq(1, 100000)),
?match({ok, _}, mnesia:change_config(extra_db_nodes, [Node2])),
%% Start table copy
Tester = self(),
spawn_link(fun() ->
mnesia:add_table_copy(a, Node2, ram_copies),
Tester ! add_table_copy_finished
end),
Wait for ` mnesia_loader : send_more/6 ` has started
%% Grab a write lock
spawn_link(fun() ->
Fun = fun() ->
ok = mnesia:write_lock_table(a),
Tester ! {write_lock_acquired, self()},
receive node2_mnesia_killed -> ok
end,
Tester ! write_lock_released,
ok
end,
mnesia:transaction(Fun)
end),
receive {write_lock_acquired, Locker} -> ok
end,
Wait for ` mnesia_loader : send_more/6 ` has finished
?match([], mnesia_test_lib:kill_mnesia([Node2])),
Locker ! node2_mnesia_killed,
receive write_lock_released -> ok
end,
receive add_table_copy_finished -> ok
end,
timer:sleep(1000), % Wait for `mnesia_loader:finish_copy/5` has acquired the read lock
%% Grab a write lock
?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write_lock_table(a) end, 10)),
?verify_mnesia([Node1], []).
add_copy_with_down(suite) -> [];
add_copy_with_down(Config) ->
%% Allow add_table_copy() with ram_copies even all other replicas are down
Nodes = [Node1, Node2, Node3] = ?acquire_nodes(3, Config),
?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, [Node3]}, {disc_copies, [Node2]}])),
stopped = rpc:call(Node2, mnesia, stop, []),
stopped = rpc:call(Node3, mnesia, stop, []),
?match({aborted, _}, mnesia:add_table_copy(a, Node1, ram_copies)),
?match({aborted, _}, mnesia:del_table_copy(a, Node2)),
ok = rpc:call(Node3, mnesia, start, []),
?match({aborted, _}, mnesia:add_table_copy(a, Node1, ram_copies)),
?match([], mnesia_test_lib:start_mnesia([Node2], [a])),
?match({atomic, ok}, mnesia:change_table_copy_type(a, Node2, ram_copies)),
stopped = rpc:call(Node2, mnesia, stop, []),
stopped = rpc:call(Node3, mnesia, stop, []),
?match({atomic, ok}, mnesia:add_table_copy(a, Node1, ram_copies)),
?match(ok, mnesia:dirty_write({a,1,1})),
?match([], mnesia_test_lib:start_mnesia([Node2,Node3], [a])),
?match([{a,1,1}], rpc:call(Node1, mnesia, dirty_read, [{a,1}])),
?match([{a,1,1}], rpc:call(Node2, mnesia, dirty_read, [{a,1}])),
?match([{a,1,1}], rpc:call(Node3, mnesia, dirty_read, [{a,1}])),
?match({atomic, ok}, mnesia:del_table_copy(a, Node1)),
stopped = rpc:call(Node2, mnesia, stop, []),
stopped = rpc:call(Node3, mnesia, stop, []),
?match({atomic, ok}, mnesia:add_table_copy(a, Node1, disc_copies)),
?match(ok, mnesia:dirty_write({a,1,1})),
?match([], mnesia_test_lib:start_mnesia([Node2,Node3], [a])),
?match([{a,1,1}], rpc:call(Node1, mnesia, dirty_read, [{a,1}])),
?match([{a,1,1}], rpc:call(Node2, mnesia, dirty_read, [{a,1}])),
?match([{a,1,1}], rpc:call(Node3, mnesia, dirty_read, [{a,1}])),
?match({atomic, ok}, mnesia:del_table_copy(a, Node1)),
stopped = rpc:call(Node2, mnesia, stop, []),
stopped = rpc:call(Node3, mnesia, stop, []),
?match({atomic, ok}, mnesia:add_table_copy(a, Node1, disc_only_copies)),
?match(ok, mnesia:dirty_write({a,1,1})),
?match([], mnesia_test_lib:start_mnesia([Node2,Node3], [a])),
?match([{a,1,1}], rpc:call(Node1, mnesia, dirty_read, [{a,1}])),
?match([{a,1,1}], rpc:call(Node2, mnesia, dirty_read, [{a,1}])),
?match([{a,1,1}], rpc:call(Node3, mnesia, dirty_read, [{a,1}])),
?verify_mnesia(Nodes, []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Add, drop and move replicas, change storage types
%% Change table layout (only arity change supported)
-record(replica_management, {k, v}).
-record(new_replica_management, {k, v, extra}).
-define(SS(R), lists:sort(element(1,R))).
replica_management(doc) ->
"Add, drop and move replicas, change storage types.";
replica_management(suite) ->
[];
replica_management(Config) when is_list(Config) ->
%% add_table_copy/3, del_table_copy/2, move_table_copy/3,
%% change_table_copy_type/3, transform_table/3
Nodes = [Node1, Node2, Node3] = ?acquire_nodes(3, Config),
Tab = replica_management,
Attrs = record_info(fields, replica_management),
%%
%% Add, delete and change replicas
%%
?match({atomic, ok},
mnesia:create_table([{name, Tab}, {attributes, Attrs},
{ram_copies, [Node1]}, {ext_ets, [Node3]}])),
[?match(ok, mnesia:dirty_write({Tab, K, K + 2})) || K <-lists:seq(1, 10)],
?match([], ?vrl(Tab, [], [Node1, Node3], [], Nodes)),
%% R - -
?match({atomic, ok}, mnesia:dump_tables([Tab])),
?match({aborted, Reason50 } when element(1, Reason50) == combine_error,
mnesia:add_table_copy(Tab, Node2, disc_copies)),
?match({aborted, Reason51 } when element(1, Reason51) == combine_error,
mnesia:change_table_copy_type(Tab, Node1, disc_copies)),
?match({atomic, ok}, mnesia:clear_table(Tab)),
SyncedCheck = fun() ->
mnesia:lock({record,Tab,0}, write),
?match([0,0,0], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size])))
end,
mnesia:transaction(SyncedCheck),
?match({[0,0,0], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
?match({atomic, ok}, mnesia:del_table_copy(Tab, Node1)),
?match({atomic, ok}, mnesia:del_table_copy(Tab, Node3)),
?match([], ?vrl(Tab, [], [], [], Nodes)),
%% - - -
?match({aborted,Reason52} when element(1, Reason52) == no_exists,
mnesia:add_table_copy(Tab, Node3, ram_copies)),
?match({atomic, ok}, mnesia:create_table([{name, Tab},
{attributes, Attrs},
{disc_copies, [Node1]}])),
?match([], ?vrl(Tab, [], [], [Node1], Nodes)),
%% D - -
[?match(ok, mnesia:dirty_write({Tab, K, K + 2})) || K <-lists:seq(1, 10)],
?match({aborted, Reason53} when element(1, Reason53) == badarg,
mnesia:add_table_copy(Tab, Node2, bad_storage_type)),
?match({atomic, ok}, mnesia:add_table_copy(Tab, Node2, disc_only_copies)),
?match([], ?vrl(Tab, [Node2], [], [Node1], Nodes)),
?match([0,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% D DO -
?match({atomic, ok}, mnesia:add_table_copy(Tab, Node3, ext_ets)),
?match([], ?vrl(Tab, [Node2], [Node3], [Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% D DO R
?match({atomic, ok},
mnesia:change_table_copy_type(Tab, Node1, disc_only_copies)),
?match([], ?vrl(Tab, [Node1, Node2], [Node3], [], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% DO DO R
?match({aborted, Reason54} when element(1, Reason54) == already_exists,
mnesia:add_table_copy(Tab, Node3, ram_copies)),
?match({atomic, ok}, mnesia:del_table_copy(Tab, Node1)),
?match([], ?vrl(Tab, [Node2], [Node3], [], Nodes)),
%% - DO R
?match({aborted, _}, mnesia:del_table_copy(Tab, Node1)),
?match(Tab, ets:new(Tab, [named_table])),
?match({aborted, _}, mnesia:add_table_copy(Tab, Node1, disc_copies)),
?match(true, ets:delete(Tab)),
?match({atomic, ok}, mnesia:add_table_copy(Tab, Node1, disc_copies)),
?match([], ?vrl(Tab, [Node2], [Node3], [Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% D DO R
?match({atomic, ok},mnesia:change_table_copy_type(Tab, Node3, disc_only_copies)),
?match([], ?vrl(Tab, [Node2, Node3], [], [Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% D DO D0
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node3, ext_ets)),
?match([], ?vrl(Tab, [Node2], [Node3], [Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% D DO R
?match({atomic, ok},
mnesia:change_table_copy_type(Tab, Node2, disc_copies)),
?match([], ?vrl(Tab, [], [Node3], [Node1,Node2], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% D D R
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node1, disc_only_copies)),
?match([], ?vrl(Tab, [Node1], [Node3], [Node2], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% DO D R
?match(Tab, ets:new(Tab, [named_table])),
?match({aborted, _}, mnesia:change_table_copy_type(Tab, Node1, ram_copies)),
?match(true, ets:delete(Tab)),
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node1, ram_copies)),
?match([], ?vrl(Tab, [], [Node3,Node1], [Node2], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% R D R
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node1, disc_copies)),
?match([], ?vrl(Tab, [], [Node3], [Node2,Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% D D R
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node2, disc_only_copies)),
?match([], ?vrl(Tab, [Node2], [Node3], [Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% D DO R
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node3, disc_only_copies)),
?match([], ?vrl(Tab, [Node2, Node3], [], [Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% D DO DO
%% test clear
?match({atomic, ok}, mnesia:clear_table(Tab)),
mnesia:transaction(SyncedCheck),
%% rewrite for rest of testcase
[?match(ok, mnesia:dirty_write({Tab, K, K + 2})) || K <-lists:seq(1, 10)],
%% D DO DO
?match({atomic, ok}, mnesia:del_table_copy(Tab, Node2)),
?match([], ?vrl(Tab, [Node3], [], [Node1], Nodes)),
%% D - DO
?match({aborted, Reason55} when element(1, Reason55) == already_exists,
mnesia:change_table_copy_type(Tab, Node1, disc_copies)),
%%
%% Move replica
%%
?match({atomic, ok}, mnesia:move_table_copy(Tab, Node1, Node2)),
?match([], ?vrl(Tab, [Node3], [], [Node2], Nodes)),
?match([0,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% - D DO
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node3, ext_ets)),
?match([], ?vrl(Tab, [], [Node3], [Node2], Nodes)),
?match([0,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% - D ER
?match({atomic, ok}, mnesia:move_table_copy(Tab, Node3, Node1)),
?match([], ?vrl(Tab, [], [Node1], [Node2], Nodes)),
?match([0,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
%% ER D -
?match({aborted, _}, mnesia:move_table_copy(Tab, Node1, Node2)),
?match({aborted, _}, mnesia:move_table_copy(Tab, Node3, Node2)),
?match({atomic, ok}, mnesia:move_table_copy(Tab, Node1, Node3)),
%% - D ER
?match([], mnesia_test_lib:stop_mnesia([Node3])),
?match({atomic,ok}, mnesia:transaction(fun() -> mnesia:write({Tab, 43, sync_me}) end)),
?match([], ?vrl(Tab, [], [Node3], [Node2],Nodes -- [Node3])),
%% - D ER
?match({aborted,Reason56} when element(1, Reason56) == not_active,
mnesia:move_table_copy(Tab, Node3, Node1)),
?match([], ?vrl(Tab, [], [Node3], [Node2],Nodes -- [Node3])),
%% - D ER
?match([], mnesia_test_lib:start_mnesia([Node3])),
?match([], ?vrl(Tab, [], [Node3], [Node2], Nodes)),
%% - D ER
?match([{Tab,43,sync_me}], mnesia:dirty_read({Tab,43})),
%%
%% Transformer
%%
NewAttrs = record_info(fields, new_replica_management),
Transformer =
fun(Rec) when is_record(Rec, replica_management) ->
#new_replica_management{k = Rec#replica_management.k,
v = Rec#replica_management.v,
extra = Rec#replica_management.k * 2}
end,
?match({atomic, ok}, mnesia:transform_table(Tab, fun(R) -> R end, Attrs)),
?match({atomic, ok}, mnesia:transform_table(Tab, Transformer, NewAttrs, new_replica_management)),
ERlist = [#new_replica_management{k = K, v = K+2, extra = K*2} || K <- lists:seq(1, 10)],
ARlist = [hd(mnesia:dirty_read(Tab, K)) || K <- lists:seq(1, 10)],
?match(ERlist, ARlist),
?match({aborted, Reason56} when element(1, Reason56) == bad_type,
mnesia:transform_table(Tab, Transformer, 0)),
?match({aborted, Reason57} when element(1, Reason57) == bad_type,
mnesia:transform_table(Tab, Transformer, -1)),
?match({aborted, Reason58} when element(1, Reason58) == bad_type,
mnesia:transform_table(Tab, Transformer, [])),
?match({aborted, Reason59} when element(1, Reason59) == bad_type,
mnesia:transform_table(Tab, no_fun, NewAttrs)),
?match({aborted, Reason59} when element(1, Reason59) == bad_type,
mnesia:transform_table(Tab, fun(X) -> X end, NewAttrs, {tuple})),
%% OTP-3878
?match({atomic, ok}, mnesia:transform_table(Tab, ignore,
NewAttrs ++ [dummy])),
?match({atomic, ok}, mnesia:transform_table(Tab, ignore,
NewAttrs ++ [dummy], last_rec)),
ARlist = [hd(mnesia:dirty_read(Tab, K)) || K <- lists:seq(1, 10)],
?match({'EXIT',{aborted,{bad_type,_}}},
mnesia:dirty_write(Tab, #new_replica_management{})),
?match(ok, mnesia:dirty_write(Tab, {last_rec, k, v, e, dummy})),
?verify_mnesia(Nodes, []).
schema_availability(doc) ->
["Test that schema succeeds (or fails) as intended when some db nodes are down."];
schema_availability(suite) ->
[];
schema_availability(Config) when is_list(Config) ->
[N1, _N2, N3] = Nodes = ?acquire_nodes(3, Config),
Tab = schema_availability,
Storage = mnesia_test_lib:storage_type(ram_copies, Config),
Def1 = [{Storage, [N1, N3]}],
?match({atomic, ok}, mnesia:create_table(Tab, Def1)),
N = 10,
?match(ok, mnesia:sync_dirty(fun() -> [mnesia:write({Tab, K, K + 2}) || K <- lists:seq(1, N)], ok end)),
?match({[N,0,N], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
?match([], mnesia_test_lib:kill_mnesia([N3])),
?match({[N,0,0], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
?match([], mnesia_test_lib:start_mnesia([N3], [Tab])),
?match({[N,0,N], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
?match([], mnesia_test_lib:kill_mnesia([N3])),
?match({atomic, ok}, mnesia:clear_table(Tab)),
?match({[0,0,0], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
?match([], mnesia_test_lib:start_mnesia([N3], [Tab])),
?match({[0,0,0], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
?verify_mnesia(Nodes, []).
-define(badrpc(Tab), {badrpc, {'EXIT', {aborted,{no_exists,Tab}}}}).
local_content(doc) ->
["Test local_content functionality, we want to see that correct"
" properties gets propageted correctly between nodes"];
local_content(suite) -> [];
local_content(Config) when is_list(Config) ->
[Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
Tab1 = local1,
Def1 = [{local_content, true}, {ram_copies, Nodes}],
Tab2 = local2,
Def2 = [{local_content, true}, {disc_copies, [Node1]}],
Tab3 = local3,
Def3 = [{local_content, true}, {disc_only_copies, [Node1]}],
Tab4 = local4,
Def4 = [{local_content, true}, {ram_copies, [Node1]}],
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
?match({atomic, ok}, mnesia:create_table(Tab4, Def4)),
?match(ok, rpc:call(Node1, mnesia, dirty_write, [{Tab1, 1, Node1}])),
?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab1, 1, Node2}])),
?match(ok, rpc:call(Node3, mnesia, dirty_write, [{Tab1, 1, Node3}])),
?match(ok, rpc:call(Node1, mnesia, dirty_write, [{Tab2, 1, Node1}])),
?match(ok, rpc:call(Node1, mnesia, dirty_write, [{Tab3, 1, Node1}])),
?match(ok, rpc:call(Node1, mnesia, dirty_write, [{Tab4, 1, Node1}])),
?match(?badrpc(Tab2), rpc:call(Node2, mnesia, dirty_write, [{Tab2, 1, Node2}])),
?match(?badrpc(Tab3), rpc:call(Node2, mnesia, dirty_write, [{Tab3, 1, Node2}])),
?match(?badrpc(Tab4), rpc:call(Node2, mnesia, dirty_write, [{Tab4, 1, Node2}])),
?match({atomic, ok}, rpc:call(Node1, mnesia, add_table_copy, [Tab2, Node2, ram_copies])),
?match({atomic, ok}, rpc:call(Node2, mnesia, add_table_copy, [Tab3, Node2, disc_copies])),
?match({atomic, ok}, rpc:call(Node3, mnesia, add_table_copy, [Tab4, Node2, disc_only_copies])),
?match([], rpc:call(Node2, mnesia, dirty_read, [{Tab2, 1}])),
?match([], rpc:call(Node2, mnesia, dirty_read, [{Tab3, 1}])),
?match([], rpc:call(Node2, mnesia, dirty_read, [{Tab4, 1}])),
?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab2, 1, Node2}])),
?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab3, 1, Node2}])),
?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab4, 1, Node2}])),
?match([{Tab1, 1, Node1}], rpc:call(Node1, mnesia, dirty_read, [{Tab1, 1}])),
?match([{Tab2, 1, Node1}], rpc:call(Node1, mnesia, dirty_read, [{Tab2, 1}])),
?match([{Tab3, 1, Node1}], rpc:call(Node1, mnesia, dirty_read, [{Tab3, 1}])),
?match([{Tab4, 1, Node1}], rpc:call(Node1, mnesia, dirty_read, [{Tab4, 1}])),
?match([{Tab1, 1, Node2}], rpc:call(Node2, mnesia, dirty_read, [{Tab1, 1}])),
?match([{Tab2, 1, Node2}], rpc:call(Node2, mnesia, dirty_read, [{Tab2, 1}])),
?match([{Tab3, 1, Node2}], rpc:call(Node2, mnesia, dirty_read, [{Tab3, 1}])),
?match([{Tab4, 1, Node2}], rpc:call(Node2, mnesia, dirty_read, [{Tab4, 1}])),
?match([{Tab1, 1, Node3}], rpc:call(Node3, mnesia, dirty_read, [{Tab1, 1}])),
?match(?badrpc([_Tab2, 1]), rpc:call(Node3, mnesia, dirty_read, [{Tab2, 1}])),
?match(?badrpc([_Tab3, 1]), rpc:call(Node3, mnesia, dirty_read, [{Tab3, 1}])),
?match(?badrpc([_Tab4, 1]), rpc:call(Node3, mnesia, dirty_read, [{Tab4, 1}])),
?match({atomic, ok},
mnesia:change_table_copy_type(schema, Node3, ram_copies)),
?match([], mnesia_test_lib:stop_mnesia([Node3])),
Added for OTP-44306
?match(ok, rpc:call(Node3, mnesia, start, [[{schema, ?BACKEND}]])),
?match({ok, _}, mnesia:change_config(extra_db_nodes, [Node3])),
mnesia_test_lib:sync_tables([Node3], [Tab1]),
?match([], rpc:call(Node3, mnesia, dirty_read, [{Tab1, 1}])),
?match({atomic, ok}, rpc:call(Node1, mnesia, clear_table, [Tab1])),
SyncedCheck = fun(Tab) ->
mnesia:lock({record,Tab,0}, write),
{OK, []} = rpc:multicall(Nodes, mnesia, table_info, [Tab, size]),
OK
end,
?match({atomic, [0,1,0]}, mnesia:transaction(SyncedCheck, [Tab1])),
?match({atomic, ok}, rpc:call(Node2, mnesia, clear_table, [Tab2])),
?match({atomic, [1,0,0]}, mnesia:transaction(SyncedCheck, [Tab2])),
?match({atomic, ok}, rpc:call(Node2, mnesia, clear_table, [Tab3])),
?match({atomic, [1,0,0]}, mnesia:transaction(SyncedCheck, [Tab3])),
?verify_mnesia(Nodes, []).
change_table_access_mode(suite) -> [];
change_table_access_mode(Config) when is_list(Config) ->
[Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
Tab = test_access_mode_tab,
Def = case mnesia_test_lib:diskless(Config) of
true -> [{name, Tab}, {ram_copies, Nodes}];
false -> [{name, Tab}, {ram_copies, [Node1]},
{disc_copies, [Node2]},
{disc_only_copies, [Node3]}]
end,
?match({atomic, ok}, mnesia:create_table(Def)),
Write = fun(What) -> mnesia:write({Tab, 1, What}) end,
Read = fun() -> mnesia:read({Tab, 1}) end,
?match({atomic, ok}, mnesia:transaction(Write, [test_ok])),
%% test read_only
?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_only)),
?match({aborted, _}, mnesia:transaction(Write, [nok])),
?match({'EXIT', {aborted, _}}, mnesia:dirty_write({Tab, 1, [nok]})),
?match({aborted, _}, rpc:call(Node2, mnesia, transaction, [Write, [nok]])),
?match({aborted, _}, rpc:call(Node3, mnesia, transaction, [Write, [nok]])),
?match({atomic, [{Tab, 1, test_ok}]}, mnesia:transaction(Read)),
%% test read_write
?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_write)),
?match({atomic, ok}, mnesia:transaction(Write, [test_ok1])),
?match({atomic, [{Tab, 1, test_ok1}]}, mnesia:transaction(Read)),
?match({atomic, ok}, rpc:call(Node2, mnesia, transaction, [Write, [test_ok2]])),
?match({atomic, [{Tab, 1, test_ok2}]}, mnesia:transaction(Read)),
?match({atomic, ok}, rpc:call(Node3, mnesia, transaction, [Write, [test_ok3]])),
?match({atomic, [{Tab, 1, test_ok3}]}, mnesia:transaction(Read)),
?match({atomic, ok}, mnesia:delete_table(Tab)),
Def4 = [{name, Tab}, {access_mode, read_only_bad}],
?match({aborted, {bad_type, _, _}}, mnesia:create_table(Def4)),
Def2 = [{name, Tab}, {access_mode, read_only}],
?match({atomic, ok}, mnesia:create_table(Def2)),
?match({aborted, _}, mnesia:transaction(Write, [nok])),
?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_write)),
?match({atomic, ok}, mnesia:delete_table(Tab)),
Def3 = [{name, Tab}, {mnesia_test_lib:storage_type(disc_copies, Config),
[Node1, Node2]},
{access_mode, read_write}],
?match({atomic, ok}, mnesia:create_table(Def3)),
?match({atomic, ok}, mnesia:transaction(Write, [ok])),
?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_only)),
?match({aborted, _}, mnesia:del_table_copy(Tab, Node2)),
?match({aborted, _}, mnesia:del_table_copy(Tab, Node1)),
?match({aborted, _}, mnesia:delete_table(Tab)),
?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_write)),
?match({aborted, {bad_type, _, _}},
mnesia:change_table_access_mode(Tab, strange_atom)),
?match({atomic, ok}, mnesia:delete_table(Tab)),
?match({aborted, {no_exists, _}},
mnesia:change_table_access_mode(err_tab, read_only)),
?match({aborted, {no_exists, _}},
mnesia:change_table_access_mode([Tab], read_only)),
?verify_mnesia(Nodes, []).
change_table_load_order(suite) -> [];
change_table_load_order(Config) when is_list(Config) ->
[Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
Tab1 = load_order_tab1,
Tab2 = load_order_tab2,
Tab3 = load_order_tab3,
Def = case mnesia_test_lib:diskless(Config) of
true -> [{ram_copies, Nodes}];
false ->
[{ram_copies, [Node1]},
{disc_copies, [Node2]},
{disc_only_copies, [Node3]}]
end,
?match({atomic, ok}, mnesia:create_table(Tab1, Def)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def)),
?match({aborted, _}, mnesia:change_table_load_order(Tab1, should_be_integer)),
?match({aborted, _}, mnesia:change_table_load_order(err_tab, 5)),
?match({aborted, _}, mnesia:change_table_load_order([err_tab], 5)),
?match({atomic, ok}, mnesia:change_table_load_order(Tab1, 5)),
?match({atomic, ok}, mnesia:change_table_load_order(Tab2, 4)),
?match({atomic, ok}, mnesia:change_table_load_order(Tab3, 73)),
?match({aborted, _}, mnesia:change_table_load_order(schema, -32)),
?verify_mnesia(Nodes, []).
set_master_nodes(suite) -> [];
set_master_nodes(Config) when is_list(Config) ->
[Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
Tab1 = master_node_tab1,
Tab2 = master_node_tab2,
Tab3 = master_node_tab3,
Def1 = [{ram_copies, [Node1, Node2]}],
Def2 = [{disc_copies, [Node2, Node3]}],
Def3 = [{disc_only_copies, [Node3, Node1]}],
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
?match({error, _}, mnesia:set_master_nodes(schema, [''])),
?match({error, _}, mnesia:set_master_nodes(badtab, [Node2, Node3])),
?match({error, _}, mnesia:set_master_nodes(Tab1, [Node3])),
?match([], mnesia:table_info(Tab1, master_nodes)),
?match(ok, mnesia:set_master_nodes(schema, [Node3, Node1])),
?match([Node3, Node1], mnesia:table_info(schema, master_nodes)),
?match(ok, mnesia:set_master_nodes(Tab1, [Node2])),
?match([Node2], mnesia:table_info(Tab1, master_nodes)),
?match(ok, mnesia:set_master_nodes(Tab1, [Node2, Node1])),
?match([Node2, Node1], mnesia:table_info(Tab1, master_nodes)),
?match(ok, mnesia:set_master_nodes(Tab2, [Node2])), % Should set where_to_read to Node2!
?match([Node2], mnesia:table_info(Tab2, master_nodes)),
?match(ok, mnesia:set_master_nodes(Tab3, [Node3])),
?match([Node3], mnesia:table_info(Tab3, master_nodes)),
?match(ok, mnesia:set_master_nodes(Tab3, [])),
?match([], mnesia:table_info(Tab3, master_nodes)),
?match(ok, mnesia:set_master_nodes([Node1])),
?match([Node1], mnesia:table_info(schema, master_nodes)),
?match([Node1], mnesia:table_info(Tab1, master_nodes)),
?match([], mnesia:table_info(Tab2, master_nodes)),
?match([Node1], mnesia:table_info(Tab3, master_nodes)),
?match(ok, mnesia:set_master_nodes([Node1, Node2])),
?match([Node1, Node2], mnesia:table_info(schema, master_nodes)),
?match([Node1, Node2], mnesia:table_info(Tab1, master_nodes)),
?match([Node2], mnesia:table_info(Tab2, master_nodes)),
?match([Node1], mnesia:table_info(Tab3, master_nodes)),
?verify_mnesia(Nodes, []).
offline_set_master_nodes(suite) -> [];
offline_set_master_nodes(Config) when is_list(Config) ->
[Node] = Nodes = ?acquire_nodes(1, Config),
Tab1 = offline_master_node_tab1,
Tab2 = offline_master_node_tab2,
Tab3 = offline_master_node_tab3,
Tabs = ?sort([Tab1, Tab2, Tab3]),
Def1 = [{ram_copies, [Node]}],
Def2 = [{disc_copies, [Node]}],
Def3 = [{disc_only_copies, [Node]}],
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
?match([], mnesia:system_info(master_node_tables)),
?match([], mnesia_test_lib:stop_mnesia([Node])),
?match(ok, mnesia:set_master_nodes(Tab1, [Node])),
?match(ok, mnesia:set_master_nodes(Tab2, [Node])),
?match(ok, mnesia:set_master_nodes(Tab3, [Node])),
?match([], mnesia_test_lib:start_mnesia([Node])),
?match(Tabs, ?sort(mnesia:system_info(master_node_tables))),
?match([], mnesia_test_lib:stop_mnesia([Node])),
?match(ok, mnesia:set_master_nodes(Tab1, [])),
?match(ok, mnesia:set_master_nodes(Tab2, [])),
?match(ok, mnesia:set_master_nodes(Tab3, [])),
?match([], mnesia_test_lib:start_mnesia([Node])),
?match([], mnesia:system_info(master_node_tables)),
?match([], mnesia_test_lib:stop_mnesia([Node])),
?match(ok, mnesia:set_master_nodes([Node])),
?match([], mnesia_test_lib:start_mnesia([Node])),
AllTabs = ?sort([schema | Tabs]),
?match(AllTabs, ?sort(mnesia:system_info(master_node_tables))),
?match([], mnesia_test_lib:stop_mnesia([Node])),
?match(ok, mnesia:set_master_nodes([])),
?match([], mnesia_test_lib:start_mnesia([Node])),
?match([], mnesia:system_info(master_node_tables)),
?verify_mnesia(Nodes, []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Synchronize table with log or disc
%%
%% Dump ram tables on disc
dump_tables(suite) -> [];
dump_tables(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab = dump_tables,
Schema = [{name, Tab}, {attributes, [k, v]}, {ram_copies, [Node2]}],
?match({atomic, ok}, mnesia:create_table(Schema)),
%% Dump 10 records
Size = 10,
Keys = lists:seq(1, Size),
Records = [{Tab, A, 7} || A <- Keys],
lists:foreach(fun(Rec) -> ?match(ok, mnesia:dirty_write(Rec)) end, Records),
AllKeys = fun() -> ?sort(mnesia:all_keys(Tab)) end,
?match({atomic, Keys}, mnesia:transaction(AllKeys)),
?match({atomic, ok}, mnesia:dump_tables([Tab])),
Delete one record
?match(ok, mnesia:dirty_delete({Tab, 5})),
Keys2 = lists:delete(5, Keys),
?match({atomic, Keys2}, mnesia:transaction(AllKeys)),
Check that all 10 is restored after a stop
?match([], mnesia_test_lib:stop_mnesia([Node1, Node2])),
?match([], mnesia_test_lib:start_mnesia([Node1, Node2])),
?match(ok, mnesia:wait_for_tables([Tab], infinity)),
?match({atomic, Keys}, mnesia:transaction(AllKeys)),
?match({aborted,Reason} when element(1, Reason) == no_exists,
mnesia:dump_tables([foo])),
?verify_mnesia(Nodes, []).
dump_log(suite) -> [];
dump_log(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab = dump_log,
Schema = [{name, Tab}, {attributes, [k, v]}, {ram_copies, [Node1, Node2]}],
?match({atomic, ok}, mnesia:create_table(Schema)),
Tab1 = dump_log1,
Schema1 = [{name, Tab1}, {attributes, [k, v]}, {disc_copies, [Node1]}],
?match({atomic, ok}, mnesia:create_table(Schema1)),
Tab2 = dump_log2,
Schema2 = [{name, Tab2}, {attributes, [k, v]}, {disc_only_copies, [Node1]}],
?match({atomic, ok}, mnesia:create_table(Schema2)),
?match(ok, mnesia:dirty_write({Tab, 1, ok})),
?match(ok, mnesia:dirty_write({Tab1, 1, ok})),
?match(ok, mnesia:dirty_write({Tab2, 1, ok})),
?match(dumped, mnesia:dump_log()),
?match(dumped, rpc:call(Node2, mnesia, dump_log, [])),
?match({atomic, ok}, mnesia:change_table_copy_type(schema, Node2, ram_copies)),
?match(dumped, rpc:call(Node2, mnesia, dump_log, [])),
Self = self(),
spawn(fun() -> dump_log(100, Self) end),
spawn(fun() -> dump_log(100, Self) end),
?match(ok, receive finished -> ok after 3000 -> timeout end),
?match(ok, receive finished -> ok after 3000 -> timeout end),
?verify_mnesia(Nodes, []).
dump_log(N, Tester) when N > 0 ->
mnesia:dump_log(),
dump_log(N-1, Tester);
dump_log(_, Tester) ->
Tester ! finished.
wait_for_tables(doc) ->
["Intf. test of wait_for_tables, see also force_load_table"];
wait_for_tables(suite) -> [];
wait_for_tables(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab = wf_tab,
Schema = [{name, Tab}, {ram_copies, [Node1, Node2]}],
?match({atomic, ok}, mnesia:create_table(Schema)),
?match(ok, mnesia:wait_for_tables([wf_tab], infinity)),
?match(ok, mnesia:wait_for_tables([], timer:seconds(5))),
?match({timeout, [bad_tab]}, mnesia:wait_for_tables([bad_tab], timer:seconds(5))),
?match(ok, mnesia:wait_for_tables([wf_tab], 0)),
?match({error,_}, mnesia:wait_for_tables([wf_tab], -1)),
?verify_mnesia(Nodes, []).
force_load_table(suite) -> [];
force_load_table(Config) when is_list(Config) ->
[Node1, Node2] = ?acquire_nodes(2, Config),
Tab = wf_tab,
Schema = [{name, Tab}, {disc_copies, [Node1, Node2]}],
?match({atomic, ok}, mnesia:create_table(Schema)),
?match(ok, mnesia:dirty_write({Tab, 1, test_ok})),
mnesia_test_lib:kill_mnesia([Node1]),
?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab, 1, test_nok}])),
mnesia_test_lib:kill_mnesia([Node2]),
%% timer:sleep(timer:seconds(5)),
?match(ok, mnesia:start()),
?match({timeout, [Tab]}, mnesia:wait_for_tables([Tab], 5)),
?match({'EXIT', _}, mnesia:dirty_read({Tab, 1})),
?match(yes, mnesia:force_load_table(Tab)),
?match([{Tab, 1, test_ok}], mnesia:dirty_read({Tab, 1})),
?match({error, _}, mnesia:force_load_table(error_tab)),
?verify_mnesia([Node1], [Node2]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
user_properties(doc) ->
["Test of reading, writing and deletion of user properties",
"Plus initialization of user properties when a table is created",
"Do also test mnesia:table_info(Tab, user_properties)"];
user_properties(suite) -> [];
user_properties(Config) when is_list(Config) ->
[Node] = Nodes = ?acquire_nodes(1, Config),
Tab1 = user_properties_1,
Tab2 = user_properties_2,
Tab3 = user_properties_3,
Def1 = [{ram_copies, [Node]}, {user_properties, []}],
Def2 = [{mnesia_test_lib:storage_type(disc_copies, Config), [Node]}],
Def3 = [{mnesia_test_lib:storage_type(disc_only_copies, Config), [Node]},
{user_properties, []}],
PropKey = my_prop,
Prop = {PropKey, some, elements},
Prop2 = {PropKey, some, other, elements},
YourProp= {your_prop},
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
?match([], mnesia:table_info(Tab1, user_properties)),
?match([], mnesia:table_info(Tab2, user_properties)),
?match([], mnesia:table_info(Tab3, user_properties)),
?match({'EXIT', {aborted, {no_exists, {Tab1, user_property, PropKey}}}},
mnesia:read_table_property(Tab1, PropKey)),
?match({'EXIT', {aborted, {no_exists, {Tab2, user_property, PropKey}}}},
mnesia:read_table_property(Tab2, PropKey)),
?match({'EXIT', {aborted, {no_exists, {Tab3, user_property, PropKey}}}},
mnesia:read_table_property(Tab3, PropKey)),
?match({atomic, ok}, mnesia:write_table_property(Tab1, Prop)),
?match({atomic, ok}, mnesia:write_table_property(Tab2, Prop)),
?match({atomic, ok}, mnesia:write_table_property(Tab3, Prop)),
?match({atomic, ok}, mnesia:write_table_property(Tab1, YourProp)),
?match({atomic, ok}, mnesia:write_table_property(Tab2, YourProp)),
?match({atomic, ok}, mnesia:write_table_property(Tab3, YourProp)),
?match(Prop, mnesia:read_table_property(Tab1, PropKey)),
?match(Prop, mnesia:read_table_property(Tab2, PropKey)),
?match(Prop, mnesia:read_table_property(Tab3, PropKey)),
?match({atomic, ok}, mnesia:write_table_property(Tab1, Prop2)),
?match({atomic, ok}, mnesia:write_table_property(Tab2, Prop2)),
?match({atomic, ok}, mnesia:write_table_property(Tab3, Prop2)),
?match(Prop2, mnesia:read_table_property(Tab1, PropKey)),
?match(Prop2, mnesia:read_table_property(Tab2, PropKey)),
?match(Prop2, mnesia:read_table_property(Tab3, PropKey)),
?match({atomic, ok}, mnesia:delete_table_property(Tab1, PropKey)),
?match({atomic, ok}, mnesia:delete_table_property(Tab2, PropKey)),
?match({atomic, ok}, mnesia:delete_table_property(Tab3, PropKey)),
?match([YourProp], mnesia:table_info(Tab1, user_properties)),
?match([YourProp], mnesia:table_info(Tab2, user_properties)),
?match([YourProp], mnesia:table_info(Tab3, user_properties)),
Tab4 = user_properties_4,
?match({atomic, ok},
mnesia:create_table(Tab4, [{user_properties, [Prop]}])),
?match([Prop], mnesia:table_info(Tab4, user_properties)),
%% Some error cases
?match({aborted, {bad_type, Tab1, {}}},
mnesia:write_table_property(Tab1, {})),
?match({aborted, {bad_type, Tab1, ali}},
mnesia:write_table_property(Tab1, ali)),
Tab5 = user_properties_5,
?match({aborted, {bad_type, Tab5, {user_properties, {}}}},
mnesia:create_table(Tab5, [{user_properties, {}}])),
?match({aborted, {bad_type, Tab5, {user_properties, ali}}},
mnesia:create_table(Tab5, [{user_properties, ali}])),
?match({aborted, {bad_type, Tab5, {user_properties, [{}]}}},
mnesia:create_table(Tab5, [{user_properties, [{}]}])),
?match({aborted, {bad_type, Tab5, {user_properties, [ali]}}},
mnesia:create_table(Tab5, [{user_properties, [ali]}])),
?verify_mnesia(Nodes, []).
unsupp_user_props(doc) ->
["Simple test of adding user props in a schema_transaction"];
unsupp_user_props(suite) -> [];
unsupp_user_props(Config) when is_list(Config) ->
[Node1] = ?acquire_nodes(1, Config),
Tab1 = silly1,
Tab2 = silly2,
Storage = mnesia_test_lib:storage_type(ram_copies, Config),
?match({atomic, ok}, rpc:call(Node1, mnesia,
create_table, [Tab1, [{Storage, [Node1]}]])),
?match({atomic, ok}, rpc:call(Node1, mnesia,
create_table, [Tab2, [{Storage, [Node1]}]])),
F1 = fun() ->
mnesia_schema:do_write_table_property(
silly1, {prop, propval1}),
mnesia_schema:do_write_table_property(
silly2, {prop, propval2}), % same key as above
mnesia_schema:do_write_table_property(
schema, {prop, propval3}) % same key as above
end,
?match({atomic, ok}, rpc:call(Node1, mnesia_schema,
schema_transaction, [F1])),
?match([{prop,propval1}], rpc:call(Node1, mnesia,
table_info, [silly1, user_properties])),
?match([{prop,propval2}], rpc:call(Node1, mnesia,
table_info, [silly2, user_properties])),
?match([_,{prop,propval3}], rpc:call(Node1, mnesia,
table_info, [schema, user_properties])),
F2 = fun() ->
mnesia_schema:do_write_table_property(
silly1, {prop, propval1a}),
mnesia_schema:do_write_table_property(
silly1, {prop, propval1b}) % same key as above
end,
?match({atomic, ok}, rpc:call(Node1, mnesia_schema,
schema_transaction, [F2])),
?match([{prop,propval1b}], rpc:call(Node1, mnesia,
table_info,
[silly1, user_properties])),
?verify_mnesia([Node1], []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
snmp_open_table(suite) -> [];
snmp_open_table(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab1 = local_snmp_table,
Storage = mnesia_test_lib:storage_type(disc_copies, Config),
Def1 =
case mnesia_test_lib:diskless(Config) of
true -> [{ram_copies, Nodes}];
false ->
[{disc_copies, [Node1]}, {ram_copies, [Node2]}]
end,
Tab2 = ext_snmp_table,
Def2 = [{Storage, [Node2]}],
ErrTab = non_existing_tab,
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key, integer}])),
?match({aborted, _}, mnesia:snmp_open_table(ErrTab, [{key, integer}])),
?verify_mnesia(Nodes, []).
snmp_close_table(suite) -> [];
snmp_close_table(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab1 = local_snmp_table,
Storage = mnesia_test_lib:storage_type(disc_copies, Config),
Def1 =
case mnesia_test_lib:diskless(Config) of
true -> [{ram_copies, Nodes}];
false ->
[{disc_copies, [Node1]}, {ram_copies, [Node2]}]
end,
Tab2 = ext_snmp_table,
Def2 = [{snmp, [{key, integer}]}, {Storage, [Node2]}],
ErrTab = non_existing_tab,
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(no_snmp_tab, [])),
add_some_records(Tab1, Tab2, 3),
?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
add_some_records(Tab1, Tab2, 5),
?match({atomic, ok}, mnesia:snmp_close_table(Tab1)),
Transform = fun(Tab, Key) ->
[{T,K,V}] = mnesia:read(Tab, Key, write),
mnesia:delete(T,K, write),
mnesia:write({T, {K,K}, V, 43+V})
end,
?match({atomic, ok}, mnesia:transform_table(Tab1, ignore, [key,val,new])),
?match({atomic, ok},
mnesia:transaction(fun() ->
mnesia:write_lock_table(Tab1),
Keys = mnesia:select(Tab1, [{{'_','$1','_'}, [],
['$1']}]),
[Transform(Tab1, Key) || Key <- Keys],
ok
end)),
?match([{Tab1, {1, 1}, 1, 44}], mnesia:dirty_read(Tab1, {1, 1})),
?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key,{integer,integer}}])),
?match({atomic, ok}, mnesia:snmp_close_table(Tab2)),
?match({atomic, ok}, mnesia:transform_table(Tab2, ignore, [key,val,new])),
?match({atomic, ok},
mnesia:transaction(fun() ->
mnesia:write_lock_table(Tab2),
Keys = mnesia:select(Tab2, [{{'_','$1','_'}, [],
['$1']}]),
[Transform(Tab2, Key) || Key <- Keys],
ok
end)),
?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key,{integer,integer}}])),
%% Should be aborted ????
?match({atomic, ok}, mnesia:snmp_close_table(no_snmp_tab)),
?match({aborted, _}, mnesia:snmp_close_table(ErrTab)),
?verify_mnesia(Nodes, []).
snmp_get_next_index(suite) -> [];
snmp_get_next_index(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab1 = local_snmp_table,
Storage = mnesia_test_lib:storage_type(disc_copies, Config),
Def1 =
case mnesia_test_lib:diskless(Config) of
true -> [{ram_copies, Nodes}];
false ->
[{disc_copies, [Node1]}, {ram_copies, [Node2]}]
end,
Tab2 = ext_snmp_table,
Def2 = [{Storage, [Node2]}],
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key, integer}])),
add_some_records(Tab1, Tab2, 1),
Test =
fun() ->
%% Test local tables
{success, Res11} = ?match({ok, _}, mnesia:snmp_get_next_index(Tab1,[])),
{ok, Index11} = Res11,
{success, _Res12} =
?match(endOfTable, mnesia:snmp_get_next_index(Tab1, Index11)),
?match({'EXIT',_}, mnesia:snmp_get_next_index(Tab1, endOfTable)),
%% Test external table
{success, Res21} =
?match({ok, _}, mnesia:snmp_get_next_index(Tab2, [])),
{ok, Index21} = Res21,
{success, _Res22} =
?match(endOfTable, mnesia:snmp_get_next_index(Tab2, Index21)),
{ok, Row} = mnesia:snmp_get_row(Tab1, Index11),
?match(ok, mnesia:dirty_delete(Tab1, hd(Index11))),
?match(endOfTable, mnesia:snmp_get_next_index(Tab1,[])),
ok = mnesia:dirty_write(Row), %% Reset to coming tests
%% Test of non existing table
? match(endOfTable , : snmp_get_next_index(ErrTab , [ ] ) ) ,
ok
end,
?match(ok, Test()),
?match({atomic,ok}, mnesia:transaction(Test)),
?match(ok, mnesia:sync_dirty(Test)),
?match(ok, mnesia:activity(transaction,Test,mnesia)),
%%io:format("**********Before ~p~n", [mnesia_lib:val({Tab1,snmp})]),
%%io:format(" ~p ~n", [ets:tab2list(mnesia_lib:val({local_snmp_table,{index,snmp}}))]),
?match([], mnesia_test_lib:stop_mnesia(Nodes)),
?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab1, Tab2])),
%%io:format("**********After ~p~n", [mnesia_lib:val({Tab1,snmp})]),
%%io:format(" ~p ~n", [ets:tab2list(mnesia_lib:val({local_snmp_table,{index,snmp}}))]),
?match(ok, Test()),
?match({atomic,ok}, mnesia:transaction(Test)),
?match(ok, mnesia:sync_dirty(Test)),
?match(ok, mnesia:activity(transaction,Test,mnesia)),
?verify_mnesia(Nodes, []).
add_some_records(Tab1, Tab2, N) ->
Recs1 = [{Tab1, I, I} || I <- lists:reverse(lists:seq(1, N))],
Recs2 = [{Tab2, I, I} || I <- lists:reverse(lists:seq(20, 20+N-1))],
lists:foreach(fun(R) -> mnesia:dirty_write(R) end, Recs1),
Fun = fun(R) -> mnesia:write(R) end,
Trans = fun() -> lists:foreach(Fun, Recs2) end,
{atomic, ok} = mnesia:transaction(Trans),
%% Sync things, so everything gets everywhere!
{atomic, ok} = mnesia:sync_transaction(fun() -> mnesia:write(lists:last(Recs1)) end),
{atomic, ok} = mnesia:sync_transaction(fun() -> mnesia:write(lists:last(Recs2)) end),
?sort(Recs1 ++ Recs2).
snmp_get_row(suite) -> [];
snmp_get_row(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab1 = local_snmp_table,
Storage = mnesia_test_lib:storage_type(disc_copies, Config),
Def1 =
case mnesia_test_lib:diskless(Config) of
true -> [{ram_copies, Nodes}];
false ->
[{disc_copies, [Node1]}, {ram_copies, [Node2]}]
end,
Tab2 = ext_snmp_table,
Def2 = [{Storage, [Node2]}],
Tab3 = snmp_table,
Def3 = [{Storage, [Node1]},
{attributes, [key, data1, data2]}],
Setup = fun() ->
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key, integer}])),
?match({atomic, ok}, mnesia:snmp_open_table(
Tab3, [{key, {fix_string,integer}}])),
add_some_records(Tab1, Tab2, 1)
end,
Clear = fun() ->
?match({atomic, ok}, mnesia:delete_table(Tab1)),
?match({atomic, ok}, mnesia:delete_table(Tab2)),
?match({atomic, ok}, mnesia:delete_table(Tab3))
end,
Test =
fun() ->
%% Test local tables
{success, Res11} =
?match({ok, [1]}, mnesia:snmp_get_next_index(Tab1,[])),
{ok, Index11} = Res11,
?match({ok, {Tab1,1,1}}, mnesia:snmp_get_row(Tab1, Index11)),
?match(endOfTable, mnesia:snmp_get_next_index(Tab1, Index11)),
?match({'EXIT',_}, mnesia:snmp_get_row(Tab1, endOfTable)),
?match(undefined, mnesia:snmp_get_row(Tab1, [73])),
Add = fun() -> mnesia:write({Tab3, {"f_string", 3}, data1, data2}) end,
?match({atomic, ok}, mnesia:transaction(Add)),
{success, {ok, Index31}} = ?match({ok, RowIndex31} when is_list(RowIndex31),
mnesia:snmp_get_next_index(Tab3,[])),
?match({ok, Row31} when is_tuple(Row31),
mnesia:snmp_get_row(Tab3, Index31)),
?match(endOfTable, mnesia:snmp_get_next_index(Tab3, Index31)),
Del = fun() -> mnesia:delete({Tab3,{"f_string",3}}) end,
?match({atomic, ok}, mnesia:transaction(Del)),
?match(undefined, mnesia:snmp_get_row(Tab3, "f_string" ++ [3])),
?match(undefined, mnesia:snmp_get_row(Tab3, "f_string" ++ [73])),
%% Test external table
{success, Res21} = ?match({ok,[20]}, mnesia:snmp_get_next_index(Tab2, [])),
{ok, Index21} = Res21,
?match({ok, Row2} when is_tuple(Row2), mnesia:snmp_get_row(Tab2, Index21)),
?match(endOfTable, mnesia:snmp_get_next_index(Tab2, Index21)),
%% Test of non existing table
? match(endOfTable , : snmp_get_next_index(ErrTab , [ ] ) ) ,
ok
end,
Setup(),
?match(ok, Test()),
Clear(), Setup(),
?match({atomic,ok}, mnesia:transaction(Test)),
Clear(), Setup(),
?match(ok, mnesia:sync_dirty(Test)),
Clear(), Setup(),
?match(ok, mnesia:activity(transaction,Test,mnesia)),
Clear(), Setup(),
?match([], mnesia_test_lib:stop_mnesia(Nodes)),
?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab1, Tab2])),
?match(ok, Test()),
Clear(), Setup(),
?match([], mnesia_test_lib:stop_mnesia(Nodes)),
?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab1, Tab2])),
?match({atomic,ok}, mnesia:transaction(Test)),
?verify_mnesia(Nodes, []).
snmp_get_mnesia_key(suite) -> [];
snmp_get_mnesia_key(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab1 = local_snmp_table,
Storage = mnesia_test_lib:storage_type(disc_copies, Config),
Def1 =
case mnesia_test_lib:diskless(Config) of
true -> [{ram_copies, Nodes}];
false ->
[{disc_copies, [Node1]}, {ram_copies, [Node2]}]
end,
Tab2 = ext_snmp_table,
Def2 = [{Storage, [Node2]}],
Tab3 = fix_string,
Setup = fun() ->
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def1)),
?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key, integer}])),
?match({atomic, ok}, mnesia:snmp_open_table(Tab3, [{key, {fix_string,integer}}])),
add_some_records(Tab1, Tab2, 1)
end,
Clear = fun() ->
?match({atomic, ok}, mnesia:delete_table(Tab1)),
?match({atomic, ok}, mnesia:delete_table(Tab2)),
?match({atomic, ok}, mnesia:delete_table(Tab3))
end,
Test =
fun() ->
%% Test local tables
{success, Res11} =
?match({ok, [1]}, mnesia:snmp_get_next_index(Tab1,[])),
{ok, Index11} = Res11,
?match({ok, 1}, mnesia:snmp_get_mnesia_key(Tab1, Index11)),
%% Test external tables
{success, Res21} =
?match({ok, [20]}, mnesia:snmp_get_next_index(Tab2, [])),
{ok, Index21} = Res21,
?match({ok, 20}, mnesia:snmp_get_mnesia_key(Tab2, Index21)),
?match(undefined, mnesia:snmp_get_mnesia_key(Tab2, [97])),
?match({'EXIT', _}, mnesia:snmp_get_mnesia_key(Tab2, 97)),
?match({atomic,ok}, mnesia:transaction(fun() -> mnesia:delete({Tab1,1}) end)),
?match(undefined, mnesia:snmp_get_mnesia_key(Tab1, Index11)),
?match({atomic,ok},mnesia:transaction(fun() -> mnesia:write({Tab1,73,7}) end)),
?match({ok, 73}, mnesia:snmp_get_mnesia_key(Tab1, [73])),
?match({atomic,ok}, mnesia:transaction(fun() -> mnesia:delete({Tab1,73}) end)),
?match(undefined, mnesia:snmp_get_mnesia_key(Tab1, [73])),
?match({atomic,ok},mnesia:transaction(fun() -> mnesia:write({Tab3,{"S",5},7}) end)),
?match({ok,{"S",5}}, mnesia:snmp_get_mnesia_key(Tab3, [$S,5])),
?match({atomic,ok},mnesia:transaction(fun() -> mnesia:delete({Tab3,{"S",5}}) end)),
?match(undefined, mnesia:snmp_get_mnesia_key(Tab3, [$S,5])),
ok
end,
Setup(),
?match(ok, Test()),
Clear(), Setup(),
?match({atomic,ok}, mnesia:transaction(Test)),
Clear(), Setup(),
?match(ok, mnesia:sync_dirty(Test)),
Clear(), Setup(),
?match(ok, mnesia:activity(transaction,Test,mnesia)),
?verify_mnesia(Nodes, []).
snmp_update_counter(doc) ->
["Verify that counters may be updated for tables with SNMP property"];
snmp_update_counter(suite) -> [];
snmp_update_counter(Config) when is_list(Config) ->
[Node1] = Nodes = ?acquire_nodes(1, Config),
Tab = snmp_update_counter,
Def = [{attributes, [key, value]},
{snmp, [{key, integer}]},
{ram_copies, [Node1]}
],
?match({atomic, ok}, mnesia:create_table(Tab, Def)),
Oid = {Tab, 1},
?match([], mnesia:dirty_read(Oid)),
?match(ok, mnesia:dirty_write({Tab, 1, 1})),
?match([{Tab, _Key, 1}], mnesia:dirty_read(Oid)),
?match(3, mnesia:dirty_update_counter(Oid, 2)),
?match([{Tab, _Key, 3}], mnesia:dirty_read(Oid)),
?verify_mnesia(Nodes, []).
snmp_order(doc) ->
["Verify that sort order is correct in transactions and dirty variants"];
snmp_order(suite) -> [];
snmp_order(Config) when is_list(Config) ->
[Node1] = Nodes = ?acquire_nodes(1, Config),
Tab = snmp_order,
Def = [{attributes, [key, value]},
{snmp, [{key, {integer, integer, integer}}]},
{ram_copies, [Node1]}
],
?match({atomic, ok}, mnesia:create_table(Tab, Def)),
Oid = {Tab, 1},
?match([], mnesia:dirty_read(Oid)),
?match({'EXIT', {aborted, _}}, mnesia:dirty_write({Tab, 1, 1})),
[mnesia:dirty_write({Tab, {A,B,2}, default}) ||
A <- lists:seq(1, 9, 2),
B <- lists:seq(2, 8, 2)],
Test1 = fun() ->
Keys0 = get_keys(Tab, []),
?match(Keys0, lists:sort(Keys0)),
?match([[1,2,2]|_], Keys0),
Keys1 = get_keys(Tab, [5]),
?match(Keys1, lists:sort(Keys1)),
?match([[5,2,2]|_], Keys1),
Keys2 = get_keys(Tab, [7, 4]),
?match(Keys2, lists:sort(Keys2)),
?match([[7,4,2]|_], Keys2),
ok
end,
?match(ok, Test1()),
?match({atomic, ok},mnesia:transaction(Test1)),
?match(ok,mnesia:sync_dirty(Test1)),
Test2 = fun() ->
mnesia:write(Tab, {Tab,{0,0,2},updated}, write),
mnesia:write(Tab, {Tab,{5,3,2},updated}, write),
mnesia:write(Tab, {Tab,{10,10,2},updated}, write),
Keys0 = get_keys(Tab, []),
?match([[0,0,2],[1,2,2]|_], Keys0),
?match(Keys0, lists:sort(Keys0)),
Keys1 = get_keys(Tab, [5]),
?match([[5,2,2],[5,3,2]|_], Keys1),
?match(Keys1, lists:sort(Keys1)),
Keys2 = get_keys(Tab, [7,4]),
?match([[7,4,2]|_], Keys2),
?match(Keys2, lists:sort(Keys2)),
?match([10,10,2], lists:last(Keys0)),
?match([10,10,2], lists:last(Keys1)),
?match([10,10,2], lists:last(Keys2)),
?match([[10,10,2]], get_keys(Tab, [10])),
ok
end,
?match({atomic, ok},mnesia:transaction(Test2)),
?verify_mnesia(Nodes, []).
get_keys(Tab, Key) ->
case mnesia:snmp_get_next_index(Tab, Key) of
endOfTable -> [];
{ok, Next} ->
[Next|get_keys(Tab, Next)]
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-record(tab, {i, e1, e2}). % Simple test table
subscribe_extended(doc) ->
["Test the extended set of events, test with and without checkpoints. "];
subscribe_extended(suite) ->
[];
subscribe_extended(Config) when is_list(Config) ->
[N1, N2]=Nodes=?acquire_nodes(2, Config),
Tab1 = etab,
Storage = mnesia_test_lib:storage_type(ram_copies, Config),
Def1 = [{Storage, [N1, N2]}, {attributes, record_info(fields, tab)}],
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
Tab2 = bag,
Def2 = [{Storage, [N1, N2]},
{type, bag},
{record_name, Tab1},
{attributes, record_info(fields, tab)}],
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
Tab3 = ctab,
Def3 = [{Storage, [N1, N2]}],
?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
?match({ok, N1}, mnesia:subscribe({table, Tab1, detailed})),
?match({ok, N1}, mnesia:subscribe({table, Tab2, detailed})),
?match({ok, N1}, mnesia:subscribe({table, Tab3, detailed})),
?match({error, {already_exists, _}}, mnesia:subscribe({table, Tab1, simple})),
?match({error, {badarg, {table, Tab1, bad}}}, mnesia:subscribe({table, Tab1, bad})),
?match({ok, N1}, mnesia:subscribe(activity)),
test_ext_sub(Tab1, Tab2, Tab3),
?match({ok, N1}, mnesia:unsubscribe(activity)),
?match({ok, N1}, mnesia:subscribe({table, Tab1, detailed})),
?match({atomic, ok}, mnesia:clear_table(Tab1)),
?match({mnesia_table_event, {delete, schema, {schema, Tab1}, [{schema, Tab1, _}],_}}, recv_event()),
?match({mnesia_table_event, {write, schema, {schema, Tab1, _}, [], _}}, recv_event()),
?match({atomic, ok}, mnesia:clear_table(Tab2)),
?match({mnesia_table_event, {delete, schema, {schema, Tab2}, [{schema, Tab2, _}],_}},
recv_event()),
?match({mnesia_table_event, {write, schema, {schema, Tab2, _}, [], _}}, recv_event()),
?match({ok, N1}, mnesia:unsubscribe({table, Tab2, detailed})),
{ok, _, _} = mnesia:activate_checkpoint([{name, testing},
{ram_overrides_dump, true},
{max, [Tab1, Tab2]}]),
?match({ok, N1}, mnesia:subscribe({table, Tab2, detailed})),
?match({ok, N1}, mnesia:subscribe(activity)),
test_ext_sub(Tab1, Tab2, Tab3),
?verify_mnesia(Nodes, []).
test_ext_sub(Tab1, Tab2, Tab3) ->
%% The basics
Rec1 = {Tab1, 1, 0, 0},
Rec2 = {Tab1, 1, 1, 0},
Rec3 = {Tab1, 2, 1, 0},
Rec4 = {Tab1, 2, 2, 0},
Write = fun(Tab, Rec) ->
mnesia:transaction(fun() -> mnesia:write(Tab, Rec, write)
end)
end,
Delete = fun(Tab, Rec) ->
mnesia:transaction(fun() -> mnesia:delete(Tab, Rec, write)
end)
end,
DelObj = fun(Tab, Rec) ->
mnesia:transaction(fun() -> mnesia:delete_object(Tab, Rec, write)
end)
end,
S = self(),
D = {dirty, self()},
%% SET
?match(ok, mnesia:dirty_write(Rec1)),
?match({mnesia_table_event, {write, Tab1, Rec1, [], D}}, recv_event()),
?match(ok, mnesia:dirty_write(Rec3)),
?match({mnesia_table_event, {write, Tab1, Rec3, [], D}}, recv_event()),
?match(ok, mnesia:dirty_write(Rec1)),
?match({mnesia_table_event, {write, Tab1, Rec1, [Rec1], D}}, recv_event()),
?match({atomic, ok}, Write(Tab1, Rec2)),
?match({mnesia_table_event, {write, Tab1, Rec2, [Rec1], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match(ok, mnesia:dirty_delete({Tab1, 2})),
?match({mnesia_table_event, {delete, Tab1, {Tab1, 2}, [Rec3], D}}, recv_event()),
?match({atomic, ok}, DelObj(Tab1, Rec2)),
?match({mnesia_table_event, {delete, Tab1, Rec2, [Rec2], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Delete(Tab1, 1)),
?match({mnesia_table_event, {delete, Tab1, {Tab1, 1}, [], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({ok, _N1}, mnesia:unsubscribe({table, Tab1, detailed})),
%% BAG
?match({atomic, ok}, Write(Tab2, Rec1)),
?match({mnesia_table_event, {write, Tab2, Rec1, [], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Write(Tab2, Rec4)),
?match({mnesia_table_event, {write, Tab2, Rec4, [], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Write(Tab2, Rec3)),
?match({mnesia_table_event, {write, Tab2, Rec3, [Rec4], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Write(Tab2, Rec2)),
?match({mnesia_table_event, {write, Tab2, Rec2, [Rec1], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Write(Tab2, Rec1)),
?match({mnesia_table_event, {write, Tab2, Rec1, [Rec1, Rec2], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, DelObj(Tab2, Rec2)),
?match({mnesia_table_event, {delete, Tab2, Rec2, [Rec2], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Delete(Tab2, 1)),
?match({mnesia_table_event, {delete, Tab2, {Tab2, 1}, [Rec1], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Delete(Tab2, 2)),
?match({mnesia_table_event, {delete, Tab2, {Tab2, 2}, [Rec4, Rec3], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
%% COUNTERS
Rec5 = {Tab3, counter, 0},
?match(ok, mnesia:dirty_write(Rec5)),
?match({mnesia_table_event, {write, Tab3, Rec5, [], D}}, recv_event()),
?match(1, mnesia:dirty_update_counter({Tab3, counter}, 1)),
?match({mnesia_table_event, {write, Tab3, {Tab3,counter,1}, [Rec5], D}}, recv_event()),
?match(ok, mnesia:dirty_delete({Tab3, counter})),
?match({mnesia_table_event, {delete, Tab3, {Tab3,counter},
[{Tab3,counter,1}], D}}, recv_event()),
ok.
subscribe_standard(doc) ->
["Tests system events and the original table events"];
subscribe_standard(suite) -> [];
subscribe_standard(Config) when is_list(Config)->
[N1, N2]=?acquire_nodes(2, Config),
Tab = tab,
Storage = mnesia_test_lib:storage_type(disc_copies, Config),
Def = [{Storage, [N1, N2]}, {attributes, record_info(fields, tab)}],
?match({atomic, ok}, mnesia:create_table(Tab, Def)),
%% Check system events
?match({error, {badarg, foo}}, mnesia:unsubscribe(foo)),
?match({error, badarg}, mnesia:unsubscribe({table, foo})),
mnesia:unsubscribe(activity),
?match({ok, N1}, mnesia:subscribe(system)),
?match({ok, N1}, mnesia:subscribe(activity)),
?match([], mnesia_test_lib:kill_mnesia([N2])),
?match({mnesia_system_event, {mnesia_down, N2}}, recv_event()),
?match(timeout, recv_event()),
?match([], mnesia_test_lib:start_mnesia([N2], [Tab])),
?match({mnesia_activity_event, _}, recv_event()),
?match({mnesia_system_event,{mnesia_up, N2}}, recv_event()),
?match(true, lists:member(self(), mnesia:system_info(subscribers))),
?match([], mnesia_test_lib:kill_mnesia([N1])),
timer:sleep(500),
mnesia_test_lib:flush(),
?match([], mnesia_test_lib:start_mnesia([N1], [Tab])),
?match(timeout, recv_event()),
?match({ok, N1}, mnesia:subscribe(system)),
?match({error, {already_exists, system}}, mnesia:subscribe(system)),
?match(stopped, mnesia:stop()),
?match({mnesia_system_event, {mnesia_down, N1}}, recv_event()),
?match({error, {node_not_running, N1}}, mnesia:subscribe(system)),
?match([], mnesia_test_lib:start_mnesia([N1, N2], [Tab])),
%% Check table events
?match({ok, N1}, mnesia:subscribe(activity)),
Old_Level = mnesia:set_debug_level(trace),
?match({ok, N1}, mnesia:subscribe({table,Tab})),
?match({atomic, ok},
mnesia:transaction(fun() -> mnesia:write(#tab{i=155}) end)),
Self = self(),
?match({mnesia_table_event, {write, _, _}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid, _, Self}}}, recv_event()),
?match({ok, N1}, mnesia:unsubscribe({table,Tab})),
?match({ok, N1}, mnesia:unsubscribe(activity)),
?match({atomic, ok},
mnesia:transaction(fun() -> mnesia:write(#tab{i=255}) end)),
?match(timeout, recv_event()),
mnesia:set_debug_level(Old_Level),
%% Check deletion of replica
?match({ok, N1}, mnesia:subscribe({table,Tab})),
?match({ok, N1}, mnesia:subscribe(activity)),
?match(ok, mnesia:dirty_write(#tab{i=355})),
?match({mnesia_table_event, {write, _, _}}, recv_event()),
?match({atomic, ok}, mnesia:del_table_copy(Tab, N1)),
?match({mnesia_activity_event, _}, recv_event()),
?match(ok, mnesia:dirty_write(#tab{i=455})),
?match(timeout, recv_event()),
?match({atomic, ok}, mnesia:move_table_copy(Tab, N2, N1)),
?match({mnesia_activity_event, _}, recv_event()),
?match({ok, N1}, mnesia:subscribe({table,Tab})),
?match(ok, mnesia:dirty_write(#tab{i=555})),
?match({mnesia_table_event, {write, _, _}}, recv_event()),
?match({atomic, ok}, mnesia:move_table_copy(Tab, N1, N2)),
?match({mnesia_activity_event, _}, recv_event()),
?match(ok, mnesia:dirty_write(#tab{i=655})),
?match(timeout, recv_event()),
?match({atomic, ok}, mnesia:add_table_copy(Tab, N1, ram_copies)),
?match({mnesia_activity_event, _}, recv_event()),
?match({ok, N1}, mnesia:subscribe({table,Tab})),
?match({error, {already_exists, {table,Tab, simple}}},
mnesia:subscribe({table,Tab})),
?match(ok, mnesia:dirty_write(#tab{i=755})),
?match({mnesia_table_event, {write, _, _}}, recv_event()),
?match({atomic, ok}, mnesia:delete_table(Tab)),
?match({mnesia_activity_event, _}, recv_event()),
?match(timeout, recv_event()),
mnesia_test_lib:kill_mnesia([N1]),
?verify_mnesia([N2], [N1]).
recv_event() ->
receive
Event -> Event
after 1000 ->
timeout
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
foldl(suite) ->
[];
foldl(doc) ->
[""];
foldl(Config) when is_list(Config) ->
Nodes = [_N1, N2] = ?acquire_nodes(2, Config),
Tab1 = fold_local,
Tab2 = fold_remote,
Tab3 = fold_ordered,
?match({atomic, ok}, mnesia:create_table(Tab1, [{ram_copies, Nodes}])),
?match({atomic, ok}, mnesia:create_table(Tab2, [{ram_copies, [N2]}, {type, bag}])),
?match({atomic, ok}, mnesia:create_table(Tab3, [{ram_copies, Nodes},
{type, ordered_set}])),
Tab1Els = [{Tab1, N, N} || N <- lists:seq(1, 10)],
Tab2Els = ?sort([{Tab2, 1, 2} | [{Tab2, N, N} || N <- lists:seq(1, 10)]]),
Tab3Els = [{Tab3, N, N} || N <- lists:seq(1, 10)],
[mnesia:sync_transaction(fun() -> mnesia:write(E) end) || E <- Tab1Els],
[mnesia:sync_transaction(fun() -> mnesia:write(E) end) || E <- Tab2Els],
[mnesia:sync_transaction(fun() -> mnesia:write(E) end) || E <- Tab3Els],
Fold = fun(Tab) ->
lists:reverse(mnesia:foldl(fun(E, A) -> [E | A] end, [], Tab))
end,
Fold2 = fun(Tab, Lock) ->
lists:reverse(mnesia:foldl(fun(E, A) -> [E | A] end, [], Tab, Lock))
end,
Exit = fun(Tab) ->
lists:reverse(mnesia:foldl(fun(_E, _A) -> exit(testing) end, [], Tab))
end,
%% Errors
?match({aborted, _}, mnesia:transaction(Fold, [error])),
?match({aborted, _}, mnesia:transaction(fun(Tab) -> mnesia:foldl(badfun,[],Tab) end,
[Tab1])),
?match({aborted, testing}, mnesia:transaction(Exit, [Tab1])),
?match({aborted, _}, mnesia:transaction(Fold2, [Tab1, read_lock])),
Success
?match({atomic, Tab1Els}, sort_res(mnesia:transaction(Fold, [Tab1]))),
?match({atomic, Tab2Els}, sort_res(mnesia:transaction(Fold, [Tab2]))),
?match({atomic, Tab3Els}, mnesia:transaction(Fold, [Tab3])),
?match({atomic, Tab1Els}, sort_res(mnesia:transaction(Fold2, [Tab1, read]))),
?match({atomic, Tab1Els}, sort_res(mnesia:transaction(Fold2, [Tab1, write]))),
?match(Tab1Els, sort_res(mnesia:sync_dirty(Fold, [Tab1]))),
?match(Tab2Els, sort_res(mnesia:async_dirty(Fold, [Tab2]))),
?verify_mnesia(Nodes, []).
sort_res({atomic, List}) ->
{atomic, ?sort(List)};
sort_res(Else) when is_list(Else) ->
?sort(Else);
sort_res(Else) ->
Else.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
info(suite) -> [];
info(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(1, Config),
?match(ok, mnesia:info()),
?verify_mnesia(Nodes, []).
schema_0(suite) -> [];
schema_0(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(1, Config),
?match(ok, mnesia:schema()),
?verify_mnesia(Nodes, []).
schema_1(suite) -> [];
schema_1(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(1, Config),
Tab = schema_1,
?match({atomic, ok}, mnesia:create_table(Tab, [])),
?match(ok, mnesia:schema(Tab)),
?verify_mnesia(Nodes, []).
view_0(suite) -> [];
view_0(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(1, Config),
?match(ok, mnesia_lib:view()),
?verify_mnesia(Nodes, []).
view_1(suite) -> [];
view_1(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(1, Config),
BinCore = mnesia_lib:mkcore({crashinfo, "Just testing..."}),
CoreFile = lists:concat(["MnesiaCore.", node(), ".view_1.", ?MODULE]),
?match(ok, file:write_file(CoreFile, BinCore)),
?match(ok, mnesia_lib:view(CoreFile)),
?match(ok, file:delete(CoreFile)),
?match(stopped, mnesia:stop()),
Dir = mnesia:system_info(directory),
?match(eof, mnesia_lib:view(filename:join(Dir, "LATEST.LOG"))),
?match(ok, mnesia_lib:view(filename:join(Dir, "schema.DAT"))),
?verify_mnesia([], Nodes).
view_2(suite) -> [];
view_2(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(1, Config),
BinCore = mnesia_lib:mkcore({crashinfo, "More testing..."}),
File = lists:concat([?MODULE, "view_2.", node()]),
?match(ok, file:write_file(File, BinCore)),
?match(ok, mnesia_lib:view(File, core)),
?match(ok, file:delete(File)),
?match(stopped, mnesia:stop()),
Dir = mnesia:system_info(directory),
?match(ok, file:rename(filename:join(Dir, "LATEST.LOG"), File)),
?match(eof, mnesia_lib:view(File, log)),
?match(ok, file:delete(File)),
?match(ok, file:rename(filename:join(Dir, "schema.DAT"), File)),
?match(ok, mnesia_lib:view(File, dat)),
?match(ok, file:delete(File)),
?verify_mnesia([], Nodes).
lkill(suite) -> [];
lkill(Config) when is_list(Config) ->
[Node1, Node2] = ?acquire_nodes(2, Config),
?match(yes, rpc:call(Node1, mnesia, system_info, [is_running])),
?match(yes, rpc:call(Node2, mnesia, system_info, [is_running])),
?match(ok, rpc:call(Node2, mnesia, lkill, [])),
?match(yes, rpc:call(Node1, mnesia, system_info, [is_running])),
?match(no, rpc:call(Node2, mnesia, system_info, [is_running])),
?verify_mnesia([Node1], [Node2]).
kill(suite) -> [];
kill(Config) when is_list(Config) ->
[Node1, Node2] = ?acquire_nodes(2, Config),
?match(yes, rpc:call(Node1, mnesia, system_info, [is_running])),
?match(yes, rpc:call(Node2, mnesia, system_info, [is_running])),
?match({_, []}, rpc:call(Node2, mnesia, kill, [])),
?match(no, rpc:call(Node1, mnesia, system_info, [is_running])),
?match(no, rpc:call(Node2, mnesia, system_info, [is_running])),
?verify_mnesia([], [Node1, Node2]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
record_name_dirty_access_ram(suite) ->
[];
record_name_dirty_access_ram(Config) when is_list(Config) ->
record_name_dirty_access(ram_copies, Config).
record_name_dirty_access_disc(suite) ->
[];
record_name_dirty_access_disc(Config) when is_list(Config) ->
record_name_dirty_access(disc_copies, Config).
record_name_dirty_access_disc_only(suite) ->
[];
record_name_dirty_access_disc_only(Config) when is_list(Config) ->
record_name_dirty_access(disc_only_copies, Config).
record_name_dirty_access_xets(Config) when is_list(Config) ->
record_name_dirty_access(ext_ets, Config).
record_name_dirty_access(Storage, Config) ->
[Node1, _Node2] = Nodes = ?acquire_nodes(2, Config),
List = lists:concat([record_name_dirty_access_, Storage]),
Tab = list_to_atom(List),
RecName = some_record,
Attr = val,
TabDef = [{type, bag},
{record_name, RecName},
{index, [Attr]},
{Storage, Nodes}],
?match({atomic, ok}, mnesia:create_table(Tab, TabDef)),
?match(RecName, mnesia:table_info(Tab, record_name)),
?match(ok, mnesia:dirty_write(Tab, {RecName, 2, 20})),
?match(ok, mnesia:dirty_write(Tab, {RecName, 2, 21})),
?match(ok, mnesia:dirty_write(Tab, {RecName, 2, 22})),
%% Backup test
BupFile = List ++ ".BUP",
CpName = cpname,
CpArgs = [{name, CpName}, {min, [Tab]}, {ram_overrides_dump, true}],
?match({ok, CpName, _}, mnesia:activate_checkpoint(CpArgs)),
?match(ok, mnesia:backup_checkpoint(CpName, BupFile)),
?match(ok, mnesia:deactivate_checkpoint(CpName)),
?match(ok, mnesia:dirty_write(Tab, {RecName, 1, 10})),
?match({ok, Node1}, mnesia:subscribe({table, Tab})),
?match(ok, mnesia:dirty_write(Tab, {RecName, 3, 10})),
Twos =?sort( [{RecName, 2, 20}, {RecName, 2, 21}, {RecName, 2, 22}]),
?match(Twos, ?sort(mnesia:dirty_read(Tab, 2))),
?match(ok, mnesia:dirty_delete_object(Tab, {RecName, 2, 21})),
Tens = ?sort([{RecName, 1, 10}, {RecName, 3, 10}]),
TenPat = {RecName, '_', 10},
?match(Tens, ?sort(mnesia:dirty_match_object(Tab, TenPat))),
?match(Tens, ?sort(mnesia:dirty_select(Tab, [{TenPat, [], ['$_']}]))),
%% Subscription test
E = mnesia_table_event,
?match_receive({E, {write, {Tab, 3, 10}, _}}),
?match_receive({E, {delete_object, {Tab, 2, 21}, _}}),
?match({ok, Node1}, mnesia:unsubscribe({table, Tab})),
?match([], mnesia_test_lib:stop_mnesia([Node1])),
?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab])),
?match(Tens, ?sort(mnesia:dirty_index_match_object(Tab, TenPat, Attr) )),
?match(Tens, ?sort(mnesia:dirty_index_read(Tab, 10, Attr))),
?match([1, 2, 3], ?sort(mnesia:dirty_all_keys(Tab))),
?match({ok, Node1}, mnesia:subscribe({table, Tab})),
?match(ok, mnesia:dirty_delete(Tab, 2)),
?match([], mnesia:dirty_read(Tab, 2)),
?match_receive({E, {delete, {Tab, 2}, _}}),
?match([], mnesia_test_lib:flush()),
?match({ok, Node1}, mnesia:unsubscribe({table, Tab})),
%% Restore test
?match({atomic, [Tab]}, mnesia:restore(BupFile, [{recreate_tables, [Tab]}])),
?match(RecName, mnesia:table_info(Tab, record_name)),
?match(Twos, ?sort(mnesia:dirty_match_object(Tab, mnesia:table_info(Tab, wild_pattern)))),
?match(Twos, ?sort(mnesia:dirty_select(Tab,
[{mnesia:table_info(Tab, wild_pattern),
[],['$_']}]))),
Traverse backup test
Fun = fun(Rec, {Good, Bad}) ->
?verbose("BUP: ~p~n", [Rec]),
case Rec of
{T, K, V} when T == Tab ->
Good2 = Good ++ [{RecName, K, V}],
{[Rec], {?sort(Good2), Bad}};
{T, K} when T == Tab ->
Good2 = [G || G <- Good, element(2, G) /= K],
{[Rec], {?sort(Good2), Bad}};
_ when element(1, Rec) == schema ->
{[Rec], {Good, Bad}};
_ ->
Bad2 = Bad ++ [Rec],
{[Rec], {Good, ?sort(Bad2)}}
end
end,
?match({ok, {Twos, []}}, mnesia:traverse_backup(BupFile, mnesia_backup,
dummy, read_only,
Fun, {[], []})),
?match(ok, file:delete(BupFile)),
%% Update counter test
CounterTab = list_to_atom(lists:concat([Tab, "_counter"])),
CounterTabDef = [{record_name, some_counter}],
C = my_counter,
?match({atomic, ok}, mnesia:create_table(CounterTab, CounterTabDef)),
?match(some_counter, mnesia:table_info(CounterTab, record_name)),
?match(0, mnesia:dirty_update_counter(CounterTab, gurka, -10)),
?match(10, mnesia:dirty_update_counter(CounterTab, C, 10)),
?match(11, mnesia:dirty_update_counter(CounterTab, C, 1)),
?match(4711, mnesia:dirty_update_counter(CounterTab, C, 4700)),
?match([{some_counter, C, 4711}], mnesia:dirty_read(CounterTab, C)),
?match(0, mnesia:dirty_update_counter(CounterTab, C, -4747)),
%% Registry tests
RegTab = list_to_atom(lists:concat([Tab, "_registry"])),
RegTabDef = [{record_name, some_reg}],
?match(ok, mnesia_registry:create_table(RegTab, RegTabDef)),
?match(some_reg, mnesia:table_info(RegTab, record_name)),
{success, RegRecs} =
?match([_ | _], mnesia_registry_test:dump_registry(node(), RegTab)),
R = ?sort(RegRecs),
?match(R, ?sort(mnesia_registry_test:restore_registry(node(), RegTab))),
?verify_mnesia(Nodes, []).
sorted_ets(suite) ->
[];
sorted_ets(Config) when is_list(Config) ->
[N1, N2, N3] = All = ?acquire_nodes(3, Config),
Tab = sorted_tab,
Def = case mnesia_test_lib:diskless(Config) of
true -> [{name, Tab}, {type, ordered_set}, {ram_copies, All}];
false -> [{name, Tab}, {type, ordered_set},
{ram_copies, [N1]},
{disc_copies,[N2, N3]}]
end,
?match({atomic, ok}, mnesia:create_table(Def)),
?match({aborted, _}, mnesia:create_table(fel, [{disc_only_copies, N1}])),
?match([ok | _],
[mnesia:dirty_write({Tab, {dirty, N}, N}) || N <- lists:seq(1, 10)]),
?match({atomic, _},
mnesia:sync_transaction(fun() ->
[mnesia:write({Tab, {trans, N}, N}) ||
N <- lists:seq(1, 10)]
end)),
List = mnesia:dirty_match_object({Tab, '_', '_'}),
?match(List, ?sort(List)),
?match(List, rpc:call(N2, mnesia, dirty_match_object, [{Tab, '_', '_'}])),
?match(List, rpc:call(N3, mnesia, dirty_match_object, [{Tab, '_', '_'}])),
mnesia_test_lib:stop_mnesia(All),
mnesia_test_lib:start_mnesia(All, [sorted_tab]),
List = mnesia:dirty_match_object({Tab, '_', '_'}),
?match(List, ?sort(List)),
?match(List, rpc:call(N2, mnesia, dirty_match_object, [{Tab, '_', '_'}])),
?match(List, rpc:call(N3, mnesia, dirty_match_object, [{Tab, '_', '_'}])),
?match(List, rpc:call(N3, mnesia, dirty_select, [Tab, [{{Tab, '_', '_'},[],['$_']}]])),
TransMatch = fun() ->
mnesia:write({Tab, {trans, 0}, 0}),
mnesia:write({Tab, {trans, 11}, 11}),
mnesia:match_object({Tab, '_', '_'})
end,
TransSelect = fun() ->
mnesia:write({Tab, {trans, 0}, 0}),
mnesia:write({Tab, {trans, 11}, 11}),
mnesia:select(Tab, [{{Tab, '_', '_'},[],['$_']}])
end,
TList = mnesia:transaction(TransMatch),
STList = ?sort(TList),
?match(STList, TList),
?match(STList, rpc:call(N2, mnesia, transaction, [TransMatch])),
?match(STList, rpc:call(N3, mnesia, transaction, [TransMatch])),
TSel = mnesia:transaction(TransSelect),
?match(STList, TSel),
?match(STList, rpc:call(N2, mnesia, transaction, [TransSelect])),
?match(STList, rpc:call(N3, mnesia, transaction, [TransSelect])),
?match({atomic, ok}, mnesia:create_table(rec, [{type, ordered_set}])),
[ok = mnesia:dirty_write(R) || R <- [{rec,1,1}, {rec,2,1}]],
?match({atomic, ok}, mnesia:add_table_index(rec, 3)),
TestIt = fun() ->
ok = mnesia:write({rec,1,1}),
mnesia:index_read(rec, 1, 3)
end,
?match({atomic, [{rec,1,1}, {rec,2,1}]}, mnesia:transaction(TestIt)).
index_cleanup(Config) when is_list(Config) ->
[N1, N2] = All = ?acquire_nodes(2, Config),
?match({atomic, ok}, mnesia:create_table(i_set, [{type, set}, {ram_copies, [N1]}, {index, [val]},
{disc_only_copies, [N2]}])),
?match({atomic, ok}, mnesia:create_table(i_bag, [{type, bag}, {ram_copies, [N1]}, {index, [val]},
{disc_only_copies, [N2]}])),
?match({atomic, ok}, mnesia:create_table(i_oset, [{type, ordered_set}, {ram_copies, [N1, N2]},
{index, [val]}])),
Tabs = [i_set, i_bag, i_oset],
Add = fun(Tab) ->
Write = fun(Table) ->
Recs = [{Table, N, N rem 5} || N <- lists:seq(1,10)],
[ok = mnesia:write(Rec) || Rec <- Recs],
Recs
end,
{atomic, Recs} = mnesia:sync_transaction(Write, [Tab]),
lists:sort(Recs)
end,
IRead = fun(Tab) ->
Read = fun(Table) ->
[mnesia:index_read(Table, N, val) || N <- lists:seq(0,4)]
end,
{atomic, Recs} = mnesia:transaction(Read, [Tab]),
lists:sort(lists:flatten(Recs))
end,
Delete = fun(Rec) ->
Del = fun() -> mnesia:delete_object(Rec) end,
{atomic, ok} = mnesia:sync_transaction(Del),
ok
end,
Recs = [Add(Tab) || Tab <- Tabs],
?match(Recs, [IRead(Tab) || Tab <- Tabs]),
[Delete(Rec) || Rec <- lists:flatten(Recs)],
[?match({Tab,0}, {Tab,mnesia:table_info(Tab, size)}) || Tab <- Tabs],
[?match({Tab,Node,0, _}, rpc:call(Node, ?MODULE, index_size, [Tab]))
|| Node <- All, Tab <- Tabs],
?verify_mnesia(All, []).
index_size(Tab) ->
%% White box testing
case mnesia:table_info(Tab, index_info) of
{index, _, [{_, {ram, Ref}}=Dbg]} -> {Tab, node(), ets:info(Ref, size), Dbg};
{index, _, [{_, {dets, Ref}}=Dbg]} -> {Tab, node(), dets:info(Ref, size), Dbg}
end.
| null | https://raw.githubusercontent.com/erlang/otp/dd28c69518e8d9c7ed3c3a30cb8f9645e678bbc2/lib/mnesia/test/mnesia_evil_coverage_test.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
Get meta info about table
Table info when table not loaded
Check the error descriptions
This is real error msg
Add and drop db nodes
Delete the DB
Check
Drop a db node when several disk resident nodes are down
Start and stop the system
Create and delete tables
Get meta info about table
Change replica type
Create conflict with loader queue.
Conflict ok
Very slow SMP machines haven't loaded it yet..
Verify that something is in the loader queue
Grab a write lock
We have a lock here we should get a timeout
Start table copy
Grab a write lock
Wait for `mnesia_loader:finish_copy/5` has acquired the read lock
Grab a write lock
Allow add_table_copy() with ram_copies even all other replicas are down
Add, drop and move replicas, change storage types
Change table layout (only arity change supported)
add_table_copy/3, del_table_copy/2, move_table_copy/3,
change_table_copy_type/3, transform_table/3
Add, delete and change replicas
R - -
- - -
D - -
D DO -
D DO R
DO DO R
- DO R
D DO R
D DO D0
D DO R
D D R
DO D R
R D R
D D R
D DO R
D DO DO
test clear
rewrite for rest of testcase
D DO DO
D - DO
Move replica
- D DO
- D ER
ER D -
- D ER
- D ER
- D ER
- D ER
Transformer
OTP-3878
test read_only
test read_write
Should set where_to_read to Node2!
Synchronize table with log or disc
Dump ram tables on disc
Dump 10 records
timer:sleep(timer:seconds(5)),
Some error cases
same key as above
same key as above
same key as above
Should be aborted ????
Test local tables
Test external table
Reset to coming tests
Test of non existing table
io:format("**********Before ~p~n", [mnesia_lib:val({Tab1,snmp})]),
io:format(" ~p ~n", [ets:tab2list(mnesia_lib:val({local_snmp_table,{index,snmp}}))]),
io:format("**********After ~p~n", [mnesia_lib:val({Tab1,snmp})]),
io:format(" ~p ~n", [ets:tab2list(mnesia_lib:val({local_snmp_table,{index,snmp}}))]),
Sync things, so everything gets everywhere!
Test local tables
Test external table
Test of non existing table
Test local tables
Test external tables
Simple test table
The basics
SET
BAG
COUNTERS
Check system events
Check table events
Check deletion of replica
Errors
Backup test
Subscription test
Restore test
Update counter test
Registry tests
White box testing | Copyright Ericsson AB 1996 - 2022 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(mnesia_evil_coverage_test).
-author('').
-include("mnesia_test_lib.hrl").
-export([init_per_testcase/2, end_per_testcase/2,
init_per_group/2, end_per_group/2,
all/0, groups/0]).
-export([system_info/1, table_info/1, error_description/1,
db_node_lifecycle/1, evil_delete_db_node/1, start_and_stop/1,
checkpoint/1, table_lifecycle/1, storage_options/1,
add_copy_conflict/1, add_copy_when_going_down/1, add_copy_when_dst_going_down/1,
add_copy_with_down/1,
replica_management/1, clear_table_during_load/1,
schema_availability/1, local_content/1,
replica_location/1, user_properties/1, unsupp_user_props/1,
sorted_ets/1, index_cleanup/1,
change_table_access_mode/1, change_table_load_order/1,
set_master_nodes/1, offline_set_master_nodes/1,
dump_tables/1, dump_log/1, wait_for_tables/1, force_load_table/1,
snmp_open_table/1, snmp_close_table/1, snmp_get_next_index/1,
snmp_get_row/1, snmp_get_mnesia_key/1, snmp_update_counter/1,
snmp_order/1, subscribe_standard/1, subscribe_extended/1,
foldl/1, info/1, schema_0/1, schema_1/1, view_0/1, view_1/1, view_2/1,
lkill/1, kill/1,
record_name_dirty_access_ram/1,
record_name_dirty_access_disc/1,
record_name_dirty_access_disc_only/1,
record_name_dirty_access_xets/1]).
-export([info_check/8, index_size/1]).
-define(cleanup(N, Config),
mnesia_test_lib:prepare_test_case([{reload_appls, [mnesia]}],
N, Config, ?FILE, ?LINE)).
init_per_testcase(Func, Conf) ->
mnesia_test_lib:init_per_testcase(Func, Conf).
end_per_testcase(Func, Conf) ->
mnesia_test_lib:end_per_testcase(Func, Conf).
all() ->
[system_info, table_info, error_description,
db_node_lifecycle, evil_delete_db_node, start_and_stop,
checkpoint, table_lifecycle, storage_options,
add_copy_conflict,
add_copy_when_going_down, add_copy_when_dst_going_down, add_copy_with_down,
replica_management,
clear_table_during_load,
schema_availability, local_content,
{group, table_access_modifications}, replica_location,
{group, table_sync}, user_properties, unsupp_user_props,
{group, record_name}, {group, snmp_access},
{group, subscriptions}, {group, iteration},
{group, debug_support}, sorted_ets, index_cleanup,
{mnesia_dirty_access_test, all},
{mnesia_trans_access_test, all},
{mnesia_evil_backup, all}].
groups() ->
[{table_access_modifications, [],
[change_table_access_mode, change_table_load_order,
set_master_nodes, offline_set_master_nodes]},
{table_sync, [],
[dump_tables, dump_log, wait_for_tables,
force_load_table]},
{snmp_access, [],
[snmp_open_table, snmp_close_table, snmp_get_next_index,
snmp_get_row, snmp_get_mnesia_key, snmp_update_counter,
snmp_order]},
{subscriptions, [],
[subscribe_standard, subscribe_extended]},
{iteration, [], [foldl]},
{debug_support, [],
[info, schema_0, schema_1, view_0, view_1, view_2,
lkill, kill]},
{record_name, [], [{group, record_name_dirty_access}]},
{record_name_dirty_access, [],
[record_name_dirty_access_ram,
record_name_dirty_access_disc,
record_name_dirty_access_disc_only,
record_name_dirty_access_xets
]}].
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
Get meta info about
system_info(suite) -> [];
system_info(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(all, Config),
Ns = ?sort(Nodes),
?match(yes, mnesia:system_info(is_running)),
?match(Ns, ?sort(mnesia:system_info(db_nodes))),
?match(Ns, ?sort(mnesia:system_info(running_db_nodes))),
?match(A when is_atom(A), mnesia:system_info(debug)),
?match(L when is_list(L), mnesia:system_info(directory)),
?match(L when is_list(L), mnesia:system_info(log_version)),
?match({_, _}, mnesia:system_info(schema_version)),
?match(L when is_list(L), mnesia:system_info(tables)),
?match(L when is_list(L), mnesia:system_info(local_tables)),
?match(L when is_list(L), mnesia:system_info(held_locks)),
?match(L when is_list(L), mnesia:system_info(lock_queue)),
?match(L when is_list(L), mnesia:system_info(transactions)),
?match(I when is_integer(I), mnesia:system_info(transaction_failures)),
?match(I when is_integer(I), mnesia:system_info(transaction_commits)),
?match(I when is_integer(I), mnesia:system_info(transaction_restarts)),
?match(L when is_list(L), mnesia:system_info(checkpoints)),
?match(A when is_atom(A), mnesia:system_info(backup_module)),
?match(true, mnesia:system_info(auto_repair)),
?match({_, _}, mnesia:system_info(dump_log_interval)),
?match(A when is_atom(A), mnesia:system_info(dump_log_update_in_place)),
?match(I when is_integer(I), mnesia:system_info(transaction_log_writes)),
?match(I when is_integer(I), mnesia:system_info(send_compressed)),
?match(I when is_integer(I), mnesia:system_info(max_transfer_size)),
?match(L when is_list(L), mnesia:system_info(all)),
?match(L when is_list(L), mnesia:system_info(backend_types)),
?match({'EXIT', {aborted, Reason }} when element(1, Reason) == badarg
, mnesia:system_info(ali_baba)),
?verify_mnesia(Nodes, []).
table_info(suite) -> [];
table_info(Config) when is_list(Config) ->
[Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
Tab = table_info,
Type = bag,
ValPos = 3,
Attrs = [k, v],
Arity = length(Attrs) +1,
Schema =
case mnesia_test_lib:diskless(Config) of
true -> [{type, Type}, {attributes, Attrs}, {index, [ValPos]},
{ram_copies, [Node1, Node2]}, {ext_ets, [Node3]}];
false ->
[{type, Type}, {attributes, Attrs}, {index, [ValPos]},
{disc_only_copies, [Node1]}, {ram_copies, [Node2]},
{ext_ets, [Node3]}]
end,
?match({atomic, ok}, mnesia:create_table(Tab, Schema)),
Size = 10,
Keys = lists:seq(1, Size),
Records = [{Tab, A, 7} || A <- Keys],
lists:foreach(fun(Rec) -> ?match(ok, mnesia:dirty_write(Rec)) end, Records),
case mnesia_test_lib:diskless(Config) of
true ->
?match(Nodes, mnesia:table_info(Tab, ram_copies));
false ->
?match([Node3], mnesia:table_info(Tab, ext_ets)),
?match([Node2], mnesia:table_info(Tab, ram_copies)),
?match([Node1], mnesia:table_info(Tab, mnesia_test_lib:storage_type(disc_only_copies, Config)))
end,
Read = [Node1, Node2, Node3],
Write = ?sort([Node1, Node2, Node3]),
{[ok,ok,ok], []} = rpc:multicall(Nodes, ?MODULE, info_check,
[Tab, Read, Write, Size, Type, ValPos, Arity, Attrs]),
?match({atomic, Attrs}, mnesia:transaction(fun() -> mnesia:table_info(Tab, attributes) end)),
?match(L when is_list(L), mnesia:table_info(Tab, all)),
?match({atomic, ok},
mnesia:create_table(tab_info, Schema)),
?match(stopped, mnesia:stop()),
?match(stopped, rpc:call(Node2, mnesia, stop, [])),
?match(ok, mnesia:start()),
?match(ok, mnesia:wait_for_tables([tab_info], 5000)),
?match(0, mnesia:table_info(tab_info, size)),
?verify_mnesia([Node1, Node3], [Node2]).
info_check(Tab, Read, Write, Size, Type, ValPos, Arity, Attrs) ->
?match(true, lists:member(mnesia:table_info(Tab, where_to_read), Read)),
?match(Write, ?sort(mnesia:table_info(Tab, where_to_write))),
?match(Mem when is_integer(Mem), mnesia:table_info(Tab, memory)),
?match(Size, mnesia:table_info(Tab, size)),
?match(Type, mnesia:table_info(Tab, type)),
?match([ValPos], mnesia:table_info(Tab, index)),
?match(Arity, mnesia:table_info(Tab, arity)),
?match(Attrs, mnesia:table_info(Tab, attributes)),
?match({Tab, '_', '_'}, mnesia:table_info(Tab, wild_pattern)),
ok.
error_description(suite) -> [];
error_description(Config) when is_list(Config) ->
?acquire_nodes(1, Config),
Errors = [nested_transaction, badarg, no_transaction, combine_error,
bad_index, already_exists, index_exists, no_exists, system_limit,
mnesia_down, not_a_db_node, bad_type, node_not_running,
truncated_binary_file, active, illegal
],
?match(X when is_atom(X), mnesia:error_description({error, bad_error_msg})),
?match(X when is_tuple(X), mnesia:error_description({'EXIT', pid, bad})),
?match(X when is_tuple(X), mnesia:error_description(
{error,
{"Cannot prepare checkpoint (bad reply)",
{{877,957351,758147},a@legolas},
{error,{node_not_running,a1@legolas}}}})),
check_errors(error, Errors),
check_errors(aborted, Errors),
check_errors('EXIT', Errors).
check_errors(_Err, []) -> ok;
check_errors(Err, [Desc|R]) ->
?match(X when is_list(X), mnesia:error_description({Err, Desc})),
check_errors(Err, R).
db_node_lifecycle(suite) -> [];
db_node_lifecycle(Config) when is_list(Config) ->
[Node1, Node2, Node3] = AllNodes = ?acquire_nodes(3, Config),
Tab = db_node_lifecycle,
Who = fun(T) ->
L1 = mnesia:table_info(T, ram_copies),
L2 = mnesia:table_info(T, disc_copies),
L3 = mnesia:table_info(T, disc_only_copies),
L4 = mnesia:table_info(T, ext_ets),
L1 ++ L2 ++ L3 ++ L4
end,
SNs = ?sort(AllNodes),
Schema = [{name, Tab}, {ram_copies, [Node1, Node2]}],
?match({atomic, ok}, mnesia:create_table(Schema)),
?match([], mnesia_test_lib:stop_mnesia(AllNodes)),
?match(ok, mnesia:delete_schema(AllNodes)),
?match({error, _}, mnesia:create_schema(foo)),
?match({error, _}, mnesia:create_schema([foo])),
?match({error, _}, mnesia:create_schema([foo@bar])),
?match(ok, mnesia:start()),
?match(false, mnesia:system_info(use_dir)),
?match([ram_copies, disc_copies, disc_only_copies], mnesia:system_info(backend_types)),
?match({atomic, ok}, mnesia:create_table(Tab, [])),
?match({aborted, {has_no_disc, Node1}}, mnesia:dump_tables([Tab])),
?match({aborted, {has_no_disc, Node1}}, mnesia:change_table_copy_type(Tab, node(), disc_copies)),
?match({aborted, {has_no_disc, Node1}}, mnesia:change_table_copy_type(Tab, node(), disc_only_copies)),
?match(stopped, mnesia:stop()),
?match(ok, mnesia:create_schema(AllNodes, ?BACKEND)),
?match([], mnesia_test_lib:start_mnesia(AllNodes)),
?match([SNs, SNs, SNs],
lists:map(fun lists:sort/1,
element(1, rpc:multicall(AllNodes, mnesia, table_info,
[schema, disc_copies])))),
?match({aborted, {already_exists, schema, Node2, _}},
mnesia:change_table_copy_type(schema, Node2, disc_copies)),
?match({atomic, ok},
mnesia:change_table_copy_type(schema, Node2, ram_copies)),
?match({aborted, {already_exists, schema, Node2, _}},
mnesia:change_table_copy_type(schema, Node2, ram_copies)),
?match({atomic, ok},
mnesia:change_table_copy_type(schema, Node2, disc_copies)),
?match([SNs, SNs, SNs],
lists:map(fun lists:sort/1,
element(1, rpc:multicall(AllNodes, mnesia, table_info,
[schema, disc_copies])))),
Tab2 = disk_tab,
Tab3 = not_local,
Tab4 = local,
Tab5 = remote,
Tab6 = ext1,
Tabs = [Schema,
[{name, Tab2}, {disc_copies, AllNodes}],
[{name, Tab3}, {ram_copies, [Node2, Node3]}],
[{name, Tab4}, {disc_only_copies, [Node1]}],
[{name, Tab5}, {disc_only_copies, [Node2]}],
[{name, Tab6}, {ext_ets, [Node1, Node2]}]
],
[?match({atomic, ok}, mnesia:create_table(T)) || T <- Tabs ],
?match({aborted, {active, _, Node2}},
mnesia:del_table_copy(schema, Node2)),
?match([], mnesia_test_lib:stop_mnesia([Node1])),
?match({aborted, {node_not_running, Node1}},
mnesia:del_table_copy(schema, Node2)),
?match([], mnesia_test_lib:start_mnesia([Node1],[Tab2,Tab4,Tab6])),
?match([], mnesia_test_lib:stop_mnesia([Node2])),
?match({atomic, ok}, mnesia:del_table_copy(schema, Node2)),
RemNodes = AllNodes -- [Node2],
?match(RemNodes, mnesia:system_info(db_nodes)),
?match([Node1], Who(Tab)),
?match(RemNodes, Who(Tab2)),
?match([Node3], Who(Tab3)),
?match([Node1], Who(Tab4)),
?match({'EXIT', {aborted, {no_exists, _, _}}}, Who(Tab5)),
?match([Node1], Who(Tab6)),
?match({atomic, ok}, mnesia:change_table_copy_type(Tab2, Node3, ram_copies)),
?match({atomic, ok}, mnesia:change_table_copy_type(schema, Node3, ram_copies)),
?match([], mnesia_test_lib:stop_mnesia([Node3])),
?match({atomic, ok}, mnesia:del_table_copy(schema, Node3)),
?match([Node1], mnesia:system_info(db_nodes)),
?match([Node1], Who(Tab)),
?match([Node1], Who(Tab2)),
?match({'EXIT', {aborted, {no_exists, _, _}}}, Who(Tab3)),
?match([Node1], Who(Tab4)),
?match({'EXIT', {aborted, {no_exists, _, _}}}, Who(Tab5)),
?verify_mnesia([Node1], []).
evil_delete_db_node(suite) -> [];
evil_delete_db_node(Config) when is_list(Config) ->
[Node1, Node2, Node3] = AllNodes = ?acquire_nodes(3, Config),
Tab = evil_delete_db_node,
?match({atomic, ok}, mnesia:create_table(Tab, [{disc_copies, AllNodes}])),
?match([], mnesia_test_lib:stop_mnesia([Node2, Node3])),
?match({atomic, ok}, mnesia:del_table_copy(schema, Node2)),
RemNodes = AllNodes -- [Node2],
?match(RemNodes, mnesia:system_info(db_nodes)),
?match(RemNodes, mnesia:table_info(Tab, disc_copies)),
?verify_mnesia([Node1], []).
start_and_stop(suite) -> [];
start_and_stop(Config) when is_list(Config) ->
[Node1 | _] = Nodes = ?acquire_nodes(all, Config),
?match(stopped, rpc:call(Node1, mnesia, stop, [])),
?match(stopped, rpc:call(Node1, mnesia, stop, [])),
?match(ok, rpc:call(Node1, mnesia, start, [])),
?match(ok, rpc:call(Node1, mnesia, start, [])),
?match(stopped, rpc:call(Node1, mnesia, stop, [])),
?verify_mnesia(Nodes -- [Node1], [Node1]),
?match([], mnesia_test_lib:start_mnesia(Nodes)),
?verify_mnesia(Nodes, []).
Checkpoints and backup management
checkpoint(suite) -> [];
checkpoint(Config) when is_list(Config) ->
checkpoint(2, Config),
checkpoint(3, Config),
ok.
checkpoint(NodeConfig, Config) ->
[Node1 | _] = TabNodes = ?acquire_nodes(NodeConfig, Config),
CreateTab = fun(Type, N, Ns) ->
Tab0 = lists:concat(["local_checkpoint_", Type, N]),
Tab = list_to_atom(Tab0),
catch mnesia:delete_table(Tab),
?match({atomic, ok},
mnesia:create_table(Tab, [{Type, Ns}])),
Tab
end,
CreateTabs = fun(Type, Acc) ->
[CreateTab(Type, 1, [hd(TabNodes)]),
CreateTab(Type, 2, TabNodes),
CreateTab(Type, 3, [lists:last(TabNodes)])] ++
Acc
end,
Types = [ram_copies, disc_copies, disc_only_copies, ext_ets],
Tabs = lists:foldl(CreateTabs, [], Types),
Recs = ?sort([{T, N, N} || T <- Tabs, N <- lists:seq(1, 10)]),
lists:foreach(fun(R) -> ?match(ok, mnesia:dirty_write(R)) end, Recs),
CpName = a_checkpoint_name,
MinArgs = [{name, CpName}, {min, Tabs}, {allow_remote, false}],
?match({error, _}, rpc:call(Node1, mnesia, activate_checkpoint, [MinArgs])),
MaxArgs = [{name, CpName}, {max, Tabs}, {allow_remote, true}],
?match({ok, CpName, L} when is_list(L),
rpc:call(Node1, mnesia, activate_checkpoint, [MaxArgs])),
?match(ok, rpc:call(Node1, mnesia, deactivate_checkpoint, [CpName])),
Args = [{name, CpName}, {min, Tabs}, {allow_remote, true}],
?match({ok, CpName, L} when is_list(L),
rpc:call(Node1, mnesia, activate_checkpoint, [Args])),
Recs2 = ?sort([{T, K, 0} || {T, K, _} <- Recs]),
lists:foreach(fun(R) -> ?match(ok, mnesia:dirty_write(R)) end, Recs2),
?match(ok, rpc:call(Node1, mnesia, deactivate_checkpoint, [CpName])),
?match({error, Reason1 } when element(1, Reason1) == no_exists,
mnesia:deactivate_checkpoint(CpName)),
?match({error, Reason2 } when element(1, Reason2) == badarg,
mnesia:activate_checkpoint(foo)),
?match({error, Reason3 } when element(1, Reason3) == badarg,
mnesia:activate_checkpoint([{foo, foo}])),
?match({error, Reason4 } when element(1, Reason4) == badarg,
mnesia:activate_checkpoint([{max, foo}])),
?match({error, Reason5 } when element(1, Reason5) == badarg,
mnesia:activate_checkpoint([{min, foo}])),
?match({error, _}, mnesia:activate_checkpoint([{min, [foo@bar]}])),
?match({error, Reason6 } when element(1, Reason6) == badarg,
mnesia:activate_checkpoint([{allow_remote, foo}])),
Fun = fun(Tab) -> ?match({atomic, ok}, mnesia:delete_table(Tab)) end,
lists:foreach(Fun, Tabs),
?verify_mnesia(TabNodes, []).
-define(vrl, mnesia_test_lib:verify_replica_location).
replica_location(suite) -> [];
replica_location(Config) when is_list(Config) ->
[Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
Create three replicas
Check = fun(Tab, Schema) ->
?match({atomic, ok}, mnesia:create_table([{name, Tab}|Schema])),
?match([], ?vrl(Tab, [Node1], [Node2], [Node3], Nodes)),
Delete one replica
?match({atomic, ok}, mnesia:del_table_copy(Tab, Node2)),
?match([], ?vrl(Tab, [Node1], [], [Node3], Nodes)),
Move one replica
?match({atomic, ok}, mnesia:move_table_copy(Tab, Node1, Node2)),
?match([], ?vrl(Tab, [Node2], [], [Node3], Nodes)),
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node2, ram_copies)),
?match([], ?vrl(Tab, [], [Node2], [Node3], Nodes))
end,
Check(replica_location, [{disc_only_copies, [Node1]},
{ram_copies, [Node2]}, {disc_copies, [Node3]}]),
Check(ext_location, [{disc_only_copies, [Node1]},
{ext_ets, [Node2]}, {disc_copies, [Node3]}]),
?verify_mnesia(Nodes, []).
table_lifecycle(suite) -> [];
table_lifecycle(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
?match({atomic, ok}, mnesia:create_table([{type, bag},
{ram_copies, [Node1]},
{attributes, [rajtan, tajtan]},
{name, order_of_args}])),
?match([], mnesia:dirty_read({order_of_args, 4711})),
?match({atomic, ok}, mnesia:create_table([{name, already_exists},
{ram_copies, [Node1]}])),
?match({aborted, Reason23 } when element(1, Reason23) ==already_exists,
mnesia:create_table([{name, already_exists},
{ram_copies, [Node1]}])),
?match({aborted, Reason21 } when element(1, Reason21) == bad_type,
mnesia:create_table([{name, bad_node}, {ram_copies, ["foo"]}])),
?match({aborted, Reason2} when element(1, Reason2) == bad_type,
mnesia:create_table([{name, zero_arity}, {attributes, []}])),
?match({aborted, Reason3} when element(1, Reason3) == badarg,
mnesia:create_table([])),
?match({aborted, Reason4} when element(1, Reason4) == badarg,
mnesia:create_table(atom)),
?match({aborted, Reason5} when element(1, Reason5) == badarg,
mnesia:create_table({cstruct, table_name_as_atom})),
?match({aborted, Reason6 } when element(1, Reason6) == bad_type,
mnesia:create_table([{name, no_host}, {ram_copies, foo}])),
?match({aborted, Reason7 } when element(1, Reason7) == bad_type,
mnesia:create_table([{name, no_host}, {disc_only_copies, foo}])),
?match({aborted, Reason8} when element(1, Reason8) == bad_type,
mnesia:create_table([{name, no_host}, {disc_copies, foo}])),
CreateFun =
fun() -> ?match({aborted, nested_transaction},
mnesia:create_table([{name, nested_trans}])), ok
end,
?match({atomic, ok}, mnesia:transaction(CreateFun)),
?match({atomic, ok}, mnesia:create_table([{name, remote_tab},
{ram_copies, [Node2]}])),
?match({atomic, ok}, mnesia:create_table([{name, a_brand_new_tab},
{ram_copies, [Node1]}])),
?match([], mnesia:dirty_read({a_brand_new_tab, 4711})),
?match({atomic, ok}, mnesia:delete_table(a_brand_new_tab)),
?match({'EXIT', {aborted, Reason31}} when element(1, Reason31) == no_exists,
mnesia:dirty_read({a_brand_new_tab, 4711})),
?match({aborted, Reason41} when element(1, Reason41) == no_exists,
mnesia:delete_table(a_brand_new_tab)),
?match({aborted, Reason9} when element(1, Reason9) == badarg,
mnesia:create_table([])),
?match({atomic, ok}, mnesia:create_table([{name, nested_del_trans},
{ram_copies, [Node1]}])),
DeleteFun = fun() -> ?match({aborted, nested_transaction},
mnesia:delete_table(nested_del_trans)), ok end,
?match({atomic, ok}, mnesia:transaction(DeleteFun)),
?match({aborted, Reason10} when element(1, Reason10) == bad_type,
mnesia:create_table([{name, create_with_index}, {index, 2}])),
?match({aborted, Reason32} when element(1, Reason32) == bad_type,
mnesia:create_table([{name, create_with_index}, {index, [-1]}])),
?match({aborted, Reason33} when element(1, Reason33) == bad_type,
mnesia:create_table([{name, create_with_index}, {index, [0]}])),
?match({aborted, Reason34} when element(1, Reason34) == bad_type,
mnesia:create_table([{name, create_with_index}, {index, [1]}])),
?match({aborted, Reason35} when element(1, Reason35) == bad_type,
mnesia:create_table([{name, create_with_index}, {index, [2]}])),
?match({atomic, ok},
mnesia:create_table([{name, create_with_index}, {index, [3]},
{ram_copies, [Node1]}])),
ets:new(ets_table, [named_table]),
?match({aborted, _}, mnesia:create_table(ets_table, [{ram_copies, Nodes}])),
?match({aborted, _}, mnesia:create_table(ets_table, [{ram_copies, [Node1]}])),
ets:delete(ets_table),
?match({atomic, ok}, mnesia:create_table(ets_table, [{ram_copies, [Node1]}])),
?match(Node1, rpc:call(Node1, mnesia_lib, val, [{ets_table,where_to_read}])),
?match(Node1, rpc:call(Node2, mnesia_lib, val, [{ets_table,where_to_read}])),
?match({atomic, ok}, mnesia:change_table_copy_type(ets_table, Node1, disc_only_copies)),
?match(Node1, rpc:call(Node2, mnesia_lib, val, [{ets_table,where_to_read}])),
?verify_mnesia(Nodes, []).
storage_options(suite) -> [];
storage_options(Config) when is_list(Config) ->
[N1,N2,N3] = Nodes = ?acquire_nodes(3, Config),
?match({aborted,_}, mnesia:create_table(a, [{storage_properties, [{ets,foobar}]}])),
?match({aborted,_}, mnesia:create_table(a, [{storage_properties, [{ets,[foobar]}]}])),
?match({aborted,_}, mnesia:create_table(a, [{storage_properties, [{ets,[duplicate_bag]}]}])),
?match({aborted,_}, mnesia:create_table(a, [{storage_properties, [{dets,[{type,bag}]}]}])),
?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, [N1]},
{disc_only_copies, [N2]},
{storage_properties,
[{ets,[compressed]},
{dets, [{auto_save, 5000}]} ]}])),
?match(true, ets:info(a, compressed)),
?match(5000, rpc:call(N2, dets, info, [a, auto_save])),
?match(ok, mnesia:dirty_write({a,1,1})),
?match([{a,1,1}], mnesia:dirty_read({a,1})),
mnesia:dump_log(),
W2C1 = [{N2, disc_only_copies}, {N1, ram_copies}],
?match(W2C1, lists:sort(rpc:call(N2, mnesia_lib, val, [{a, where_to_commit}]))),
?match(W2C1, lists:sort(rpc:call(N3, mnesia_lib, val, [{a, where_to_commit}]))),
?match({atomic,ok}, mnesia:change_table_copy_type(a, N1, disc_only_copies)),
W2C2 = [{N2, disc_only_copies}, {N1, disc_only_copies}],
?match(W2C2, lists:sort(rpc:call(N2, mnesia_lib, val, [{a, where_to_commit}]))),
?match(W2C2, lists:sort(rpc:call(N3, mnesia_lib, val, [{a, where_to_commit}]))),
?match(undefined, ets:info(a, compressed)),
?match(5000, dets:info(a, auto_save)),
?match({atomic,ok}, mnesia:change_table_copy_type(a, N1, disc_copies)),
?match(true, ets:info(a, compressed)),
?verify_mnesia(Nodes, []).
clear_table_during_load(suite) -> [];
clear_table_during_load(doc) ->
["Clear table caused during load caused a schema entry in the actual tab"];
clear_table_during_load(Config) when is_list(Config) ->
Nodes = [_, Node2] = ?acquire_nodes(2, Config ++ [{tc_timeout, timer:minutes(2)}]),
?match({atomic,ok}, mnesia:create_table(cleartab, [{ram_copies, Nodes}])),
Tester = self(),
Bin = <<"Testingasdasd", 0:32000>>,
Fill = fun() -> [mnesia:write({cleartab, N, Bin}) || N <- lists:seq(1, 3000)], ok end,
?match({atomic, ok}, mnesia:sync_transaction(Fill)),
StopAndStart = fun() ->
stopped = mnesia:stop(),
Tester ! {self(), stopped},
receive start_node -> ok end,
ok = mnesia:start(),
ok = mnesia:wait_for_tables([cleartab], 2000),
lists:foreach(fun({cleartab,_,_}) -> ok;
(What) -> Tester ! {failed, What},
unlink(Tester),
exit(foo)
end,
ets:tab2list(cleartab)),
Tester ! {self(), ok},
normal
end,
Test = fun(N) ->
Pid = spawn_link(Node2, StopAndStart),
receive {Pid, stopped} -> ok end,
Pid ! start_node,
timer:sleep(N*10),
{atomic, ok} = mnesia:clear_table(cleartab),
receive
{Pid, ok} -> ok;
{failed, What} ->
io:format("Failed in ~p tries, with ~p~n",[N, What]),
exit({error, What});
{'EXIT', Pid, Reason} ->
exit({died, Reason})
end
end,
[Test(N) || N <- lists:seq(1, 10)],
?verify_mnesia(Nodes, []).
add_copy_conflict(suite) -> [];
add_copy_conflict(doc) ->
["Verify that OTP-5065 doesn't happen again, whitebox testing"];
add_copy_conflict(Config) when is_list(Config) ->
Nodes = [Node1, Node2] =
?acquire_nodes(2, Config ++ [{tc_timeout, timer:minutes(2)}]),
?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, Nodes}])),
?match({atomic, ok}, mnesia:create_table(b, [{ram_copies, Nodes}])),
?match({atomic, ok}, mnesia:create_table(test, [{ram_copies, [Node2]}])),
mnesia:stop(),
?match(ok,mnesia:start([{no_table_loaders, 1}])),
verify_ll_queue(10),
Self = self(),
Test = fun() ->
Res = mnesia:add_table_copy(test, Node1, ram_copies),
Self ! {test, Res}
end,
spawn_link(Test),
?match_receive(timeout),
mnesia_controller:unblock_controller(),
?match_receive({test, {atomic,ok}}),
?match(ok, mnesia:wait_for_tables([a,b], 3000)),
?verify_mnesia(Nodes, []),
?cleanup(1, Config).
verify_ll_queue(0) ->
?error("Couldn't find anything in queue~n",[]);
verify_ll_queue(N) ->
?match(granted,mnesia_controller:block_controller()),
case mnesia_controller:get_info(1000) of
{info,{state,_,true,[],_Loader,[],[],[],_,_,_,_,_,_}} ->
mnesia_controller:unblock_controller(),
timer:sleep(10),
verify_ll_queue(N-1);
{info,{state,_,true,[],Loader,LL,[],[],_,_,_,_,_,_}} ->
io:format("~p~n", [{Loader,LL}]),
Else ->
?error("No match ~p maybe the internal format has changed~n",[Else])
end.
add_copy_when_going_down(suite) -> [];
add_copy_when_going_down(doc) ->
["Tests abort when node we load from goes down"];
add_copy_when_going_down(Config) ->
[Node1, Node2] =
?acquire_nodes(2, Config ++ [{tc_timeout, timer:minutes(2)}]),
?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, [Node1]}])),
Tester = self(),
WriteAndWait = fun() ->
mnesia:write({a,1,1}),
Tester ! {self(), got_lock},
receive continue -> ok
end
end,
Locker = spawn(fun() -> mnesia:transaction(WriteAndWait) end),
receive {Locker, got_lock} -> ok end,
spawn_link(fun() -> Res = rpc:call(Node2, mnesia, add_table_copy,
[a, Node2, ram_copies]),
Tester ! {test, Res}
end),
?match_receive(timeout),
mnesia_test_lib:kill_mnesia([Node1]),
?match_receive({test,{aborted,_}}),
?verify_mnesia([Node2], []).
add_copy_when_dst_going_down(suite) -> [];
add_copy_when_dst_going_down(doc) ->
["Table copy destination node goes down. Verify that the issue fixed in erlang/otp#6013 doesn't happen again, whitebox testing."];
add_copy_when_dst_going_down(Config) ->
[Node1, Node2] = ?acquire_nodes(2, Config),
?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, [Node1]}])),
lists:foreach(fun(I) ->
ok = mnesia:sync_dirty(fun() -> mnesia:write({a, I, I}) end)
end,
lists:seq(1, 100000)),
?match({ok, _}, mnesia:change_config(extra_db_nodes, [Node2])),
Tester = self(),
spawn_link(fun() ->
mnesia:add_table_copy(a, Node2, ram_copies),
Tester ! add_table_copy_finished
end),
Wait for ` mnesia_loader : send_more/6 ` has started
spawn_link(fun() ->
Fun = fun() ->
ok = mnesia:write_lock_table(a),
Tester ! {write_lock_acquired, self()},
receive node2_mnesia_killed -> ok
end,
Tester ! write_lock_released,
ok
end,
mnesia:transaction(Fun)
end),
receive {write_lock_acquired, Locker} -> ok
end,
Wait for ` mnesia_loader : send_more/6 ` has finished
?match([], mnesia_test_lib:kill_mnesia([Node2])),
Locker ! node2_mnesia_killed,
receive write_lock_released -> ok
end,
receive add_table_copy_finished -> ok
end,
?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write_lock_table(a) end, 10)),
?verify_mnesia([Node1], []).
add_copy_with_down(suite) -> [];
add_copy_with_down(Config) ->
Nodes = [Node1, Node2, Node3] = ?acquire_nodes(3, Config),
?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, [Node3]}, {disc_copies, [Node2]}])),
stopped = rpc:call(Node2, mnesia, stop, []),
stopped = rpc:call(Node3, mnesia, stop, []),
?match({aborted, _}, mnesia:add_table_copy(a, Node1, ram_copies)),
?match({aborted, _}, mnesia:del_table_copy(a, Node2)),
ok = rpc:call(Node3, mnesia, start, []),
?match({aborted, _}, mnesia:add_table_copy(a, Node1, ram_copies)),
?match([], mnesia_test_lib:start_mnesia([Node2], [a])),
?match({atomic, ok}, mnesia:change_table_copy_type(a, Node2, ram_copies)),
stopped = rpc:call(Node2, mnesia, stop, []),
stopped = rpc:call(Node3, mnesia, stop, []),
?match({atomic, ok}, mnesia:add_table_copy(a, Node1, ram_copies)),
?match(ok, mnesia:dirty_write({a,1,1})),
?match([], mnesia_test_lib:start_mnesia([Node2,Node3], [a])),
?match([{a,1,1}], rpc:call(Node1, mnesia, dirty_read, [{a,1}])),
?match([{a,1,1}], rpc:call(Node2, mnesia, dirty_read, [{a,1}])),
?match([{a,1,1}], rpc:call(Node3, mnesia, dirty_read, [{a,1}])),
?match({atomic, ok}, mnesia:del_table_copy(a, Node1)),
stopped = rpc:call(Node2, mnesia, stop, []),
stopped = rpc:call(Node3, mnesia, stop, []),
?match({atomic, ok}, mnesia:add_table_copy(a, Node1, disc_copies)),
?match(ok, mnesia:dirty_write({a,1,1})),
?match([], mnesia_test_lib:start_mnesia([Node2,Node3], [a])),
?match([{a,1,1}], rpc:call(Node1, mnesia, dirty_read, [{a,1}])),
?match([{a,1,1}], rpc:call(Node2, mnesia, dirty_read, [{a,1}])),
?match([{a,1,1}], rpc:call(Node3, mnesia, dirty_read, [{a,1}])),
?match({atomic, ok}, mnesia:del_table_copy(a, Node1)),
stopped = rpc:call(Node2, mnesia, stop, []),
stopped = rpc:call(Node3, mnesia, stop, []),
?match({atomic, ok}, mnesia:add_table_copy(a, Node1, disc_only_copies)),
?match(ok, mnesia:dirty_write({a,1,1})),
?match([], mnesia_test_lib:start_mnesia([Node2,Node3], [a])),
?match([{a,1,1}], rpc:call(Node1, mnesia, dirty_read, [{a,1}])),
?match([{a,1,1}], rpc:call(Node2, mnesia, dirty_read, [{a,1}])),
?match([{a,1,1}], rpc:call(Node3, mnesia, dirty_read, [{a,1}])),
?verify_mnesia(Nodes, []).
-record(replica_management, {k, v}).
-record(new_replica_management, {k, v, extra}).
-define(SS(R), lists:sort(element(1,R))).
replica_management(doc) ->
"Add, drop and move replicas, change storage types.";
replica_management(suite) ->
[];
replica_management(Config) when is_list(Config) ->
Nodes = [Node1, Node2, Node3] = ?acquire_nodes(3, Config),
Tab = replica_management,
Attrs = record_info(fields, replica_management),
?match({atomic, ok},
mnesia:create_table([{name, Tab}, {attributes, Attrs},
{ram_copies, [Node1]}, {ext_ets, [Node3]}])),
[?match(ok, mnesia:dirty_write({Tab, K, K + 2})) || K <-lists:seq(1, 10)],
?match([], ?vrl(Tab, [], [Node1, Node3], [], Nodes)),
?match({atomic, ok}, mnesia:dump_tables([Tab])),
?match({aborted, Reason50 } when element(1, Reason50) == combine_error,
mnesia:add_table_copy(Tab, Node2, disc_copies)),
?match({aborted, Reason51 } when element(1, Reason51) == combine_error,
mnesia:change_table_copy_type(Tab, Node1, disc_copies)),
?match({atomic, ok}, mnesia:clear_table(Tab)),
SyncedCheck = fun() ->
mnesia:lock({record,Tab,0}, write),
?match([0,0,0], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size])))
end,
mnesia:transaction(SyncedCheck),
?match({[0,0,0], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
?match({atomic, ok}, mnesia:del_table_copy(Tab, Node1)),
?match({atomic, ok}, mnesia:del_table_copy(Tab, Node3)),
?match([], ?vrl(Tab, [], [], [], Nodes)),
?match({aborted,Reason52} when element(1, Reason52) == no_exists,
mnesia:add_table_copy(Tab, Node3, ram_copies)),
?match({atomic, ok}, mnesia:create_table([{name, Tab},
{attributes, Attrs},
{disc_copies, [Node1]}])),
?match([], ?vrl(Tab, [], [], [Node1], Nodes)),
[?match(ok, mnesia:dirty_write({Tab, K, K + 2})) || K <-lists:seq(1, 10)],
?match({aborted, Reason53} when element(1, Reason53) == badarg,
mnesia:add_table_copy(Tab, Node2, bad_storage_type)),
?match({atomic, ok}, mnesia:add_table_copy(Tab, Node2, disc_only_copies)),
?match([], ?vrl(Tab, [Node2], [], [Node1], Nodes)),
?match([0,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({atomic, ok}, mnesia:add_table_copy(Tab, Node3, ext_ets)),
?match([], ?vrl(Tab, [Node2], [Node3], [Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({atomic, ok},
mnesia:change_table_copy_type(Tab, Node1, disc_only_copies)),
?match([], ?vrl(Tab, [Node1, Node2], [Node3], [], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({aborted, Reason54} when element(1, Reason54) == already_exists,
mnesia:add_table_copy(Tab, Node3, ram_copies)),
?match({atomic, ok}, mnesia:del_table_copy(Tab, Node1)),
?match([], ?vrl(Tab, [Node2], [Node3], [], Nodes)),
?match({aborted, _}, mnesia:del_table_copy(Tab, Node1)),
?match(Tab, ets:new(Tab, [named_table])),
?match({aborted, _}, mnesia:add_table_copy(Tab, Node1, disc_copies)),
?match(true, ets:delete(Tab)),
?match({atomic, ok}, mnesia:add_table_copy(Tab, Node1, disc_copies)),
?match([], ?vrl(Tab, [Node2], [Node3], [Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({atomic, ok},mnesia:change_table_copy_type(Tab, Node3, disc_only_copies)),
?match([], ?vrl(Tab, [Node2, Node3], [], [Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node3, ext_ets)),
?match([], ?vrl(Tab, [Node2], [Node3], [Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({atomic, ok},
mnesia:change_table_copy_type(Tab, Node2, disc_copies)),
?match([], ?vrl(Tab, [], [Node3], [Node1,Node2], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node1, disc_only_copies)),
?match([], ?vrl(Tab, [Node1], [Node3], [Node2], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match(Tab, ets:new(Tab, [named_table])),
?match({aborted, _}, mnesia:change_table_copy_type(Tab, Node1, ram_copies)),
?match(true, ets:delete(Tab)),
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node1, ram_copies)),
?match([], ?vrl(Tab, [], [Node3,Node1], [Node2], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node1, disc_copies)),
?match([], ?vrl(Tab, [], [Node3], [Node2,Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node2, disc_only_copies)),
?match([], ?vrl(Tab, [Node2], [Node3], [Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node3, disc_only_copies)),
?match([], ?vrl(Tab, [Node2, Node3], [], [Node1], Nodes)),
?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({atomic, ok}, mnesia:clear_table(Tab)),
mnesia:transaction(SyncedCheck),
[?match(ok, mnesia:dirty_write({Tab, K, K + 2})) || K <-lists:seq(1, 10)],
?match({atomic, ok}, mnesia:del_table_copy(Tab, Node2)),
?match([], ?vrl(Tab, [Node3], [], [Node1], Nodes)),
?match({aborted, Reason55} when element(1, Reason55) == already_exists,
mnesia:change_table_copy_type(Tab, Node1, disc_copies)),
?match({atomic, ok}, mnesia:move_table_copy(Tab, Node1, Node2)),
?match([], ?vrl(Tab, [Node3], [], [Node2], Nodes)),
?match([0,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node3, ext_ets)),
?match([], ?vrl(Tab, [], [Node3], [Node2], Nodes)),
?match([0,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({atomic, ok}, mnesia:move_table_copy(Tab, Node3, Node1)),
?match([], ?vrl(Tab, [], [Node1], [Node2], Nodes)),
?match([0,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
?match({aborted, _}, mnesia:move_table_copy(Tab, Node1, Node2)),
?match({aborted, _}, mnesia:move_table_copy(Tab, Node3, Node2)),
?match({atomic, ok}, mnesia:move_table_copy(Tab, Node1, Node3)),
?match([], mnesia_test_lib:stop_mnesia([Node3])),
?match({atomic,ok}, mnesia:transaction(fun() -> mnesia:write({Tab, 43, sync_me}) end)),
?match([], ?vrl(Tab, [], [Node3], [Node2],Nodes -- [Node3])),
?match({aborted,Reason56} when element(1, Reason56) == not_active,
mnesia:move_table_copy(Tab, Node3, Node1)),
?match([], ?vrl(Tab, [], [Node3], [Node2],Nodes -- [Node3])),
?match([], mnesia_test_lib:start_mnesia([Node3])),
?match([], ?vrl(Tab, [], [Node3], [Node2], Nodes)),
?match([{Tab,43,sync_me}], mnesia:dirty_read({Tab,43})),
NewAttrs = record_info(fields, new_replica_management),
Transformer =
fun(Rec) when is_record(Rec, replica_management) ->
#new_replica_management{k = Rec#replica_management.k,
v = Rec#replica_management.v,
extra = Rec#replica_management.k * 2}
end,
?match({atomic, ok}, mnesia:transform_table(Tab, fun(R) -> R end, Attrs)),
?match({atomic, ok}, mnesia:transform_table(Tab, Transformer, NewAttrs, new_replica_management)),
ERlist = [#new_replica_management{k = K, v = K+2, extra = K*2} || K <- lists:seq(1, 10)],
ARlist = [hd(mnesia:dirty_read(Tab, K)) || K <- lists:seq(1, 10)],
?match(ERlist, ARlist),
?match({aborted, Reason56} when element(1, Reason56) == bad_type,
mnesia:transform_table(Tab, Transformer, 0)),
?match({aborted, Reason57} when element(1, Reason57) == bad_type,
mnesia:transform_table(Tab, Transformer, -1)),
?match({aborted, Reason58} when element(1, Reason58) == bad_type,
mnesia:transform_table(Tab, Transformer, [])),
?match({aborted, Reason59} when element(1, Reason59) == bad_type,
mnesia:transform_table(Tab, no_fun, NewAttrs)),
?match({aborted, Reason59} when element(1, Reason59) == bad_type,
mnesia:transform_table(Tab, fun(X) -> X end, NewAttrs, {tuple})),
?match({atomic, ok}, mnesia:transform_table(Tab, ignore,
NewAttrs ++ [dummy])),
?match({atomic, ok}, mnesia:transform_table(Tab, ignore,
NewAttrs ++ [dummy], last_rec)),
ARlist = [hd(mnesia:dirty_read(Tab, K)) || K <- lists:seq(1, 10)],
?match({'EXIT',{aborted,{bad_type,_}}},
mnesia:dirty_write(Tab, #new_replica_management{})),
?match(ok, mnesia:dirty_write(Tab, {last_rec, k, v, e, dummy})),
?verify_mnesia(Nodes, []).
schema_availability(doc) ->
["Test that schema succeeds (or fails) as intended when some db nodes are down."];
schema_availability(suite) ->
[];
schema_availability(Config) when is_list(Config) ->
[N1, _N2, N3] = Nodes = ?acquire_nodes(3, Config),
Tab = schema_availability,
Storage = mnesia_test_lib:storage_type(ram_copies, Config),
Def1 = [{Storage, [N1, N3]}],
?match({atomic, ok}, mnesia:create_table(Tab, Def1)),
N = 10,
?match(ok, mnesia:sync_dirty(fun() -> [mnesia:write({Tab, K, K + 2}) || K <- lists:seq(1, N)], ok end)),
?match({[N,0,N], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
?match([], mnesia_test_lib:kill_mnesia([N3])),
?match({[N,0,0], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
?match([], mnesia_test_lib:start_mnesia([N3], [Tab])),
?match({[N,0,N], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
?match([], mnesia_test_lib:kill_mnesia([N3])),
?match({atomic, ok}, mnesia:clear_table(Tab)),
?match({[0,0,0], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
?match([], mnesia_test_lib:start_mnesia([N3], [Tab])),
?match({[0,0,0], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
?verify_mnesia(Nodes, []).
-define(badrpc(Tab), {badrpc, {'EXIT', {aborted,{no_exists,Tab}}}}).
local_content(doc) ->
["Test local_content functionality, we want to see that correct"
" properties gets propageted correctly between nodes"];
local_content(suite) -> [];
local_content(Config) when is_list(Config) ->
[Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
Tab1 = local1,
Def1 = [{local_content, true}, {ram_copies, Nodes}],
Tab2 = local2,
Def2 = [{local_content, true}, {disc_copies, [Node1]}],
Tab3 = local3,
Def3 = [{local_content, true}, {disc_only_copies, [Node1]}],
Tab4 = local4,
Def4 = [{local_content, true}, {ram_copies, [Node1]}],
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
?match({atomic, ok}, mnesia:create_table(Tab4, Def4)),
?match(ok, rpc:call(Node1, mnesia, dirty_write, [{Tab1, 1, Node1}])),
?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab1, 1, Node2}])),
?match(ok, rpc:call(Node3, mnesia, dirty_write, [{Tab1, 1, Node3}])),
?match(ok, rpc:call(Node1, mnesia, dirty_write, [{Tab2, 1, Node1}])),
?match(ok, rpc:call(Node1, mnesia, dirty_write, [{Tab3, 1, Node1}])),
?match(ok, rpc:call(Node1, mnesia, dirty_write, [{Tab4, 1, Node1}])),
?match(?badrpc(Tab2), rpc:call(Node2, mnesia, dirty_write, [{Tab2, 1, Node2}])),
?match(?badrpc(Tab3), rpc:call(Node2, mnesia, dirty_write, [{Tab3, 1, Node2}])),
?match(?badrpc(Tab4), rpc:call(Node2, mnesia, dirty_write, [{Tab4, 1, Node2}])),
?match({atomic, ok}, rpc:call(Node1, mnesia, add_table_copy, [Tab2, Node2, ram_copies])),
?match({atomic, ok}, rpc:call(Node2, mnesia, add_table_copy, [Tab3, Node2, disc_copies])),
?match({atomic, ok}, rpc:call(Node3, mnesia, add_table_copy, [Tab4, Node2, disc_only_copies])),
?match([], rpc:call(Node2, mnesia, dirty_read, [{Tab2, 1}])),
?match([], rpc:call(Node2, mnesia, dirty_read, [{Tab3, 1}])),
?match([], rpc:call(Node2, mnesia, dirty_read, [{Tab4, 1}])),
?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab2, 1, Node2}])),
?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab3, 1, Node2}])),
?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab4, 1, Node2}])),
?match([{Tab1, 1, Node1}], rpc:call(Node1, mnesia, dirty_read, [{Tab1, 1}])),
?match([{Tab2, 1, Node1}], rpc:call(Node1, mnesia, dirty_read, [{Tab2, 1}])),
?match([{Tab3, 1, Node1}], rpc:call(Node1, mnesia, dirty_read, [{Tab3, 1}])),
?match([{Tab4, 1, Node1}], rpc:call(Node1, mnesia, dirty_read, [{Tab4, 1}])),
?match([{Tab1, 1, Node2}], rpc:call(Node2, mnesia, dirty_read, [{Tab1, 1}])),
?match([{Tab2, 1, Node2}], rpc:call(Node2, mnesia, dirty_read, [{Tab2, 1}])),
?match([{Tab3, 1, Node2}], rpc:call(Node2, mnesia, dirty_read, [{Tab3, 1}])),
?match([{Tab4, 1, Node2}], rpc:call(Node2, mnesia, dirty_read, [{Tab4, 1}])),
?match([{Tab1, 1, Node3}], rpc:call(Node3, mnesia, dirty_read, [{Tab1, 1}])),
?match(?badrpc([_Tab2, 1]), rpc:call(Node3, mnesia, dirty_read, [{Tab2, 1}])),
?match(?badrpc([_Tab3, 1]), rpc:call(Node3, mnesia, dirty_read, [{Tab3, 1}])),
?match(?badrpc([_Tab4, 1]), rpc:call(Node3, mnesia, dirty_read, [{Tab4, 1}])),
?match({atomic, ok},
mnesia:change_table_copy_type(schema, Node3, ram_copies)),
?match([], mnesia_test_lib:stop_mnesia([Node3])),
Added for OTP-44306
?match(ok, rpc:call(Node3, mnesia, start, [[{schema, ?BACKEND}]])),
?match({ok, _}, mnesia:change_config(extra_db_nodes, [Node3])),
mnesia_test_lib:sync_tables([Node3], [Tab1]),
?match([], rpc:call(Node3, mnesia, dirty_read, [{Tab1, 1}])),
?match({atomic, ok}, rpc:call(Node1, mnesia, clear_table, [Tab1])),
SyncedCheck = fun(Tab) ->
mnesia:lock({record,Tab,0}, write),
{OK, []} = rpc:multicall(Nodes, mnesia, table_info, [Tab, size]),
OK
end,
?match({atomic, [0,1,0]}, mnesia:transaction(SyncedCheck, [Tab1])),
?match({atomic, ok}, rpc:call(Node2, mnesia, clear_table, [Tab2])),
?match({atomic, [1,0,0]}, mnesia:transaction(SyncedCheck, [Tab2])),
?match({atomic, ok}, rpc:call(Node2, mnesia, clear_table, [Tab3])),
?match({atomic, [1,0,0]}, mnesia:transaction(SyncedCheck, [Tab3])),
?verify_mnesia(Nodes, []).
change_table_access_mode(suite) -> [];
change_table_access_mode(Config) when is_list(Config) ->
[Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
Tab = test_access_mode_tab,
Def = case mnesia_test_lib:diskless(Config) of
true -> [{name, Tab}, {ram_copies, Nodes}];
false -> [{name, Tab}, {ram_copies, [Node1]},
{disc_copies, [Node2]},
{disc_only_copies, [Node3]}]
end,
?match({atomic, ok}, mnesia:create_table(Def)),
Write = fun(What) -> mnesia:write({Tab, 1, What}) end,
Read = fun() -> mnesia:read({Tab, 1}) end,
?match({atomic, ok}, mnesia:transaction(Write, [test_ok])),
?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_only)),
?match({aborted, _}, mnesia:transaction(Write, [nok])),
?match({'EXIT', {aborted, _}}, mnesia:dirty_write({Tab, 1, [nok]})),
?match({aborted, _}, rpc:call(Node2, mnesia, transaction, [Write, [nok]])),
?match({aborted, _}, rpc:call(Node3, mnesia, transaction, [Write, [nok]])),
?match({atomic, [{Tab, 1, test_ok}]}, mnesia:transaction(Read)),
?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_write)),
?match({atomic, ok}, mnesia:transaction(Write, [test_ok1])),
?match({atomic, [{Tab, 1, test_ok1}]}, mnesia:transaction(Read)),
?match({atomic, ok}, rpc:call(Node2, mnesia, transaction, [Write, [test_ok2]])),
?match({atomic, [{Tab, 1, test_ok2}]}, mnesia:transaction(Read)),
?match({atomic, ok}, rpc:call(Node3, mnesia, transaction, [Write, [test_ok3]])),
?match({atomic, [{Tab, 1, test_ok3}]}, mnesia:transaction(Read)),
?match({atomic, ok}, mnesia:delete_table(Tab)),
Def4 = [{name, Tab}, {access_mode, read_only_bad}],
?match({aborted, {bad_type, _, _}}, mnesia:create_table(Def4)),
Def2 = [{name, Tab}, {access_mode, read_only}],
?match({atomic, ok}, mnesia:create_table(Def2)),
?match({aborted, _}, mnesia:transaction(Write, [nok])),
?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_write)),
?match({atomic, ok}, mnesia:delete_table(Tab)),
Def3 = [{name, Tab}, {mnesia_test_lib:storage_type(disc_copies, Config),
[Node1, Node2]},
{access_mode, read_write}],
?match({atomic, ok}, mnesia:create_table(Def3)),
?match({atomic, ok}, mnesia:transaction(Write, [ok])),
?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_only)),
?match({aborted, _}, mnesia:del_table_copy(Tab, Node2)),
?match({aborted, _}, mnesia:del_table_copy(Tab, Node1)),
?match({aborted, _}, mnesia:delete_table(Tab)),
?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_write)),
?match({aborted, {bad_type, _, _}},
mnesia:change_table_access_mode(Tab, strange_atom)),
?match({atomic, ok}, mnesia:delete_table(Tab)),
?match({aborted, {no_exists, _}},
mnesia:change_table_access_mode(err_tab, read_only)),
?match({aborted, {no_exists, _}},
mnesia:change_table_access_mode([Tab], read_only)),
?verify_mnesia(Nodes, []).
change_table_load_order(suite) -> [];
change_table_load_order(Config) when is_list(Config) ->
[Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
Tab1 = load_order_tab1,
Tab2 = load_order_tab2,
Tab3 = load_order_tab3,
Def = case mnesia_test_lib:diskless(Config) of
true -> [{ram_copies, Nodes}];
false ->
[{ram_copies, [Node1]},
{disc_copies, [Node2]},
{disc_only_copies, [Node3]}]
end,
?match({atomic, ok}, mnesia:create_table(Tab1, Def)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def)),
?match({aborted, _}, mnesia:change_table_load_order(Tab1, should_be_integer)),
?match({aborted, _}, mnesia:change_table_load_order(err_tab, 5)),
?match({aborted, _}, mnesia:change_table_load_order([err_tab], 5)),
?match({atomic, ok}, mnesia:change_table_load_order(Tab1, 5)),
?match({atomic, ok}, mnesia:change_table_load_order(Tab2, 4)),
?match({atomic, ok}, mnesia:change_table_load_order(Tab3, 73)),
?match({aborted, _}, mnesia:change_table_load_order(schema, -32)),
?verify_mnesia(Nodes, []).
set_master_nodes(suite) -> [];
set_master_nodes(Config) when is_list(Config) ->
[Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
Tab1 = master_node_tab1,
Tab2 = master_node_tab2,
Tab3 = master_node_tab3,
Def1 = [{ram_copies, [Node1, Node2]}],
Def2 = [{disc_copies, [Node2, Node3]}],
Def3 = [{disc_only_copies, [Node3, Node1]}],
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
?match({error, _}, mnesia:set_master_nodes(schema, [''])),
?match({error, _}, mnesia:set_master_nodes(badtab, [Node2, Node3])),
?match({error, _}, mnesia:set_master_nodes(Tab1, [Node3])),
?match([], mnesia:table_info(Tab1, master_nodes)),
?match(ok, mnesia:set_master_nodes(schema, [Node3, Node1])),
?match([Node3, Node1], mnesia:table_info(schema, master_nodes)),
?match(ok, mnesia:set_master_nodes(Tab1, [Node2])),
?match([Node2], mnesia:table_info(Tab1, master_nodes)),
?match(ok, mnesia:set_master_nodes(Tab1, [Node2, Node1])),
?match([Node2, Node1], mnesia:table_info(Tab1, master_nodes)),
?match([Node2], mnesia:table_info(Tab2, master_nodes)),
?match(ok, mnesia:set_master_nodes(Tab3, [Node3])),
?match([Node3], mnesia:table_info(Tab3, master_nodes)),
?match(ok, mnesia:set_master_nodes(Tab3, [])),
?match([], mnesia:table_info(Tab3, master_nodes)),
?match(ok, mnesia:set_master_nodes([Node1])),
?match([Node1], mnesia:table_info(schema, master_nodes)),
?match([Node1], mnesia:table_info(Tab1, master_nodes)),
?match([], mnesia:table_info(Tab2, master_nodes)),
?match([Node1], mnesia:table_info(Tab3, master_nodes)),
?match(ok, mnesia:set_master_nodes([Node1, Node2])),
?match([Node1, Node2], mnesia:table_info(schema, master_nodes)),
?match([Node1, Node2], mnesia:table_info(Tab1, master_nodes)),
?match([Node2], mnesia:table_info(Tab2, master_nodes)),
?match([Node1], mnesia:table_info(Tab3, master_nodes)),
?verify_mnesia(Nodes, []).
offline_set_master_nodes(suite) -> [];
offline_set_master_nodes(Config) when is_list(Config) ->
[Node] = Nodes = ?acquire_nodes(1, Config),
Tab1 = offline_master_node_tab1,
Tab2 = offline_master_node_tab2,
Tab3 = offline_master_node_tab3,
Tabs = ?sort([Tab1, Tab2, Tab3]),
Def1 = [{ram_copies, [Node]}],
Def2 = [{disc_copies, [Node]}],
Def3 = [{disc_only_copies, [Node]}],
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
?match([], mnesia:system_info(master_node_tables)),
?match([], mnesia_test_lib:stop_mnesia([Node])),
?match(ok, mnesia:set_master_nodes(Tab1, [Node])),
?match(ok, mnesia:set_master_nodes(Tab2, [Node])),
?match(ok, mnesia:set_master_nodes(Tab3, [Node])),
?match([], mnesia_test_lib:start_mnesia([Node])),
?match(Tabs, ?sort(mnesia:system_info(master_node_tables))),
?match([], mnesia_test_lib:stop_mnesia([Node])),
?match(ok, mnesia:set_master_nodes(Tab1, [])),
?match(ok, mnesia:set_master_nodes(Tab2, [])),
?match(ok, mnesia:set_master_nodes(Tab3, [])),
?match([], mnesia_test_lib:start_mnesia([Node])),
?match([], mnesia:system_info(master_node_tables)),
?match([], mnesia_test_lib:stop_mnesia([Node])),
?match(ok, mnesia:set_master_nodes([Node])),
?match([], mnesia_test_lib:start_mnesia([Node])),
AllTabs = ?sort([schema | Tabs]),
?match(AllTabs, ?sort(mnesia:system_info(master_node_tables))),
?match([], mnesia_test_lib:stop_mnesia([Node])),
?match(ok, mnesia:set_master_nodes([])),
?match([], mnesia_test_lib:start_mnesia([Node])),
?match([], mnesia:system_info(master_node_tables)),
?verify_mnesia(Nodes, []).
dump_tables(suite) -> [];
dump_tables(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab = dump_tables,
Schema = [{name, Tab}, {attributes, [k, v]}, {ram_copies, [Node2]}],
?match({atomic, ok}, mnesia:create_table(Schema)),
Size = 10,
Keys = lists:seq(1, Size),
Records = [{Tab, A, 7} || A <- Keys],
lists:foreach(fun(Rec) -> ?match(ok, mnesia:dirty_write(Rec)) end, Records),
AllKeys = fun() -> ?sort(mnesia:all_keys(Tab)) end,
?match({atomic, Keys}, mnesia:transaction(AllKeys)),
?match({atomic, ok}, mnesia:dump_tables([Tab])),
Delete one record
?match(ok, mnesia:dirty_delete({Tab, 5})),
Keys2 = lists:delete(5, Keys),
?match({atomic, Keys2}, mnesia:transaction(AllKeys)),
Check that all 10 is restored after a stop
?match([], mnesia_test_lib:stop_mnesia([Node1, Node2])),
?match([], mnesia_test_lib:start_mnesia([Node1, Node2])),
?match(ok, mnesia:wait_for_tables([Tab], infinity)),
?match({atomic, Keys}, mnesia:transaction(AllKeys)),
?match({aborted,Reason} when element(1, Reason) == no_exists,
mnesia:dump_tables([foo])),
?verify_mnesia(Nodes, []).
dump_log(suite) -> [];
dump_log(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab = dump_log,
Schema = [{name, Tab}, {attributes, [k, v]}, {ram_copies, [Node1, Node2]}],
?match({atomic, ok}, mnesia:create_table(Schema)),
Tab1 = dump_log1,
Schema1 = [{name, Tab1}, {attributes, [k, v]}, {disc_copies, [Node1]}],
?match({atomic, ok}, mnesia:create_table(Schema1)),
Tab2 = dump_log2,
Schema2 = [{name, Tab2}, {attributes, [k, v]}, {disc_only_copies, [Node1]}],
?match({atomic, ok}, mnesia:create_table(Schema2)),
?match(ok, mnesia:dirty_write({Tab, 1, ok})),
?match(ok, mnesia:dirty_write({Tab1, 1, ok})),
?match(ok, mnesia:dirty_write({Tab2, 1, ok})),
?match(dumped, mnesia:dump_log()),
?match(dumped, rpc:call(Node2, mnesia, dump_log, [])),
?match({atomic, ok}, mnesia:change_table_copy_type(schema, Node2, ram_copies)),
?match(dumped, rpc:call(Node2, mnesia, dump_log, [])),
Self = self(),
spawn(fun() -> dump_log(100, Self) end),
spawn(fun() -> dump_log(100, Self) end),
?match(ok, receive finished -> ok after 3000 -> timeout end),
?match(ok, receive finished -> ok after 3000 -> timeout end),
?verify_mnesia(Nodes, []).
dump_log(N, Tester) when N > 0 ->
mnesia:dump_log(),
dump_log(N-1, Tester);
dump_log(_, Tester) ->
Tester ! finished.
wait_for_tables(doc) ->
["Intf. test of wait_for_tables, see also force_load_table"];
wait_for_tables(suite) -> [];
wait_for_tables(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab = wf_tab,
Schema = [{name, Tab}, {ram_copies, [Node1, Node2]}],
?match({atomic, ok}, mnesia:create_table(Schema)),
?match(ok, mnesia:wait_for_tables([wf_tab], infinity)),
?match(ok, mnesia:wait_for_tables([], timer:seconds(5))),
?match({timeout, [bad_tab]}, mnesia:wait_for_tables([bad_tab], timer:seconds(5))),
?match(ok, mnesia:wait_for_tables([wf_tab], 0)),
?match({error,_}, mnesia:wait_for_tables([wf_tab], -1)),
?verify_mnesia(Nodes, []).
force_load_table(suite) -> [];
force_load_table(Config) when is_list(Config) ->
[Node1, Node2] = ?acquire_nodes(2, Config),
Tab = wf_tab,
Schema = [{name, Tab}, {disc_copies, [Node1, Node2]}],
?match({atomic, ok}, mnesia:create_table(Schema)),
?match(ok, mnesia:dirty_write({Tab, 1, test_ok})),
mnesia_test_lib:kill_mnesia([Node1]),
?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab, 1, test_nok}])),
mnesia_test_lib:kill_mnesia([Node2]),
?match(ok, mnesia:start()),
?match({timeout, [Tab]}, mnesia:wait_for_tables([Tab], 5)),
?match({'EXIT', _}, mnesia:dirty_read({Tab, 1})),
?match(yes, mnesia:force_load_table(Tab)),
?match([{Tab, 1, test_ok}], mnesia:dirty_read({Tab, 1})),
?match({error, _}, mnesia:force_load_table(error_tab)),
?verify_mnesia([Node1], [Node2]).
user_properties(doc) ->
["Test of reading, writing and deletion of user properties",
"Plus initialization of user properties when a table is created",
"Do also test mnesia:table_info(Tab, user_properties)"];
user_properties(suite) -> [];
user_properties(Config) when is_list(Config) ->
[Node] = Nodes = ?acquire_nodes(1, Config),
Tab1 = user_properties_1,
Tab2 = user_properties_2,
Tab3 = user_properties_3,
Def1 = [{ram_copies, [Node]}, {user_properties, []}],
Def2 = [{mnesia_test_lib:storage_type(disc_copies, Config), [Node]}],
Def3 = [{mnesia_test_lib:storage_type(disc_only_copies, Config), [Node]},
{user_properties, []}],
PropKey = my_prop,
Prop = {PropKey, some, elements},
Prop2 = {PropKey, some, other, elements},
YourProp= {your_prop},
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
?match([], mnesia:table_info(Tab1, user_properties)),
?match([], mnesia:table_info(Tab2, user_properties)),
?match([], mnesia:table_info(Tab3, user_properties)),
?match({'EXIT', {aborted, {no_exists, {Tab1, user_property, PropKey}}}},
mnesia:read_table_property(Tab1, PropKey)),
?match({'EXIT', {aborted, {no_exists, {Tab2, user_property, PropKey}}}},
mnesia:read_table_property(Tab2, PropKey)),
?match({'EXIT', {aborted, {no_exists, {Tab3, user_property, PropKey}}}},
mnesia:read_table_property(Tab3, PropKey)),
?match({atomic, ok}, mnesia:write_table_property(Tab1, Prop)),
?match({atomic, ok}, mnesia:write_table_property(Tab2, Prop)),
?match({atomic, ok}, mnesia:write_table_property(Tab3, Prop)),
?match({atomic, ok}, mnesia:write_table_property(Tab1, YourProp)),
?match({atomic, ok}, mnesia:write_table_property(Tab2, YourProp)),
?match({atomic, ok}, mnesia:write_table_property(Tab3, YourProp)),
?match(Prop, mnesia:read_table_property(Tab1, PropKey)),
?match(Prop, mnesia:read_table_property(Tab2, PropKey)),
?match(Prop, mnesia:read_table_property(Tab3, PropKey)),
?match({atomic, ok}, mnesia:write_table_property(Tab1, Prop2)),
?match({atomic, ok}, mnesia:write_table_property(Tab2, Prop2)),
?match({atomic, ok}, mnesia:write_table_property(Tab3, Prop2)),
?match(Prop2, mnesia:read_table_property(Tab1, PropKey)),
?match(Prop2, mnesia:read_table_property(Tab2, PropKey)),
?match(Prop2, mnesia:read_table_property(Tab3, PropKey)),
?match({atomic, ok}, mnesia:delete_table_property(Tab1, PropKey)),
?match({atomic, ok}, mnesia:delete_table_property(Tab2, PropKey)),
?match({atomic, ok}, mnesia:delete_table_property(Tab3, PropKey)),
?match([YourProp], mnesia:table_info(Tab1, user_properties)),
?match([YourProp], mnesia:table_info(Tab2, user_properties)),
?match([YourProp], mnesia:table_info(Tab3, user_properties)),
Tab4 = user_properties_4,
?match({atomic, ok},
mnesia:create_table(Tab4, [{user_properties, [Prop]}])),
?match([Prop], mnesia:table_info(Tab4, user_properties)),
?match({aborted, {bad_type, Tab1, {}}},
mnesia:write_table_property(Tab1, {})),
?match({aborted, {bad_type, Tab1, ali}},
mnesia:write_table_property(Tab1, ali)),
Tab5 = user_properties_5,
?match({aborted, {bad_type, Tab5, {user_properties, {}}}},
mnesia:create_table(Tab5, [{user_properties, {}}])),
?match({aborted, {bad_type, Tab5, {user_properties, ali}}},
mnesia:create_table(Tab5, [{user_properties, ali}])),
?match({aborted, {bad_type, Tab5, {user_properties, [{}]}}},
mnesia:create_table(Tab5, [{user_properties, [{}]}])),
?match({aborted, {bad_type, Tab5, {user_properties, [ali]}}},
mnesia:create_table(Tab5, [{user_properties, [ali]}])),
?verify_mnesia(Nodes, []).
unsupp_user_props(doc) ->
["Simple test of adding user props in a schema_transaction"];
unsupp_user_props(suite) -> [];
unsupp_user_props(Config) when is_list(Config) ->
[Node1] = ?acquire_nodes(1, Config),
Tab1 = silly1,
Tab2 = silly2,
Storage = mnesia_test_lib:storage_type(ram_copies, Config),
?match({atomic, ok}, rpc:call(Node1, mnesia,
create_table, [Tab1, [{Storage, [Node1]}]])),
?match({atomic, ok}, rpc:call(Node1, mnesia,
create_table, [Tab2, [{Storage, [Node1]}]])),
F1 = fun() ->
mnesia_schema:do_write_table_property(
silly1, {prop, propval1}),
mnesia_schema:do_write_table_property(
mnesia_schema:do_write_table_property(
end,
?match({atomic, ok}, rpc:call(Node1, mnesia_schema,
schema_transaction, [F1])),
?match([{prop,propval1}], rpc:call(Node1, mnesia,
table_info, [silly1, user_properties])),
?match([{prop,propval2}], rpc:call(Node1, mnesia,
table_info, [silly2, user_properties])),
?match([_,{prop,propval3}], rpc:call(Node1, mnesia,
table_info, [schema, user_properties])),
F2 = fun() ->
mnesia_schema:do_write_table_property(
silly1, {prop, propval1a}),
mnesia_schema:do_write_table_property(
end,
?match({atomic, ok}, rpc:call(Node1, mnesia_schema,
schema_transaction, [F2])),
?match([{prop,propval1b}], rpc:call(Node1, mnesia,
table_info,
[silly1, user_properties])),
?verify_mnesia([Node1], []).
snmp_open_table(suite) -> [];
snmp_open_table(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab1 = local_snmp_table,
Storage = mnesia_test_lib:storage_type(disc_copies, Config),
Def1 =
case mnesia_test_lib:diskless(Config) of
true -> [{ram_copies, Nodes}];
false ->
[{disc_copies, [Node1]}, {ram_copies, [Node2]}]
end,
Tab2 = ext_snmp_table,
Def2 = [{Storage, [Node2]}],
ErrTab = non_existing_tab,
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key, integer}])),
?match({aborted, _}, mnesia:snmp_open_table(ErrTab, [{key, integer}])),
?verify_mnesia(Nodes, []).
snmp_close_table(suite) -> [];
snmp_close_table(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab1 = local_snmp_table,
Storage = mnesia_test_lib:storage_type(disc_copies, Config),
Def1 =
case mnesia_test_lib:diskless(Config) of
true -> [{ram_copies, Nodes}];
false ->
[{disc_copies, [Node1]}, {ram_copies, [Node2]}]
end,
Tab2 = ext_snmp_table,
Def2 = [{snmp, [{key, integer}]}, {Storage, [Node2]}],
ErrTab = non_existing_tab,
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(no_snmp_tab, [])),
add_some_records(Tab1, Tab2, 3),
?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
add_some_records(Tab1, Tab2, 5),
?match({atomic, ok}, mnesia:snmp_close_table(Tab1)),
Transform = fun(Tab, Key) ->
[{T,K,V}] = mnesia:read(Tab, Key, write),
mnesia:delete(T,K, write),
mnesia:write({T, {K,K}, V, 43+V})
end,
?match({atomic, ok}, mnesia:transform_table(Tab1, ignore, [key,val,new])),
?match({atomic, ok},
mnesia:transaction(fun() ->
mnesia:write_lock_table(Tab1),
Keys = mnesia:select(Tab1, [{{'_','$1','_'}, [],
['$1']}]),
[Transform(Tab1, Key) || Key <- Keys],
ok
end)),
?match([{Tab1, {1, 1}, 1, 44}], mnesia:dirty_read(Tab1, {1, 1})),
?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key,{integer,integer}}])),
?match({atomic, ok}, mnesia:snmp_close_table(Tab2)),
?match({atomic, ok}, mnesia:transform_table(Tab2, ignore, [key,val,new])),
?match({atomic, ok},
mnesia:transaction(fun() ->
mnesia:write_lock_table(Tab2),
Keys = mnesia:select(Tab2, [{{'_','$1','_'}, [],
['$1']}]),
[Transform(Tab2, Key) || Key <- Keys],
ok
end)),
?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key,{integer,integer}}])),
?match({atomic, ok}, mnesia:snmp_close_table(no_snmp_tab)),
?match({aborted, _}, mnesia:snmp_close_table(ErrTab)),
?verify_mnesia(Nodes, []).
snmp_get_next_index(suite) -> [];
snmp_get_next_index(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab1 = local_snmp_table,
Storage = mnesia_test_lib:storage_type(disc_copies, Config),
Def1 =
case mnesia_test_lib:diskless(Config) of
true -> [{ram_copies, Nodes}];
false ->
[{disc_copies, [Node1]}, {ram_copies, [Node2]}]
end,
Tab2 = ext_snmp_table,
Def2 = [{Storage, [Node2]}],
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key, integer}])),
add_some_records(Tab1, Tab2, 1),
Test =
fun() ->
{success, Res11} = ?match({ok, _}, mnesia:snmp_get_next_index(Tab1,[])),
{ok, Index11} = Res11,
{success, _Res12} =
?match(endOfTable, mnesia:snmp_get_next_index(Tab1, Index11)),
?match({'EXIT',_}, mnesia:snmp_get_next_index(Tab1, endOfTable)),
{success, Res21} =
?match({ok, _}, mnesia:snmp_get_next_index(Tab2, [])),
{ok, Index21} = Res21,
{success, _Res22} =
?match(endOfTable, mnesia:snmp_get_next_index(Tab2, Index21)),
{ok, Row} = mnesia:snmp_get_row(Tab1, Index11),
?match(ok, mnesia:dirty_delete(Tab1, hd(Index11))),
?match(endOfTable, mnesia:snmp_get_next_index(Tab1,[])),
? match(endOfTable , : snmp_get_next_index(ErrTab , [ ] ) ) ,
ok
end,
?match(ok, Test()),
?match({atomic,ok}, mnesia:transaction(Test)),
?match(ok, mnesia:sync_dirty(Test)),
?match(ok, mnesia:activity(transaction,Test,mnesia)),
?match([], mnesia_test_lib:stop_mnesia(Nodes)),
?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab1, Tab2])),
?match(ok, Test()),
?match({atomic,ok}, mnesia:transaction(Test)),
?match(ok, mnesia:sync_dirty(Test)),
?match(ok, mnesia:activity(transaction,Test,mnesia)),
?verify_mnesia(Nodes, []).
add_some_records(Tab1, Tab2, N) ->
Recs1 = [{Tab1, I, I} || I <- lists:reverse(lists:seq(1, N))],
Recs2 = [{Tab2, I, I} || I <- lists:reverse(lists:seq(20, 20+N-1))],
lists:foreach(fun(R) -> mnesia:dirty_write(R) end, Recs1),
Fun = fun(R) -> mnesia:write(R) end,
Trans = fun() -> lists:foreach(Fun, Recs2) end,
{atomic, ok} = mnesia:transaction(Trans),
{atomic, ok} = mnesia:sync_transaction(fun() -> mnesia:write(lists:last(Recs1)) end),
{atomic, ok} = mnesia:sync_transaction(fun() -> mnesia:write(lists:last(Recs2)) end),
?sort(Recs1 ++ Recs2).
snmp_get_row(suite) -> [];
snmp_get_row(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab1 = local_snmp_table,
Storage = mnesia_test_lib:storage_type(disc_copies, Config),
Def1 =
case mnesia_test_lib:diskless(Config) of
true -> [{ram_copies, Nodes}];
false ->
[{disc_copies, [Node1]}, {ram_copies, [Node2]}]
end,
Tab2 = ext_snmp_table,
Def2 = [{Storage, [Node2]}],
Tab3 = snmp_table,
Def3 = [{Storage, [Node1]},
{attributes, [key, data1, data2]}],
Setup = fun() ->
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key, integer}])),
?match({atomic, ok}, mnesia:snmp_open_table(
Tab3, [{key, {fix_string,integer}}])),
add_some_records(Tab1, Tab2, 1)
end,
Clear = fun() ->
?match({atomic, ok}, mnesia:delete_table(Tab1)),
?match({atomic, ok}, mnesia:delete_table(Tab2)),
?match({atomic, ok}, mnesia:delete_table(Tab3))
end,
Test =
fun() ->
{success, Res11} =
?match({ok, [1]}, mnesia:snmp_get_next_index(Tab1,[])),
{ok, Index11} = Res11,
?match({ok, {Tab1,1,1}}, mnesia:snmp_get_row(Tab1, Index11)),
?match(endOfTable, mnesia:snmp_get_next_index(Tab1, Index11)),
?match({'EXIT',_}, mnesia:snmp_get_row(Tab1, endOfTable)),
?match(undefined, mnesia:snmp_get_row(Tab1, [73])),
Add = fun() -> mnesia:write({Tab3, {"f_string", 3}, data1, data2}) end,
?match({atomic, ok}, mnesia:transaction(Add)),
{success, {ok, Index31}} = ?match({ok, RowIndex31} when is_list(RowIndex31),
mnesia:snmp_get_next_index(Tab3,[])),
?match({ok, Row31} when is_tuple(Row31),
mnesia:snmp_get_row(Tab3, Index31)),
?match(endOfTable, mnesia:snmp_get_next_index(Tab3, Index31)),
Del = fun() -> mnesia:delete({Tab3,{"f_string",3}}) end,
?match({atomic, ok}, mnesia:transaction(Del)),
?match(undefined, mnesia:snmp_get_row(Tab3, "f_string" ++ [3])),
?match(undefined, mnesia:snmp_get_row(Tab3, "f_string" ++ [73])),
{success, Res21} = ?match({ok,[20]}, mnesia:snmp_get_next_index(Tab2, [])),
{ok, Index21} = Res21,
?match({ok, Row2} when is_tuple(Row2), mnesia:snmp_get_row(Tab2, Index21)),
?match(endOfTable, mnesia:snmp_get_next_index(Tab2, Index21)),
? match(endOfTable , : snmp_get_next_index(ErrTab , [ ] ) ) ,
ok
end,
Setup(),
?match(ok, Test()),
Clear(), Setup(),
?match({atomic,ok}, mnesia:transaction(Test)),
Clear(), Setup(),
?match(ok, mnesia:sync_dirty(Test)),
Clear(), Setup(),
?match(ok, mnesia:activity(transaction,Test,mnesia)),
Clear(), Setup(),
?match([], mnesia_test_lib:stop_mnesia(Nodes)),
?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab1, Tab2])),
?match(ok, Test()),
Clear(), Setup(),
?match([], mnesia_test_lib:stop_mnesia(Nodes)),
?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab1, Tab2])),
?match({atomic,ok}, mnesia:transaction(Test)),
?verify_mnesia(Nodes, []).
snmp_get_mnesia_key(suite) -> [];
snmp_get_mnesia_key(Config) when is_list(Config) ->
[Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
Tab1 = local_snmp_table,
Storage = mnesia_test_lib:storage_type(disc_copies, Config),
Def1 =
case mnesia_test_lib:diskless(Config) of
true -> [{ram_copies, Nodes}];
false ->
[{disc_copies, [Node1]}, {ram_copies, [Node2]}]
end,
Tab2 = ext_snmp_table,
Def2 = [{Storage, [Node2]}],
Tab3 = fix_string,
Setup = fun() ->
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
?match({atomic, ok}, mnesia:create_table(Tab3, Def1)),
?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key, integer}])),
?match({atomic, ok}, mnesia:snmp_open_table(Tab3, [{key, {fix_string,integer}}])),
add_some_records(Tab1, Tab2, 1)
end,
Clear = fun() ->
?match({atomic, ok}, mnesia:delete_table(Tab1)),
?match({atomic, ok}, mnesia:delete_table(Tab2)),
?match({atomic, ok}, mnesia:delete_table(Tab3))
end,
Test =
fun() ->
{success, Res11} =
?match({ok, [1]}, mnesia:snmp_get_next_index(Tab1,[])),
{ok, Index11} = Res11,
?match({ok, 1}, mnesia:snmp_get_mnesia_key(Tab1, Index11)),
{success, Res21} =
?match({ok, [20]}, mnesia:snmp_get_next_index(Tab2, [])),
{ok, Index21} = Res21,
?match({ok, 20}, mnesia:snmp_get_mnesia_key(Tab2, Index21)),
?match(undefined, mnesia:snmp_get_mnesia_key(Tab2, [97])),
?match({'EXIT', _}, mnesia:snmp_get_mnesia_key(Tab2, 97)),
?match({atomic,ok}, mnesia:transaction(fun() -> mnesia:delete({Tab1,1}) end)),
?match(undefined, mnesia:snmp_get_mnesia_key(Tab1, Index11)),
?match({atomic,ok},mnesia:transaction(fun() -> mnesia:write({Tab1,73,7}) end)),
?match({ok, 73}, mnesia:snmp_get_mnesia_key(Tab1, [73])),
?match({atomic,ok}, mnesia:transaction(fun() -> mnesia:delete({Tab1,73}) end)),
?match(undefined, mnesia:snmp_get_mnesia_key(Tab1, [73])),
?match({atomic,ok},mnesia:transaction(fun() -> mnesia:write({Tab3,{"S",5},7}) end)),
?match({ok,{"S",5}}, mnesia:snmp_get_mnesia_key(Tab3, [$S,5])),
?match({atomic,ok},mnesia:transaction(fun() -> mnesia:delete({Tab3,{"S",5}}) end)),
?match(undefined, mnesia:snmp_get_mnesia_key(Tab3, [$S,5])),
ok
end,
Setup(),
?match(ok, Test()),
Clear(), Setup(),
?match({atomic,ok}, mnesia:transaction(Test)),
Clear(), Setup(),
?match(ok, mnesia:sync_dirty(Test)),
Clear(), Setup(),
?match(ok, mnesia:activity(transaction,Test,mnesia)),
?verify_mnesia(Nodes, []).
snmp_update_counter(doc) ->
["Verify that counters may be updated for tables with SNMP property"];
snmp_update_counter(suite) -> [];
snmp_update_counter(Config) when is_list(Config) ->
[Node1] = Nodes = ?acquire_nodes(1, Config),
Tab = snmp_update_counter,
Def = [{attributes, [key, value]},
{snmp, [{key, integer}]},
{ram_copies, [Node1]}
],
?match({atomic, ok}, mnesia:create_table(Tab, Def)),
Oid = {Tab, 1},
?match([], mnesia:dirty_read(Oid)),
?match(ok, mnesia:dirty_write({Tab, 1, 1})),
?match([{Tab, _Key, 1}], mnesia:dirty_read(Oid)),
?match(3, mnesia:dirty_update_counter(Oid, 2)),
?match([{Tab, _Key, 3}], mnesia:dirty_read(Oid)),
?verify_mnesia(Nodes, []).
snmp_order(doc) ->
["Verify that sort order is correct in transactions and dirty variants"];
snmp_order(suite) -> [];
snmp_order(Config) when is_list(Config) ->
[Node1] = Nodes = ?acquire_nodes(1, Config),
Tab = snmp_order,
Def = [{attributes, [key, value]},
{snmp, [{key, {integer, integer, integer}}]},
{ram_copies, [Node1]}
],
?match({atomic, ok}, mnesia:create_table(Tab, Def)),
Oid = {Tab, 1},
?match([], mnesia:dirty_read(Oid)),
?match({'EXIT', {aborted, _}}, mnesia:dirty_write({Tab, 1, 1})),
[mnesia:dirty_write({Tab, {A,B,2}, default}) ||
A <- lists:seq(1, 9, 2),
B <- lists:seq(2, 8, 2)],
Test1 = fun() ->
Keys0 = get_keys(Tab, []),
?match(Keys0, lists:sort(Keys0)),
?match([[1,2,2]|_], Keys0),
Keys1 = get_keys(Tab, [5]),
?match(Keys1, lists:sort(Keys1)),
?match([[5,2,2]|_], Keys1),
Keys2 = get_keys(Tab, [7, 4]),
?match(Keys2, lists:sort(Keys2)),
?match([[7,4,2]|_], Keys2),
ok
end,
?match(ok, Test1()),
?match({atomic, ok},mnesia:transaction(Test1)),
?match(ok,mnesia:sync_dirty(Test1)),
Test2 = fun() ->
mnesia:write(Tab, {Tab,{0,0,2},updated}, write),
mnesia:write(Tab, {Tab,{5,3,2},updated}, write),
mnesia:write(Tab, {Tab,{10,10,2},updated}, write),
Keys0 = get_keys(Tab, []),
?match([[0,0,2],[1,2,2]|_], Keys0),
?match(Keys0, lists:sort(Keys0)),
Keys1 = get_keys(Tab, [5]),
?match([[5,2,2],[5,3,2]|_], Keys1),
?match(Keys1, lists:sort(Keys1)),
Keys2 = get_keys(Tab, [7,4]),
?match([[7,4,2]|_], Keys2),
?match(Keys2, lists:sort(Keys2)),
?match([10,10,2], lists:last(Keys0)),
?match([10,10,2], lists:last(Keys1)),
?match([10,10,2], lists:last(Keys2)),
?match([[10,10,2]], get_keys(Tab, [10])),
ok
end,
?match({atomic, ok},mnesia:transaction(Test2)),
?verify_mnesia(Nodes, []).
get_keys(Tab, Key) ->
case mnesia:snmp_get_next_index(Tab, Key) of
endOfTable -> [];
{ok, Next} ->
[Next|get_keys(Tab, Next)]
end.
subscribe_extended(doc) ->
["Test the extended set of events, test with and without checkpoints. "];
subscribe_extended(suite) ->
[];
subscribe_extended(Config) when is_list(Config) ->
[N1, N2]=Nodes=?acquire_nodes(2, Config),
Tab1 = etab,
Storage = mnesia_test_lib:storage_type(ram_copies, Config),
Def1 = [{Storage, [N1, N2]}, {attributes, record_info(fields, tab)}],
?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
Tab2 = bag,
Def2 = [{Storage, [N1, N2]},
{type, bag},
{record_name, Tab1},
{attributes, record_info(fields, tab)}],
?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
Tab3 = ctab,
Def3 = [{Storage, [N1, N2]}],
?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
?match({ok, N1}, mnesia:subscribe({table, Tab1, detailed})),
?match({ok, N1}, mnesia:subscribe({table, Tab2, detailed})),
?match({ok, N1}, mnesia:subscribe({table, Tab3, detailed})),
?match({error, {already_exists, _}}, mnesia:subscribe({table, Tab1, simple})),
?match({error, {badarg, {table, Tab1, bad}}}, mnesia:subscribe({table, Tab1, bad})),
?match({ok, N1}, mnesia:subscribe(activity)),
test_ext_sub(Tab1, Tab2, Tab3),
?match({ok, N1}, mnesia:unsubscribe(activity)),
?match({ok, N1}, mnesia:subscribe({table, Tab1, detailed})),
?match({atomic, ok}, mnesia:clear_table(Tab1)),
?match({mnesia_table_event, {delete, schema, {schema, Tab1}, [{schema, Tab1, _}],_}}, recv_event()),
?match({mnesia_table_event, {write, schema, {schema, Tab1, _}, [], _}}, recv_event()),
?match({atomic, ok}, mnesia:clear_table(Tab2)),
?match({mnesia_table_event, {delete, schema, {schema, Tab2}, [{schema, Tab2, _}],_}},
recv_event()),
?match({mnesia_table_event, {write, schema, {schema, Tab2, _}, [], _}}, recv_event()),
?match({ok, N1}, mnesia:unsubscribe({table, Tab2, detailed})),
{ok, _, _} = mnesia:activate_checkpoint([{name, testing},
{ram_overrides_dump, true},
{max, [Tab1, Tab2]}]),
?match({ok, N1}, mnesia:subscribe({table, Tab2, detailed})),
?match({ok, N1}, mnesia:subscribe(activity)),
test_ext_sub(Tab1, Tab2, Tab3),
?verify_mnesia(Nodes, []).
test_ext_sub(Tab1, Tab2, Tab3) ->
Rec1 = {Tab1, 1, 0, 0},
Rec2 = {Tab1, 1, 1, 0},
Rec3 = {Tab1, 2, 1, 0},
Rec4 = {Tab1, 2, 2, 0},
Write = fun(Tab, Rec) ->
mnesia:transaction(fun() -> mnesia:write(Tab, Rec, write)
end)
end,
Delete = fun(Tab, Rec) ->
mnesia:transaction(fun() -> mnesia:delete(Tab, Rec, write)
end)
end,
DelObj = fun(Tab, Rec) ->
mnesia:transaction(fun() -> mnesia:delete_object(Tab, Rec, write)
end)
end,
S = self(),
D = {dirty, self()},
?match(ok, mnesia:dirty_write(Rec1)),
?match({mnesia_table_event, {write, Tab1, Rec1, [], D}}, recv_event()),
?match(ok, mnesia:dirty_write(Rec3)),
?match({mnesia_table_event, {write, Tab1, Rec3, [], D}}, recv_event()),
?match(ok, mnesia:dirty_write(Rec1)),
?match({mnesia_table_event, {write, Tab1, Rec1, [Rec1], D}}, recv_event()),
?match({atomic, ok}, Write(Tab1, Rec2)),
?match({mnesia_table_event, {write, Tab1, Rec2, [Rec1], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match(ok, mnesia:dirty_delete({Tab1, 2})),
?match({mnesia_table_event, {delete, Tab1, {Tab1, 2}, [Rec3], D}}, recv_event()),
?match({atomic, ok}, DelObj(Tab1, Rec2)),
?match({mnesia_table_event, {delete, Tab1, Rec2, [Rec2], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Delete(Tab1, 1)),
?match({mnesia_table_event, {delete, Tab1, {Tab1, 1}, [], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({ok, _N1}, mnesia:unsubscribe({table, Tab1, detailed})),
?match({atomic, ok}, Write(Tab2, Rec1)),
?match({mnesia_table_event, {write, Tab2, Rec1, [], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Write(Tab2, Rec4)),
?match({mnesia_table_event, {write, Tab2, Rec4, [], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Write(Tab2, Rec3)),
?match({mnesia_table_event, {write, Tab2, Rec3, [Rec4], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Write(Tab2, Rec2)),
?match({mnesia_table_event, {write, Tab2, Rec2, [Rec1], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Write(Tab2, Rec1)),
?match({mnesia_table_event, {write, Tab2, Rec1, [Rec1, Rec2], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, DelObj(Tab2, Rec2)),
?match({mnesia_table_event, {delete, Tab2, Rec2, [Rec2], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Delete(Tab2, 1)),
?match({mnesia_table_event, {delete, Tab2, {Tab2, 1}, [Rec1], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
?match({atomic, ok}, Delete(Tab2, 2)),
?match({mnesia_table_event, {delete, Tab2, {Tab2, 2}, [Rec4, Rec3], {tid,_,S}}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
Rec5 = {Tab3, counter, 0},
?match(ok, mnesia:dirty_write(Rec5)),
?match({mnesia_table_event, {write, Tab3, Rec5, [], D}}, recv_event()),
?match(1, mnesia:dirty_update_counter({Tab3, counter}, 1)),
?match({mnesia_table_event, {write, Tab3, {Tab3,counter,1}, [Rec5], D}}, recv_event()),
?match(ok, mnesia:dirty_delete({Tab3, counter})),
?match({mnesia_table_event, {delete, Tab3, {Tab3,counter},
[{Tab3,counter,1}], D}}, recv_event()),
ok.
subscribe_standard(doc) ->
["Tests system events and the original table events"];
subscribe_standard(suite) -> [];
subscribe_standard(Config) when is_list(Config)->
[N1, N2]=?acquire_nodes(2, Config),
Tab = tab,
Storage = mnesia_test_lib:storage_type(disc_copies, Config),
Def = [{Storage, [N1, N2]}, {attributes, record_info(fields, tab)}],
?match({atomic, ok}, mnesia:create_table(Tab, Def)),
?match({error, {badarg, foo}}, mnesia:unsubscribe(foo)),
?match({error, badarg}, mnesia:unsubscribe({table, foo})),
mnesia:unsubscribe(activity),
?match({ok, N1}, mnesia:subscribe(system)),
?match({ok, N1}, mnesia:subscribe(activity)),
?match([], mnesia_test_lib:kill_mnesia([N2])),
?match({mnesia_system_event, {mnesia_down, N2}}, recv_event()),
?match(timeout, recv_event()),
?match([], mnesia_test_lib:start_mnesia([N2], [Tab])),
?match({mnesia_activity_event, _}, recv_event()),
?match({mnesia_system_event,{mnesia_up, N2}}, recv_event()),
?match(true, lists:member(self(), mnesia:system_info(subscribers))),
?match([], mnesia_test_lib:kill_mnesia([N1])),
timer:sleep(500),
mnesia_test_lib:flush(),
?match([], mnesia_test_lib:start_mnesia([N1], [Tab])),
?match(timeout, recv_event()),
?match({ok, N1}, mnesia:subscribe(system)),
?match({error, {already_exists, system}}, mnesia:subscribe(system)),
?match(stopped, mnesia:stop()),
?match({mnesia_system_event, {mnesia_down, N1}}, recv_event()),
?match({error, {node_not_running, N1}}, mnesia:subscribe(system)),
?match([], mnesia_test_lib:start_mnesia([N1, N2], [Tab])),
?match({ok, N1}, mnesia:subscribe(activity)),
Old_Level = mnesia:set_debug_level(trace),
?match({ok, N1}, mnesia:subscribe({table,Tab})),
?match({atomic, ok},
mnesia:transaction(fun() -> mnesia:write(#tab{i=155}) end)),
Self = self(),
?match({mnesia_table_event, {write, _, _}}, recv_event()),
?match({mnesia_activity_event, {complete, {tid, _, Self}}}, recv_event()),
?match({ok, N1}, mnesia:unsubscribe({table,Tab})),
?match({ok, N1}, mnesia:unsubscribe(activity)),
?match({atomic, ok},
mnesia:transaction(fun() -> mnesia:write(#tab{i=255}) end)),
?match(timeout, recv_event()),
mnesia:set_debug_level(Old_Level),
?match({ok, N1}, mnesia:subscribe({table,Tab})),
?match({ok, N1}, mnesia:subscribe(activity)),
?match(ok, mnesia:dirty_write(#tab{i=355})),
?match({mnesia_table_event, {write, _, _}}, recv_event()),
?match({atomic, ok}, mnesia:del_table_copy(Tab, N1)),
?match({mnesia_activity_event, _}, recv_event()),
?match(ok, mnesia:dirty_write(#tab{i=455})),
?match(timeout, recv_event()),
?match({atomic, ok}, mnesia:move_table_copy(Tab, N2, N1)),
?match({mnesia_activity_event, _}, recv_event()),
?match({ok, N1}, mnesia:subscribe({table,Tab})),
?match(ok, mnesia:dirty_write(#tab{i=555})),
?match({mnesia_table_event, {write, _, _}}, recv_event()),
?match({atomic, ok}, mnesia:move_table_copy(Tab, N1, N2)),
?match({mnesia_activity_event, _}, recv_event()),
?match(ok, mnesia:dirty_write(#tab{i=655})),
?match(timeout, recv_event()),
?match({atomic, ok}, mnesia:add_table_copy(Tab, N1, ram_copies)),
?match({mnesia_activity_event, _}, recv_event()),
?match({ok, N1}, mnesia:subscribe({table,Tab})),
?match({error, {already_exists, {table,Tab, simple}}},
mnesia:subscribe({table,Tab})),
?match(ok, mnesia:dirty_write(#tab{i=755})),
?match({mnesia_table_event, {write, _, _}}, recv_event()),
?match({atomic, ok}, mnesia:delete_table(Tab)),
?match({mnesia_activity_event, _}, recv_event()),
?match(timeout, recv_event()),
mnesia_test_lib:kill_mnesia([N1]),
?verify_mnesia([N2], [N1]).
recv_event() ->
receive
Event -> Event
after 1000 ->
timeout
end.
foldl(suite) ->
[];
foldl(doc) ->
[""];
foldl(Config) when is_list(Config) ->
Nodes = [_N1, N2] = ?acquire_nodes(2, Config),
Tab1 = fold_local,
Tab2 = fold_remote,
Tab3 = fold_ordered,
?match({atomic, ok}, mnesia:create_table(Tab1, [{ram_copies, Nodes}])),
?match({atomic, ok}, mnesia:create_table(Tab2, [{ram_copies, [N2]}, {type, bag}])),
?match({atomic, ok}, mnesia:create_table(Tab3, [{ram_copies, Nodes},
{type, ordered_set}])),
Tab1Els = [{Tab1, N, N} || N <- lists:seq(1, 10)],
Tab2Els = ?sort([{Tab2, 1, 2} | [{Tab2, N, N} || N <- lists:seq(1, 10)]]),
Tab3Els = [{Tab3, N, N} || N <- lists:seq(1, 10)],
[mnesia:sync_transaction(fun() -> mnesia:write(E) end) || E <- Tab1Els],
[mnesia:sync_transaction(fun() -> mnesia:write(E) end) || E <- Tab2Els],
[mnesia:sync_transaction(fun() -> mnesia:write(E) end) || E <- Tab3Els],
Fold = fun(Tab) ->
lists:reverse(mnesia:foldl(fun(E, A) -> [E | A] end, [], Tab))
end,
Fold2 = fun(Tab, Lock) ->
lists:reverse(mnesia:foldl(fun(E, A) -> [E | A] end, [], Tab, Lock))
end,
Exit = fun(Tab) ->
lists:reverse(mnesia:foldl(fun(_E, _A) -> exit(testing) end, [], Tab))
end,
?match({aborted, _}, mnesia:transaction(Fold, [error])),
?match({aborted, _}, mnesia:transaction(fun(Tab) -> mnesia:foldl(badfun,[],Tab) end,
[Tab1])),
?match({aborted, testing}, mnesia:transaction(Exit, [Tab1])),
?match({aborted, _}, mnesia:transaction(Fold2, [Tab1, read_lock])),
Success
?match({atomic, Tab1Els}, sort_res(mnesia:transaction(Fold, [Tab1]))),
?match({atomic, Tab2Els}, sort_res(mnesia:transaction(Fold, [Tab2]))),
?match({atomic, Tab3Els}, mnesia:transaction(Fold, [Tab3])),
?match({atomic, Tab1Els}, sort_res(mnesia:transaction(Fold2, [Tab1, read]))),
?match({atomic, Tab1Els}, sort_res(mnesia:transaction(Fold2, [Tab1, write]))),
?match(Tab1Els, sort_res(mnesia:sync_dirty(Fold, [Tab1]))),
?match(Tab2Els, sort_res(mnesia:async_dirty(Fold, [Tab2]))),
?verify_mnesia(Nodes, []).
sort_res({atomic, List}) ->
{atomic, ?sort(List)};
sort_res(Else) when is_list(Else) ->
?sort(Else);
sort_res(Else) ->
Else.
info(suite) -> [];
info(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(1, Config),
?match(ok, mnesia:info()),
?verify_mnesia(Nodes, []).
schema_0(suite) -> [];
schema_0(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(1, Config),
?match(ok, mnesia:schema()),
?verify_mnesia(Nodes, []).
schema_1(suite) -> [];
schema_1(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(1, Config),
Tab = schema_1,
?match({atomic, ok}, mnesia:create_table(Tab, [])),
?match(ok, mnesia:schema(Tab)),
?verify_mnesia(Nodes, []).
view_0(suite) -> [];
view_0(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(1, Config),
?match(ok, mnesia_lib:view()),
?verify_mnesia(Nodes, []).
view_1(suite) -> [];
view_1(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(1, Config),
BinCore = mnesia_lib:mkcore({crashinfo, "Just testing..."}),
CoreFile = lists:concat(["MnesiaCore.", node(), ".view_1.", ?MODULE]),
?match(ok, file:write_file(CoreFile, BinCore)),
?match(ok, mnesia_lib:view(CoreFile)),
?match(ok, file:delete(CoreFile)),
?match(stopped, mnesia:stop()),
Dir = mnesia:system_info(directory),
?match(eof, mnesia_lib:view(filename:join(Dir, "LATEST.LOG"))),
?match(ok, mnesia_lib:view(filename:join(Dir, "schema.DAT"))),
?verify_mnesia([], Nodes).
view_2(suite) -> [];
view_2(Config) when is_list(Config) ->
Nodes = ?acquire_nodes(1, Config),
BinCore = mnesia_lib:mkcore({crashinfo, "More testing..."}),
File = lists:concat([?MODULE, "view_2.", node()]),
?match(ok, file:write_file(File, BinCore)),
?match(ok, mnesia_lib:view(File, core)),
?match(ok, file:delete(File)),
?match(stopped, mnesia:stop()),
Dir = mnesia:system_info(directory),
?match(ok, file:rename(filename:join(Dir, "LATEST.LOG"), File)),
?match(eof, mnesia_lib:view(File, log)),
?match(ok, file:delete(File)),
?match(ok, file:rename(filename:join(Dir, "schema.DAT"), File)),
?match(ok, mnesia_lib:view(File, dat)),
?match(ok, file:delete(File)),
?verify_mnesia([], Nodes).
lkill(suite) -> [];
lkill(Config) when is_list(Config) ->
[Node1, Node2] = ?acquire_nodes(2, Config),
?match(yes, rpc:call(Node1, mnesia, system_info, [is_running])),
?match(yes, rpc:call(Node2, mnesia, system_info, [is_running])),
?match(ok, rpc:call(Node2, mnesia, lkill, [])),
?match(yes, rpc:call(Node1, mnesia, system_info, [is_running])),
?match(no, rpc:call(Node2, mnesia, system_info, [is_running])),
?verify_mnesia([Node1], [Node2]).
kill(suite) -> [];
kill(Config) when is_list(Config) ->
[Node1, Node2] = ?acquire_nodes(2, Config),
?match(yes, rpc:call(Node1, mnesia, system_info, [is_running])),
?match(yes, rpc:call(Node2, mnesia, system_info, [is_running])),
?match({_, []}, rpc:call(Node2, mnesia, kill, [])),
?match(no, rpc:call(Node1, mnesia, system_info, [is_running])),
?match(no, rpc:call(Node2, mnesia, system_info, [is_running])),
?verify_mnesia([], [Node1, Node2]).
record_name_dirty_access_ram(suite) ->
[];
record_name_dirty_access_ram(Config) when is_list(Config) ->
record_name_dirty_access(ram_copies, Config).
record_name_dirty_access_disc(suite) ->
[];
record_name_dirty_access_disc(Config) when is_list(Config) ->
record_name_dirty_access(disc_copies, Config).
record_name_dirty_access_disc_only(suite) ->
[];
record_name_dirty_access_disc_only(Config) when is_list(Config) ->
record_name_dirty_access(disc_only_copies, Config).
record_name_dirty_access_xets(Config) when is_list(Config) ->
record_name_dirty_access(ext_ets, Config).
record_name_dirty_access(Storage, Config) ->
[Node1, _Node2] = Nodes = ?acquire_nodes(2, Config),
List = lists:concat([record_name_dirty_access_, Storage]),
Tab = list_to_atom(List),
RecName = some_record,
Attr = val,
TabDef = [{type, bag},
{record_name, RecName},
{index, [Attr]},
{Storage, Nodes}],
?match({atomic, ok}, mnesia:create_table(Tab, TabDef)),
?match(RecName, mnesia:table_info(Tab, record_name)),
?match(ok, mnesia:dirty_write(Tab, {RecName, 2, 20})),
?match(ok, mnesia:dirty_write(Tab, {RecName, 2, 21})),
?match(ok, mnesia:dirty_write(Tab, {RecName, 2, 22})),
BupFile = List ++ ".BUP",
CpName = cpname,
CpArgs = [{name, CpName}, {min, [Tab]}, {ram_overrides_dump, true}],
?match({ok, CpName, _}, mnesia:activate_checkpoint(CpArgs)),
?match(ok, mnesia:backup_checkpoint(CpName, BupFile)),
?match(ok, mnesia:deactivate_checkpoint(CpName)),
?match(ok, mnesia:dirty_write(Tab, {RecName, 1, 10})),
?match({ok, Node1}, mnesia:subscribe({table, Tab})),
?match(ok, mnesia:dirty_write(Tab, {RecName, 3, 10})),
Twos =?sort( [{RecName, 2, 20}, {RecName, 2, 21}, {RecName, 2, 22}]),
?match(Twos, ?sort(mnesia:dirty_read(Tab, 2))),
?match(ok, mnesia:dirty_delete_object(Tab, {RecName, 2, 21})),
Tens = ?sort([{RecName, 1, 10}, {RecName, 3, 10}]),
TenPat = {RecName, '_', 10},
?match(Tens, ?sort(mnesia:dirty_match_object(Tab, TenPat))),
?match(Tens, ?sort(mnesia:dirty_select(Tab, [{TenPat, [], ['$_']}]))),
E = mnesia_table_event,
?match_receive({E, {write, {Tab, 3, 10}, _}}),
?match_receive({E, {delete_object, {Tab, 2, 21}, _}}),
?match({ok, Node1}, mnesia:unsubscribe({table, Tab})),
?match([], mnesia_test_lib:stop_mnesia([Node1])),
?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab])),
?match(Tens, ?sort(mnesia:dirty_index_match_object(Tab, TenPat, Attr) )),
?match(Tens, ?sort(mnesia:dirty_index_read(Tab, 10, Attr))),
?match([1, 2, 3], ?sort(mnesia:dirty_all_keys(Tab))),
?match({ok, Node1}, mnesia:subscribe({table, Tab})),
?match(ok, mnesia:dirty_delete(Tab, 2)),
?match([], mnesia:dirty_read(Tab, 2)),
?match_receive({E, {delete, {Tab, 2}, _}}),
?match([], mnesia_test_lib:flush()),
?match({ok, Node1}, mnesia:unsubscribe({table, Tab})),
?match({atomic, [Tab]}, mnesia:restore(BupFile, [{recreate_tables, [Tab]}])),
?match(RecName, mnesia:table_info(Tab, record_name)),
?match(Twos, ?sort(mnesia:dirty_match_object(Tab, mnesia:table_info(Tab, wild_pattern)))),
?match(Twos, ?sort(mnesia:dirty_select(Tab,
[{mnesia:table_info(Tab, wild_pattern),
[],['$_']}]))),
Traverse backup test
Fun = fun(Rec, {Good, Bad}) ->
?verbose("BUP: ~p~n", [Rec]),
case Rec of
{T, K, V} when T == Tab ->
Good2 = Good ++ [{RecName, K, V}],
{[Rec], {?sort(Good2), Bad}};
{T, K} when T == Tab ->
Good2 = [G || G <- Good, element(2, G) /= K],
{[Rec], {?sort(Good2), Bad}};
_ when element(1, Rec) == schema ->
{[Rec], {Good, Bad}};
_ ->
Bad2 = Bad ++ [Rec],
{[Rec], {Good, ?sort(Bad2)}}
end
end,
?match({ok, {Twos, []}}, mnesia:traverse_backup(BupFile, mnesia_backup,
dummy, read_only,
Fun, {[], []})),
?match(ok, file:delete(BupFile)),
CounterTab = list_to_atom(lists:concat([Tab, "_counter"])),
CounterTabDef = [{record_name, some_counter}],
C = my_counter,
?match({atomic, ok}, mnesia:create_table(CounterTab, CounterTabDef)),
?match(some_counter, mnesia:table_info(CounterTab, record_name)),
?match(0, mnesia:dirty_update_counter(CounterTab, gurka, -10)),
?match(10, mnesia:dirty_update_counter(CounterTab, C, 10)),
?match(11, mnesia:dirty_update_counter(CounterTab, C, 1)),
?match(4711, mnesia:dirty_update_counter(CounterTab, C, 4700)),
?match([{some_counter, C, 4711}], mnesia:dirty_read(CounterTab, C)),
?match(0, mnesia:dirty_update_counter(CounterTab, C, -4747)),
RegTab = list_to_atom(lists:concat([Tab, "_registry"])),
RegTabDef = [{record_name, some_reg}],
?match(ok, mnesia_registry:create_table(RegTab, RegTabDef)),
?match(some_reg, mnesia:table_info(RegTab, record_name)),
{success, RegRecs} =
?match([_ | _], mnesia_registry_test:dump_registry(node(), RegTab)),
R = ?sort(RegRecs),
?match(R, ?sort(mnesia_registry_test:restore_registry(node(), RegTab))),
?verify_mnesia(Nodes, []).
sorted_ets(suite) ->
[];
sorted_ets(Config) when is_list(Config) ->
[N1, N2, N3] = All = ?acquire_nodes(3, Config),
Tab = sorted_tab,
Def = case mnesia_test_lib:diskless(Config) of
true -> [{name, Tab}, {type, ordered_set}, {ram_copies, All}];
false -> [{name, Tab}, {type, ordered_set},
{ram_copies, [N1]},
{disc_copies,[N2, N3]}]
end,
?match({atomic, ok}, mnesia:create_table(Def)),
?match({aborted, _}, mnesia:create_table(fel, [{disc_only_copies, N1}])),
?match([ok | _],
[mnesia:dirty_write({Tab, {dirty, N}, N}) || N <- lists:seq(1, 10)]),
?match({atomic, _},
mnesia:sync_transaction(fun() ->
[mnesia:write({Tab, {trans, N}, N}) ||
N <- lists:seq(1, 10)]
end)),
List = mnesia:dirty_match_object({Tab, '_', '_'}),
?match(List, ?sort(List)),
?match(List, rpc:call(N2, mnesia, dirty_match_object, [{Tab, '_', '_'}])),
?match(List, rpc:call(N3, mnesia, dirty_match_object, [{Tab, '_', '_'}])),
mnesia_test_lib:stop_mnesia(All),
mnesia_test_lib:start_mnesia(All, [sorted_tab]),
List = mnesia:dirty_match_object({Tab, '_', '_'}),
?match(List, ?sort(List)),
?match(List, rpc:call(N2, mnesia, dirty_match_object, [{Tab, '_', '_'}])),
?match(List, rpc:call(N3, mnesia, dirty_match_object, [{Tab, '_', '_'}])),
?match(List, rpc:call(N3, mnesia, dirty_select, [Tab, [{{Tab, '_', '_'},[],['$_']}]])),
TransMatch = fun() ->
mnesia:write({Tab, {trans, 0}, 0}),
mnesia:write({Tab, {trans, 11}, 11}),
mnesia:match_object({Tab, '_', '_'})
end,
TransSelect = fun() ->
mnesia:write({Tab, {trans, 0}, 0}),
mnesia:write({Tab, {trans, 11}, 11}),
mnesia:select(Tab, [{{Tab, '_', '_'},[],['$_']}])
end,
TList = mnesia:transaction(TransMatch),
STList = ?sort(TList),
?match(STList, TList),
?match(STList, rpc:call(N2, mnesia, transaction, [TransMatch])),
?match(STList, rpc:call(N3, mnesia, transaction, [TransMatch])),
TSel = mnesia:transaction(TransSelect),
?match(STList, TSel),
?match(STList, rpc:call(N2, mnesia, transaction, [TransSelect])),
?match(STList, rpc:call(N3, mnesia, transaction, [TransSelect])),
?match({atomic, ok}, mnesia:create_table(rec, [{type, ordered_set}])),
[ok = mnesia:dirty_write(R) || R <- [{rec,1,1}, {rec,2,1}]],
?match({atomic, ok}, mnesia:add_table_index(rec, 3)),
TestIt = fun() ->
ok = mnesia:write({rec,1,1}),
mnesia:index_read(rec, 1, 3)
end,
?match({atomic, [{rec,1,1}, {rec,2,1}]}, mnesia:transaction(TestIt)).
index_cleanup(Config) when is_list(Config) ->
[N1, N2] = All = ?acquire_nodes(2, Config),
?match({atomic, ok}, mnesia:create_table(i_set, [{type, set}, {ram_copies, [N1]}, {index, [val]},
{disc_only_copies, [N2]}])),
?match({atomic, ok}, mnesia:create_table(i_bag, [{type, bag}, {ram_copies, [N1]}, {index, [val]},
{disc_only_copies, [N2]}])),
?match({atomic, ok}, mnesia:create_table(i_oset, [{type, ordered_set}, {ram_copies, [N1, N2]},
{index, [val]}])),
Tabs = [i_set, i_bag, i_oset],
Add = fun(Tab) ->
Write = fun(Table) ->
Recs = [{Table, N, N rem 5} || N <- lists:seq(1,10)],
[ok = mnesia:write(Rec) || Rec <- Recs],
Recs
end,
{atomic, Recs} = mnesia:sync_transaction(Write, [Tab]),
lists:sort(Recs)
end,
IRead = fun(Tab) ->
Read = fun(Table) ->
[mnesia:index_read(Table, N, val) || N <- lists:seq(0,4)]
end,
{atomic, Recs} = mnesia:transaction(Read, [Tab]),
lists:sort(lists:flatten(Recs))
end,
Delete = fun(Rec) ->
Del = fun() -> mnesia:delete_object(Rec) end,
{atomic, ok} = mnesia:sync_transaction(Del),
ok
end,
Recs = [Add(Tab) || Tab <- Tabs],
?match(Recs, [IRead(Tab) || Tab <- Tabs]),
[Delete(Rec) || Rec <- lists:flatten(Recs)],
[?match({Tab,0}, {Tab,mnesia:table_info(Tab, size)}) || Tab <- Tabs],
[?match({Tab,Node,0, _}, rpc:call(Node, ?MODULE, index_size, [Tab]))
|| Node <- All, Tab <- Tabs],
?verify_mnesia(All, []).
index_size(Tab) ->
case mnesia:table_info(Tab, index_info) of
{index, _, [{_, {ram, Ref}}=Dbg]} -> {Tab, node(), ets:info(Ref, size), Dbg};
{index, _, [{_, {dets, Ref}}=Dbg]} -> {Tab, node(), dets:info(Ref, size), Dbg}
end.
|
4c90c1a8317ff8d40f2a7bd138d03c70c5feb0488c73698b4cc97a8aebd4ea63 | herd/herdtools7 | equivSpec.ml | (****************************************************************************)
(* the diy toolsuite *)
(* *)
, University College London , UK .
, INRIA Paris - Rocquencourt , France .
(* *)
Copyright 2020 - present Institut National de Recherche en Informatique et
(* en Automatique and the authors. All rights reserved. *)
(* *)
This software is governed by the CeCILL - B license under French law and
(* abiding by the rules of distribution of free software. You can use, *)
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
(****************************************************************************)
module Make(S:Sem.Semantics) = struct
let dbg = false
module Instance = struct
type t = S.A.inst_instance_id
let compare = S.A.inst_instance_compare
end
module InstMap = MyMap.Make(Instance)
module InstSet = MySet.Make(Instance)
Utilities on instance graphs coded as neighbours maps
let succs i m = InstMap.safe_find InstSet.empty i m
let add_succ i1 i2 m = InstMap.add i1 (InstSet.add i2 (succs i1 m)) m
let subrel m1 m2 =
try
InstMap.iter
(fun i is ->
let js = succs i m2 in
if not (InstSet.subset is js) then raise Exit)
m1 ;
true
with Exit -> false
open Printf
let pp_instance i = sprintf "{P%i:%02i}" i.S.A.proc i.S.A.program_order_index
let pp_rel chan m =
InstMap.iter
(fun i js ->
fprintf chan "%s ->" (pp_instance i) ;
InstSet.iter
(fun j -> fprintf chan " %s" (pp_instance j))
js ;
fprintf chan "\n")
m
(* fold f over pairs of distinct elements *)
let rec fold_pairs f xs k = match xs with
| [] -> k
| x::xs ->
List.fold_left
(fun k y -> f x y (f y x k))
(fold_pairs f xs k) xs
let build rf evts =
let open S in
(* Build a map from (instruction) instances to events of that instance *)
let m =
E.EventSet.fold
(fun e m -> match e.E.iiid with
| E.IdInit|E.IdSpurious -> m
| E.IdSome i ->
InstMap.add i
(E.EventSet.add e (InstMap.safe_find E.EventSet.empty i m)) m)
evts InstMap.empty in
let is = InstMap.fold (fun i _ k -> i::k) m [] in
Utilities
let inst2evts i = try InstMap.find i m with Not_found -> assert false in
(* lift rf to instances *)
let rf_rel =
E.EventRel.fold
(fun (w,r) m -> match r.E.iiid with
| E.IdInit|E.IdSpurious -> assert false
| E.IdSome ir -> match w.E.iiid with
| E.IdInit|E.IdSpurious -> m
| E.IdSome iw -> add_succ ir iw m)
rf InstMap.empty in
if dbg then eprintf "RF-REG:\n%a\n" pp_rel rf_rel ;
let same_instr i1 i2 = i1.A.inst == i2.A.inst in
let matches m is js =
let ok i j = InstSet.mem i (succs j m) in
InstSet.for_all
(fun i -> InstSet.exists (ok i) js)
is in
let step m =
InstMap.fold
(fun i js k ->
let rf_is = succs i rf_rel in
InstSet.fold
(fun j k ->
let rf_js = succs j rf_rel in
if matches m rf_is rf_js && matches m rf_js rf_is
then add_succ i j k
else k)
js k)
m InstMap.empty in
let rec fix m =
if dbg then eprintf "**FIX\n%a\n" pp_rel m ;
let next = step m in
if subrel m next then m
else fix next in
if dbg then eprintf "Instances: %s\n"
(String.concat " " (List.map pp_instance is)) ;
let m0 =
fold_pairs
(fun i j k ->
if same_instr i j then add_succ i j k
else k)
is InstMap.empty in
let equiv = fix m0 in
let equiv =
InstMap.fold
(fun i js k ->
let evts_i = inst2evts i in
InstSet.fold
(fun j k ->
E.EventRel.cartesian evts_i (inst2evts j)::k)
js k)
equiv [] in
E.EventRel.unions equiv
end
| null | https://raw.githubusercontent.com/herd/herdtools7/5bed6439c96fce799c32343fb78704a71b7b6bdd/herd/equivSpec.ml | ocaml | **************************************************************************
the diy toolsuite
en Automatique and the authors. All rights reserved.
abiding by the rules of distribution of free software. You can use,
**************************************************************************
fold f over pairs of distinct elements
Build a map from (instruction) instances to events of that instance
lift rf to instances | , University College London , UK .
, INRIA Paris - Rocquencourt , France .
Copyright 2020 - present Institut National de Recherche en Informatique et
This software is governed by the CeCILL - B license under French law and
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
module Make(S:Sem.Semantics) = struct
let dbg = false
module Instance = struct
type t = S.A.inst_instance_id
let compare = S.A.inst_instance_compare
end
module InstMap = MyMap.Make(Instance)
module InstSet = MySet.Make(Instance)
Utilities on instance graphs coded as neighbours maps
let succs i m = InstMap.safe_find InstSet.empty i m
let add_succ i1 i2 m = InstMap.add i1 (InstSet.add i2 (succs i1 m)) m
let subrel m1 m2 =
try
InstMap.iter
(fun i is ->
let js = succs i m2 in
if not (InstSet.subset is js) then raise Exit)
m1 ;
true
with Exit -> false
open Printf
let pp_instance i = sprintf "{P%i:%02i}" i.S.A.proc i.S.A.program_order_index
let pp_rel chan m =
InstMap.iter
(fun i js ->
fprintf chan "%s ->" (pp_instance i) ;
InstSet.iter
(fun j -> fprintf chan " %s" (pp_instance j))
js ;
fprintf chan "\n")
m
let rec fold_pairs f xs k = match xs with
| [] -> k
| x::xs ->
List.fold_left
(fun k y -> f x y (f y x k))
(fold_pairs f xs k) xs
let build rf evts =
let open S in
let m =
E.EventSet.fold
(fun e m -> match e.E.iiid with
| E.IdInit|E.IdSpurious -> m
| E.IdSome i ->
InstMap.add i
(E.EventSet.add e (InstMap.safe_find E.EventSet.empty i m)) m)
evts InstMap.empty in
let is = InstMap.fold (fun i _ k -> i::k) m [] in
Utilities
let inst2evts i = try InstMap.find i m with Not_found -> assert false in
let rf_rel =
E.EventRel.fold
(fun (w,r) m -> match r.E.iiid with
| E.IdInit|E.IdSpurious -> assert false
| E.IdSome ir -> match w.E.iiid with
| E.IdInit|E.IdSpurious -> m
| E.IdSome iw -> add_succ ir iw m)
rf InstMap.empty in
if dbg then eprintf "RF-REG:\n%a\n" pp_rel rf_rel ;
let same_instr i1 i2 = i1.A.inst == i2.A.inst in
let matches m is js =
let ok i j = InstSet.mem i (succs j m) in
InstSet.for_all
(fun i -> InstSet.exists (ok i) js)
is in
let step m =
InstMap.fold
(fun i js k ->
let rf_is = succs i rf_rel in
InstSet.fold
(fun j k ->
let rf_js = succs j rf_rel in
if matches m rf_is rf_js && matches m rf_js rf_is
then add_succ i j k
else k)
js k)
m InstMap.empty in
let rec fix m =
if dbg then eprintf "**FIX\n%a\n" pp_rel m ;
let next = step m in
if subrel m next then m
else fix next in
if dbg then eprintf "Instances: %s\n"
(String.concat " " (List.map pp_instance is)) ;
let m0 =
fold_pairs
(fun i j k ->
if same_instr i j then add_succ i j k
else k)
is InstMap.empty in
let equiv = fix m0 in
let equiv =
InstMap.fold
(fun i js k ->
let evts_i = inst2evts i in
InstSet.fold
(fun j k ->
E.EventRel.cartesian evts_i (inst2evts j)::k)
js k)
equiv [] in
E.EventRel.unions equiv
end
|
3df89a3cca366f3f40f00763b5db771a3343bc5dcfca5a0177280ae9fc432928 | swtwsk/vinci-lang | ErrM.hs | -- BNF Converter: Error Monad
Copyright ( C ) 2004 Author :
module Parser.ErrM where
the Error monad : like Maybe type with error msgs
import Control.Monad (MonadPlus(..), liftM)
import Control.Applicative (Alternative(..))
data Err a = Ok a | Bad String
deriving (Read, Show, Eq, Ord)
instance Monad Err where
return = Ok
Ok a >>= f = f a
Bad s >>= _ = Bad s
instance Applicative Err where
pure = Ok
(Bad s) <*> _ = Bad s
(Ok f) <*> o = fmap f o
instance Functor Err where
fmap = liftM
instance MonadPlus Err where
mzero = Bad "Err.mzero"
mplus (Bad _) y = y
mplus x _ = x
instance Alternative Err where
empty = mzero
(<|>) = mplus
| null | https://raw.githubusercontent.com/swtwsk/vinci-lang/9c7e01953e0b1cf135af7188e0c71fe6195bdfa1/src/Parser/ErrM.hs | haskell | BNF Converter: Error Monad | Copyright ( C ) 2004 Author :
module Parser.ErrM where
the Error monad : like Maybe type with error msgs
import Control.Monad (MonadPlus(..), liftM)
import Control.Applicative (Alternative(..))
data Err a = Ok a | Bad String
deriving (Read, Show, Eq, Ord)
instance Monad Err where
return = Ok
Ok a >>= f = f a
Bad s >>= _ = Bad s
instance Applicative Err where
pure = Ok
(Bad s) <*> _ = Bad s
(Ok f) <*> o = fmap f o
instance Functor Err where
fmap = liftM
instance MonadPlus Err where
mzero = Bad "Err.mzero"
mplus (Bad _) y = y
mplus x _ = x
instance Alternative Err where
empty = mzero
(<|>) = mplus
|
3d6b5a2251fadee378bff80b2c2ff6d19a1828a5520f7a510ea459fc6790efe1 | dselsam/arc | ClassifyIndices.hs | Copyright ( c ) 2020 Microsoft Corporation . All rights reserved .
Released under Apache 2.0 license as described in the file LICENSE .
Authors : , , .
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE StrictData #-}
module Solver.Tactics.ClassifyIndices where
import Util.Imports
import Solver.SolveM
import Solver.Goal
import qualified Synth.Ex as Ex
import qualified Data.Maybe as Maybe
import Synth.Ex (Ex(..), ForTrain, ForTest)
import Lib.Grid (Grid)
import qualified Lib.Dims as Dims
import qualified Data.Set as Set
import Lib.Dims (Dims (Dims))
import qualified Lib.Grid as Grid
import Solver.TacticResult (TacticResult)
import qualified Solver.TacticResult as TacticResult
import Lib.Color (Color)
import Data.List
import qualified Util.Int as Int
import qualified Util.List as List
import qualified Lib.Index as Index
import qualified Data.Map as Map
import Lib.Index (Index (Index))
import Synth1.Arith
import Search.SearchT
import Lib.Blank
import qualified Lib.Parse as Parse
import Lib.Shape (Shape)
import qualified Lib.Shape as Shape
import Solver.Parse
import Solver.Tactics.GuessDims
import qualified Synth.Spec as Spec
import qualified Lib.Shape as Shape
import Search.DFS
import Synth.Basic
import Solver.Parse
import Synth.Core
import Synth1.Basic
import Synth.Int2Bool
import Synth.Bool2Bool
import Synth.Sequence
import Solver.Tactics.GuessDims
import Solver.Synth1Context (ctxInts, ctxColors)
-- TODO: this is missing ones that the Lean version got.
-- I ported very very rushed, likely stupid mistakes.
-- TODO:
- when dims are the same , skip if - then - else and require the mask to make progress on the DIFFS
classifyIndices :: StdGoal -> SolveM TacticResult
classifyIndices goal@(Goal inputs outputs ctx) = choice "classifyIndices" [
("idx2idx", do
testDims <- synthDims goal
idx2idx <- enumIdx2Idx
idx2color <- enumIdx2Color
let f input idx = idx2color input (idx2idx idx)
guard $ flip all (zip (Ex.train inputs) outputs) $ \(input, output) ->
Dims.all (Grid.dims output) $ \idx -> Grid.get output idx == idx2color input (idx2idx idx)
pure $ TacticResult.Guess . flip map (zip (Ex.test inputs) testDims) $ \(input, outDims) ->
Grid.fromFunc outDims $ \idx -> idx2color input (idx2idx idx)),
("if2color", do
-- uncomment below if we want to only do this choice if the dims are NOT the same
--guard $ flip any (zip (Ex.train inputs) outputs) $ \(ig, og) -> not (Grid.sameDims ig og)
testDims <- synthDims goal
idx2bool <- enumIdx2Bool
guard $ flip Ex.all (Ex.zip inputs $ Ex (map Grid.dims outputs) testDims) $ \(input, outDims) ->
Dims.any outDims $ \idx -> idx2bool outDims idx
guard $ flip Ex.all (Ex.zip inputs $ Ex (map Grid.dims outputs) testDims) $ \(input, outDims) ->
Dims.any outDims $ \idx -> not $ idx2bool outDims idx
true2color <- enumIdx2Color
guard $ flip all (zip (Ex.train inputs) outputs) $ \(input, output) ->
Dims.all (Grid.dims output) $ \idx -> idx2bool (Grid.dims output) idx <= (true2color input idx == Grid.get output idx)
false2color <- enumIdx2Color
guard $ flip all (zip (Ex.train inputs) outputs) $ \(input, output) ->
Dims.all (Grid.dims output) $ \idx -> (not $ idx2bool (Grid.dims output) idx) <= (false2color input idx == Grid.get output idx)
pure $ TacticResult.Guess . flip map (zip (Ex.test inputs) testDims) $ \(input, outDims) ->
Grid.fromFunc outDims $ \idx ->
if idx2bool outDims idx
then true2color input idx
else false2color input idx)
( " if2colorSameDims " , do
-- should only do this choice if the dims ARE the same
guard $ flip all ( zip ( Ex.train inputs ) outputs ) $ \(ig , og ) - > Grid.sameDims ig og
idx2bool < - enumIdx2Bool
guard $ flip Ex.all inputs $ \ig - > Dims.any ( Grid.dims ig ) $ \idx - > idx2bool ( Grid.dims ig ) idx
idx2color < - enumIdx2Color
guard $ flip all ( zip ( Ex.train inputs ) outputs ) $ \(input , output ) - >
Dims.all ( Grid.dims output ) $ \idx - > idx2bool ( Grid.dims output ) idx < = ( idx2color input idx = = Grid.get output idx )
let = flip Ex.map inputs $ \ig - > Grid.fromFunc ( Grid.dims ig ) $ \idx - >
if idx2bool ( Grid.dims ig ) idx then idx2color ig idx else ( Grid.get ig idx )
-- guard that we are doing something new on a majority of the inputs
-- we could be stricter with this if necessary
-- this is especially necessary because currently classify indices does n't make any greedy
-- decisions -- it just picks the first one
guard $ flip List.majority ( zip ( Ex.toBigList inputs ) ( Ex.toBigList ) ) $ \(ig , newIg ) - >
ig /= newIg
if flip all ( zip ( Ex.train newInputs ) outputs ) ( \(ig , og ) - > ig = = og ) then
pure $ TacticResult . Guess ( Ex.test newInputs )
else
pure $ TacticResult . Decompose ( Goal newInputs outputs ctx ) pure )
-- should only do this choice if the dims ARE the same
guard $ flip all (zip (Ex.train inputs) outputs) $ \(ig, og) -> Grid.sameDims ig og
idx2bool <- enumIdx2Bool
guard $ flip Ex.all inputs $ \ig -> Dims.any (Grid.dims ig) $ \idx -> idx2bool (Grid.dims ig) idx
idx2color <- enumIdx2Color
guard $ flip all (zip (Ex.train inputs) outputs) $ \(input, output) ->
Dims.all (Grid.dims output) $ \idx -> idx2bool (Grid.dims output) idx <= (idx2color input idx == Grid.get output idx)
let newInputs = flip Ex.map inputs $ \ig -> Grid.fromFunc (Grid.dims ig) $ \idx ->
if idx2bool (Grid.dims ig) idx then idx2color ig idx else (Grid.get ig idx)
-- guard that we are doing something new on a majority of the inputs
-- we could be stricter with this if necessary
-- this is especially necessary because currently classify indices doesn't make any greedy
-- decisions -- it just picks the first one
guard $ flip List.majority (zip (Ex.toBigList inputs) (Ex.toBigList newInputs)) $ \(ig, newIg) ->
ig /= newIg
if flip all (zip (Ex.train newInputs) outputs) (\(ig, og) -> ig == og) then
pure $ TacticResult.Guess (Ex.test newInputs)
else
pure $ TacticResult.Decompose (Goal newInputs outputs ctx) pure)-}
]
enumIdx2Int :: SolveM (Index -> Int)
enumIdx2Int = oneOf "enumIdx2Int" [
("row", Index.row),
("col", Index.col),
("sum", \(Index r c) -> r + c),
("diff", \(Index r c) -> r - c),
("max", \(Index r c) -> max r c),
("min", \(Index r c) -> min r c)
]
enumInt2Bool :: SolveM (Int -> Bool)
enumInt2Bool = do
phi <- oneOf "enumInt2Bool.core" [
("isZero", (==0)),
("isNegOne", (==(-1))),
("isOne", (==1)),
("isTwo", (==2)),
("isEven", ((==0) . (`mod` 2))),
("isOdd" , ((==1) . (`mod` 2))),
("is0m3" , ((==0) . (`mod` 3))),
("is1m3" , ((==1) . (`mod` 3))),
("is2m3" , ((==2) . (`mod` 3))),
("isGt0" , (>0)),
("isGt1" , (>1))
]
neg <- oneOf "enumInt2Bool.neg" [("no", id), ("yes", not)]
pure $ neg . phi
enumIdx2Color :: SolveM (Grid Color -> Index -> Color)
enumIdx2Color = choice "enumIdx2Color" [
("blank", pure $ \_ _ -> blank),
("keep", pure $ \input (Index x y) -> let Dims dx dy = Grid.dims input in Grid.get input (Index (x `mod` dx) (y `mod` dy))),
("constUpdate", do
c <- enumVals
pure $ \input (Index x y) ->
let Dims dx dy = Grid.dims input in
if nonBlank (Grid.get input (Index (x `mod` dx) (y `mod` dy)))
then c
else blank),
("const", do
c <- enumVals
pure $ \input (Index x y) -> c)
]
enumIdx2Bool :: SolveM (Dims -> Index -> Bool)
enumIdx2Bool = choice "enumIdx2Bool" [
("idx2int2bool", do
idx2int <- enumIdx2Int
int2bool <- enumInt2Bool
pure $ \_ idx -> int2bool $ idx2int idx),
("idx2bool", oneOf "idx2bool" [
("middleRow", \(Dims m n) (Index i j) -> i == m `div` 2)
, ("middleCol", \(Dims m n) (Index i j) -> j == n `div` 2)
, ("upperLeft", \(Dims m n) (Index i j) -> i == 0 && j == 0)
, ("lowerRight", \(Dims m n) (Index i j) -> i == m-1 && j == n-1)
, ("onEdge", \ds idx -> Dims.onEdge ds idx)
])
]
enumIdx2Idx :: SolveM (Index -> Index)
enumIdx2Idx = oneOf "enumIdx2Idx" [
("id", id),
("transpose", Index.transpose),
("max", \(Index i j) -> Index (max i j) (max i j)),
("min", \(Index i j) -> Index (min i j) (min i j))
]
| null | https://raw.githubusercontent.com/dselsam/arc/7e68a7ed9508bf26926b0f68336db05505f4e765/src/Solver/Tactics/ClassifyIndices.hs | haskell | # LANGUAGE StrictData #
TODO: this is missing ones that the Lean version got.
I ported very very rushed, likely stupid mistakes.
TODO:
uncomment below if we want to only do this choice if the dims are NOT the same
guard $ flip any (zip (Ex.train inputs) outputs) $ \(ig, og) -> not (Grid.sameDims ig og)
should only do this choice if the dims ARE the same
guard that we are doing something new on a majority of the inputs
we could be stricter with this if necessary
this is especially necessary because currently classify indices does n't make any greedy
decisions -- it just picks the first one
should only do this choice if the dims ARE the same
guard that we are doing something new on a majority of the inputs
we could be stricter with this if necessary
this is especially necessary because currently classify indices doesn't make any greedy
decisions -- it just picks the first one | Copyright ( c ) 2020 Microsoft Corporation . All rights reserved .
Released under Apache 2.0 license as described in the file LICENSE .
Authors : , , .
# LANGUAGE ScopedTypeVariables #
module Solver.Tactics.ClassifyIndices where
import Util.Imports
import Solver.SolveM
import Solver.Goal
import qualified Synth.Ex as Ex
import qualified Data.Maybe as Maybe
import Synth.Ex (Ex(..), ForTrain, ForTest)
import Lib.Grid (Grid)
import qualified Lib.Dims as Dims
import qualified Data.Set as Set
import Lib.Dims (Dims (Dims))
import qualified Lib.Grid as Grid
import Solver.TacticResult (TacticResult)
import qualified Solver.TacticResult as TacticResult
import Lib.Color (Color)
import Data.List
import qualified Util.Int as Int
import qualified Util.List as List
import qualified Lib.Index as Index
import qualified Data.Map as Map
import Lib.Index (Index (Index))
import Synth1.Arith
import Search.SearchT
import Lib.Blank
import qualified Lib.Parse as Parse
import Lib.Shape (Shape)
import qualified Lib.Shape as Shape
import Solver.Parse
import Solver.Tactics.GuessDims
import qualified Synth.Spec as Spec
import qualified Lib.Shape as Shape
import Search.DFS
import Synth.Basic
import Solver.Parse
import Synth.Core
import Synth1.Basic
import Synth.Int2Bool
import Synth.Bool2Bool
import Synth.Sequence
import Solver.Tactics.GuessDims
import Solver.Synth1Context (ctxInts, ctxColors)
- when dims are the same , skip if - then - else and require the mask to make progress on the DIFFS
classifyIndices :: StdGoal -> SolveM TacticResult
classifyIndices goal@(Goal inputs outputs ctx) = choice "classifyIndices" [
("idx2idx", do
testDims <- synthDims goal
idx2idx <- enumIdx2Idx
idx2color <- enumIdx2Color
let f input idx = idx2color input (idx2idx idx)
guard $ flip all (zip (Ex.train inputs) outputs) $ \(input, output) ->
Dims.all (Grid.dims output) $ \idx -> Grid.get output idx == idx2color input (idx2idx idx)
pure $ TacticResult.Guess . flip map (zip (Ex.test inputs) testDims) $ \(input, outDims) ->
Grid.fromFunc outDims $ \idx -> idx2color input (idx2idx idx)),
("if2color", do
testDims <- synthDims goal
idx2bool <- enumIdx2Bool
guard $ flip Ex.all (Ex.zip inputs $ Ex (map Grid.dims outputs) testDims) $ \(input, outDims) ->
Dims.any outDims $ \idx -> idx2bool outDims idx
guard $ flip Ex.all (Ex.zip inputs $ Ex (map Grid.dims outputs) testDims) $ \(input, outDims) ->
Dims.any outDims $ \idx -> not $ idx2bool outDims idx
true2color <- enumIdx2Color
guard $ flip all (zip (Ex.train inputs) outputs) $ \(input, output) ->
Dims.all (Grid.dims output) $ \idx -> idx2bool (Grid.dims output) idx <= (true2color input idx == Grid.get output idx)
false2color <- enumIdx2Color
guard $ flip all (zip (Ex.train inputs) outputs) $ \(input, output) ->
Dims.all (Grid.dims output) $ \idx -> (not $ idx2bool (Grid.dims output) idx) <= (false2color input idx == Grid.get output idx)
pure $ TacticResult.Guess . flip map (zip (Ex.test inputs) testDims) $ \(input, outDims) ->
Grid.fromFunc outDims $ \idx ->
if idx2bool outDims idx
then true2color input idx
else false2color input idx)
( " if2colorSameDims " , do
guard $ flip all ( zip ( Ex.train inputs ) outputs ) $ \(ig , og ) - > Grid.sameDims ig og
idx2bool < - enumIdx2Bool
guard $ flip Ex.all inputs $ \ig - > Dims.any ( Grid.dims ig ) $ \idx - > idx2bool ( Grid.dims ig ) idx
idx2color < - enumIdx2Color
guard $ flip all ( zip ( Ex.train inputs ) outputs ) $ \(input , output ) - >
Dims.all ( Grid.dims output ) $ \idx - > idx2bool ( Grid.dims output ) idx < = ( idx2color input idx = = Grid.get output idx )
let = flip Ex.map inputs $ \ig - > Grid.fromFunc ( Grid.dims ig ) $ \idx - >
if idx2bool ( Grid.dims ig ) idx then idx2color ig idx else ( Grid.get ig idx )
guard $ flip List.majority ( zip ( Ex.toBigList inputs ) ( Ex.toBigList ) ) $ \(ig , newIg ) - >
ig /= newIg
if flip all ( zip ( Ex.train newInputs ) outputs ) ( \(ig , og ) - > ig = = og ) then
pure $ TacticResult . Guess ( Ex.test newInputs )
else
pure $ TacticResult . Decompose ( Goal newInputs outputs ctx ) pure )
guard $ flip all (zip (Ex.train inputs) outputs) $ \(ig, og) -> Grid.sameDims ig og
idx2bool <- enumIdx2Bool
guard $ flip Ex.all inputs $ \ig -> Dims.any (Grid.dims ig) $ \idx -> idx2bool (Grid.dims ig) idx
idx2color <- enumIdx2Color
guard $ flip all (zip (Ex.train inputs) outputs) $ \(input, output) ->
Dims.all (Grid.dims output) $ \idx -> idx2bool (Grid.dims output) idx <= (idx2color input idx == Grid.get output idx)
let newInputs = flip Ex.map inputs $ \ig -> Grid.fromFunc (Grid.dims ig) $ \idx ->
if idx2bool (Grid.dims ig) idx then idx2color ig idx else (Grid.get ig idx)
guard $ flip List.majority (zip (Ex.toBigList inputs) (Ex.toBigList newInputs)) $ \(ig, newIg) ->
ig /= newIg
if flip all (zip (Ex.train newInputs) outputs) (\(ig, og) -> ig == og) then
pure $ TacticResult.Guess (Ex.test newInputs)
else
pure $ TacticResult.Decompose (Goal newInputs outputs ctx) pure)-}
]
enumIdx2Int :: SolveM (Index -> Int)
enumIdx2Int = oneOf "enumIdx2Int" [
("row", Index.row),
("col", Index.col),
("sum", \(Index r c) -> r + c),
("diff", \(Index r c) -> r - c),
("max", \(Index r c) -> max r c),
("min", \(Index r c) -> min r c)
]
enumInt2Bool :: SolveM (Int -> Bool)
enumInt2Bool = do
phi <- oneOf "enumInt2Bool.core" [
("isZero", (==0)),
("isNegOne", (==(-1))),
("isOne", (==1)),
("isTwo", (==2)),
("isEven", ((==0) . (`mod` 2))),
("isOdd" , ((==1) . (`mod` 2))),
("is0m3" , ((==0) . (`mod` 3))),
("is1m3" , ((==1) . (`mod` 3))),
("is2m3" , ((==2) . (`mod` 3))),
("isGt0" , (>0)),
("isGt1" , (>1))
]
neg <- oneOf "enumInt2Bool.neg" [("no", id), ("yes", not)]
pure $ neg . phi
enumIdx2Color :: SolveM (Grid Color -> Index -> Color)
enumIdx2Color = choice "enumIdx2Color" [
("blank", pure $ \_ _ -> blank),
("keep", pure $ \input (Index x y) -> let Dims dx dy = Grid.dims input in Grid.get input (Index (x `mod` dx) (y `mod` dy))),
("constUpdate", do
c <- enumVals
pure $ \input (Index x y) ->
let Dims dx dy = Grid.dims input in
if nonBlank (Grid.get input (Index (x `mod` dx) (y `mod` dy)))
then c
else blank),
("const", do
c <- enumVals
pure $ \input (Index x y) -> c)
]
enumIdx2Bool :: SolveM (Dims -> Index -> Bool)
enumIdx2Bool = choice "enumIdx2Bool" [
("idx2int2bool", do
idx2int <- enumIdx2Int
int2bool <- enumInt2Bool
pure $ \_ idx -> int2bool $ idx2int idx),
("idx2bool", oneOf "idx2bool" [
("middleRow", \(Dims m n) (Index i j) -> i == m `div` 2)
, ("middleCol", \(Dims m n) (Index i j) -> j == n `div` 2)
, ("upperLeft", \(Dims m n) (Index i j) -> i == 0 && j == 0)
, ("lowerRight", \(Dims m n) (Index i j) -> i == m-1 && j == n-1)
, ("onEdge", \ds idx -> Dims.onEdge ds idx)
])
]
enumIdx2Idx :: SolveM (Index -> Index)
enumIdx2Idx = oneOf "enumIdx2Idx" [
("id", id),
("transpose", Index.transpose),
("max", \(Index i j) -> Index (max i j) (max i j)),
("min", \(Index i j) -> Index (min i j) (min i j))
]
|
b627abec0510a4fb15fa32961089108a6c7c6b7579db10891ef08f976ec707d5 | michaelballantyne/syntax-spec | statecharts-smaller.rkt | #lang racket
(require "../main.rkt" (for-syntax syntax/parse))
(syntax-spec
(binding-class statechart-name)
(binding-class state-name)
(binding-class var)
(binding-class data-name)
(nonterminal/two-pass state-body
(initial n:state-name)
#:binding {n}
e:event
(state n:state-name
sb:state-body ...)
#:binding [(export n) {(recursive sb)}]
(use scn:statechart-name #:as sn:state-name
e:event ...))
(nonterminal event
(on (evt:id arg:var ...)
ab:action ...+)
#:binding {(bind arg) ab})
(nonterminal action
(-> s:state-name)
(set n:data-name e:racket-expr)
(emit (name:id arg:racket-expr ...))
(let* (b:binding-group ...) body:action ...)
#:binding (nest b body))
(nonterminal/nesting binding-group (tail)
[v:var e:racket-expr]
#:binding {(bind v) tail})
#;(host-interface/definition
(define-statechart n:statechart-name
sb:state-body)
#:binding [(export n) {(recursive sb)}])
(host-interface/expression
(machine st:statechart-name)
#''TODO))
( machine , any ) - > ( machine , ( listof any ) )
(define (machine-step m event)
'TODO)
; ... accessors ...
| null | https://raw.githubusercontent.com/michaelballantyne/syntax-spec/1eca406e83468601ce7507de25fb036f6ff4d0ff/design/statecharts-smaller.rkt | racket | (host-interface/definition
... accessors ... | #lang racket
(require "../main.rkt" (for-syntax syntax/parse))
(syntax-spec
(binding-class statechart-name)
(binding-class state-name)
(binding-class var)
(binding-class data-name)
(nonterminal/two-pass state-body
(initial n:state-name)
#:binding {n}
e:event
(state n:state-name
sb:state-body ...)
#:binding [(export n) {(recursive sb)}]
(use scn:statechart-name #:as sn:state-name
e:event ...))
(nonterminal event
(on (evt:id arg:var ...)
ab:action ...+)
#:binding {(bind arg) ab})
(nonterminal action
(-> s:state-name)
(set n:data-name e:racket-expr)
(emit (name:id arg:racket-expr ...))
(let* (b:binding-group ...) body:action ...)
#:binding (nest b body))
(nonterminal/nesting binding-group (tail)
[v:var e:racket-expr]
#:binding {(bind v) tail})
(define-statechart n:statechart-name
sb:state-body)
#:binding [(export n) {(recursive sb)}])
(host-interface/expression
(machine st:statechart-name)
#''TODO))
( machine , any ) - > ( machine , ( listof any ) )
(define (machine-step m event)
'TODO)
|
dd687aad3ae1fcf0c1af0b07c0fad39c16ab1845c37a67e4860217d039fd6cc4 | typelead/eta | tc094.hs | module ShouldSucceed where
From a bug report by .
foo = bar
where bar = \_ -> (truncate boing, truncate boing)
boing = 0
| null | https://raw.githubusercontent.com/typelead/eta/97ee2251bbc52294efbf60fa4342ce6f52c0d25c/tests/suite/typecheck/compile/tc094.hs | haskell | module ShouldSucceed where
From a bug report by .
foo = bar
where bar = \_ -> (truncate boing, truncate boing)
boing = 0
| |
6d99c2704f0d71394af43a3483f2779425d70b60bd4e324e7e6cb83d0d6c94d2 | gmr/pgsql-listen-exchange | pgsql_listen_worker.erl | %%==============================================================================
@author < >
2014 - 2020 AWeber Communications
%% @end
%%==============================================================================
%% @doc gen_server process for listening to casts and calls from
pgsql_listen_exchange and epgsql
%% @end
-module(pgsql_listen_worker).
-behaviour(gen_server).
-export([
start_link/0,
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-include("pgsql_listen.hrl").
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
init([]) ->
State = #pgsql_listen_state{
amqp = dict:new(),
channels = dict:new(),
pgsql = dict:new()
},
{ok, maybe_connect(rabbit_exchange:list(), State)}.
code_change(_, State, _) ->
{ok, State}.
handle_call({add_binding, X, B}, _From, State) ->
case pgsql_listen_lib:add_binding(X, B, State) of
{ok, NewState} ->
{reply, ok, NewState};
{error, Error} ->
{reply, {error, Error}, {state, State}}
end;
handle_call({create, X}, _From, State) ->
case pgsql_listen_lib:start_exchange(X, State) of
{ok, NewState} ->
{reply, ok, NewState};
{error, Error} ->
{reply, {error, Error}, {state, State}}
end;
handle_call({delete, X, Bs}, _From, State) ->
case pgsql_listen_lib:remove_bindings(X, Bs, State) of
{ok, NState1} ->
case pgsql_listen_lib:stop_exchange(X, NState1) of
{ok, NState2} ->
{reply, ok, NState2};
{error, Error} ->
{reply, {error, Error}, NState1}
end;
{error, Error} ->
{reply, {error, Error}, State}
end;
handle_call({remove_bindings, X, Bs}, _From, State) ->
case pgsql_listen_lib:remove_bindings(X, Bs, State) of
{ok, NState} -> {reply, ok, NState};
{error, Error} -> {reply, {error, Error}, {state, State}}
end;
handle_call({validate, X}, _From, State) ->
case pgsql_listen_lib:validate_pgsql_connection(X) of
ok -> {reply, ok, State};
{error, Error} -> {reply, {error, Error}, State}
end;
handle_call(_Msg, _From, State) ->
{noreply, unknown_command, State}.
handle_cast(Cast, State) ->
rabbit_log:error("pgsql_listen_worker unknown_cast: ~p", [Cast]),
{noreply, State}.
handle_info({epgsql, Conn, {notice, Error}}, State) ->
rabbit_log:error("pgsql_listen_worker postgres error: ~p (~p)", [Conn, Error]),
{noreply, State};
handle_info({epgsql, Conn, {notification, Channel, _, Payload}}, State) ->
{noreply, pgsql_listen_lib:publish_notification(Conn, Channel, Payload, State)};
handle_info(Message, State) ->
rabbit_log:error("pgsql_listen_worker unknown_info: ~p", [Message]),
{noreply, State}.
terminate(_, _) ->
ok.
% -------------------------
maybe_connect([X = #exchange{name = Name, type = 'x-pgsql-listen'} | Tail], State) ->
case pgsql_listen_lib:start_exchange(X, State) of
{ok, NewState} ->
maybe_connect(Tail, add_bindings(X, rabbit_binding:list_for_source(Name), NewState));
{error, Error} ->
rabbit_log:error(
"pgsql_listen_exchange startup error for ~p: ~p",
[Name, Error]
),
maybe_connect(Tail, State)
end;
maybe_connect([_X | Tail], State) ->
maybe_connect(Tail, State);
maybe_connect([], State) ->
State.
add_bindings(X = #exchange{name = Name}, [H | T], State) ->
case pgsql_listen_lib:add_binding(X, H, State) of
{ok, NewState} ->
add_bindings(X, T, NewState);
{error, Error} ->
rabbit_log:error(
"pgsql_listen_exchange error adding binding ~p: ~p",
[Name, Error]
),
add_bindings(X, T, State)
end;
add_bindings(_X, [], State) ->
State.
| null | https://raw.githubusercontent.com/gmr/pgsql-listen-exchange/033ea5ecc6e02b740d616812ba10d142915149ee/src/pgsql_listen_worker.erl | erlang | ==============================================================================
@end
==============================================================================
@doc gen_server process for listening to casts and calls from
@end
------------------------- | @author < >
2014 - 2020 AWeber Communications
pgsql_listen_exchange and epgsql
-module(pgsql_listen_worker).
-behaviour(gen_server).
-export([
start_link/0,
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-include("pgsql_listen.hrl").
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
init([]) ->
State = #pgsql_listen_state{
amqp = dict:new(),
channels = dict:new(),
pgsql = dict:new()
},
{ok, maybe_connect(rabbit_exchange:list(), State)}.
code_change(_, State, _) ->
{ok, State}.
handle_call({add_binding, X, B}, _From, State) ->
case pgsql_listen_lib:add_binding(X, B, State) of
{ok, NewState} ->
{reply, ok, NewState};
{error, Error} ->
{reply, {error, Error}, {state, State}}
end;
handle_call({create, X}, _From, State) ->
case pgsql_listen_lib:start_exchange(X, State) of
{ok, NewState} ->
{reply, ok, NewState};
{error, Error} ->
{reply, {error, Error}, {state, State}}
end;
handle_call({delete, X, Bs}, _From, State) ->
case pgsql_listen_lib:remove_bindings(X, Bs, State) of
{ok, NState1} ->
case pgsql_listen_lib:stop_exchange(X, NState1) of
{ok, NState2} ->
{reply, ok, NState2};
{error, Error} ->
{reply, {error, Error}, NState1}
end;
{error, Error} ->
{reply, {error, Error}, State}
end;
handle_call({remove_bindings, X, Bs}, _From, State) ->
case pgsql_listen_lib:remove_bindings(X, Bs, State) of
{ok, NState} -> {reply, ok, NState};
{error, Error} -> {reply, {error, Error}, {state, State}}
end;
handle_call({validate, X}, _From, State) ->
case pgsql_listen_lib:validate_pgsql_connection(X) of
ok -> {reply, ok, State};
{error, Error} -> {reply, {error, Error}, State}
end;
handle_call(_Msg, _From, State) ->
{noreply, unknown_command, State}.
handle_cast(Cast, State) ->
rabbit_log:error("pgsql_listen_worker unknown_cast: ~p", [Cast]),
{noreply, State}.
handle_info({epgsql, Conn, {notice, Error}}, State) ->
rabbit_log:error("pgsql_listen_worker postgres error: ~p (~p)", [Conn, Error]),
{noreply, State};
handle_info({epgsql, Conn, {notification, Channel, _, Payload}}, State) ->
{noreply, pgsql_listen_lib:publish_notification(Conn, Channel, Payload, State)};
handle_info(Message, State) ->
rabbit_log:error("pgsql_listen_worker unknown_info: ~p", [Message]),
{noreply, State}.
terminate(_, _) ->
ok.
maybe_connect([X = #exchange{name = Name, type = 'x-pgsql-listen'} | Tail], State) ->
case pgsql_listen_lib:start_exchange(X, State) of
{ok, NewState} ->
maybe_connect(Tail, add_bindings(X, rabbit_binding:list_for_source(Name), NewState));
{error, Error} ->
rabbit_log:error(
"pgsql_listen_exchange startup error for ~p: ~p",
[Name, Error]
),
maybe_connect(Tail, State)
end;
maybe_connect([_X | Tail], State) ->
maybe_connect(Tail, State);
maybe_connect([], State) ->
State.
add_bindings(X = #exchange{name = Name}, [H | T], State) ->
case pgsql_listen_lib:add_binding(X, H, State) of
{ok, NewState} ->
add_bindings(X, T, NewState);
{error, Error} ->
rabbit_log:error(
"pgsql_listen_exchange error adding binding ~p: ~p",
[Name, Error]
),
add_bindings(X, T, State)
end;
add_bindings(_X, [], State) ->
State.
|
678d0d3eee034633fabc8e05125fcd8616b7b45f208dea4ba356449b687f3baa | lucasvreis/organon | JSON.hs | module Site.Org.JSON
( ToJSON (..),
FromJSON (..),
genericToEncoding,
genericToJSON,
genericParseJSON,
customOptions,
)
where
import Data.Aeson
customOptions :: Options
customOptions =
defaultOptions
{ fieldLabelModifier = camelTo2 '-',
constructorTagModifier = camelTo2 '-',
sumEncoding = TaggedObject "kind" "options"
}
| null | https://raw.githubusercontent.com/lucasvreis/organon/08d20ee0e3f28519a6a31b54cfab9cfc701ae004/src/ema-org/Site/Org/JSON.hs | haskell | module Site.Org.JSON
( ToJSON (..),
FromJSON (..),
genericToEncoding,
genericToJSON,
genericParseJSON,
customOptions,
)
where
import Data.Aeson
customOptions :: Options
customOptions =
defaultOptions
{ fieldLabelModifier = camelTo2 '-',
constructorTagModifier = camelTo2 '-',
sumEncoding = TaggedObject "kind" "options"
}
| |
551e5e1e976db2b2f59c0f375353a94f0fff02731447b3b00456d0e7e904acb9 | wdebeaum/step | y.lisp | ;;;;
;;;; W::Y
;;;;
(define-words :pos W::value :boost-word t
:words (
(W::Y
(senses((LF-PARENT ONT::letter-symbol)
(TEMPL value-templ) (PREFERENCE 0.92)
)
)
)
))
| null | https://raw.githubusercontent.com/wdebeaum/step/f38c07d9cd3a58d0e0183159d4445de9a0eafe26/src/LexiconManager/Data/new/y.lisp | lisp |
W::Y
|
(define-words :pos W::value :boost-word t
:words (
(W::Y
(senses((LF-PARENT ONT::letter-symbol)
(TEMPL value-templ) (PREFERENCE 0.92)
)
)
)
))
|
265032cff63dc54c52d8a66727dd86c3441580dd28f8f2cd0640a445af82146b | progman1/genprintlib | input_handling.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
OCaml port by and
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(**************************** Input control ****************************)
open Unix
open Primitives
(*** Actives files. ***)
(* List of the actives files. *)
let active_files =
ref ([] : (file_descr * ((io_channel -> unit) * io_channel)) list)
(* Add a file to the list of actives files. *)
let add_file file controller =
active_files := (file.io_fd, (controller, file))::!active_files
(* Remove a file from the list of actives files. *)
let remove_file file =
active_files := List.remove_assoc file.io_fd !active_files
(* Change the controller for the given file. *)
let change_controller file controller =
remove_file file; add_file file controller
(* Return the controller currently attached to the given file. *)
let current_controller file =
fst (List.assoc file.io_fd !active_files)
(* Execute a function with `controller' attached to `file'. *)
# # # controller file funct
let execute_with_other_controller controller file funct =
let old_controller = current_controller file in
change_controller file controller;
try
let result = funct () in
change_controller file old_controller;
result
with
x ->
change_controller file old_controller;
raise x
(*** The "Main Loop" ***)
let continue_main_loop =
ref true
let exit_main_loop _ =
continue_main_loop := false
(* Handle active files until `continue_main_loop' is false. *)
let main_loop () =
let old_state = !continue_main_loop in
try
continue_main_loop := true;
while !continue_main_loop do
try
let (input, _, _) =
select (List.map fst !active_files) [] [] (-1.)
in
List.iter
(function fd ->
let (funct, iochan) = (List.assoc fd !active_files) in
funct iochan)
input
with
Unix_error (EINTR, _, _) -> ()
done;
continue_main_loop := old_state
with
x ->
continue_main_loop := old_state;
raise x
(*** Managing user inputs ***)
(* Are we in interactive mode ? *)
let interactif = ref true
let current_prompt = ref ""
(* Where the user input come from. *)
let user_channel = ref std_io
let read_user_input buffer length =
main_loop ();
input !user_channel.io_in buffer 0 length
(* Stop reading user input. *)
let stop_user_input () =
remove_file !user_channel
(* Resume reading user input. *)
let resume_user_input () =
if not (List.mem_assoc !user_channel.io_fd !active_files) then begin
if !interactif && !Parameters.prompt then begin
print_string !current_prompt;
flush Stdlib.stdout
end;
add_file !user_channel exit_main_loop
end
| null | https://raw.githubusercontent.com/progman1/genprintlib/acc1e5cc46b9ce6191d0306f51337581c93ffe94/debugger/4.10.0/input_handling.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
*************************** Input control ***************************
** Actives files. **
List of the actives files.
Add a file to the list of actives files.
Remove a file from the list of actives files.
Change the controller for the given file.
Return the controller currently attached to the given file.
Execute a function with `controller' attached to `file'.
** The "Main Loop" **
Handle active files until `continue_main_loop' is false.
** Managing user inputs **
Are we in interactive mode ?
Where the user input come from.
Stop reading user input.
Resume reading user input. | , projet Cristal , INRIA Rocquencourt
OCaml port by and
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Unix
open Primitives
let active_files =
ref ([] : (file_descr * ((io_channel -> unit) * io_channel)) list)
let add_file file controller =
active_files := (file.io_fd, (controller, file))::!active_files
let remove_file file =
active_files := List.remove_assoc file.io_fd !active_files
let change_controller file controller =
remove_file file; add_file file controller
let current_controller file =
fst (List.assoc file.io_fd !active_files)
# # # controller file funct
let execute_with_other_controller controller file funct =
let old_controller = current_controller file in
change_controller file controller;
try
let result = funct () in
change_controller file old_controller;
result
with
x ->
change_controller file old_controller;
raise x
let continue_main_loop =
ref true
let exit_main_loop _ =
continue_main_loop := false
let main_loop () =
let old_state = !continue_main_loop in
try
continue_main_loop := true;
while !continue_main_loop do
try
let (input, _, _) =
select (List.map fst !active_files) [] [] (-1.)
in
List.iter
(function fd ->
let (funct, iochan) = (List.assoc fd !active_files) in
funct iochan)
input
with
Unix_error (EINTR, _, _) -> ()
done;
continue_main_loop := old_state
with
x ->
continue_main_loop := old_state;
raise x
let interactif = ref true
let current_prompt = ref ""
let user_channel = ref std_io
let read_user_input buffer length =
main_loop ();
input !user_channel.io_in buffer 0 length
let stop_user_input () =
remove_file !user_channel
let resume_user_input () =
if not (List.mem_assoc !user_channel.io_fd !active_files) then begin
if !interactif && !Parameters.prompt then begin
print_string !current_prompt;
flush Stdlib.stdout
end;
add_file !user_channel exit_main_loop
end
|
5d02713bfdd86a0063966f6d7072e4a5fff64aa379834201d2941568c9f3eddc | SamB/coq | coq_config.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(*i $Id$ i*)
val local : bool (* local use (no installation) *)
val bindir : string (* where the binaries are installed *)
val coqlib : string (* where the std library is installed *)
val coqtop : string (* where are the sources *)
val camldir : string (* base directory of OCaml binaries *)
val camllib : string (* for Dynlink *)
exact name of : either " " ou " camlp5 "
val camlp4lib : string (* where is the library of Camlp4 *)
val best : string (* byte/opt *)
val arch : string (* architecture *)
val osdeplibs : string (* OS dependant link options for ocamlc *)
(* val defined : string list (* options for lib/ocamlpp *) *)
version number of Coq
val versionsi : string (* version number of Coq\_SearchIsos *)
val date : string (* release date *)
val compile_date : string (* compile date *)
val theories_dirs : string list
val contrib_dirs : string list
" " under Unix , " .exe " under MS - windows
to ( de)activate functions specific to with Coqide
| null | https://raw.githubusercontent.com/SamB/coq/8f84aba9ae83a4dc43ea6e804227ae8cae8086b1/config/coq_config.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
i $Id$ i
local use (no installation)
where the binaries are installed
where the std library is installed
where are the sources
base directory of OCaml binaries
for Dynlink
where is the library of Camlp4
byte/opt
architecture
OS dependant link options for ocamlc
val defined : string list (* options for lib/ocamlpp
version number of Coq\_SearchIsos
release date
compile date | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
exact name of : either " " ou " camlp5 "
version number of Coq
val theories_dirs : string list
val contrib_dirs : string list
" " under Unix , " .exe " under MS - windows
to ( de)activate functions specific to with Coqide
|
5b28f74e91e9b39dd2c135b657b7bd6f4bf75ed804993c08ac9a262a943aa46f | didierverna/tfm | package.lisp | package.lisp --- TFM package definition
Copyright ( C ) 2019 , 2021
Author : < >
This file is part of TFM .
;; Permission to use, copy, modify, and distribute this software for any
;; purpose with or without fee is hereby granted, provided that the above
;; copyright notice and this permission notice appear in all copies.
THIS SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
;; WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
;; MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
;; ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
;; OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
;;; Commentary:
;;; Code:
(in-package :cl-user)
(defpackage :net.didierverna.tfm
(:documentation "The TeX Font Metrics package.")
(:use :cl :net.didierverna.tfm.setup)
(:export
;; From the :net.didierverna.tfm.setup package:
:*copyright-years*
:*release-major-level*
:*release-minor-level*
:*release-status*
:*release-status-level*
:*release-name*
:version
From package.lisp ( this file ):
:nickname-package
;; From src/util.lisp:
:tfm :tfm-warning :tfm-error :tfm-compliance :tfm-usage
:tfm-compliance-warning :tfm-compliance-error
:tfm-usage-warning :tfm-usage-error
:u16-overflow :value
:fix-word-overflow :value :set-to-zero
:invalid-string-length :value :padding
:invalid-bcpl-string :value
:read-maximum-length :keep-string :fix-string :discard-string
;; From src/character.lisp:
:character-metrics
:code :font :width :height :depth :italic-correction :next-character
:extensiblep :not-extensible :value
:top-character :middle-character :bottom-character :repeated-character
;; From src/font.lisp:
:ligature :composite :delete-before :delete-after :pass-over
:font :name :file :checksum :frozen :design-size :original-design-size
:encoding :family :7bits-safe :face-number :weight :slope :expansion
:face-code
:slant :interword-space :interword-stretch :interword-shrink :ex :em
:extra-space :parameters :min-code :max-code :character-count
:boundary-character
:invalid-character-code :value
:get-character
:different-fonts :character1 :character2
:ligature :kerning
:freeze :unfreeze
:math-symbols-font
:num1 :num2 :num3 :denom1 :denom2 :sup1 :sup2 :sup3 :sub1 :sub2
:supdrop :subdrop :delim1 :delim2 :axis-height
:math-extension-font :default-rule-thickness
:big-op-spacing1 :big-op-spacing2 :big-op-spacing3 :big-op-spacing4
:big-op-spacing5
;; From src/file.lisp:
:invalid-design-size :value :set-to-ten
:name
:invalid-table-index :value :largest
:invalid-ligature-opcode :value
:abort-lig/kern-program :discard-ligature :discard-kerning
:abort-lig/kern-program
:invalid-char-info :value
:invalid-table-start :value
:no-boundary-character
:character-list-cycle :value
:ligature-cycle :value :characters
:set-to-zero :abort-lig/kern-program
:discard-next-character :discard-extension-recipe
:discard-next-character :discard-ligature
:declared-size :actual-size
:file-underflow :file-overflow
:invalid-header-length :value
:invalid-character-range :bc :ec
:invalid-section-lengths :lf :lh :nc :nw :nh :nd :ni :nl :nk :ne :np
:invalid-table-length :value :smallest :largest
:extended-tfm :value :file
:load-font :cancel-loading))
(in-package :net.didierverna.tfm)
(defun nickname-package (&optional (nickname :tfm))
"Add NICKNAME (:TFM by default) to the :NET.DIDIERVERNA.TFM package."
(rename-package :net.didierverna.tfm
(package-name :net.didierverna.tfm)
(adjoin nickname (package-nicknames :net.didierverna.tfm)
:test #'string-equal)))
;;; package.lisp ends here
| null | https://raw.githubusercontent.com/didierverna/tfm/30ef2114392063043d34f900a641a8510069db68/core/package.lisp | lisp | Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
Commentary:
Code:
From the :net.didierverna.tfm.setup package:
From src/util.lisp:
From src/character.lisp:
From src/font.lisp:
From src/file.lisp:
package.lisp ends here | package.lisp --- TFM package definition
Copyright ( C ) 2019 , 2021
Author : < >
This file is part of TFM .
THIS SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
(in-package :cl-user)
(defpackage :net.didierverna.tfm
(:documentation "The TeX Font Metrics package.")
(:use :cl :net.didierverna.tfm.setup)
(:export
:*copyright-years*
:*release-major-level*
:*release-minor-level*
:*release-status*
:*release-status-level*
:*release-name*
:version
From package.lisp ( this file ):
:nickname-package
:tfm :tfm-warning :tfm-error :tfm-compliance :tfm-usage
:tfm-compliance-warning :tfm-compliance-error
:tfm-usage-warning :tfm-usage-error
:u16-overflow :value
:fix-word-overflow :value :set-to-zero
:invalid-string-length :value :padding
:invalid-bcpl-string :value
:read-maximum-length :keep-string :fix-string :discard-string
:character-metrics
:code :font :width :height :depth :italic-correction :next-character
:extensiblep :not-extensible :value
:top-character :middle-character :bottom-character :repeated-character
:ligature :composite :delete-before :delete-after :pass-over
:font :name :file :checksum :frozen :design-size :original-design-size
:encoding :family :7bits-safe :face-number :weight :slope :expansion
:face-code
:slant :interword-space :interword-stretch :interword-shrink :ex :em
:extra-space :parameters :min-code :max-code :character-count
:boundary-character
:invalid-character-code :value
:get-character
:different-fonts :character1 :character2
:ligature :kerning
:freeze :unfreeze
:math-symbols-font
:num1 :num2 :num3 :denom1 :denom2 :sup1 :sup2 :sup3 :sub1 :sub2
:supdrop :subdrop :delim1 :delim2 :axis-height
:math-extension-font :default-rule-thickness
:big-op-spacing1 :big-op-spacing2 :big-op-spacing3 :big-op-spacing4
:big-op-spacing5
:invalid-design-size :value :set-to-ten
:name
:invalid-table-index :value :largest
:invalid-ligature-opcode :value
:abort-lig/kern-program :discard-ligature :discard-kerning
:abort-lig/kern-program
:invalid-char-info :value
:invalid-table-start :value
:no-boundary-character
:character-list-cycle :value
:ligature-cycle :value :characters
:set-to-zero :abort-lig/kern-program
:discard-next-character :discard-extension-recipe
:discard-next-character :discard-ligature
:declared-size :actual-size
:file-underflow :file-overflow
:invalid-header-length :value
:invalid-character-range :bc :ec
:invalid-section-lengths :lf :lh :nc :nw :nh :nd :ni :nl :nk :ne :np
:invalid-table-length :value :smallest :largest
:extended-tfm :value :file
:load-font :cancel-loading))
(in-package :net.didierverna.tfm)
(defun nickname-package (&optional (nickname :tfm))
"Add NICKNAME (:TFM by default) to the :NET.DIDIERVERNA.TFM package."
(rename-package :net.didierverna.tfm
(package-name :net.didierverna.tfm)
(adjoin nickname (package-nicknames :net.didierverna.tfm)
:test #'string-equal)))
|
860818bbe3d4f938fa34b4382223dcf88259b145855140b40dfcceb37be3c682 | workframers/garamond | util.clj | (ns garamond.util)
(defn exit
"Abort garamond and exit with the specified exit code. If a message is passed, print it to stderr
before exiting. Note that this function just throws exceptions which are handled in garamond.main/-main."
([]
(exit 0))
([code]
(exit code nil))
([code message]
(throw (ex-info "Exit condition" {:code code :message message}))))
| null | https://raw.githubusercontent.com/workframers/garamond/a5077bc99d5f6e303735505068d5363d3b2cc00c/src/garamond/util.clj | clojure | (ns garamond.util)
(defn exit
"Abort garamond and exit with the specified exit code. If a message is passed, print it to stderr
before exiting. Note that this function just throws exceptions which are handled in garamond.main/-main."
([]
(exit 0))
([code]
(exit code nil))
([code message]
(throw (ex-info "Exit condition" {:code code :message message}))))
| |
2eaa7545690079f16f10c8b04c49f8051c4b74747d9d3043168c49f14d7bcdbe | mirage/shared-memory-ring | client.ml |
* Copyright ( C ) Citrix Systems Inc.
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (C) Citrix Systems Inc.
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open Lwt
let ( | > ) a b = b a
let i d x = x
let alloc_page ( ) =
Bigarray.Array1.create Bigarray.char Bigarray.c_layout 4096
let one_request_response ( ) =
let page = alloc_page ( ) in
let sring = Ring . ) ~idx_size:1 ~name:"test " in
let front = Ring.Rpc.Front.init sring in
let back = Ring.Rpc.Back.init sring in
Printf.fprintf stdout " % s\n% ! " ( Ring . Rpc . back ) ;
assert_equal ~msg:"more_to_do " ~printer : string_of_bool false ( Ring . Rpc . Back.more_to_do back ) ;
let client = Lwt_ring.Front.init front in
let server = Lwt_ring.Back.init back in
let i d = ( ) in
let must_notify = ref false in
let request_th = Lwt_ring . Front.push_request_and_wait client ( fun ( ) - > must_notify : = true ) ( fun _ - > i d ) in
assert_equal ~msg:"must_notify " ~printer : string_of_bool true ( ! must_notify ) ;
Printf.fprintf stdout " % s\n% ! " ( Ring . Rpc . back ) ;
assert_equal ~msg:"more_to_do " ~printer : string_of_bool true ( Ring . Rpc . Back.more_to_do back ) ;
let finished = ref false in
Ring . Rpc . back ( fun _ - > finished : = true ) ;
assert_equal ~msg:"ack_requests " ~printer : string_of_bool true ( ! finished ) ;
Lwt_ring . Back.push_response server ( fun ( ) - > ( ) ) ( fun _ - > ( ) ) ;
Printf.fprintf stdout " % s\n% ! " ( Ring . Rpc . back ) ;
let replied = ref false in
Lwt_ring.Front.poll client ( fun _ - > replied : = true ; i d , ( ) ) ;
assert_equal ~msg:"poll " ~printer : string_of_bool true ( ! replied ) ;
assert_equal ~msg:"more_to_do " ~printer : string_of_bool false ( Ring . Rpc . Back.more_to_do back ) ;
lwt ( ) = Lwt.choose [ Lwt_unix.sleep 5 . ; request_th ] in
assert_equal ~msg:"is_sleeping " ~printer : string_of_bool false ( Lwt.is_sleeping request_th ) ;
return ( )
let one_request_response ( ) = Lwt_main.run ( one_request_response ( ) )
let _ =
let verbose = ref false in
Arg.parse [
" -verbose " , Arg . Unit ( fun _ - > verbose : = true ) , " Run in verbose mode " ;
] ( fun x - > Printf.fprintf stderr " Ignoring argument : % s " x )
" Test shared memory ring code " ;
let suite = " ring " > : : :
[
" one_request_response " > : : one_request_response
] in
run_test_tt ~verbose:!verbose suite
open Lwt
let ( |> ) a b = b a
let id x = x
let alloc_page () =
Bigarray.Array1.create Bigarray.char Bigarray.c_layout 4096
let one_request_response () =
let page = alloc_page () in
let sring = Ring.Rpc.of_buf ~buf:(Cstruct.of_bigarray page) ~idx_size:1 ~name:"test" in
let front = Ring.Rpc.Front.init sring in
let back = Ring.Rpc.Back.init sring in
Printf.fprintf stdout "%s\n%!" (Ring.Rpc.Back.to_string back);
assert_equal ~msg:"more_to_do" ~printer:string_of_bool false (Ring.Rpc.Back.more_to_do back);
let client = Lwt_ring.Front.init front in
let server = Lwt_ring.Back.init back in
let id = () in
let must_notify = ref false in
let request_th = Lwt_ring.Front.push_request_and_wait client (fun () -> must_notify := true) (fun _ -> id) in
assert_equal ~msg:"must_notify" ~printer:string_of_bool true (!must_notify);
Printf.fprintf stdout "%s\n%!" (Ring.Rpc.Back.to_string back);
assert_equal ~msg:"more_to_do" ~printer:string_of_bool true (Ring.Rpc.Back.more_to_do back);
let finished = ref false in
Ring.Rpc.Back.ack_requests back (fun _ -> finished := true);
assert_equal ~msg:"ack_requests" ~printer:string_of_bool true (!finished);
Lwt_ring.Back.push_response server (fun () -> ()) (fun _ -> ());
Printf.fprintf stdout "%s\n%!" (Ring.Rpc.Back.to_string back);
let replied = ref false in
Lwt_ring.Front.poll client (fun _ -> replied := true; id, ());
assert_equal ~msg:"poll" ~printer:string_of_bool true (!replied);
assert_equal ~msg:"more_to_do" ~printer:string_of_bool false (Ring.Rpc.Back.more_to_do back);
lwt () = Lwt.choose [ Lwt_unix.sleep 5.; request_th ] in
assert_equal ~msg:"is_sleeping" ~printer:string_of_bool false (Lwt.is_sleeping request_th);
return ()
let one_request_response () = Lwt_main.run (one_request_response ())
let _ =
let verbose = ref false in
Arg.parse [
"-verbose", Arg.Unit (fun _ -> verbose := true), "Run in verbose mode";
] (fun x -> Printf.fprintf stderr "Ignoring argument: %s" x)
"Test shared memory ring code";
let suite = "ring" >:::
[
"one_request_response" >:: one_request_response
] in
run_test_tt ~verbose:!verbose suite
*)
let () = print_string "hello\n"
| null | https://raw.githubusercontent.com/mirage/shared-memory-ring/34b981b4db5c020b2967a0397c41f642d7b07df1/examples/client.ml | ocaml |
* Copyright ( C ) Citrix Systems Inc.
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (C) Citrix Systems Inc.
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open Lwt
let ( | > ) a b = b a
let i d x = x
let alloc_page ( ) =
Bigarray.Array1.create Bigarray.char Bigarray.c_layout 4096
let one_request_response ( ) =
let page = alloc_page ( ) in
let sring = Ring . ) ~idx_size:1 ~name:"test " in
let front = Ring.Rpc.Front.init sring in
let back = Ring.Rpc.Back.init sring in
Printf.fprintf stdout " % s\n% ! " ( Ring . Rpc . back ) ;
assert_equal ~msg:"more_to_do " ~printer : string_of_bool false ( Ring . Rpc . Back.more_to_do back ) ;
let client = Lwt_ring.Front.init front in
let server = Lwt_ring.Back.init back in
let i d = ( ) in
let must_notify = ref false in
let request_th = Lwt_ring . Front.push_request_and_wait client ( fun ( ) - > must_notify : = true ) ( fun _ - > i d ) in
assert_equal ~msg:"must_notify " ~printer : string_of_bool true ( ! must_notify ) ;
Printf.fprintf stdout " % s\n% ! " ( Ring . Rpc . back ) ;
assert_equal ~msg:"more_to_do " ~printer : string_of_bool true ( Ring . Rpc . Back.more_to_do back ) ;
let finished = ref false in
Ring . Rpc . back ( fun _ - > finished : = true ) ;
assert_equal ~msg:"ack_requests " ~printer : string_of_bool true ( ! finished ) ;
Lwt_ring . Back.push_response server ( fun ( ) - > ( ) ) ( fun _ - > ( ) ) ;
Printf.fprintf stdout " % s\n% ! " ( Ring . Rpc . back ) ;
let replied = ref false in
Lwt_ring.Front.poll client ( fun _ - > replied : = true ; i d , ( ) ) ;
assert_equal ~msg:"poll " ~printer : string_of_bool true ( ! replied ) ;
assert_equal ~msg:"more_to_do " ~printer : string_of_bool false ( Ring . Rpc . Back.more_to_do back ) ;
lwt ( ) = Lwt.choose [ Lwt_unix.sleep 5 . ; request_th ] in
assert_equal ~msg:"is_sleeping " ~printer : string_of_bool false ( Lwt.is_sleeping request_th ) ;
return ( )
let one_request_response ( ) = Lwt_main.run ( one_request_response ( ) )
let _ =
let verbose = ref false in
Arg.parse [
" -verbose " , Arg . Unit ( fun _ - > verbose : = true ) , " Run in verbose mode " ;
] ( fun x - > Printf.fprintf stderr " Ignoring argument : % s " x )
" Test shared memory ring code " ;
let suite = " ring " > : : :
[
" one_request_response " > : : one_request_response
] in
run_test_tt ~verbose:!verbose suite
open Lwt
let ( |> ) a b = b a
let id x = x
let alloc_page () =
Bigarray.Array1.create Bigarray.char Bigarray.c_layout 4096
let one_request_response () =
let page = alloc_page () in
let sring = Ring.Rpc.of_buf ~buf:(Cstruct.of_bigarray page) ~idx_size:1 ~name:"test" in
let front = Ring.Rpc.Front.init sring in
let back = Ring.Rpc.Back.init sring in
Printf.fprintf stdout "%s\n%!" (Ring.Rpc.Back.to_string back);
assert_equal ~msg:"more_to_do" ~printer:string_of_bool false (Ring.Rpc.Back.more_to_do back);
let client = Lwt_ring.Front.init front in
let server = Lwt_ring.Back.init back in
let id = () in
let must_notify = ref false in
let request_th = Lwt_ring.Front.push_request_and_wait client (fun () -> must_notify := true) (fun _ -> id) in
assert_equal ~msg:"must_notify" ~printer:string_of_bool true (!must_notify);
Printf.fprintf stdout "%s\n%!" (Ring.Rpc.Back.to_string back);
assert_equal ~msg:"more_to_do" ~printer:string_of_bool true (Ring.Rpc.Back.more_to_do back);
let finished = ref false in
Ring.Rpc.Back.ack_requests back (fun _ -> finished := true);
assert_equal ~msg:"ack_requests" ~printer:string_of_bool true (!finished);
Lwt_ring.Back.push_response server (fun () -> ()) (fun _ -> ());
Printf.fprintf stdout "%s\n%!" (Ring.Rpc.Back.to_string back);
let replied = ref false in
Lwt_ring.Front.poll client (fun _ -> replied := true; id, ());
assert_equal ~msg:"poll" ~printer:string_of_bool true (!replied);
assert_equal ~msg:"more_to_do" ~printer:string_of_bool false (Ring.Rpc.Back.more_to_do back);
lwt () = Lwt.choose [ Lwt_unix.sleep 5.; request_th ] in
assert_equal ~msg:"is_sleeping" ~printer:string_of_bool false (Lwt.is_sleeping request_th);
return ()
let one_request_response () = Lwt_main.run (one_request_response ())
let _ =
let verbose = ref false in
Arg.parse [
"-verbose", Arg.Unit (fun _ -> verbose := true), "Run in verbose mode";
] (fun x -> Printf.fprintf stderr "Ignoring argument: %s" x)
"Test shared memory ring code";
let suite = "ring" >:::
[
"one_request_response" >:: one_request_response
] in
run_test_tt ~verbose:!verbose suite
*)
let () = print_string "hello\n"
| |
3c5d50f47f4774ea6aa0a036633a200c899070a6257d1d994e9db8336a8ff03e | gdamore/tree-sitter-d | empty.scm | ==================
Empty file
==================
---
(source_file)
==================
End of file
==================
__EOF__
module is.not.included;
---
(source_file (end_file))
| null | https://raw.githubusercontent.com/gdamore/tree-sitter-d/d7b3cc1f9f1f59518457696a8547e0e1bb1a64b4/test/corpus/empty.scm | scheme | ==================
Empty file
==================
---
(source_file)
==================
End of file
==================
__EOF__
---
(source_file (end_file))
| |
f45b22bc3805cd348c156cd003e7f504886f028840f79f99e46673d6d595c30e | brendanhay/gogol | Patch.hs | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - duplicate - exports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
-- |
Module : . ShoppingContent . Content . Regions . Patch
Copyright : ( c ) 2015 - 2022
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
--
Updates a region definition in your Merchant Center account .
--
/See:/ < API for Shopping Reference > for @content.regions.patch@.
module Gogol.ShoppingContent.Content.Regions.Patch
( -- * Resource
ContentRegionsPatchResource,
-- ** Constructing a Request
ContentRegionsPatch (..),
newContentRegionsPatch,
)
where
import qualified Gogol.Prelude as Core
import Gogol.ShoppingContent.Types
| A resource alias for @content.regions.patch@ method which the
-- 'ContentRegionsPatch' request conforms to.
type ContentRegionsPatchResource =
"content"
Core.:> "v2.1"
Core.:> Core.Capture "merchantId" Core.Int64
Core.:> "regions"
Core.:> Core.Capture "regionId" Core.Text
Core.:> Core.QueryParam "$.xgafv" Xgafv
Core.:> Core.QueryParam "access_token" Core.Text
Core.:> Core.QueryParam "callback" Core.Text
Core.:> Core.QueryParam "updateMask" Core.FieldMask
Core.:> Core.QueryParam "uploadType" Core.Text
Core.:> Core.QueryParam "upload_protocol" Core.Text
Core.:> Core.QueryParam "alt" Core.AltJSON
Core.:> Core.ReqBody '[Core.JSON] Region
Core.:> Core.Patch '[Core.JSON] Region
| Updates a region definition in your Merchant Center account .
--
-- /See:/ 'newContentRegionsPatch' smart constructor.
data ContentRegionsPatch = ContentRegionsPatch
{ -- | V1 error format.
xgafv :: (Core.Maybe Xgafv),
-- | OAuth access token.
accessToken :: (Core.Maybe Core.Text),
| JSONP
callback :: (Core.Maybe Core.Text),
-- | Required. The id of the merchant for which to update region definition.
merchantId :: Core.Int64,
-- | Multipart request metadata.
payload :: Region,
-- | Required. The id of the region to update.
regionId :: Core.Text,
-- | Optional. The comma-separated field mask indicating the fields to update. Example: @\"displayName,postalCodeArea.regionCode\"@.
updateMask :: (Core.Maybe Core.FieldMask),
| Legacy upload protocol for media ( e.g. \"media\ " , \"multipart\ " ) .
uploadType :: (Core.Maybe Core.Text),
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
uploadProtocol :: (Core.Maybe Core.Text)
}
deriving (Core.Eq, Core.Show, Core.Generic)
-- | Creates a value of 'ContentRegionsPatch' with the minimum fields required to make a request.
newContentRegionsPatch ::
-- | Required. The id of the merchant for which to update region definition. See 'merchantId'.
Core.Int64 ->
-- | Multipart request metadata. See 'payload'.
Region ->
-- | Required. The id of the region to update. See 'regionId'.
Core.Text ->
ContentRegionsPatch
newContentRegionsPatch merchantId payload regionId =
ContentRegionsPatch
{ xgafv = Core.Nothing,
accessToken = Core.Nothing,
callback = Core.Nothing,
merchantId = merchantId,
payload = payload,
regionId = regionId,
updateMask = Core.Nothing,
uploadType = Core.Nothing,
uploadProtocol = Core.Nothing
}
instance Core.GoogleRequest ContentRegionsPatch where
type Rs ContentRegionsPatch = Region
type
Scopes ContentRegionsPatch =
'[Content'FullControl]
requestClient ContentRegionsPatch {..} =
go
merchantId
regionId
xgafv
accessToken
callback
updateMask
uploadType
uploadProtocol
(Core.Just Core.AltJSON)
payload
shoppingContentService
where
go =
Core.buildClient
( Core.Proxy ::
Core.Proxy ContentRegionsPatchResource
)
Core.mempty
| null | https://raw.githubusercontent.com/brendanhay/gogol/77394c4e0f5bd729e6fe27119701c45f9d5e1e9a/lib/services/gogol-shopping-content/gen/Gogol/ShoppingContent/Content/Regions/Patch.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated
* Resource
** Constructing a Request
'ContentRegionsPatch' request conforms to.
/See:/ 'newContentRegionsPatch' smart constructor.
| V1 error format.
| OAuth access token.
| Required. The id of the merchant for which to update region definition.
| Multipart request metadata.
| Required. The id of the region to update.
| Optional. The comma-separated field mask indicating the fields to update. Example: @\"displayName,postalCodeArea.regionCode\"@.
| Upload protocol for media (e.g. \"raw\", \"multipart\").
| Creates a value of 'ContentRegionsPatch' with the minimum fields required to make a request.
| Required. The id of the merchant for which to update region definition. See 'merchantId'.
| Multipart request metadata. See 'payload'.
| Required. The id of the region to update. See 'regionId'. | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - duplicate - exports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Module : . ShoppingContent . Content . Regions . Patch
Copyright : ( c ) 2015 - 2022
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
Updates a region definition in your Merchant Center account .
/See:/ < API for Shopping Reference > for @content.regions.patch@.
module Gogol.ShoppingContent.Content.Regions.Patch
ContentRegionsPatchResource,
ContentRegionsPatch (..),
newContentRegionsPatch,
)
where
import qualified Gogol.Prelude as Core
import Gogol.ShoppingContent.Types
| A resource alias for @content.regions.patch@ method which the
type ContentRegionsPatchResource =
"content"
Core.:> "v2.1"
Core.:> Core.Capture "merchantId" Core.Int64
Core.:> "regions"
Core.:> Core.Capture "regionId" Core.Text
Core.:> Core.QueryParam "$.xgafv" Xgafv
Core.:> Core.QueryParam "access_token" Core.Text
Core.:> Core.QueryParam "callback" Core.Text
Core.:> Core.QueryParam "updateMask" Core.FieldMask
Core.:> Core.QueryParam "uploadType" Core.Text
Core.:> Core.QueryParam "upload_protocol" Core.Text
Core.:> Core.QueryParam "alt" Core.AltJSON
Core.:> Core.ReqBody '[Core.JSON] Region
Core.:> Core.Patch '[Core.JSON] Region
| Updates a region definition in your Merchant Center account .
data ContentRegionsPatch = ContentRegionsPatch
xgafv :: (Core.Maybe Xgafv),
accessToken :: (Core.Maybe Core.Text),
| JSONP
callback :: (Core.Maybe Core.Text),
merchantId :: Core.Int64,
payload :: Region,
regionId :: Core.Text,
updateMask :: (Core.Maybe Core.FieldMask),
| Legacy upload protocol for media ( e.g. \"media\ " , \"multipart\ " ) .
uploadType :: (Core.Maybe Core.Text),
uploadProtocol :: (Core.Maybe Core.Text)
}
deriving (Core.Eq, Core.Show, Core.Generic)
newContentRegionsPatch ::
Core.Int64 ->
Region ->
Core.Text ->
ContentRegionsPatch
newContentRegionsPatch merchantId payload regionId =
ContentRegionsPatch
{ xgafv = Core.Nothing,
accessToken = Core.Nothing,
callback = Core.Nothing,
merchantId = merchantId,
payload = payload,
regionId = regionId,
updateMask = Core.Nothing,
uploadType = Core.Nothing,
uploadProtocol = Core.Nothing
}
instance Core.GoogleRequest ContentRegionsPatch where
type Rs ContentRegionsPatch = Region
type
Scopes ContentRegionsPatch =
'[Content'FullControl]
requestClient ContentRegionsPatch {..} =
go
merchantId
regionId
xgafv
accessToken
callback
updateMask
uploadType
uploadProtocol
(Core.Just Core.AltJSON)
payload
shoppingContentService
where
go =
Core.buildClient
( Core.Proxy ::
Core.Proxy ContentRegionsPatchResource
)
Core.mempty
|
115fbdc893e3f192e23c291ebcb00100d45b5c9f7417d08782d85de3124621f8 | edicl/cl-unicode | tests.lisp | -*- Mode : LISP ; Syntax : COMMON - LISP ; Package : CL - UNICODE - TEST ; Base : 10 -*-
$ Header : /usr / local / cvsrep / cl - unicode / test / tests.lisp , v 1.18 2012 - 05 - 04 21:17:49 edi Exp $
Copyright ( c ) 2008 - 2012 , Dr. . All rights reserved .
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above
;;; copyright notice, this list of conditions and the following
;;; disclaimer in the documentation and/or other materials
;;; provided with the distribution.
;;; THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
;;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
;;; GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
;;; WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(in-package :cl-unicode-test)
(defvar *this-file* (load-time-value
(or #.*compile-file-pathname* *load-pathname*))
"The location of this source file. Needed to find the data files.")
(defmacro do-tests ((name &optional show-progress-p) &body body)
"Helper macro which repeatedly executes BODY until the code in body
calls the function DONE. It is assumed that each invocation of BODY
will be the execution of one test which returns NIL in case of success
and a list of strings describing errors otherwise.
The macro prints a simple progress indicator \(one dots for ten tests)
to *STANDARD-OUTPUT* unless SHOW-PROGRESS-P is NIL and returns a true
value iff all tests succeeded. Errors in BODY are caught and reported
\(and counted as failures)."
`(let ((successp t)
(testcount 1))
(block test-block
(flet ((done ()
(return-from test-block successp)))
(format t "~&Test: ~A~%" ,name)
(loop
(when (and ,show-progress-p (zerop (mod testcount 10)))
(format t ".")
(when (zerop (mod testcount 100))
(terpri))
(force-output))
(let ((errors
(handler-case
(progn ,@body)
(error (msg)
(list (format nil "~&got an unexpected error: ~A" msg))))))
(setq successp (and successp (null errors)))
(when errors
(format t "~&~4@A:~{~& ~A~}~%" testcount errors))
(incf testcount)))))
successp))
(defun simple-tests (&key (file-name
(make-pathname :name "simple"
:type nil :version nil
:defaults *this-file*))
verbose)
"Loops through all the forms in the file FILE-NAME and executes each
of them using EVAL. Reads all forms with the alternative character
syntax enabled. It is assumed that each FORM specifies a test which
returns a true value iff it succeeds. Prints each test form to
*STANDARD-OUTPUT* if VERBOSE is true and shows a simple progress
indicator otherwise. Returns a true value iff all tests succeeded."
(enable-alternative-character-syntax)
(unwind-protect
(with-open-file (stream file-name)
(let ((*package* (find-package :cl-unicode-test))
(*try-unicode1-names-p* t)
(*try-abbreviations-p* t)
(*scripts-to-try* '("Hebrew"))
(*try-hex-notation-p* t)
(*try-lisp-names-p* t))
(do-tests ((format nil "Simple tests from file ~S" (file-namestring file-name))
(not verbose))
(let ((form (or (read stream nil) (done))))
(when verbose
(format t "~&~S" form))
(cond ((eval form) nil)
(t (list (format nil "~S returned NIL" form))))))))
(disable-alternative-character-syntax)))
(defun property-tests (&key (file-name
(make-pathname :name "properties"
:type nil :version nil
:defaults *this-file*))
verbose)
"Loops through all the forms in the file FILE-NAME and executes each
of them as a test for a property. The forms must be lists \(C S B)
where C is a code point \(an integer), S is a string denoting the
property, and B is boolean denoting whether the character has the
property or not. Tests are performed using HAS-PROPERTY. Prints each
test to *STANDARD-OUTPUT* if VERBOSE is true and shows a simple
progress indicator otherwise. Returns a true value iff all tests
succeeded."
(with-open-file (stream file-name)
(do-tests ((format nil "Properties from file ~S" (file-namestring file-name))
(not verbose))
(let ((input-line (or (read stream nil) (done))))
(destructuring-bind (char-code property-name expected-result)
input-line
(when verbose
(format t "~&~A: #x~X" property-name char-code))
(let* ((char (and (< char-code char-code-limit) (code-char char-code)))
(result-1 (has-property char-code property-name))
(result-2 (and char (has-property char property-name)))
errors)
(unless (eq expected-result (not (not result-1)))
(push (format nil "code point #x~X should ~:[not ~;~]have property \"~A\""
char-code expected-result property-name)
errors))
(when char
(unless (eq expected-result (not (not result-2)))
(push (format nil "\(code-char #x~X) should ~:[not ~;~]have property \"~A\""
char-code expected-result property-name)
errors)))
errors))))))
(defun normalization-tests (&key (file-name
(make-pathname :name "normalization-forms"
:type nil :version nil
:defaults *this-file*))
verbose)
"Loops through all the forms in the file FILE-NAME and executes each
of them as a test for a property. The forms must be lists \(C S B)
where C is a code point \(an integer), S is a string denoting the
property, and B is boolean denoting whether the character has the
property or not. Tests are performed using HAS-PROPERTY. Prints each
test to *STANDARD-OUTPUT* if VERBOSE is true and shows a simple
progress indicator otherwise. Returns a true value iff all tests
succeeded."
(with-open-file (stream file-name)
(do-tests ((format nil "Normalization forms from file ~S" (file-namestring file-name))
(not verbose))
(let ((input-line (or (read stream nil) (done))))
(destructuring-bind (source nfc nfd nfkc nfkd)
input-line
(when verbose
(format t "~&~A: " source))
(remove-if #'null
(mapcar #'(lambda (name expected result)
(unless (equal expected result)
(format nil "~A~A should be ~A, got ~A"
name source expected result)))
'("NFC" "NFD" "NFKC" "NFKD")
(list nfc nfd nfkc nfkd)
(list (cl-unicode:normalization-form-c source)
(cl-unicode:normalization-form-d source)
(cl-unicode:normalization-form-k-c source)
(cl-unicode:normalization-form-k-d source)))))))))
(defun run-all-tests (&key verbose)
"Runs all tests for CL-UNICODE and returns a true value iff all
tests succeeded. VERBOSE is interpreted by the individual test suites
above."
(let ((successp t))
(macrolet ((run-test-suite (&body body)
`(unless (progn ,@body)
(setq successp nil))))
;; run the automatically generated tests for derived properties
(run-test-suite (property-tests :verbose verbose
:file-name (make-pathname :name "derived-properties"
:type nil :version nil
:defaults *this-file*)))
(run-test-suite (property-tests :verbose verbose))
(run-test-suite (simple-tests :verbose verbose))
(run-test-suite (normalization-tests :verbose verbose)))
(format t "~2&~:[Some tests failed~;All tests passed~]." successp)
successp))
| null | https://raw.githubusercontent.com/edicl/cl-unicode/2790a6b8912be1cb051437f463400b4a7198748a/test/tests.lisp | lisp | Syntax : COMMON - LISP ; Package : CL - UNICODE - TEST ; Base : 10 -*-
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
run the automatically generated tests for derived properties | $ Header : /usr / local / cvsrep / cl - unicode / test / tests.lisp , v 1.18 2012 - 05 - 04 21:17:49 edi Exp $
Copyright ( c ) 2008 - 2012 , Dr. . All rights reserved .
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
(in-package :cl-unicode-test)
(defvar *this-file* (load-time-value
(or #.*compile-file-pathname* *load-pathname*))
"The location of this source file. Needed to find the data files.")
(defmacro do-tests ((name &optional show-progress-p) &body body)
"Helper macro which repeatedly executes BODY until the code in body
calls the function DONE. It is assumed that each invocation of BODY
will be the execution of one test which returns NIL in case of success
and a list of strings describing errors otherwise.
The macro prints a simple progress indicator \(one dots for ten tests)
to *STANDARD-OUTPUT* unless SHOW-PROGRESS-P is NIL and returns a true
value iff all tests succeeded. Errors in BODY are caught and reported
\(and counted as failures)."
`(let ((successp t)
(testcount 1))
(block test-block
(flet ((done ()
(return-from test-block successp)))
(format t "~&Test: ~A~%" ,name)
(loop
(when (and ,show-progress-p (zerop (mod testcount 10)))
(format t ".")
(when (zerop (mod testcount 100))
(terpri))
(force-output))
(let ((errors
(handler-case
(progn ,@body)
(error (msg)
(list (format nil "~&got an unexpected error: ~A" msg))))))
(setq successp (and successp (null errors)))
(when errors
(format t "~&~4@A:~{~& ~A~}~%" testcount errors))
(incf testcount)))))
successp))
(defun simple-tests (&key (file-name
(make-pathname :name "simple"
:type nil :version nil
:defaults *this-file*))
verbose)
"Loops through all the forms in the file FILE-NAME and executes each
of them using EVAL. Reads all forms with the alternative character
syntax enabled. It is assumed that each FORM specifies a test which
returns a true value iff it succeeds. Prints each test form to
*STANDARD-OUTPUT* if VERBOSE is true and shows a simple progress
indicator otherwise. Returns a true value iff all tests succeeded."
(enable-alternative-character-syntax)
(unwind-protect
(with-open-file (stream file-name)
(let ((*package* (find-package :cl-unicode-test))
(*try-unicode1-names-p* t)
(*try-abbreviations-p* t)
(*scripts-to-try* '("Hebrew"))
(*try-hex-notation-p* t)
(*try-lisp-names-p* t))
(do-tests ((format nil "Simple tests from file ~S" (file-namestring file-name))
(not verbose))
(let ((form (or (read stream nil) (done))))
(when verbose
(format t "~&~S" form))
(cond ((eval form) nil)
(t (list (format nil "~S returned NIL" form))))))))
(disable-alternative-character-syntax)))
(defun property-tests (&key (file-name
(make-pathname :name "properties"
:type nil :version nil
:defaults *this-file*))
verbose)
"Loops through all the forms in the file FILE-NAME and executes each
of them as a test for a property. The forms must be lists \(C S B)
where C is a code point \(an integer), S is a string denoting the
property, and B is boolean denoting whether the character has the
property or not. Tests are performed using HAS-PROPERTY. Prints each
test to *STANDARD-OUTPUT* if VERBOSE is true and shows a simple
progress indicator otherwise. Returns a true value iff all tests
succeeded."
(with-open-file (stream file-name)
(do-tests ((format nil "Properties from file ~S" (file-namestring file-name))
(not verbose))
(let ((input-line (or (read stream nil) (done))))
(destructuring-bind (char-code property-name expected-result)
input-line
(when verbose
(format t "~&~A: #x~X" property-name char-code))
(let* ((char (and (< char-code char-code-limit) (code-char char-code)))
(result-1 (has-property char-code property-name))
(result-2 (and char (has-property char property-name)))
errors)
(unless (eq expected-result (not (not result-1)))
(push (format nil "code point #x~X should ~:[not ~;~]have property \"~A\""
char-code expected-result property-name)
errors))
(when char
(unless (eq expected-result (not (not result-2)))
(push (format nil "\(code-char #x~X) should ~:[not ~;~]have property \"~A\""
char-code expected-result property-name)
errors)))
errors))))))
(defun normalization-tests (&key (file-name
(make-pathname :name "normalization-forms"
:type nil :version nil
:defaults *this-file*))
verbose)
"Loops through all the forms in the file FILE-NAME and executes each
of them as a test for a property. The forms must be lists \(C S B)
where C is a code point \(an integer), S is a string denoting the
property, and B is boolean denoting whether the character has the
property or not. Tests are performed using HAS-PROPERTY. Prints each
test to *STANDARD-OUTPUT* if VERBOSE is true and shows a simple
progress indicator otherwise. Returns a true value iff all tests
succeeded."
(with-open-file (stream file-name)
(do-tests ((format nil "Normalization forms from file ~S" (file-namestring file-name))
(not verbose))
(let ((input-line (or (read stream nil) (done))))
(destructuring-bind (source nfc nfd nfkc nfkd)
input-line
(when verbose
(format t "~&~A: " source))
(remove-if #'null
(mapcar #'(lambda (name expected result)
(unless (equal expected result)
(format nil "~A~A should be ~A, got ~A"
name source expected result)))
'("NFC" "NFD" "NFKC" "NFKD")
(list nfc nfd nfkc nfkd)
(list (cl-unicode:normalization-form-c source)
(cl-unicode:normalization-form-d source)
(cl-unicode:normalization-form-k-c source)
(cl-unicode:normalization-form-k-d source)))))))))
(defun run-all-tests (&key verbose)
"Runs all tests for CL-UNICODE and returns a true value iff all
tests succeeded. VERBOSE is interpreted by the individual test suites
above."
(let ((successp t))
(macrolet ((run-test-suite (&body body)
`(unless (progn ,@body)
(setq successp nil))))
(run-test-suite (property-tests :verbose verbose
:file-name (make-pathname :name "derived-properties"
:type nil :version nil
:defaults *this-file*)))
(run-test-suite (property-tests :verbose verbose))
(run-test-suite (simple-tests :verbose verbose))
(run-test-suite (normalization-tests :verbose verbose)))
(format t "~2&~:[Some tests failed~;All tests passed~]." successp)
successp))
|
89b5eda4340407bb70763db4a37f56d28382331a451262861b61efb5b24d5482 | ludat/conferer | Conferer.hs | -- |
Copyright : ( c ) 2019
-- License: MPL-2.0
Maintainer : < >
-- Stability: stable
-- Portability: portable
--
-- Public and stable API for the most basic usage of this library
module Conferer
(
-- * How to use this doc
-- | This doc is mostly for reference, so you probably won't learn how to
-- use conferer by reading it. For more detailed and guided documentation
-- the best place is the webpage: <>
-- * Creating a Config
mkConfig
, mkConfig'
-- * Getting values from a config
-- | These functions allow you to get any type that implements 'FromConfig'
, fetch
, fetch'
, fetchKey
, fetchFromConfig
, safeFetchKey
, unsafeFetchKey
, DefaultConfig(..)
, FromConfig
-- * Some useful types
, Config
, Key
) where
import Data.Text (Text)
import Data.Typeable (Typeable)
import Conferer.Config.Internal
import Conferer.Config.Internal.Types
import Conferer.FromConfig.Internal
import Conferer.Key
import qualified Conferer.Source.Env as Env
import qualified Conferer.Source.CLIArgs as Cli
import qualified Conferer.Source.PropertiesFile as PropertiesFile
import Conferer.Config (Defaults)
import Conferer.FromConfig.Internal.Types
import Control.Exception
import System.Exit (exitFailure)
-- | Use the 'FromConfig' instance to get a value of type @a@ from the config
-- using some default fallback. The most common use for this is creating a custom
-- record and using this function to fetch it at initialization time.
--
-- This function throws only parsing exceptions when the values are present
-- but malformed somehow (@"abc"@ as an Int) but that depends on the 'FromConfig'
-- implementation for the type.
fetch :: forall a. (FromConfig a, Typeable a, DefaultConfig a) => Config -> IO a
fetch c = fetch' c configDef
-- | Same as 'fetch' but it accepts the default as a parameter instead of using
-- the default from 'configDef'
fetch' :: forall a. (FromConfig a, Typeable a) => Config -> a -> IO a
fetch' c a = do
asTopLevel $ fetchFromRootConfigWithDefault c a
| Given an IO action , it runs and if it throws a Conferer related exception
-- it pretty prints the error and exits the program with failure.
asTopLevel :: IO a -> IO a
asTopLevel action =
action
`catch` (\(e :: MissingRequiredKey) -> do
putStrLn $ displayException e
exitFailure)
`catch` (\(e :: ConfigParsingError) -> do
putStrLn $ displayException e
exitFailure)
-- | Same as 'fetch'' but you can specify a 'Key' instead of the root key which allows
-- you to fetch smaller values when you need them instead of a big one at
-- initialization time.
fetchKey :: forall a. (FromConfig a, Typeable a) => Config -> Key -> a -> IO a
fetchKey = fetchFromConfigWithDefault
-- | Same as 'fetchKey' but it returns a 'Nothing' when the value isn't present
safeFetchKey :: forall a. (FromConfig a, Typeable a) => Config -> Key -> IO (Maybe a)
safeFetchKey c k = fetchFromConfig k c
-- | Same as 'fetchKey' but it throws when the value isn't present.
unsafeFetchKey :: forall a. (FromConfig a, Typeable a) => Config -> Key -> IO a
unsafeFetchKey c k = fetchFromConfig k c
-- | Create a 'Config' which reads from command line arguments, env vars and
-- property files that depend on the environment (@config/development.properties@)
-- by default
mkConfig :: Text -> IO Config
mkConfig appName =
pure emptyConfig
>>= addSource (Cli.fromConfig)
>>= addSource (Env.fromConfig appName)
>>= addSource (PropertiesFile.fromConfig "config.file")
-- | Create a 'Config' with the given defaults and source creators.
-- The sources will take precedence by the order they have in the list (earlier in
the list means it 's tried first ) .
-- If the requested key is not found in any source it'll be looked up in the defaults.
mkConfig' :: Defaults -> [SourceCreator] -> IO Config
mkConfig' defaults sources = addSources sources . addDefaults defaults $ emptyConfig
| null | https://raw.githubusercontent.com/ludat/conferer/13af5727c3b34779e87a1bdb4b0f247581ce4bc4/packages/conferer/src/Conferer.hs | haskell | |
License: MPL-2.0
Stability: stable
Portability: portable
Public and stable API for the most basic usage of this library
* How to use this doc
| This doc is mostly for reference, so you probably won't learn how to
use conferer by reading it. For more detailed and guided documentation
the best place is the webpage: <>
* Creating a Config
* Getting values from a config
| These functions allow you to get any type that implements 'FromConfig'
* Some useful types
| Use the 'FromConfig' instance to get a value of type @a@ from the config
using some default fallback. The most common use for this is creating a custom
record and using this function to fetch it at initialization time.
This function throws only parsing exceptions when the values are present
but malformed somehow (@"abc"@ as an Int) but that depends on the 'FromConfig'
implementation for the type.
| Same as 'fetch' but it accepts the default as a parameter instead of using
the default from 'configDef'
it pretty prints the error and exits the program with failure.
| Same as 'fetch'' but you can specify a 'Key' instead of the root key which allows
you to fetch smaller values when you need them instead of a big one at
initialization time.
| Same as 'fetchKey' but it returns a 'Nothing' when the value isn't present
| Same as 'fetchKey' but it throws when the value isn't present.
| Create a 'Config' which reads from command line arguments, env vars and
property files that depend on the environment (@config/development.properties@)
by default
| Create a 'Config' with the given defaults and source creators.
The sources will take precedence by the order they have in the list (earlier in
If the requested key is not found in any source it'll be looked up in the defaults. | Copyright : ( c ) 2019
Maintainer : < >
module Conferer
(
mkConfig
, mkConfig'
, fetch
, fetch'
, fetchKey
, fetchFromConfig
, safeFetchKey
, unsafeFetchKey
, DefaultConfig(..)
, FromConfig
, Config
, Key
) where
import Data.Text (Text)
import Data.Typeable (Typeable)
import Conferer.Config.Internal
import Conferer.Config.Internal.Types
import Conferer.FromConfig.Internal
import Conferer.Key
import qualified Conferer.Source.Env as Env
import qualified Conferer.Source.CLIArgs as Cli
import qualified Conferer.Source.PropertiesFile as PropertiesFile
import Conferer.Config (Defaults)
import Conferer.FromConfig.Internal.Types
import Control.Exception
import System.Exit (exitFailure)
fetch :: forall a. (FromConfig a, Typeable a, DefaultConfig a) => Config -> IO a
fetch c = fetch' c configDef
fetch' :: forall a. (FromConfig a, Typeable a) => Config -> a -> IO a
fetch' c a = do
asTopLevel $ fetchFromRootConfigWithDefault c a
| Given an IO action , it runs and if it throws a Conferer related exception
asTopLevel :: IO a -> IO a
asTopLevel action =
action
`catch` (\(e :: MissingRequiredKey) -> do
putStrLn $ displayException e
exitFailure)
`catch` (\(e :: ConfigParsingError) -> do
putStrLn $ displayException e
exitFailure)
fetchKey :: forall a. (FromConfig a, Typeable a) => Config -> Key -> a -> IO a
fetchKey = fetchFromConfigWithDefault
safeFetchKey :: forall a. (FromConfig a, Typeable a) => Config -> Key -> IO (Maybe a)
safeFetchKey c k = fetchFromConfig k c
unsafeFetchKey :: forall a. (FromConfig a, Typeable a) => Config -> Key -> IO a
unsafeFetchKey c k = fetchFromConfig k c
mkConfig :: Text -> IO Config
mkConfig appName =
pure emptyConfig
>>= addSource (Cli.fromConfig)
>>= addSource (Env.fromConfig appName)
>>= addSource (PropertiesFile.fromConfig "config.file")
the list means it 's tried first ) .
mkConfig' :: Defaults -> [SourceCreator] -> IO Config
mkConfig' defaults sources = addSources sources . addDefaults defaults $ emptyConfig
|
505fef936b3b8b9450a6825836c73f74a75f5571e6dfaab50e37d5f38c04dd60 | aeolus-project/zephyrus | myocamlbuild.ml |
open Ocamlbuild_plugin;;
Options.use_ocamlfind := true ;;
let _ = dispatch begin function
| After_rules ->
Disable Warning 24 : bad source file name
flag ["ocaml"; "compile"] & S[A"-w"; A"-24"];
(* optimization to ocaml code *)
flag ["ocaml"; "compile"] & S[A"-ccopt"; A"-O9"];
flag ["ocaml"; "pkg_threads"; "compile"] (S[A "-thread"]);
flag ["ocaml"; "pkg_threads"; "link"] (S[A "-thread"]);
rule "atdgen: .atd -> _t.ml*, _j.ml*"
~prods:["%_t.ml";"%_t.mli";"%_j.ml";"%_j.mli";"%_v.ml";"%_v.mli";]
~dep:"%.atd"
(begin fun env build ->
let atdgen = "atdgen" in
Seq [
Cmd (S [A atdgen; A "-t"; P (env "%.atd")]);
Cmd (S [A atdgen; A "-j"; A "-j-std"; P (env "%.atd")]);
Cmd (S [A atdgen; A "-v"; P (env "%.atd")]);
]
end)
| _ -> ()
end
| null | https://raw.githubusercontent.com/aeolus-project/zephyrus/0b52de4038bbab724e6a9628430165a7f09f77ae/myocamlbuild.ml | ocaml | optimization to ocaml code |
open Ocamlbuild_plugin;;
Options.use_ocamlfind := true ;;
let _ = dispatch begin function
| After_rules ->
Disable Warning 24 : bad source file name
flag ["ocaml"; "compile"] & S[A"-w"; A"-24"];
flag ["ocaml"; "compile"] & S[A"-ccopt"; A"-O9"];
flag ["ocaml"; "pkg_threads"; "compile"] (S[A "-thread"]);
flag ["ocaml"; "pkg_threads"; "link"] (S[A "-thread"]);
rule "atdgen: .atd -> _t.ml*, _j.ml*"
~prods:["%_t.ml";"%_t.mli";"%_j.ml";"%_j.mli";"%_v.ml";"%_v.mli";]
~dep:"%.atd"
(begin fun env build ->
let atdgen = "atdgen" in
Seq [
Cmd (S [A atdgen; A "-t"; P (env "%.atd")]);
Cmd (S [A atdgen; A "-j"; A "-j-std"; P (env "%.atd")]);
Cmd (S [A atdgen; A "-v"; P (env "%.atd")]);
]
end)
| _ -> ()
end
|
0aee2758fa9decdaa9281c251581727d5dbe4d0a52050eb8b044536d60805543 | triclops200/quickapp | slime.lisp | (cl:load "quickapp.asd")
(ql:quickload "quickapp")
| null | https://raw.githubusercontent.com/triclops200/quickapp/20e004d90b971201cb7df3020a05ccebc38763eb/slime.lisp | lisp | (cl:load "quickapp.asd")
(ql:quickload "quickapp")
| |
fc803033a19d29f15b23f7c3d049cef176fd6cc0f91badc52c4322d3cf0bbba8 | haskellari/some | Newtype.hs | {-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE RoleAnnotations #
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE Trustworthy #-}
#if __GLASGOW_HASKELL__ >= 810
# LANGUAGE StandaloneKindSignatures #
#endif
module Data.Some.Newtype (
Some(Some),
mkSome,
withSome,
withSomeM,
mapSome,
foldSome,
traverseSome,
) where
import Control.Applicative (Applicative (..))
import Control.DeepSeq (NFData (..))
import Data.Monoid (Monoid (..))
import Data.Semigroup (Semigroup (..))
import GHC.Exts (Any)
import Unsafe.Coerce (unsafeCoerce)
#if __GLASGOW_HASKELL__ >= 810
import Data.Kind (Type)
#endif
import Data.GADT.Compare
import Data.GADT.DeepSeq
import Data.GADT.Show
-- $setup
-- >>> :set -XKindSignatures -XGADTs
> > > import Data .
| Existential . This is type is useful to hide GADTs ' parameters .
--
> > > data Tag : : * - > * where TagInt : : Tag Int ; TagBool : : Tag Bool
> > > instance GShow Tag where gshowsPrec _ TagInt = showString " TagInt " ; gshowsPrec _ TagBool = showString " TagBool "
> > > classify s = case s of " TagInt " - > [ ] ; " TagBool " - > [ mkGReadResult TagBool ] ; _ - > [ ]
> > > instance where greadsPrec _ s = [ ( r , rest ) | ( con , rest ) < - lex s , r < - classify con ]
--
You can either use @PatternSynonyms@ ( available with GHC > = 8.0 )
--
> > > let x = Some TagInt
-- >>> x
Some TagInt
--
> > > case x of { Some TagInt - > " I " ; Some TagBool - > " B " } : : String
-- "I"
--
-- or you can use functions
--
-- >>> let y = mkSome TagBool
-- >>> y
Some TagBool
--
-- >>> withSome y $ \y' -> case y' of { TagInt -> "I"; TagBool -> "B" } :: String
-- "B"
--
-- The implementation of 'mapSome' is /safe/.
--
> > > let f : : Tag a - > Tag a ; f TagInt = TagInt ; f TagBool = TagBool
-- >>> mapSome f y
Some TagBool
--
-- but you can also use:
--
-- >>> withSome y (mkSome . f)
Some TagBool
--
> > > read " Some TagBool " : : Some Tag
Some TagBool
--
> > > read " mkSome TagInt " : : Some Tag
Some TagInt
--
#if __GLASGOW_HASKELL__ >= 810
type Some :: (k -> Type) -> Type
#endif
newtype Some tag = UnsafeSome (tag Any)
type role Some representational
{-# COMPLETE Some #-}
pattern Some :: tag a -> Some tag
pattern Some x <- UnsafeSome x
where Some x = UnsafeSome ((unsafeCoerce :: tag a -> tag Any) x)
-- | Constructor.
mkSome :: tag a -> Some tag
mkSome = \x -> UnsafeSome (unsafeCoerce x)
-- | Eliminator.
withSome :: Some tag -> (forall a. tag a -> b) -> b
withSome (UnsafeSome thing) some = some (unsafeCoerce thing)
| Monadic ' withSome ' .
--
@since 1.0.1
withSomeM :: Monad m => m (Some tag) -> (forall a. tag a -> m r) -> m r
withSomeM m k = m >>= \s -> withSome s k
-- | @'flip' 'withSome'@
foldSome :: (forall a. tag a -> b) -> Some tag -> b
foldSome some (UnsafeSome thing) = some (unsafeCoerce thing)
-- | Map over argument.
mapSome :: (forall t. f t -> g t) -> Some f -> Some g
mapSome f (UnsafeSome x) = UnsafeSome (unsafeCoerce f x)
-- | Traverse over argument.
traverseSome :: Functor m => (forall a. f a -> m (g a)) -> Some f -> m (Some g)
traverseSome f x = withSome x $ \x' -> fmap mkSome (f x')
instance GShow tag => Show (Some tag) where
showsPrec p some = withSome some $ \thing -> showParen (p > 10)
( showString "Some "
. gshowsPrec 11 thing
)
instance GRead f => Read (Some f) where
readsPrec p = readParen (p>10) $ \s ->
[ (getGReadResult withTag mkSome, rest')
| (con, rest) <- lex s
, con == "Some" || con == "mkSome"
, (withTag, rest') <- greadsPrec 11 rest
]
instance GEq tag => Eq (Some tag) where
x == y =
withSome x $ \x' ->
withSome y $ \y' -> defaultEq x' y'
instance GCompare tag => Ord (Some tag) where
compare x y =
withSome x $ \x' ->
withSome y $ \y' -> defaultCompare x' y'
instance GNFData tag => NFData (Some tag) where
rnf x = withSome x grnf
instance Control.Applicative.Applicative m => Data.Semigroup.Semigroup (Some m) where
m <> n =
withSome m $ \m' ->
withSome n $ \n' ->
mkSome (m' *> n')
instance Applicative m => Data.Monoid.Monoid (Some m) where
mempty = mkSome (pure ())
mappend = (<>)
| null | https://raw.githubusercontent.com/haskellari/some/00e42322da777cba81a1afdbb701fe8bbe263f58/src/Data/Some/Newtype.hs | haskell | # LANGUAGE CPP #
# LANGUAGE GADTs #
# LANGUAGE RankNTypes #
# LANGUAGE PolyKinds #
# LANGUAGE Trustworthy #
$setup
>>> :set -XKindSignatures -XGADTs
>>> x
"I"
or you can use functions
>>> let y = mkSome TagBool
>>> y
>>> withSome y $ \y' -> case y' of { TagInt -> "I"; TagBool -> "B" } :: String
"B"
The implementation of 'mapSome' is /safe/.
>>> mapSome f y
but you can also use:
>>> withSome y (mkSome . f)
# COMPLETE Some #
| Constructor.
| Eliminator.
| @'flip' 'withSome'@
| Map over argument.
| Traverse over argument. | # LANGUAGE PatternSynonyms #
# LANGUAGE RoleAnnotations #
#if __GLASGOW_HASKELL__ >= 810
# LANGUAGE StandaloneKindSignatures #
#endif
module Data.Some.Newtype (
Some(Some),
mkSome,
withSome,
withSomeM,
mapSome,
foldSome,
traverseSome,
) where
import Control.Applicative (Applicative (..))
import Control.DeepSeq (NFData (..))
import Data.Monoid (Monoid (..))
import Data.Semigroup (Semigroup (..))
import GHC.Exts (Any)
import Unsafe.Coerce (unsafeCoerce)
#if __GLASGOW_HASKELL__ >= 810
import Data.Kind (Type)
#endif
import Data.GADT.Compare
import Data.GADT.DeepSeq
import Data.GADT.Show
> > > import Data .
| Existential . This is type is useful to hide GADTs ' parameters .
> > > data Tag : : * - > * where TagInt : : Tag Int ; TagBool : : Tag Bool
> > > instance GShow Tag where gshowsPrec _ TagInt = showString " TagInt " ; gshowsPrec _ TagBool = showString " TagBool "
> > > classify s = case s of " TagInt " - > [ ] ; " TagBool " - > [ mkGReadResult TagBool ] ; _ - > [ ]
> > > instance where greadsPrec _ s = [ ( r , rest ) | ( con , rest ) < - lex s , r < - classify con ]
You can either use @PatternSynonyms@ ( available with GHC > = 8.0 )
> > > let x = Some TagInt
Some TagInt
> > > case x of { Some TagInt - > " I " ; Some TagBool - > " B " } : : String
Some TagBool
> > > let f : : Tag a - > Tag a ; f TagInt = TagInt ; f TagBool = TagBool
Some TagBool
Some TagBool
> > > read " Some TagBool " : : Some Tag
Some TagBool
> > > read " mkSome TagInt " : : Some Tag
Some TagInt
#if __GLASGOW_HASKELL__ >= 810
type Some :: (k -> Type) -> Type
#endif
newtype Some tag = UnsafeSome (tag Any)
type role Some representational
pattern Some :: tag a -> Some tag
pattern Some x <- UnsafeSome x
where Some x = UnsafeSome ((unsafeCoerce :: tag a -> tag Any) x)
mkSome :: tag a -> Some tag
mkSome = \x -> UnsafeSome (unsafeCoerce x)
withSome :: Some tag -> (forall a. tag a -> b) -> b
withSome (UnsafeSome thing) some = some (unsafeCoerce thing)
| Monadic ' withSome ' .
@since 1.0.1
withSomeM :: Monad m => m (Some tag) -> (forall a. tag a -> m r) -> m r
withSomeM m k = m >>= \s -> withSome s k
foldSome :: (forall a. tag a -> b) -> Some tag -> b
foldSome some (UnsafeSome thing) = some (unsafeCoerce thing)
mapSome :: (forall t. f t -> g t) -> Some f -> Some g
mapSome f (UnsafeSome x) = UnsafeSome (unsafeCoerce f x)
traverseSome :: Functor m => (forall a. f a -> m (g a)) -> Some f -> m (Some g)
traverseSome f x = withSome x $ \x' -> fmap mkSome (f x')
instance GShow tag => Show (Some tag) where
showsPrec p some = withSome some $ \thing -> showParen (p > 10)
( showString "Some "
. gshowsPrec 11 thing
)
instance GRead f => Read (Some f) where
readsPrec p = readParen (p>10) $ \s ->
[ (getGReadResult withTag mkSome, rest')
| (con, rest) <- lex s
, con == "Some" || con == "mkSome"
, (withTag, rest') <- greadsPrec 11 rest
]
instance GEq tag => Eq (Some tag) where
x == y =
withSome x $ \x' ->
withSome y $ \y' -> defaultEq x' y'
instance GCompare tag => Ord (Some tag) where
compare x y =
withSome x $ \x' ->
withSome y $ \y' -> defaultCompare x' y'
instance GNFData tag => NFData (Some tag) where
rnf x = withSome x grnf
instance Control.Applicative.Applicative m => Data.Semigroup.Semigroup (Some m) where
m <> n =
withSome m $ \m' ->
withSome n $ \n' ->
mkSome (m' *> n')
instance Applicative m => Data.Monoid.Monoid (Some m) where
mempty = mkSome (pure ())
mappend = (<>)
|
b0addfe1edc0e942f4dc040dd4498f566355b2242e7dbb1212236677ae5a3246 | clojurians-org/haskell-example | CopyObject.hs | --
MinIO Haskell SDK , ( C ) 2017 MinIO , Inc.
--
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- -2.0
--
-- Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
module Network.Minio.CopyObject where
import qualified Data.List as List
import Lib.Prelude
import Network.Minio.Data
import Network.Minio.Errors
import Network.Minio.S3API
import Network.Minio.Utils
-- | Copy an object using single or multipart copy strategy.
copyObjectInternal :: Bucket -> Object -> SourceInfo
-> Minio ETag
copyObjectInternal b' o srcInfo = do
let sBucket = srcBucket srcInfo
sObject = srcObject srcInfo
-- get source object size with a head request
oi <- headObject sBucket sObject []
let srcSize = oiSize oi
-- check that byte offsets are valid if specified in cps
let rangeMay = srcRange srcInfo
range = maybe (0, srcSize) identity rangeMay
startOffset = fst range
endOffset = snd range
when (isJust rangeMay &&
or [startOffset < 0, endOffset < startOffset,
endOffset >= fromIntegral srcSize]) $
throwIO $ MErrVInvalidSrcObjByteRange range
1 . If sz > 64MiB ( minPartSize ) use multipart copy , OR
2 . If startOffset /= 0 use multipart copy
let destSize = (\(a, b) -> b - a + 1 ) $
maybe (0, srcSize - 1) identity rangeMay
if destSize > minPartSize || (endOffset - startOffset + 1 /= srcSize)
then multiPartCopyObject b' o srcInfo srcSize
else fst <$> copyObjectSingle b' o srcInfo{srcRange = Nothing} []
-- | Given the input byte range of the source object, compute the
-- splits for a multipart copy object procedure. Minimum part size
-- used is minPartSize.
selectCopyRanges :: (Int64, Int64) -> [(PartNumber, (Int64, Int64))]
selectCopyRanges (st, end) = zip pns $
map (\(x, y) -> (st + x, st + x + y - 1)) $ zip startOffsets partSizes
where
size = end - st + 1
(pns, startOffsets, partSizes) = List.unzip3 $ selectPartSizes size
-- | Perform a multipart copy object action. Since we cannot verify
-- existing parts based on the source object, there is no resuming
-- copy action support.
multiPartCopyObject :: Bucket -> Object -> SourceInfo -> Int64
-> Minio ETag
multiPartCopyObject b o cps srcSize = do
uid <- newMultipartUpload b o []
let byteRange = maybe (0, fromIntegral $ srcSize - 1) identity $ srcRange cps
partRanges = selectCopyRanges byteRange
partSources = map (\(x, (start, end)) -> (x, cps {srcRange = Just (start, end) }))
partRanges
dstInfo = defaultDestinationInfo { dstBucket = b, dstObject = o}
copiedParts <- limitedMapConcurrently 10
(\(pn, cps') -> do
(etag, _) <- copyObjectPart dstInfo cps' uid pn []
return (pn, etag)
)
partSources
completeMultipartUpload b o uid copiedParts
| null | https://raw.githubusercontent.com/clojurians-org/haskell-example/c96b021bdef52a121e04ea203c8c3e458770a25a/minio-migration/src/Network/Minio/CopyObject.hs | haskell |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
| Copy an object using single or multipart copy strategy.
get source object size with a head request
check that byte offsets are valid if specified in cps
| Given the input byte range of the source object, compute the
splits for a multipart copy object procedure. Minimum part size
used is minPartSize.
| Perform a multipart copy object action. Since we cannot verify
existing parts based on the source object, there is no resuming
copy action support. | MinIO Haskell SDK , ( C ) 2017 MinIO , Inc.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
module Network.Minio.CopyObject where
import qualified Data.List as List
import Lib.Prelude
import Network.Minio.Data
import Network.Minio.Errors
import Network.Minio.S3API
import Network.Minio.Utils
copyObjectInternal :: Bucket -> Object -> SourceInfo
-> Minio ETag
copyObjectInternal b' o srcInfo = do
let sBucket = srcBucket srcInfo
sObject = srcObject srcInfo
oi <- headObject sBucket sObject []
let srcSize = oiSize oi
let rangeMay = srcRange srcInfo
range = maybe (0, srcSize) identity rangeMay
startOffset = fst range
endOffset = snd range
when (isJust rangeMay &&
or [startOffset < 0, endOffset < startOffset,
endOffset >= fromIntegral srcSize]) $
throwIO $ MErrVInvalidSrcObjByteRange range
1 . If sz > 64MiB ( minPartSize ) use multipart copy , OR
2 . If startOffset /= 0 use multipart copy
let destSize = (\(a, b) -> b - a + 1 ) $
maybe (0, srcSize - 1) identity rangeMay
if destSize > minPartSize || (endOffset - startOffset + 1 /= srcSize)
then multiPartCopyObject b' o srcInfo srcSize
else fst <$> copyObjectSingle b' o srcInfo{srcRange = Nothing} []
selectCopyRanges :: (Int64, Int64) -> [(PartNumber, (Int64, Int64))]
selectCopyRanges (st, end) = zip pns $
map (\(x, y) -> (st + x, st + x + y - 1)) $ zip startOffsets partSizes
where
size = end - st + 1
(pns, startOffsets, partSizes) = List.unzip3 $ selectPartSizes size
multiPartCopyObject :: Bucket -> Object -> SourceInfo -> Int64
-> Minio ETag
multiPartCopyObject b o cps srcSize = do
uid <- newMultipartUpload b o []
let byteRange = maybe (0, fromIntegral $ srcSize - 1) identity $ srcRange cps
partRanges = selectCopyRanges byteRange
partSources = map (\(x, (start, end)) -> (x, cps {srcRange = Just (start, end) }))
partRanges
dstInfo = defaultDestinationInfo { dstBucket = b, dstObject = o}
copiedParts <- limitedMapConcurrently 10
(\(pn, cps') -> do
(etag, _) <- copyObjectPart dstInfo cps' uid pn []
return (pn, etag)
)
partSources
completeMultipartUpload b o uid copiedParts
|
d71a4e4d69539d923a90c97c0a505992902ca5101f5c66549f97a5b7381c8b00 | marcoheisig/Typo | predicates.lisp | (in-package #:typo.vm)
(defmacro define-predicate-fnrecord (predicate type-specifier)
`(define-fnrecord ,predicate (object)
(:properties :foldable :movable)
(:specializer
(let ((ntype (wrapper-ntype object)))
(ntype-subtypecase ntype
((not ,type-specifier) (wrap nil))
(,type-specifier (wrap-default (true-ntype)))
(t (wrap-default (type-specifier-ntype 'generalized-boolean))))))))
(define-predicate-fnrecord arrayp array)
(define-predicate-fnrecord bit-vector-p bit-vector)
(define-predicate-fnrecord characterp character)
(define-predicate-fnrecord compiled-function-p compiled-function)
(define-predicate-fnrecord complexp complex)
(define-predicate-fnrecord consp cons)
(define-predicate-fnrecord floatp float)
(define-predicate-fnrecord functionp function)
(define-predicate-fnrecord hash-table-p hash-table)
(define-predicate-fnrecord integerp integer)
(define-predicate-fnrecord keywordp keyword)
(define-predicate-fnrecord listp list)
(define-predicate-fnrecord numberp number)
(define-predicate-fnrecord packagep package)
(define-predicate-fnrecord random-state-p random-state)
(define-predicate-fnrecord rationalp rational)
(define-predicate-fnrecord realp real)
(define-predicate-fnrecord streamp stream)
The remaining rules can not be handled by DEFINE - PREDICATE - FNRECORD ,
;;; because the domain of these functions is limited to certain numbers.
(define-fnrecord minusp (real)
(:properties :foldable :movable)
(:specializer
(ntype-subtypecase (wrapper-ntype real)
((not real) (abort-specialization))
((real * (0)) (wrap-default (true-ntype)))
((real 0 *) (wrap nil))
(t (wrap-default (type-specifier-ntype 'generalized-boolean))))))
(define-fnrecord plusp (real)
(:properties :foldable :movable)
(:specializer
(ntype-subtypecase (wrapper-ntype real)
((not real) (abort-specialization))
((real (0) *) (wrap-default (true-ntype)))
((real * 0) (wrap nil))
(t (wrap-default (type-specifier-ntype 'generalized-boolean))))))
(define-fnrecord zerop (number)
(:properties :foldable :movable)
(:specializer
(ntype-subtypecase (wrapper-ntype number)
((not real) (abort-specialization))
(zero (wrap-default (true-ntype)))
((not zero) (wrap nil))
(t (wrap-default (type-specifier-ntype 'generalized-boolean))))))
(define-fnrecord evenp (integer)
(:properties :foldable :movable)
(:specializer
(ntype-subtypecase (wrapper-ntype integer)
((not real) (abort-specialization))
(t (wrap-default (type-specifier-ntype 'generalized-boolean))))))
(define-fnrecord oddp (integer)
(:properties :foldable :movable)
(:specializer
(ntype-subtypecase (wrapper-ntype integer)
((not real) (abort-specialization))
(t (wrap-default (type-specifier-ntype 'generalized-boolean))))))
| null | https://raw.githubusercontent.com/marcoheisig/Typo/303e21c38b1773f7d6f87eeb7f03617c286c4a44/code/vm/predicates.lisp | lisp | because the domain of these functions is limited to certain numbers. | (in-package #:typo.vm)
(defmacro define-predicate-fnrecord (predicate type-specifier)
`(define-fnrecord ,predicate (object)
(:properties :foldable :movable)
(:specializer
(let ((ntype (wrapper-ntype object)))
(ntype-subtypecase ntype
((not ,type-specifier) (wrap nil))
(,type-specifier (wrap-default (true-ntype)))
(t (wrap-default (type-specifier-ntype 'generalized-boolean))))))))
(define-predicate-fnrecord arrayp array)
(define-predicate-fnrecord bit-vector-p bit-vector)
(define-predicate-fnrecord characterp character)
(define-predicate-fnrecord compiled-function-p compiled-function)
(define-predicate-fnrecord complexp complex)
(define-predicate-fnrecord consp cons)
(define-predicate-fnrecord floatp float)
(define-predicate-fnrecord functionp function)
(define-predicate-fnrecord hash-table-p hash-table)
(define-predicate-fnrecord integerp integer)
(define-predicate-fnrecord keywordp keyword)
(define-predicate-fnrecord listp list)
(define-predicate-fnrecord numberp number)
(define-predicate-fnrecord packagep package)
(define-predicate-fnrecord random-state-p random-state)
(define-predicate-fnrecord rationalp rational)
(define-predicate-fnrecord realp real)
(define-predicate-fnrecord streamp stream)
The remaining rules can not be handled by DEFINE - PREDICATE - FNRECORD ,
(define-fnrecord minusp (real)
(:properties :foldable :movable)
(:specializer
(ntype-subtypecase (wrapper-ntype real)
((not real) (abort-specialization))
((real * (0)) (wrap-default (true-ntype)))
((real 0 *) (wrap nil))
(t (wrap-default (type-specifier-ntype 'generalized-boolean))))))
(define-fnrecord plusp (real)
(:properties :foldable :movable)
(:specializer
(ntype-subtypecase (wrapper-ntype real)
((not real) (abort-specialization))
((real (0) *) (wrap-default (true-ntype)))
((real * 0) (wrap nil))
(t (wrap-default (type-specifier-ntype 'generalized-boolean))))))
(define-fnrecord zerop (number)
(:properties :foldable :movable)
(:specializer
(ntype-subtypecase (wrapper-ntype number)
((not real) (abort-specialization))
(zero (wrap-default (true-ntype)))
((not zero) (wrap nil))
(t (wrap-default (type-specifier-ntype 'generalized-boolean))))))
(define-fnrecord evenp (integer)
(:properties :foldable :movable)
(:specializer
(ntype-subtypecase (wrapper-ntype integer)
((not real) (abort-specialization))
(t (wrap-default (type-specifier-ntype 'generalized-boolean))))))
(define-fnrecord oddp (integer)
(:properties :foldable :movable)
(:specializer
(ntype-subtypecase (wrapper-ntype integer)
((not real) (abort-specialization))
(t (wrap-default (type-specifier-ntype 'generalized-boolean))))))
|
f3b107061afdf968fc200df5c0ba30368c4b71a57662f7148e75699f5c9345de | garrigue/lablgtk | searchpos.mli | (**************************************************************************)
(* Lablgtk - Applications *)
(* *)
(* * You are free to do anything you want with this code as long *)
(* as it is for personal use. *)
(* *)
(* * Redistribution can only be "as is". Binary distribution *)
(* and bug fixes are allowed, but you cannot extensively *)
(* modify the code without asking the authors. *)
(* *)
(* The authors may choose to remove any of the above *)
(* restrictions on a per request basis. *)
(* *)
(* Authors: *)
< >
< >
< >
< >
< >
< >
(* *)
(**************************************************************************)
$ Id$
val top_widgets : GWindow.window list ref
type module_widgets =
{ mw_frame: GPack.box;
mw_title: GMisc.label option;
mw_buttons: GPack.box; }
val add_shown_module : Path.t -> widgets:module_widgets -> unit
val find_shown_module : Path.t -> module_widgets
val is_shown_module : Path.t -> bool
val default_frame : module_widgets option ref
val set_path : (Path.t -> sign:Types.signature -> unit) ref
val view_defined_ref : (Longident.t -> env:Env.t -> unit) ref
val editor_ref :
(?file:string -> ?pos:int -> ?opendialog:bool -> unit -> unit) ref
val view_signature :
?title:string ->
?path:Path.t -> ?env:Env.t -> ?detach:bool -> Types.signature -> unit
val view_signature_item :
Types.signature -> path:Path.t -> env:Env.t -> unit
val view_module_id : Longident.t -> env:Env.t -> unit
val view_type_id : Longident.t -> env:Env.t -> unit
val view_class_id : Longident.t -> env:Env.t -> unit
val view_cltype_id : Longident.t -> env:Env.t -> unit
val view_modtype_id : Longident.t -> env:Env.t -> unit
val view_type_decl : Path.t -> env:Env.t -> unit
type skind = [`Type|`Class|`Module|`Modtype]
val search_pos_signature :
Parsetree.signature -> pos:int -> env:Env.t ->
((skind * Longident.t) * Env.t * Location.t) list
val view_decl : Longident.t -> kind:skind -> env:Env.t -> unit
val view_decl_menu :
Longident.t -> kind:skind -> env:Env.t -> GMenu.menu
type fkind = [
`Exp of
[`Expr|`Pat|`Const|`Val of Path.t|`Var of Path.t|`New of Path.t]
* Types.type_expr
| `Class of Path.t * Types.class_type
| `Module of Path.t * Types.module_type
]
val search_pos_structure :
pos:int -> Typedtree.structure_item list ->
(fkind * Env.t * Location.t) list
val view_type : fkind -> env:Env.t -> unit
val view_type_menu : fkind -> env:Env.t -> GMenu.menu
val parent_path : Path.t -> Path.t option
val string_of_path : Path.t -> string
val string_of_longident : Longident.t -> string
val lines_to_chars : int -> text:string -> int
| null | https://raw.githubusercontent.com/garrigue/lablgtk/504fac1257e900e6044c638025a4d6c5a321284c/applications/browser/searchpos.mli | ocaml | ************************************************************************
Lablgtk - Applications
* You are free to do anything you want with this code as long
as it is for personal use.
* Redistribution can only be "as is". Binary distribution
and bug fixes are allowed, but you cannot extensively
modify the code without asking the authors.
The authors may choose to remove any of the above
restrictions on a per request basis.
Authors:
************************************************************************ | < >
< >
< >
< >
< >
< >
$ Id$
val top_widgets : GWindow.window list ref
type module_widgets =
{ mw_frame: GPack.box;
mw_title: GMisc.label option;
mw_buttons: GPack.box; }
val add_shown_module : Path.t -> widgets:module_widgets -> unit
val find_shown_module : Path.t -> module_widgets
val is_shown_module : Path.t -> bool
val default_frame : module_widgets option ref
val set_path : (Path.t -> sign:Types.signature -> unit) ref
val view_defined_ref : (Longident.t -> env:Env.t -> unit) ref
val editor_ref :
(?file:string -> ?pos:int -> ?opendialog:bool -> unit -> unit) ref
val view_signature :
?title:string ->
?path:Path.t -> ?env:Env.t -> ?detach:bool -> Types.signature -> unit
val view_signature_item :
Types.signature -> path:Path.t -> env:Env.t -> unit
val view_module_id : Longident.t -> env:Env.t -> unit
val view_type_id : Longident.t -> env:Env.t -> unit
val view_class_id : Longident.t -> env:Env.t -> unit
val view_cltype_id : Longident.t -> env:Env.t -> unit
val view_modtype_id : Longident.t -> env:Env.t -> unit
val view_type_decl : Path.t -> env:Env.t -> unit
type skind = [`Type|`Class|`Module|`Modtype]
val search_pos_signature :
Parsetree.signature -> pos:int -> env:Env.t ->
((skind * Longident.t) * Env.t * Location.t) list
val view_decl : Longident.t -> kind:skind -> env:Env.t -> unit
val view_decl_menu :
Longident.t -> kind:skind -> env:Env.t -> GMenu.menu
type fkind = [
`Exp of
[`Expr|`Pat|`Const|`Val of Path.t|`Var of Path.t|`New of Path.t]
* Types.type_expr
| `Class of Path.t * Types.class_type
| `Module of Path.t * Types.module_type
]
val search_pos_structure :
pos:int -> Typedtree.structure_item list ->
(fkind * Env.t * Location.t) list
val view_type : fkind -> env:Env.t -> unit
val view_type_menu : fkind -> env:Env.t -> GMenu.menu
val parent_path : Path.t -> Path.t option
val string_of_path : Path.t -> string
val string_of_longident : Longident.t -> string
val lines_to_chars : int -> text:string -> int
|
9313da63f1186634a93fa692f271a597e992ec29fcf7ebd243f37523a6028cb6 | masateruk/micro-caml | mybool.ml | type b = True | False
let () =
let x = True in
match x with
| True -> print_int 1
| False -> print_int 0
| null | https://raw.githubusercontent.com/masateruk/micro-caml/0c0bd066b87cf54ce33709355c422993a85a86a1/test/mybool.ml | ocaml | type b = True | False
let () =
let x = True in
match x with
| True -> print_int 1
| False -> print_int 0
| |
a11e0bbf1fd0f6696ce16c7e91892b6ba59d00466ad457c8509c27f0024f2cfb | kadena-io/chainweb-node | ModuleCacheOnRestart.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
module Chainweb.Test.Pact.ModuleCacheOnRestart (tests) where
import Control.Concurrent.MVar.Strict
import Control.DeepSeq (NFData)
import Control.Lens
import Control.Monad
import Control.Monad.IO.Class
import qualified Data.HashMap.Strict as HM
import qualified Data.Map.Strict as M
import Data.List (intercalate)
import qualified Data.Text.IO as T
import GHC.Generics
import Test.Tasty.HUnit
import Test.Tasty
import System.LogLevel
-- pact imports
import Pact.Types.Runtime (mdModule)
import Pact.Types.Term
chainweb imports
import Chainweb.BlockHeader
import Chainweb.BlockHeader.Genesis
import Chainweb.ChainId
import Chainweb.Logger
import Chainweb.Miner.Pact
import Chainweb.Pact.Backend.Types
import Chainweb.Pact.PactService
import Chainweb.Pact.Types
import Chainweb.Payload
import Chainweb.Payload.PayloadStore
import Chainweb.Time
import Chainweb.Test.Cut
import Chainweb.Test.Cut.TestBlockDb
import Chainweb.Test.Utils
import Chainweb.Test.Pact.Utils
import Chainweb.Utils (T2(..))
import Chainweb.Version
import Chainweb.WebBlockHeaderDB
import Chainweb.Storage.Table.RocksDB
testVer :: ChainwebVersion
testVer = FastTimedCPM singleton
testChainId :: ChainId
testChainId = unsafeChainId 0
type RewindPoint = (BlockHeader, PayloadWithOutputs)
data RewindData = RewindData
{ afterV4 :: RewindPoint
, beforeV4 :: RewindPoint
, v3Cache :: HM.HashMap ModuleName (Maybe ModuleHash)
} deriving Generic
instance NFData RewindData
tests :: RocksDb -> ScheduledTest
tests rdb =
ScheduledTest label $
withMVarResource mempty $ \iom ->
withEmptyMVarResource $ \rewindDataM ->
withTestBlockDbTest testVer rdb $ \bdbio ->
withTempSQLiteResource $ \ioSqlEnv ->
testGroup label
[ testCase "testInitial" $ withPact' bdbio ioSqlEnv iom testInitial
, after AllSucceed "testInitial" $
testCase "testRestart1" $ withPact' bdbio ioSqlEnv iom testRestart
, after AllSucceed "testRestart1" $
-- wow, Tasty thinks there's a "loop" if the following test is called "testCoinbase"!!
testCase "testDoUpgrades" $ withPact' bdbio ioSqlEnv iom (testCoinbase bdbio)
, after AllSucceed "testDoUpgrades" $
testCase "testRestart2" $ withPact' bdbio ioSqlEnv iom testRestart
, after AllSucceed "testRestart2" $
testCase "testV3" $ withPact' bdbio ioSqlEnv iom (testV3 bdbio rewindDataM)
, after AllSucceed "testV3" $
testCase "testRestart3"$ withPact' bdbio ioSqlEnv iom testRestart
, after AllSucceed "testRestart3" $
testCase "testV4" $ withPact' bdbio ioSqlEnv iom (testV4 bdbio rewindDataM)
, after AllSucceed "testV4" $
testCase "testRestart4" $ withPact' bdbio ioSqlEnv iom testRestart
, after AllSucceed "testRestart4" $
testCase "testRewindAfterFork" $ withPact' bdbio ioSqlEnv iom (testRewindAfterFork bdbio rewindDataM)
, after AllSucceed "testRewindAfterFork" $
testCase "testRewindBeforeFork" $ withPact' bdbio ioSqlEnv iom (testRewindBeforeFork bdbio rewindDataM)
, after AllSucceed "testRewindBeforeFork" $
testCase "testCw217CoinOnly" $ withPact' bdbio ioSqlEnv iom $
testCw217CoinOnly bdbio rewindDataM
, after AllSucceed "testCw217CoinOnly" $
testCase "testRestartCw217" $
withPact' bdbio ioSqlEnv iom testRestart
]
where
label = "Chainweb.Test.Pact.ModuleCacheOnRestart"
type CacheTest tbl =
(PactServiceM tbl ()
,IO (MVar ModuleInitCache) -> ModuleInitCache -> Assertion)
-- | Do genesis load, snapshot cache.
testInitial :: CanReadablePayloadCas tbl => CacheTest tbl
testInitial = (initPayloadState,snapshotCache)
-- | Do restart load, test results of 'initialPayloadState' against snapshotted cache.
testRestart :: CanReadablePayloadCas tbl => CacheTest tbl
testRestart = (initPayloadState,checkLoadedCache)
where
checkLoadedCache ioa initCache = do
a <- ioa >>= readMVar
(justModuleHashes a) `assertNoCacheMismatch` (justModuleHashes initCache)
-- | Run coinbase to do upgrade to v2, snapshot cache.
testCoinbase :: CanReadablePayloadCas tbl => IO TestBlockDb -> CacheTest tbl
testCoinbase iobdb = (initPayloadState >> doCoinbase,snapshotCache)
where
doCoinbase = do
bdb <- liftIO $ iobdb
pwo <- execNewBlock mempty (ParentHeader genblock) noMiner
liftIO $ addTestBlockDb bdb (Nonce 0) (offsetBlockTime second) testChainId pwo
nextH <- liftIO $ getParentTestBlockDb bdb testChainId
void $ execValidateBlock mempty nextH (payloadWithOutputsToPayloadData pwo)
testV3 :: CanReadablePayloadCas tbl => IO TestBlockDb -> IO (MVar RewindData) -> CacheTest tbl
testV3 iobdb rewindM = (go,grabAndSnapshotCache)
where
go = do
initPayloadState
void $ doNextCoinbase iobdb
void $ doNextCoinbase iobdb
hpwo <- doNextCoinbase iobdb
liftIO (rewindM >>= \rewind -> putMVar rewind $ RewindData hpwo hpwo mempty)
grabAndSnapshotCache ioa initCache = do
rewindM >>= \rewind -> modifyMVar_ rewind $ \old -> pure $ old { v3Cache = justModuleHashes initCache }
snapshotCache ioa initCache
testV4 :: CanReadablePayloadCas tbl => IO TestBlockDb -> IO (MVar RewindData) -> CacheTest tbl
testV4 iobdb rewindM = (go,snapshotCache)
where
go = do
initPayloadState
-- at the upgrade/fork point
void $ doNextCoinbase iobdb
-- just after the upgrade/fork point
afterV4' <- doNextCoinbase iobdb
rewind <- liftIO rewindM
liftIO $ modifyMVar_ rewind $ \old -> pure $ old { afterV4 = afterV4' }
void $ doNextCoinbase iobdb
void $ doNextCoinbase iobdb
testRewindAfterFork :: CanReadablePayloadCas tbl => IO TestBlockDb -> IO (MVar RewindData) -> CacheTest tbl
testRewindAfterFork iobdb rewindM = (go, checkLoadedCache)
where
go = do
initPayloadState
liftIO rewindM >>= liftIO . readMVar >>= rewindToBlock . afterV4
void $ doNextCoinbase iobdb
void $ doNextCoinbase iobdb
checkLoadedCache ioa initCache = do
a <- ioa >>= readMVar
case M.lookup 6 initCache of
Nothing -> assertFailure "Cache not found at height 6"
Just c -> (justModuleHashes a) `assertNoCacheMismatch` (justModuleHashes' c)
testRewindBeforeFork :: CanReadablePayloadCas tbl => IO TestBlockDb -> IO (MVar RewindData) -> CacheTest tbl
testRewindBeforeFork iobdb rewindM = (go, checkLoadedCache)
where
go = do
initPayloadState
liftIO rewindM >>= liftIO . readMVar >>= rewindToBlock . beforeV4
void $ doNextCoinbase iobdb
void $ doNextCoinbase iobdb
checkLoadedCache ioa initCache = do
a <- ioa >>= readMVar
case (M.lookup 5 initCache, M.lookup 4 initCache) of
(Just c, Just d) -> do
(justModuleHashes a) `assertNoCacheMismatch` (justModuleHashes' c)
v3c <- rewindM >>= \rewind -> fmap v3Cache (readMVar rewind)
assertNoCacheMismatch v3c (justModuleHashes' d)
_ -> assertFailure "Failed to lookup either block 4 or 5."
testCw217CoinOnly
:: CanReadablePayloadCas cas
=> IO TestBlockDb
-> IO (MVar RewindData)
-> CacheTest cas
testCw217CoinOnly iobdb _rewindM = (go, go')
where
go = do
initPayloadState
void $ doNextCoinbaseN_ 9 iobdb
go' ioa initCache = do
snapshotCache ioa initCache
case M.lookup 20 initCache of
Just a -> assertEqual "module init cache contains only coin" ["coin"] $ HM.keys a
Nothing -> assertFailure "failed to lookup block at 20"
assertNoCacheMismatch
:: HM.HashMap ModuleName (Maybe ModuleHash)
-> HM.HashMap ModuleName (Maybe ModuleHash)
-> Assertion
assertNoCacheMismatch c1 c2 = assertBool msg $ c1 == c2
where
showCache = intercalate "\n" . map show . HM.toList
msg = mconcat
[
"Module cache mismatch, found: \n"
, showCache c1
, "\n expected: \n"
, showCache c2
]
rewindToBlock :: CanReadablePayloadCas tbl => RewindPoint -> PactServiceM tbl ()
rewindToBlock (rewindHeader, pwo) = void $ execValidateBlock mempty rewindHeader (payloadWithOutputsToPayloadData pwo)
doNextCoinbase :: CanReadablePayloadCas tbl => IO TestBlockDb -> PactServiceM tbl (BlockHeader, PayloadWithOutputs)
doNextCoinbase iobdb = do
bdb <- liftIO iobdb
prevH <- liftIO $ getParentTestBlockDb bdb testChainId
pwo <- execNewBlock mempty (ParentHeader prevH) noMiner
liftIO $ addTestBlockDb bdb (Nonce 0) (offsetBlockTime second) testChainId pwo
nextH <- liftIO $ getParentTestBlockDb bdb testChainId
valPWO <- execValidateBlock mempty nextH (payloadWithOutputsToPayloadData pwo)
return (nextH, valPWO)
doNextCoinbaseN_
:: CanReadablePayloadCas cas
=> Int
-> IO TestBlockDb
-> PactServiceM cas (BlockHeader, PayloadWithOutputs)
doNextCoinbaseN_ n iobdb = fmap last $ forM [1..n] $ \_ ->
doNextCoinbase iobdb
-- | Interfaces can't be upgraded, but modules can, so verify hash in that case.
justModuleHashes :: ModuleInitCache -> HM.HashMap ModuleName (Maybe ModuleHash)
justModuleHashes = justModuleHashes' . snd . last . M.toList
justModuleHashes' :: ModuleCache -> HM.HashMap ModuleName (Maybe ModuleHash)
justModuleHashes' = HM.map $ \v -> preview (_1 . mdModule . _MDModule . mHash) v
genblock :: BlockHeader
genblock = genesisBlockHeader testVer testChainId
initPayloadState :: CanReadablePayloadCas tbl => PactServiceM tbl ()
initPayloadState = initialPayloadState dummyLogger mempty testVer testChainId
snapshotCache :: IO (MVar ModuleInitCache) -> ModuleInitCache -> IO ()
snapshotCache iomcache initCache = do
mcache <- iomcache
modifyMVar_ mcache (const (pure initCache))
withPact'
:: IO TestBlockDb
-> IO SQLiteEnv
-> IO (MVar ModuleInitCache)
-> CacheTest RocksDbTable
-> Assertion
withPact' bdbio ioSqlEnv r (ps, cacheTest) = do
bdb <- bdbio
bhdb <- getWebBlockHeaderDb (_bdbWebBlockHeaderDb bdb) testChainId
let pdb = _bdbPayloadDb bdb
sqlEnv <- ioSqlEnv
T2 _ pstate <- runPactService'
testVer testChainId logger bhdb pdb sqlEnv defaultPactServiceConfig ps
cacheTest r (_psInitCache pstate)
where
logger = genericLogger Quiet T.putStrLn
| null | https://raw.githubusercontent.com/kadena-io/chainweb-node/aff594a05096341d01ae50a9f37056b2519025d2/test/Chainweb/Test/Pact/ModuleCacheOnRestart.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE OverloadedStrings #
pact imports
wow, Tasty thinks there's a "loop" if the following test is called "testCoinbase"!!
| Do genesis load, snapshot cache.
| Do restart load, test results of 'initialPayloadState' against snapshotted cache.
| Run coinbase to do upgrade to v2, snapshot cache.
at the upgrade/fork point
just after the upgrade/fork point
| Interfaces can't be upgraded, but modules can, so verify hash in that case. | # LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
module Chainweb.Test.Pact.ModuleCacheOnRestart (tests) where
import Control.Concurrent.MVar.Strict
import Control.DeepSeq (NFData)
import Control.Lens
import Control.Monad
import Control.Monad.IO.Class
import qualified Data.HashMap.Strict as HM
import qualified Data.Map.Strict as M
import Data.List (intercalate)
import qualified Data.Text.IO as T
import GHC.Generics
import Test.Tasty.HUnit
import Test.Tasty
import System.LogLevel
import Pact.Types.Runtime (mdModule)
import Pact.Types.Term
chainweb imports
import Chainweb.BlockHeader
import Chainweb.BlockHeader.Genesis
import Chainweb.ChainId
import Chainweb.Logger
import Chainweb.Miner.Pact
import Chainweb.Pact.Backend.Types
import Chainweb.Pact.PactService
import Chainweb.Pact.Types
import Chainweb.Payload
import Chainweb.Payload.PayloadStore
import Chainweb.Time
import Chainweb.Test.Cut
import Chainweb.Test.Cut.TestBlockDb
import Chainweb.Test.Utils
import Chainweb.Test.Pact.Utils
import Chainweb.Utils (T2(..))
import Chainweb.Version
import Chainweb.WebBlockHeaderDB
import Chainweb.Storage.Table.RocksDB
testVer :: ChainwebVersion
testVer = FastTimedCPM singleton
testChainId :: ChainId
testChainId = unsafeChainId 0
type RewindPoint = (BlockHeader, PayloadWithOutputs)
data RewindData = RewindData
{ afterV4 :: RewindPoint
, beforeV4 :: RewindPoint
, v3Cache :: HM.HashMap ModuleName (Maybe ModuleHash)
} deriving Generic
instance NFData RewindData
tests :: RocksDb -> ScheduledTest
tests rdb =
ScheduledTest label $
withMVarResource mempty $ \iom ->
withEmptyMVarResource $ \rewindDataM ->
withTestBlockDbTest testVer rdb $ \bdbio ->
withTempSQLiteResource $ \ioSqlEnv ->
testGroup label
[ testCase "testInitial" $ withPact' bdbio ioSqlEnv iom testInitial
, after AllSucceed "testInitial" $
testCase "testRestart1" $ withPact' bdbio ioSqlEnv iom testRestart
, after AllSucceed "testRestart1" $
testCase "testDoUpgrades" $ withPact' bdbio ioSqlEnv iom (testCoinbase bdbio)
, after AllSucceed "testDoUpgrades" $
testCase "testRestart2" $ withPact' bdbio ioSqlEnv iom testRestart
, after AllSucceed "testRestart2" $
testCase "testV3" $ withPact' bdbio ioSqlEnv iom (testV3 bdbio rewindDataM)
, after AllSucceed "testV3" $
testCase "testRestart3"$ withPact' bdbio ioSqlEnv iom testRestart
, after AllSucceed "testRestart3" $
testCase "testV4" $ withPact' bdbio ioSqlEnv iom (testV4 bdbio rewindDataM)
, after AllSucceed "testV4" $
testCase "testRestart4" $ withPact' bdbio ioSqlEnv iom testRestart
, after AllSucceed "testRestart4" $
testCase "testRewindAfterFork" $ withPact' bdbio ioSqlEnv iom (testRewindAfterFork bdbio rewindDataM)
, after AllSucceed "testRewindAfterFork" $
testCase "testRewindBeforeFork" $ withPact' bdbio ioSqlEnv iom (testRewindBeforeFork bdbio rewindDataM)
, after AllSucceed "testRewindBeforeFork" $
testCase "testCw217CoinOnly" $ withPact' bdbio ioSqlEnv iom $
testCw217CoinOnly bdbio rewindDataM
, after AllSucceed "testCw217CoinOnly" $
testCase "testRestartCw217" $
withPact' bdbio ioSqlEnv iom testRestart
]
where
label = "Chainweb.Test.Pact.ModuleCacheOnRestart"
type CacheTest tbl =
(PactServiceM tbl ()
,IO (MVar ModuleInitCache) -> ModuleInitCache -> Assertion)
testInitial :: CanReadablePayloadCas tbl => CacheTest tbl
testInitial = (initPayloadState,snapshotCache)
testRestart :: CanReadablePayloadCas tbl => CacheTest tbl
testRestart = (initPayloadState,checkLoadedCache)
where
checkLoadedCache ioa initCache = do
a <- ioa >>= readMVar
(justModuleHashes a) `assertNoCacheMismatch` (justModuleHashes initCache)
testCoinbase :: CanReadablePayloadCas tbl => IO TestBlockDb -> CacheTest tbl
testCoinbase iobdb = (initPayloadState >> doCoinbase,snapshotCache)
where
doCoinbase = do
bdb <- liftIO $ iobdb
pwo <- execNewBlock mempty (ParentHeader genblock) noMiner
liftIO $ addTestBlockDb bdb (Nonce 0) (offsetBlockTime second) testChainId pwo
nextH <- liftIO $ getParentTestBlockDb bdb testChainId
void $ execValidateBlock mempty nextH (payloadWithOutputsToPayloadData pwo)
testV3 :: CanReadablePayloadCas tbl => IO TestBlockDb -> IO (MVar RewindData) -> CacheTest tbl
testV3 iobdb rewindM = (go,grabAndSnapshotCache)
where
go = do
initPayloadState
void $ doNextCoinbase iobdb
void $ doNextCoinbase iobdb
hpwo <- doNextCoinbase iobdb
liftIO (rewindM >>= \rewind -> putMVar rewind $ RewindData hpwo hpwo mempty)
grabAndSnapshotCache ioa initCache = do
rewindM >>= \rewind -> modifyMVar_ rewind $ \old -> pure $ old { v3Cache = justModuleHashes initCache }
snapshotCache ioa initCache
testV4 :: CanReadablePayloadCas tbl => IO TestBlockDb -> IO (MVar RewindData) -> CacheTest tbl
testV4 iobdb rewindM = (go,snapshotCache)
where
go = do
initPayloadState
void $ doNextCoinbase iobdb
afterV4' <- doNextCoinbase iobdb
rewind <- liftIO rewindM
liftIO $ modifyMVar_ rewind $ \old -> pure $ old { afterV4 = afterV4' }
void $ doNextCoinbase iobdb
void $ doNextCoinbase iobdb
testRewindAfterFork :: CanReadablePayloadCas tbl => IO TestBlockDb -> IO (MVar RewindData) -> CacheTest tbl
testRewindAfterFork iobdb rewindM = (go, checkLoadedCache)
where
go = do
initPayloadState
liftIO rewindM >>= liftIO . readMVar >>= rewindToBlock . afterV4
void $ doNextCoinbase iobdb
void $ doNextCoinbase iobdb
checkLoadedCache ioa initCache = do
a <- ioa >>= readMVar
case M.lookup 6 initCache of
Nothing -> assertFailure "Cache not found at height 6"
Just c -> (justModuleHashes a) `assertNoCacheMismatch` (justModuleHashes' c)
testRewindBeforeFork :: CanReadablePayloadCas tbl => IO TestBlockDb -> IO (MVar RewindData) -> CacheTest tbl
testRewindBeforeFork iobdb rewindM = (go, checkLoadedCache)
where
go = do
initPayloadState
liftIO rewindM >>= liftIO . readMVar >>= rewindToBlock . beforeV4
void $ doNextCoinbase iobdb
void $ doNextCoinbase iobdb
checkLoadedCache ioa initCache = do
a <- ioa >>= readMVar
case (M.lookup 5 initCache, M.lookup 4 initCache) of
(Just c, Just d) -> do
(justModuleHashes a) `assertNoCacheMismatch` (justModuleHashes' c)
v3c <- rewindM >>= \rewind -> fmap v3Cache (readMVar rewind)
assertNoCacheMismatch v3c (justModuleHashes' d)
_ -> assertFailure "Failed to lookup either block 4 or 5."
testCw217CoinOnly
:: CanReadablePayloadCas cas
=> IO TestBlockDb
-> IO (MVar RewindData)
-> CacheTest cas
testCw217CoinOnly iobdb _rewindM = (go, go')
where
go = do
initPayloadState
void $ doNextCoinbaseN_ 9 iobdb
go' ioa initCache = do
snapshotCache ioa initCache
case M.lookup 20 initCache of
Just a -> assertEqual "module init cache contains only coin" ["coin"] $ HM.keys a
Nothing -> assertFailure "failed to lookup block at 20"
assertNoCacheMismatch
:: HM.HashMap ModuleName (Maybe ModuleHash)
-> HM.HashMap ModuleName (Maybe ModuleHash)
-> Assertion
assertNoCacheMismatch c1 c2 = assertBool msg $ c1 == c2
where
showCache = intercalate "\n" . map show . HM.toList
msg = mconcat
[
"Module cache mismatch, found: \n"
, showCache c1
, "\n expected: \n"
, showCache c2
]
rewindToBlock :: CanReadablePayloadCas tbl => RewindPoint -> PactServiceM tbl ()
rewindToBlock (rewindHeader, pwo) = void $ execValidateBlock mempty rewindHeader (payloadWithOutputsToPayloadData pwo)
doNextCoinbase :: CanReadablePayloadCas tbl => IO TestBlockDb -> PactServiceM tbl (BlockHeader, PayloadWithOutputs)
doNextCoinbase iobdb = do
bdb <- liftIO iobdb
prevH <- liftIO $ getParentTestBlockDb bdb testChainId
pwo <- execNewBlock mempty (ParentHeader prevH) noMiner
liftIO $ addTestBlockDb bdb (Nonce 0) (offsetBlockTime second) testChainId pwo
nextH <- liftIO $ getParentTestBlockDb bdb testChainId
valPWO <- execValidateBlock mempty nextH (payloadWithOutputsToPayloadData pwo)
return (nextH, valPWO)
doNextCoinbaseN_
:: CanReadablePayloadCas cas
=> Int
-> IO TestBlockDb
-> PactServiceM cas (BlockHeader, PayloadWithOutputs)
doNextCoinbaseN_ n iobdb = fmap last $ forM [1..n] $ \_ ->
doNextCoinbase iobdb
justModuleHashes :: ModuleInitCache -> HM.HashMap ModuleName (Maybe ModuleHash)
justModuleHashes = justModuleHashes' . snd . last . M.toList
justModuleHashes' :: ModuleCache -> HM.HashMap ModuleName (Maybe ModuleHash)
justModuleHashes' = HM.map $ \v -> preview (_1 . mdModule . _MDModule . mHash) v
genblock :: BlockHeader
genblock = genesisBlockHeader testVer testChainId
initPayloadState :: CanReadablePayloadCas tbl => PactServiceM tbl ()
initPayloadState = initialPayloadState dummyLogger mempty testVer testChainId
snapshotCache :: IO (MVar ModuleInitCache) -> ModuleInitCache -> IO ()
snapshotCache iomcache initCache = do
mcache <- iomcache
modifyMVar_ mcache (const (pure initCache))
withPact'
:: IO TestBlockDb
-> IO SQLiteEnv
-> IO (MVar ModuleInitCache)
-> CacheTest RocksDbTable
-> Assertion
withPact' bdbio ioSqlEnv r (ps, cacheTest) = do
bdb <- bdbio
bhdb <- getWebBlockHeaderDb (_bdbWebBlockHeaderDb bdb) testChainId
let pdb = _bdbPayloadDb bdb
sqlEnv <- ioSqlEnv
T2 _ pstate <- runPactService'
testVer testChainId logger bhdb pdb sqlEnv defaultPactServiceConfig ps
cacheTest r (_psInitCache pstate)
where
logger = genericLogger Quiet T.putStrLn
|
321dd423d6454a6675aae9bf28511bb2cc046c9c020c2db333e55654e99af691 | facebook/infer | livenessTests.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module TestInterpreter =
AnalyzerTester.MakeBackwardExceptional
(Liveness.PreAnalysisTransferFunctions (ProcCfg.Backward (ProcCfg.Exceptional)))
let tests =
let open OUnit2 in
let open AnalyzerTester.StructuredSil in
let assert_empty = invariant "normal:{ }" in
let fun_ptr_typ = Typ.mk (Tptr (Typ.mk Tfun, Pk_pointer)) in
let closure_exp captured_pvars =
let mk_captured_var str =
(Exp.Var (ident_of_str str), pvar_of_str str, dummy_typ, CapturedVar.ByReference)
in
let captured_vars = List.map ~f:mk_captured_var captured_pvars in
let closure = {Exp.name= dummy_procname; captured_vars} in
Exp.Closure closure
in
let unknown_cond =
do n't want to use AnalyzerTest.unknown_exp because we 'll treat it as a live var !
Exp.zero
in
let test_list =
[ ("basic_live", [invariant "normal:{ b }"; id_assign_var "a" "b"])
; ( "basic_live_then_dead"
, [assert_empty; var_assign_int "b" 1; invariant "normal:{ b }"; id_assign_var "a" "b"] )
; ( "iterative_live"
, [ invariant "normal:{ b, f, d }"
; id_assign_var "e" "f"
; invariant "normal:{ b, d }"
; id_assign_var "c" "d"
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ( "live_kill_live"
, [ invariant "normal:{ b }"
; id_assign_var "c" "b"
; assert_empty
; var_assign_int "b" 1
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ("basic_live_load", [invariant "normal:{ y$0 }"; id_assign_id "x" "y"])
; ( "basic_live_then_kill_load"
, [ invariant "normal:{ z$0 }"
; id_assign_id "y" "z"
; invariant "normal:{ y$0 }"
; id_assign_id "x" "y" ] )
; ( "set_id"
, (* this is *x = y, which is a read of both x and y *)
[invariant "normal:{ x$0, y$0 }"; id_set_id "x" "y"] )
; ( "if_exp_live"
, [assert_empty; var_assign_int "x" 1; invariant "normal:{ x }"; If (var_of_str "x", [], [])]
)
; ( "while_exp_live"
, [assert_empty; var_assign_int "x" 1; invariant "normal:{ x }"; While (var_of_str "x", [])]
)
; ("call_params_live", [invariant "normal:{ b, a, c }"; call_unknown ["a"; "b"; "c"]])
; ( "dead_after_call_with_retval"
, [ assert_empty
; call_unknown ~return:("y", Typ.mk (Tint IInt)) []
; invariant "normal:{ y$0 }"
; id_assign_id "x" "y" ] )
; ( "closure_captured_live"
, [ invariant "normal:{ b$0, c$0 }"
; var_assign_exp ~rhs_typ:fun_ptr_typ "a" (closure_exp ["b"; "c"]) ] )
; ( "if_conservative_live1"
, [invariant "normal:{ b }"; If (unknown_cond, [id_assign_var "a" "b"], [])] )
; ( "if_conservative_live2"
, [ invariant "normal:{ b, d }"
; If (unknown_cond, [id_assign_var "a" "b"], [id_assign_var "c" "d"]) ] )
; ( "if_conservative_kill"
, [ invariant "normal:{ b }"
; If (unknown_cond, [var_assign_int "b" 1], [])
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ( "if_conservative_kill_live"
, [ invariant "normal:{ b, d }"
; If (unknown_cond, [var_assign_int "b" 1], [id_assign_var "c" "d"])
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ( "if_precise1"
, [ assert_empty
; If
( unknown_cond
, [var_assign_int "b" 1; invariant "normal:{ b }"; id_assign_var "a" "b"]
, [var_assign_int "d" 1; invariant "normal:{ d }"; id_assign_var "c" "d"] ) ] )
; ( "if_precise2"
, [ assert_empty
; If (unknown_cond, [var_assign_int "b" 2], [var_assign_int "b" 1])
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ("loop_as_if1", [invariant "normal:{ b }"; While (unknown_cond, [id_assign_var "a" "b"])])
; ( "loop_as_if2"
, [ invariant "normal:{ b }"
; While (unknown_cond, [var_assign_int "b" 1])
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ( "loop_before_after"
, [ invariant "normal:{ b, d }"
; While (unknown_cond, [id_assign_var "b" "d"])
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ( "java_exceptions"
, [ invariant "normal:{ b, a, c }"
; Try
( Java
, [ id_assign_var "x" "c"
; invariant "normal:{ b, a }"
; id_assign_var "a" "b"
; invariant "normal:{ }" ]
, [invariant "normal:{ a }"; id_assign_var "x" "a"]
, [] ) ] )
; ( "java_exceptions_empty_try"
, [ invariant "normal:{ a }"
; Try
(Java, [], [invariant "normal:{ b, a }"; id_assign_var "x" "b"], [id_assign_var "x" "a"])
] )
; ( "c_exceptions"
, [ invariant "normal:{ b, c }"
; Try
( Cpp {try_id= 0}
, [ id_assign_var "x" "c"
(* a should be live here but the C++ exception system is not in synch yet with the
new abstract interpreter framework for exceptional edges *)
; invariant "normal:{ b }"
; id_assign_var "a" "b"
; invariant "normal:{ }" ]
, [invariant "normal:{ a }"; id_assign_var "x" "a"]
, [] ) ] )
; ( "c_exceptions_empty_try"
, [ invariant "normal:{ a }"
; Try
( Cpp {try_id= 0}
, []
, [invariant "normal:{ b, a }"; id_assign_var "x" "b"]
, [id_assign_var "x" "a"] ) ] ) ]
|> TestInterpreter.create_tests
(fun {proc_name} -> Procdesc.load_exn proc_name)
~initial:Liveness.ExtendedDomain.bottom
in
"liveness_test_suite" >::: test_list
| null | https://raw.githubusercontent.com/facebook/infer/28e867254f6dc8c2a26d749575b472ca0ae27a0f/infer/src/unit/livenessTests.ml | ocaml | this is *x = y, which is a read of both x and y
a should be live here but the C++ exception system is not in synch yet with the
new abstract interpreter framework for exceptional edges |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module TestInterpreter =
AnalyzerTester.MakeBackwardExceptional
(Liveness.PreAnalysisTransferFunctions (ProcCfg.Backward (ProcCfg.Exceptional)))
let tests =
let open OUnit2 in
let open AnalyzerTester.StructuredSil in
let assert_empty = invariant "normal:{ }" in
let fun_ptr_typ = Typ.mk (Tptr (Typ.mk Tfun, Pk_pointer)) in
let closure_exp captured_pvars =
let mk_captured_var str =
(Exp.Var (ident_of_str str), pvar_of_str str, dummy_typ, CapturedVar.ByReference)
in
let captured_vars = List.map ~f:mk_captured_var captured_pvars in
let closure = {Exp.name= dummy_procname; captured_vars} in
Exp.Closure closure
in
let unknown_cond =
do n't want to use AnalyzerTest.unknown_exp because we 'll treat it as a live var !
Exp.zero
in
let test_list =
[ ("basic_live", [invariant "normal:{ b }"; id_assign_var "a" "b"])
; ( "basic_live_then_dead"
, [assert_empty; var_assign_int "b" 1; invariant "normal:{ b }"; id_assign_var "a" "b"] )
; ( "iterative_live"
, [ invariant "normal:{ b, f, d }"
; id_assign_var "e" "f"
; invariant "normal:{ b, d }"
; id_assign_var "c" "d"
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ( "live_kill_live"
, [ invariant "normal:{ b }"
; id_assign_var "c" "b"
; assert_empty
; var_assign_int "b" 1
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ("basic_live_load", [invariant "normal:{ y$0 }"; id_assign_id "x" "y"])
; ( "basic_live_then_kill_load"
, [ invariant "normal:{ z$0 }"
; id_assign_id "y" "z"
; invariant "normal:{ y$0 }"
; id_assign_id "x" "y" ] )
; ( "set_id"
[invariant "normal:{ x$0, y$0 }"; id_set_id "x" "y"] )
; ( "if_exp_live"
, [assert_empty; var_assign_int "x" 1; invariant "normal:{ x }"; If (var_of_str "x", [], [])]
)
; ( "while_exp_live"
, [assert_empty; var_assign_int "x" 1; invariant "normal:{ x }"; While (var_of_str "x", [])]
)
; ("call_params_live", [invariant "normal:{ b, a, c }"; call_unknown ["a"; "b"; "c"]])
; ( "dead_after_call_with_retval"
, [ assert_empty
; call_unknown ~return:("y", Typ.mk (Tint IInt)) []
; invariant "normal:{ y$0 }"
; id_assign_id "x" "y" ] )
; ( "closure_captured_live"
, [ invariant "normal:{ b$0, c$0 }"
; var_assign_exp ~rhs_typ:fun_ptr_typ "a" (closure_exp ["b"; "c"]) ] )
; ( "if_conservative_live1"
, [invariant "normal:{ b }"; If (unknown_cond, [id_assign_var "a" "b"], [])] )
; ( "if_conservative_live2"
, [ invariant "normal:{ b, d }"
; If (unknown_cond, [id_assign_var "a" "b"], [id_assign_var "c" "d"]) ] )
; ( "if_conservative_kill"
, [ invariant "normal:{ b }"
; If (unknown_cond, [var_assign_int "b" 1], [])
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ( "if_conservative_kill_live"
, [ invariant "normal:{ b, d }"
; If (unknown_cond, [var_assign_int "b" 1], [id_assign_var "c" "d"])
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ( "if_precise1"
, [ assert_empty
; If
( unknown_cond
, [var_assign_int "b" 1; invariant "normal:{ b }"; id_assign_var "a" "b"]
, [var_assign_int "d" 1; invariant "normal:{ d }"; id_assign_var "c" "d"] ) ] )
; ( "if_precise2"
, [ assert_empty
; If (unknown_cond, [var_assign_int "b" 2], [var_assign_int "b" 1])
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ("loop_as_if1", [invariant "normal:{ b }"; While (unknown_cond, [id_assign_var "a" "b"])])
; ( "loop_as_if2"
, [ invariant "normal:{ b }"
; While (unknown_cond, [var_assign_int "b" 1])
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ( "loop_before_after"
, [ invariant "normal:{ b, d }"
; While (unknown_cond, [id_assign_var "b" "d"])
; invariant "normal:{ b }"
; id_assign_var "a" "b" ] )
; ( "java_exceptions"
, [ invariant "normal:{ b, a, c }"
; Try
( Java
, [ id_assign_var "x" "c"
; invariant "normal:{ b, a }"
; id_assign_var "a" "b"
; invariant "normal:{ }" ]
, [invariant "normal:{ a }"; id_assign_var "x" "a"]
, [] ) ] )
; ( "java_exceptions_empty_try"
, [ invariant "normal:{ a }"
; Try
(Java, [], [invariant "normal:{ b, a }"; id_assign_var "x" "b"], [id_assign_var "x" "a"])
] )
; ( "c_exceptions"
, [ invariant "normal:{ b, c }"
; Try
( Cpp {try_id= 0}
, [ id_assign_var "x" "c"
; invariant "normal:{ b }"
; id_assign_var "a" "b"
; invariant "normal:{ }" ]
, [invariant "normal:{ a }"; id_assign_var "x" "a"]
, [] ) ] )
; ( "c_exceptions_empty_try"
, [ invariant "normal:{ a }"
; Try
( Cpp {try_id= 0}
, []
, [invariant "normal:{ b, a }"; id_assign_var "x" "b"]
, [id_assign_var "x" "a"] ) ] ) ]
|> TestInterpreter.create_tests
(fun {proc_name} -> Procdesc.load_exn proc_name)
~initial:Liveness.ExtendedDomain.bottom
in
"liveness_test_suite" >::: test_list
|
a780a4cf4b4a4abe6e99fac8d5aca31a13e19cd5fbec6691dd95e697841c1b08 | moby/vpnkit | mux.ml | open Lwt.Infix
let src =
let src = Logs.Src.create "mux" ~doc:"Mirage TCP/IP <-> socket proxy" in
Logs.Src.set_level src (Some Logs.Info);
src
module Log = (val Logs.src_log src : Logs.LOG)
module DontCareAboutStats = struct
let get_stats_counters _ = Mirage_net.Stats.create ()
let reset_stats_counters _ = ()
end
module ObviouslyCommon = struct
type error = [Mirage_net.Net.error | `Unknown of string]
let pp_error ppf = function
| #Mirage_net.Net.error as e -> Mirage_net.Net.pp_error ppf e
| `Unknown s -> Fmt.pf ppf "unknown: %s" s
end
module Make (Netif: Mirage_net.S) = struct
include DontCareAboutStats
include ObviouslyCommon
type rule = Ipaddr.V4.t
module RuleMap = Map.Make(Ipaddr.V4)
type callback = Cstruct.t -> unit Lwt.t
type port = {
callback: callback;
mutable last_active_time: float;
}
type t = {
netif: Netif.t;
mutable rules: port RuleMap.t;
mutable default_callback: callback;
}
let lift_error: ('a, Netif.error) result -> ('a, error) result = function
| Ok x -> Ok x
| Error (#Mirage_net.Net.error as e) -> Error e
| Error e -> Fmt.kstr (fun s -> Error (`Unknown s)) "%a" Netif.pp_error e
let filesystem t =
let xs =
RuleMap.fold
(fun ip t acc ->
Fmt.str "%a last_active_time = %.1f" Ipaddr.V4.pp ip
t.last_active_time
:: acc
) t.rules []
in
Vfs.File.ro_of_string (String.concat "\n" xs)
let remove t rule =
Log.debug (fun f ->
f "removing switch port for %s" (Ipaddr.V4.to_string rule));
t.rules <- RuleMap.remove rule t.rules
let callback t buf =
(* Does the packet match any of our rules? *)
let open Frame in
match parse [ buf ] with
| Ok (Ethernet { payload = Ipv4 { dst; _ }; _ }) ->
if RuleMap.mem dst t.rules then begin
let port = RuleMap.find dst t.rules in
port.last_active_time <- Unix.gettimeofday ();
port.callback buf
end else begin
Log.debug (fun f ->
f "using default callback for packet for %a" Ipaddr.V4.pp dst);
t.default_callback buf
end
| _ ->
Log.debug (fun f -> f "using default callback for non-IPv4 frame");
t.default_callback buf
let connect netif =
let rules = RuleMap.empty in
let default_callback = fun _ -> Lwt.return_unit in
let t = { netif; rules; default_callback } in
Lwt.async
(fun () ->
Netif.listen netif ~header_size:Ethernet.Packet.sizeof_ethernet @@ callback t >>= function
| Ok () -> Lwt.return_unit
| Error _e ->
Log.err (fun f -> f "Mux.connect calling Netif.listen: failed");
Lwt.return_unit
);
Lwt.return (Ok t)
let write t ~size fill = Netif.write t.netif ~size fill >|= lift_error
let listen t ~header_size:_ callback = t.default_callback <- callback; Lwt.return (Ok ())
let disconnect t = Netif.disconnect t.netif
let mac t = Netif.mac t.netif
let mtu t = Netif.mtu t.netif
module Port = struct
include DontCareAboutStats
include ObviouslyCommon
type _t = {
switch: t;
netif: Netif.t;
rule: rule;
}
let write t ~size fill = Netif.write t.netif ~size fill >|= lift_error
let listen t ~header_size:_ callback =
Log.debug (fun f ->
f "activating switch port for %s" (Ipaddr.V4.to_string t.rule));
let last_active_time = Unix.gettimeofday () in
let port = { callback; last_active_time } in
t.switch.rules <- RuleMap.add t.rule port t.switch.rules;
Lwt.return (Ok ())
let disconnect t =
Log.debug (fun f ->
f "deactivating switch port for %s" (Ipaddr.V4.to_string t.rule));
t.switch.rules <- RuleMap.remove t.rule t.switch.rules;
Lwt.return_unit
let mac t = Netif.mac t.netif
let mtu t = Netif.mtu t.netif
type t = _t
end
let port t rule = { Port.switch = t; netif = t.netif; rule }
end
| null | https://raw.githubusercontent.com/moby/vpnkit/6dda85cda59e36875fcc6324205aaf0c7056ff0a/src/hostnet/mux.ml | ocaml | Does the packet match any of our rules? | open Lwt.Infix
let src =
let src = Logs.Src.create "mux" ~doc:"Mirage TCP/IP <-> socket proxy" in
Logs.Src.set_level src (Some Logs.Info);
src
module Log = (val Logs.src_log src : Logs.LOG)
module DontCareAboutStats = struct
let get_stats_counters _ = Mirage_net.Stats.create ()
let reset_stats_counters _ = ()
end
module ObviouslyCommon = struct
type error = [Mirage_net.Net.error | `Unknown of string]
let pp_error ppf = function
| #Mirage_net.Net.error as e -> Mirage_net.Net.pp_error ppf e
| `Unknown s -> Fmt.pf ppf "unknown: %s" s
end
module Make (Netif: Mirage_net.S) = struct
include DontCareAboutStats
include ObviouslyCommon
type rule = Ipaddr.V4.t
module RuleMap = Map.Make(Ipaddr.V4)
type callback = Cstruct.t -> unit Lwt.t
type port = {
callback: callback;
mutable last_active_time: float;
}
type t = {
netif: Netif.t;
mutable rules: port RuleMap.t;
mutable default_callback: callback;
}
let lift_error: ('a, Netif.error) result -> ('a, error) result = function
| Ok x -> Ok x
| Error (#Mirage_net.Net.error as e) -> Error e
| Error e -> Fmt.kstr (fun s -> Error (`Unknown s)) "%a" Netif.pp_error e
let filesystem t =
let xs =
RuleMap.fold
(fun ip t acc ->
Fmt.str "%a last_active_time = %.1f" Ipaddr.V4.pp ip
t.last_active_time
:: acc
) t.rules []
in
Vfs.File.ro_of_string (String.concat "\n" xs)
let remove t rule =
Log.debug (fun f ->
f "removing switch port for %s" (Ipaddr.V4.to_string rule));
t.rules <- RuleMap.remove rule t.rules
let callback t buf =
let open Frame in
match parse [ buf ] with
| Ok (Ethernet { payload = Ipv4 { dst; _ }; _ }) ->
if RuleMap.mem dst t.rules then begin
let port = RuleMap.find dst t.rules in
port.last_active_time <- Unix.gettimeofday ();
port.callback buf
end else begin
Log.debug (fun f ->
f "using default callback for packet for %a" Ipaddr.V4.pp dst);
t.default_callback buf
end
| _ ->
Log.debug (fun f -> f "using default callback for non-IPv4 frame");
t.default_callback buf
let connect netif =
let rules = RuleMap.empty in
let default_callback = fun _ -> Lwt.return_unit in
let t = { netif; rules; default_callback } in
Lwt.async
(fun () ->
Netif.listen netif ~header_size:Ethernet.Packet.sizeof_ethernet @@ callback t >>= function
| Ok () -> Lwt.return_unit
| Error _e ->
Log.err (fun f -> f "Mux.connect calling Netif.listen: failed");
Lwt.return_unit
);
Lwt.return (Ok t)
let write t ~size fill = Netif.write t.netif ~size fill >|= lift_error
let listen t ~header_size:_ callback = t.default_callback <- callback; Lwt.return (Ok ())
let disconnect t = Netif.disconnect t.netif
let mac t = Netif.mac t.netif
let mtu t = Netif.mtu t.netif
module Port = struct
include DontCareAboutStats
include ObviouslyCommon
type _t = {
switch: t;
netif: Netif.t;
rule: rule;
}
let write t ~size fill = Netif.write t.netif ~size fill >|= lift_error
let listen t ~header_size:_ callback =
Log.debug (fun f ->
f "activating switch port for %s" (Ipaddr.V4.to_string t.rule));
let last_active_time = Unix.gettimeofday () in
let port = { callback; last_active_time } in
t.switch.rules <- RuleMap.add t.rule port t.switch.rules;
Lwt.return (Ok ())
let disconnect t =
Log.debug (fun f ->
f "deactivating switch port for %s" (Ipaddr.V4.to_string t.rule));
t.switch.rules <- RuleMap.remove t.rule t.switch.rules;
Lwt.return_unit
let mac t = Netif.mac t.netif
let mtu t = Netif.mtu t.netif
type t = _t
end
let port t rule = { Port.switch = t; netif = t.netif; rule }
end
|
f3b33877f1b4cc2ac79f2bff5252a362af9c1a06d0c91626e0c972606dfe49be | composewell/unicode-data | Main.hs | module Main where
import Test.Hspec
import qualified Unicode.Char.General.NamesSpec as Names
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "Unicode.Char.General.Names" Names.spec
| null | https://raw.githubusercontent.com/composewell/unicode-data/3ba60c1a254e37b019c36feda9d1e82a889aec19/unicode-data-names/test/Main.hs | haskell | module Main where
import Test.Hspec
import qualified Unicode.Char.General.NamesSpec as Names
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "Unicode.Char.General.Names" Names.spec
| |
13910927848a953c6f2fd0214a584f3e67ffda68afa765926458de7a1b1e1a55 | MailOnline/s-metric | bulk.clj | (ns s-metric.bulk
(:require [s-metric.protocols :as p]
[clojure.set :as s]
[s-metric.levenshtein]
[clojure.core.reducers :as r])
(:import [s_metric.levenshtein LevenshteinDistance]))
(defn reducef [n]
(fn [topn score]
(let [top (conj topn score)
lowest (first top)]
(if (> (count top) n)
(s/difference top #{lowest})
top))))
(defn combinef [n]
(fn [top1 top2]
(let [top (s/union top1 top2)
lowest (take n top)]
(s/difference top (into (sorted-set) lowest)))))
(defn top-scores
"Applies match-% to a collection of targets returning the top <= n highest scores as
[score xs] pairs in an ordered set."
([s xs]
(top-scores s xs 20))
([s xs n]
(top-scores s xs n (LevenshteinDistance.)))
([s xs n metrics]
(let [xs (->> xs
(into [])
(r/map #(-> [(p/match-% metrics s %) %])))]
(r/fold (r/monoid (combinef n) sorted-set) (reducef n) xs))))
(defn mispelled [s xs]
"Just return the highest of the top-scores"
(last (top-scores s xs)))
| null | https://raw.githubusercontent.com/MailOnline/s-metric/991c84f3cc1c8c3ac55981f7eb1788385927fba0/src/s_metric/bulk.clj | clojure | (ns s-metric.bulk
(:require [s-metric.protocols :as p]
[clojure.set :as s]
[s-metric.levenshtein]
[clojure.core.reducers :as r])
(:import [s_metric.levenshtein LevenshteinDistance]))
(defn reducef [n]
(fn [topn score]
(let [top (conj topn score)
lowest (first top)]
(if (> (count top) n)
(s/difference top #{lowest})
top))))
(defn combinef [n]
(fn [top1 top2]
(let [top (s/union top1 top2)
lowest (take n top)]
(s/difference top (into (sorted-set) lowest)))))
(defn top-scores
"Applies match-% to a collection of targets returning the top <= n highest scores as
[score xs] pairs in an ordered set."
([s xs]
(top-scores s xs 20))
([s xs n]
(top-scores s xs n (LevenshteinDistance.)))
([s xs n metrics]
(let [xs (->> xs
(into [])
(r/map #(-> [(p/match-% metrics s %) %])))]
(r/fold (r/monoid (combinef n) sorted-set) (reducef n) xs))))
(defn mispelled [s xs]
"Just return the highest of the top-scores"
(last (top-scores s xs)))
| |
446d1352d15ebd2eb9049392a415981f924e78ecb8c611d439e483ac55266cd1 | danieljharvey/nix-mate | Spec.hs | import Data.Either
import NixMate.Actions.Config (defaultConfig, loadConfig)
import NixMate.Actions.Docker
import NixMate.Actions.Tags (parseTags)
import Test.Hspec
main :: IO ()
main = hspec $ do
describe "tmux-mate" $ do
it "Parses tags" $ do
resp <- readFile "./test/data/git-tags.txt"
let tags = parseTags resp
length tags `shouldBe` 6
it "Parses config" $ do
cfg <- loadConfig "./test/data/nix-mate.json"
cfg `shouldSatisfy` isRight
it "Generates Docker derivation" $ do
docker <- readFile "./test/data/docker-derivation.nix"
let generated = createDocker defaultConfig
generated `shouldBe` docker
| null | https://raw.githubusercontent.com/danieljharvey/nix-mate/0197b8517319a4ad4c9cb489579a6ed36ad9aec1/test/Spec.hs | haskell | import Data.Either
import NixMate.Actions.Config (defaultConfig, loadConfig)
import NixMate.Actions.Docker
import NixMate.Actions.Tags (parseTags)
import Test.Hspec
main :: IO ()
main = hspec $ do
describe "tmux-mate" $ do
it "Parses tags" $ do
resp <- readFile "./test/data/git-tags.txt"
let tags = parseTags resp
length tags `shouldBe` 6
it "Parses config" $ do
cfg <- loadConfig "./test/data/nix-mate.json"
cfg `shouldSatisfy` isRight
it "Generates Docker derivation" $ do
docker <- readFile "./test/data/docker-derivation.nix"
let generated = createDocker defaultConfig
generated `shouldBe` docker
| |
ac84a3696ffd3d6e06eb517220db9b8f45a23924acaa0e64a430b0986a5e9a39 | jlongster/genetic-canvas | vectors.scm |
;; Vectors
(define-structure vec2
x y)
(define-structure vec3
x y z)
(define (vec2-equal? v1 v2)
(and (eq? (vec2-x v1) (vec2-x v2))
(eq? (vec2-y v1) (vec2-y v2))))
(define (vec2-op v1 v2 op)
(make-vec2 (op (vec2-x v1) (vec2-x v2))
(op (vec2-y v1) (vec2-y v2))))
(define (vec2-add v1 v2)
(vec2-op v1 v2 +))
(define (vec2-sub v1 v2)
(vec2-op v1 v2 -))
(define (vec2-component-mul v1 v2)
(vec2-op v1 v2 *))
(define (vec2-scalar-mul v1 f)
(make-vec2 (* (vec2-x v1) f)
(* (vec2-y v1) f)))
(define (vec2-length v1)
(sqrt (+ (* (vec2-x v1) (vec2-x v1))
(* (vec2-y v1) (vec2-y v1)))))
(define (vec3-equal? v1 v2)
(and (eq? (vec3-x v1) (vec3-x v2))
(eq? (vec3-y v1) (vec3-y v2))
(eq? (vec3-z v1) (vec3-z v2))))
(define (vec3-op v1 v2 op)
(make-vec3 (op (vec3-x v1) (vec3-x v2))
(op (vec3-y v1) (vec3-y v2))
(op (vec3-z v1) (vec3-z v2))))
(define (vec3-add v1 v2)
(vec3-op v1 v2 +))
(define (vec3-sub v1 v2)
(vec3-op v1 v2 -))
(define (vec3-component-mul v1 v2)
(vec3-op v1 v2 *))
(define (vec3-scalar-mul v1 f)
(make-vec3 (* (vec3-x v1) f)
(* (vec3-y v1) f)
(* (vec3-z v1) f)))
(define (vec3-length v1)
(flsqrt (vec3-dot v1 v1)))
(define (vec3-unit v1)
(vec3-scalar-mul v1 (/ (vec3-length v1))))
(define (vec3-dot v1 v2)
(+ (* (vec3-x v1) (vec3-x v2))
(* (vec3-y v1) (vec3-y v2))
(* (vec3-z v1) (vec3-z v2))))
(define (vec3-cross v1 v2)
(let ((v1-x (vec3-x v1)) (v2-x (vec3-x v2))
(v1-y (vec3-y v1)) (v2-y (vec3-y v2))
(v1-z (vec3-z v1)) (v2-z (vec3-z v2)))
(make-vec3 (- (* v1-y v2-z)
(* v1-z v2-y))
(- (* v1-z v2-x)
(* v1-x v2-z))
(- (* v1-x v2-y)
(* v1-y v2-x)))))
(define (random-vec2 #!optional scale)
(make-vec2 (* (- (* (random-real) 2.) 1.) (or scale 1.0))
(* (- (* (random-real) 2.) 1.) (or scale 1.0))))
(define (random-vec3 #!optional scale)
(make-vec3 (* (random-real) (or scale 1.0))
(* (random-real) (or scale 1.0))
(* (random-real) (or scale 1.0))))
| null | https://raw.githubusercontent.com/jlongster/genetic-canvas/2592e48ddbd168a819ca6ca330a6f8af6ffc37aa/lib/vectors.scm | scheme | Vectors |
(define-structure vec2
x y)
(define-structure vec3
x y z)
(define (vec2-equal? v1 v2)
(and (eq? (vec2-x v1) (vec2-x v2))
(eq? (vec2-y v1) (vec2-y v2))))
(define (vec2-op v1 v2 op)
(make-vec2 (op (vec2-x v1) (vec2-x v2))
(op (vec2-y v1) (vec2-y v2))))
(define (vec2-add v1 v2)
(vec2-op v1 v2 +))
(define (vec2-sub v1 v2)
(vec2-op v1 v2 -))
(define (vec2-component-mul v1 v2)
(vec2-op v1 v2 *))
(define (vec2-scalar-mul v1 f)
(make-vec2 (* (vec2-x v1) f)
(* (vec2-y v1) f)))
(define (vec2-length v1)
(sqrt (+ (* (vec2-x v1) (vec2-x v1))
(* (vec2-y v1) (vec2-y v1)))))
(define (vec3-equal? v1 v2)
(and (eq? (vec3-x v1) (vec3-x v2))
(eq? (vec3-y v1) (vec3-y v2))
(eq? (vec3-z v1) (vec3-z v2))))
(define (vec3-op v1 v2 op)
(make-vec3 (op (vec3-x v1) (vec3-x v2))
(op (vec3-y v1) (vec3-y v2))
(op (vec3-z v1) (vec3-z v2))))
(define (vec3-add v1 v2)
(vec3-op v1 v2 +))
(define (vec3-sub v1 v2)
(vec3-op v1 v2 -))
(define (vec3-component-mul v1 v2)
(vec3-op v1 v2 *))
(define (vec3-scalar-mul v1 f)
(make-vec3 (* (vec3-x v1) f)
(* (vec3-y v1) f)
(* (vec3-z v1) f)))
(define (vec3-length v1)
(flsqrt (vec3-dot v1 v1)))
(define (vec3-unit v1)
(vec3-scalar-mul v1 (/ (vec3-length v1))))
(define (vec3-dot v1 v2)
(+ (* (vec3-x v1) (vec3-x v2))
(* (vec3-y v1) (vec3-y v2))
(* (vec3-z v1) (vec3-z v2))))
(define (vec3-cross v1 v2)
(let ((v1-x (vec3-x v1)) (v2-x (vec3-x v2))
(v1-y (vec3-y v1)) (v2-y (vec3-y v2))
(v1-z (vec3-z v1)) (v2-z (vec3-z v2)))
(make-vec3 (- (* v1-y v2-z)
(* v1-z v2-y))
(- (* v1-z v2-x)
(* v1-x v2-z))
(- (* v1-x v2-y)
(* v1-y v2-x)))))
(define (random-vec2 #!optional scale)
(make-vec2 (* (- (* (random-real) 2.) 1.) (or scale 1.0))
(* (- (* (random-real) 2.) 1.) (or scale 1.0))))
(define (random-vec3 #!optional scale)
(make-vec3 (* (random-real) (or scale 1.0))
(* (random-real) (or scale 1.0))
(* (random-real) (or scale 1.0))))
|
b17c3638f389708bfbbbec69dc1801a43d2e2a9be96c00401292b32ed6187f12 | callum-oakley/advent-of-code | 01.clj | (ns aoc.2021.01
(:require
[clojure.string :as str]
[clojure.test :refer [deftest is]]))
(defn parse [s]
(map read-string (str/split-lines s)))
(defn part-1 [xs]
(->> xs (partition 2 1) (filter #(apply < %)) count))
(defn part-2 [xs]
(->> xs (partition 3 1) (map #(apply + %)) part-1))
(deftest test-example
(is (= 7 (part-1 [199 200 208 210 200 207 240 269 260 263])))
(is (= 5 (part-2 [199 200 208 210 200 207 240 269 260 263]))))
| null | https://raw.githubusercontent.com/callum-oakley/advent-of-code/3cf44bcb8c57693639630f95f29d4abf49a6f0e4/src/aoc/2021/01.clj | clojure | (ns aoc.2021.01
(:require
[clojure.string :as str]
[clojure.test :refer [deftest is]]))
(defn parse [s]
(map read-string (str/split-lines s)))
(defn part-1 [xs]
(->> xs (partition 2 1) (filter #(apply < %)) count))
(defn part-2 [xs]
(->> xs (partition 3 1) (map #(apply + %)) part-1))
(deftest test-example
(is (= 7 (part-1 [199 200 208 210 200 207 240 269 260 263])))
(is (= 5 (part-2 [199 200 208 210 200 207 240 269 260 263]))))
| |
1b942fcf0f0299a1940515b451dd52cf3fd11002de97f084c04d51153b98735b | EasyCrypt/easycrypt | ecGenRegexp.ml | (* -------------------------------------------------------------------- *)
open EcUtils
open EcMaps
(* -------------------------------------------------------------------- *)
type anchor =
| Start
| End
type 'base gen_regexp =
| Anchor of anchor
| Any
| Base of 'base
| Choice of 'base gen_regexp list
| Named of 'base gen_regexp * string
| Repeat of 'base gen_regexp * int option pair * [ `Greedy | `Lazy ]
| Seq of 'base gen_regexp list
(* -------------------------------------------------------------------- *)
exception NoMatch
exception InvalidRange
(* -------------------------------------------------------------------- *)
module type IRegexpBase = sig
type subject
type engine
type regexp1
type path
type pos = int
type regexp = regexp1 gen_regexp
val mkengine : subject -> engine
val at_start : engine -> bool
val at_end : engine -> bool
val eat : engine -> engine
val eat_base : engine -> regexp1 -> engine * (engine * regexp) list
val position : engine -> pos
val extract : engine -> (pos * pos) -> subject
val next : engine -> engine option
val path : engine -> path
end
(* -------------------------------------------------------------------- *)
module Regexp(B : IRegexpBase) : sig
type regexp = B.regexp
type subject = B.subject
type matches = subject Mstr.t
val search : regexp -> subject -> matches option
end = struct
type regexp = B.regexp
(* ------------------------------------------------------------------ *)
type subject = B.subject
type matches = subject Mstr.t
type engine = { e_sub : B.engine; e_grp : matches; }
type pos = B.pos
(* ------------------------------------------------------------------ *)
let mkengine (s : subject) =
{ e_sub = B.mkengine s; e_grp = Mstr.empty; }
(* ------------------------------------------------------------------ *)
let eat (e : engine) =
{ e with e_sub = B.eat e.e_sub }
(* ------------------------------------------------------------------ *)
type continuation = Cont of (continuation1 * continuation) Lazy.t
and matchr = engine * continuation
and continuation1 = [
| `Result of engine
| `Regexp of engine * regexp
]
(* ------------------------------------------------------------------ *)
let no_continuation =
Cont (Lazy.from_fun (fun () -> raise NoMatch))
(* ------------------------------------------------------------------ *)
let single_continuation (ctn : continuation1) =
Cont (Lazy.from_val (ctn, no_continuation))
(* ------------------------------------------------------------------ *)
let single_mr (e : engine) : matchr =
(e, no_continuation)
(* -------------------------------------------------------------------- *)
let add_match (e : engine) (name : string) (range : pos * pos) =
{ e with e_grp = Mstr.add name (B.extract e.e_sub range) e.e_grp }
(* ------------------------------------------------------------------ *)
let rec search (e : engine) (r : regexp) : matchr =
match r with
| Anchor Start when B.at_start e.e_sub -> (e, no_continuation)
| Anchor End when B.at_end e.e_sub -> (e, no_continuation)
| Anchor _ ->
raise NoMatch
| Any ->
(eat e, no_continuation)
| Base br ->
let sub, aux = B.eat_base e.e_sub br in
let grp = List.fold_left search_sub e.e_grp aux in
({ e_sub = sub; e_grp = grp; }, no_continuation)
| Named (subr, name) ->
let decorate res =
let start = B.position e.e_sub in
let end_ = B.position res.e_sub in
add_match res name (start, end_)
in apply1_on_mr decorate (search e subr)
| Choice rs ->
let ctn =
let do1 r ctn =
let ctn1 = `Regexp (e, r) in
Cont (Lazy.from_val (ctn1, ctn))
in List.fold_right do1 rs no_continuation
in force_continuation ctn
| Seq [] ->
(e, no_continuation)
| Seq (r :: rs) ->
apply_on_mr (fun e -> search e (Seq rs)) (search e r)
| Repeat (subr, (imin, imax), mode) -> begin
let imin = odfl 0 imin in
let imax = odfl max_int imax in
if imax < imin then raise NoMatch else
let mr =
let rec aux (count : int) (e : engine) =
if count <= 0
then (e, no_continuation)
else apply_on_mr (aux (count - 1)) (search e subr)
in aux imin e in
if imax <= imin then mr else
let module E = struct exception Error end in
let rec next1 (count : int) (e : engine) =
if count <= 0
then raise NoMatch
else
apply_on_mr
(next (Some (B.path e.e_sub)) (count - 1))
(search e subr)
and next start count (e : engine) =
if Some (B.path e.e_sub) = start then raise NoMatch;
try
try
match mode with
| `Lazy ->
(e, continuation_of_mr (next1 count e))
| `Greedy ->
chain_mr
(next1 count e)
(continuation_of_mr (e, no_continuation))
with NoMatch -> raise E.Error
with E.Error -> (e, no_continuation)
in apply_on_mr (next None (imax - imin)) mr
end
(* ------------------------------------------------------------------ *)
and continuation_of_mr (e, ctn) : continuation =
Cont (Lazy.from_val (`Result e, ctn))
(* ------------------------------------------------------------------ *)
and chain_continuation (Cont ctn1) (Cont ctn2) =
Cont (Lazy.from_fun (fun () ->
try
let (x, ctn1) = Lazy.force ctn1 in
(x, chain_continuation ctn1 (Cont ctn2))
with NoMatch -> Lazy.force ctn2))
(* ------------------------------------------------------------------ *)
and force_continuation (Cont (lazy (ctn1, ctn))) : matchr =
match ctn1 with
| `Result e -> (e, ctn)
| `Regexp (e, r) ->
try
let (e, ectn) = search e r in
(e, chain_continuation ectn ctn)
with NoMatch -> force_continuation ctn
(* ------------------------------------------------------------------ *)
and apply_on_continuation f ctn =
Cont (Lazy.from_fun (fun () ->
let e, ctn = apply_on_mr f (force_continuation ctn) in
(`Result e, ctn)))
(* ------------------------------------------------------------------ *)
and apply_on_mr (f : engine -> matchr) ((e, ctn) : matchr) : matchr =
try chain_mr (f e) (apply_on_continuation f ctn)
with NoMatch -> apply_on_mr f (force_continuation ctn)
(* ------------------------------------------------------------------ *)
and chain_mr ((e, ctn1) : matchr) (ctn2 : continuation) =
(e, chain_continuation ctn1 ctn2)
(* ------------------------------------------------------------------ *)
and apply1_on_continuation f (ctn : continuation) : continuation =
apply_on_continuation (fun e -> (f e, no_continuation)) ctn
(* ------------------------------------------------------------------ *)
and apply1_on_mr f (mr : matchr) : matchr =
apply_on_mr (fun e -> (f e, no_continuation)) mr
(* ------------------------------------------------------------------ *)
and next_continuation (e : engine) : continuation =
let next () : continuation1 * continuation =
let e = { e with e_sub = oget ~exn:NoMatch (B.next e.e_sub) } in
(`Result e, next_continuation e)
in Cont (Lazy.from_fun next)
(* ------------------------------------------------------------------ *)
and next_mr (e : engine) : matchr =
(e, next_continuation e)
(* ------------------------------------------------------------------ *)
and search_sub (grp : matches) ((e, r) : B.engine * regexp) =
let mr = next_mr { e_sub = e; e_grp = grp; } in
(fst (apply_on_mr (fun e -> search e r) mr)).e_grp
(* ------------------------------------------------------------------ *)
let search (re : regexp) (subject : subject) =
let mr = next_mr (mkengine subject) in
try Some (fst (apply_on_mr (fun e -> search e re) mr)).e_grp
with NoMatch -> None
end
(* -------------------------------------------------------------------- *)
type string_regexp = String of string
module StringBaseRegexp
: IRegexpBase with type subject = string
and type regexp1 = string_regexp
= struct
type subject = string
type regexp1 = string_regexp
type engine = { e_sbj : string; e_pos : int; }
type pos = int
type path = int
type regexp = regexp1 gen_regexp
(* ------------------------------------------------------------------ *)
let mkengine (s : string) =
{ e_sbj = s; e_pos = 0; }
(* ------------------------------------------------------------------ *)
let at_start (e : engine) = e.e_pos = 0
let at_end (e : engine) = e.e_pos = String.length e.e_sbj
(* ------------------------------------------------------------------ *)
let path (e : engine) : path =
e.e_pos
(* ------------------------------------------------------------------ *)
let position (e : engine) = e.e_pos
(* ------------------------------------------------------------------ *)
let eat (e : engine) (n : int) =
if String.length e.e_sbj - e.e_pos < n
then raise NoMatch
else { e with e_pos = e.e_pos + 1 }
(* ------------------------------------------------------------------ *)
let eat e = eat e 1
(* ------------------------------------------------------------------ *)
let eat_base (e : engine) (String s : regexp1) =
let len = String.length s in
if String.length e.e_sbj - e.e_pos < len then
raise NoMatch;
s |> String.iteri (fun i c ->
if c <> e.e_sbj.[e.e_pos + i] then raise NoMatch);
{ e with e_pos = e.e_pos + len }, []
(* ------------------------------------------------------------------ *)
let extract (e : engine) ((r1, r2) : int * int) =
try String.sub e.e_sbj r1 (r2 - r1)
with Invalid_argument _ -> raise InvalidRange
(* ------------------------------------------------------------------ *)
let next (e : engine) =
if at_end e then None else Some { e with e_pos = e.e_pos + 1 }
end
(* -------------------------------------------------------------------- *)
module StringRegexp = Regexp(StringBaseRegexp)
| null | https://raw.githubusercontent.com/EasyCrypt/easycrypt/f87695472e70c313ef2966e20979b1afcc2e543e/src/ecGenRegexp.ml | ocaml | --------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
--------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
--------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
-------------------------------------------------------------------- | open EcUtils
open EcMaps
type anchor =
| Start
| End
type 'base gen_regexp =
| Anchor of anchor
| Any
| Base of 'base
| Choice of 'base gen_regexp list
| Named of 'base gen_regexp * string
| Repeat of 'base gen_regexp * int option pair * [ `Greedy | `Lazy ]
| Seq of 'base gen_regexp list
exception NoMatch
exception InvalidRange
module type IRegexpBase = sig
type subject
type engine
type regexp1
type path
type pos = int
type regexp = regexp1 gen_regexp
val mkengine : subject -> engine
val at_start : engine -> bool
val at_end : engine -> bool
val eat : engine -> engine
val eat_base : engine -> regexp1 -> engine * (engine * regexp) list
val position : engine -> pos
val extract : engine -> (pos * pos) -> subject
val next : engine -> engine option
val path : engine -> path
end
module Regexp(B : IRegexpBase) : sig
type regexp = B.regexp
type subject = B.subject
type matches = subject Mstr.t
val search : regexp -> subject -> matches option
end = struct
type regexp = B.regexp
type subject = B.subject
type matches = subject Mstr.t
type engine = { e_sub : B.engine; e_grp : matches; }
type pos = B.pos
let mkengine (s : subject) =
{ e_sub = B.mkengine s; e_grp = Mstr.empty; }
let eat (e : engine) =
{ e with e_sub = B.eat e.e_sub }
type continuation = Cont of (continuation1 * continuation) Lazy.t
and matchr = engine * continuation
and continuation1 = [
| `Result of engine
| `Regexp of engine * regexp
]
let no_continuation =
Cont (Lazy.from_fun (fun () -> raise NoMatch))
let single_continuation (ctn : continuation1) =
Cont (Lazy.from_val (ctn, no_continuation))
let single_mr (e : engine) : matchr =
(e, no_continuation)
let add_match (e : engine) (name : string) (range : pos * pos) =
{ e with e_grp = Mstr.add name (B.extract e.e_sub range) e.e_grp }
let rec search (e : engine) (r : regexp) : matchr =
match r with
| Anchor Start when B.at_start e.e_sub -> (e, no_continuation)
| Anchor End when B.at_end e.e_sub -> (e, no_continuation)
| Anchor _ ->
raise NoMatch
| Any ->
(eat e, no_continuation)
| Base br ->
let sub, aux = B.eat_base e.e_sub br in
let grp = List.fold_left search_sub e.e_grp aux in
({ e_sub = sub; e_grp = grp; }, no_continuation)
| Named (subr, name) ->
let decorate res =
let start = B.position e.e_sub in
let end_ = B.position res.e_sub in
add_match res name (start, end_)
in apply1_on_mr decorate (search e subr)
| Choice rs ->
let ctn =
let do1 r ctn =
let ctn1 = `Regexp (e, r) in
Cont (Lazy.from_val (ctn1, ctn))
in List.fold_right do1 rs no_continuation
in force_continuation ctn
| Seq [] ->
(e, no_continuation)
| Seq (r :: rs) ->
apply_on_mr (fun e -> search e (Seq rs)) (search e r)
| Repeat (subr, (imin, imax), mode) -> begin
let imin = odfl 0 imin in
let imax = odfl max_int imax in
if imax < imin then raise NoMatch else
let mr =
let rec aux (count : int) (e : engine) =
if count <= 0
then (e, no_continuation)
else apply_on_mr (aux (count - 1)) (search e subr)
in aux imin e in
if imax <= imin then mr else
let module E = struct exception Error end in
let rec next1 (count : int) (e : engine) =
if count <= 0
then raise NoMatch
else
apply_on_mr
(next (Some (B.path e.e_sub)) (count - 1))
(search e subr)
and next start count (e : engine) =
if Some (B.path e.e_sub) = start then raise NoMatch;
try
try
match mode with
| `Lazy ->
(e, continuation_of_mr (next1 count e))
| `Greedy ->
chain_mr
(next1 count e)
(continuation_of_mr (e, no_continuation))
with NoMatch -> raise E.Error
with E.Error -> (e, no_continuation)
in apply_on_mr (next None (imax - imin)) mr
end
and continuation_of_mr (e, ctn) : continuation =
Cont (Lazy.from_val (`Result e, ctn))
and chain_continuation (Cont ctn1) (Cont ctn2) =
Cont (Lazy.from_fun (fun () ->
try
let (x, ctn1) = Lazy.force ctn1 in
(x, chain_continuation ctn1 (Cont ctn2))
with NoMatch -> Lazy.force ctn2))
and force_continuation (Cont (lazy (ctn1, ctn))) : matchr =
match ctn1 with
| `Result e -> (e, ctn)
| `Regexp (e, r) ->
try
let (e, ectn) = search e r in
(e, chain_continuation ectn ctn)
with NoMatch -> force_continuation ctn
and apply_on_continuation f ctn =
Cont (Lazy.from_fun (fun () ->
let e, ctn = apply_on_mr f (force_continuation ctn) in
(`Result e, ctn)))
and apply_on_mr (f : engine -> matchr) ((e, ctn) : matchr) : matchr =
try chain_mr (f e) (apply_on_continuation f ctn)
with NoMatch -> apply_on_mr f (force_continuation ctn)
and chain_mr ((e, ctn1) : matchr) (ctn2 : continuation) =
(e, chain_continuation ctn1 ctn2)
and apply1_on_continuation f (ctn : continuation) : continuation =
apply_on_continuation (fun e -> (f e, no_continuation)) ctn
and apply1_on_mr f (mr : matchr) : matchr =
apply_on_mr (fun e -> (f e, no_continuation)) mr
and next_continuation (e : engine) : continuation =
let next () : continuation1 * continuation =
let e = { e with e_sub = oget ~exn:NoMatch (B.next e.e_sub) } in
(`Result e, next_continuation e)
in Cont (Lazy.from_fun next)
and next_mr (e : engine) : matchr =
(e, next_continuation e)
and search_sub (grp : matches) ((e, r) : B.engine * regexp) =
let mr = next_mr { e_sub = e; e_grp = grp; } in
(fst (apply_on_mr (fun e -> search e r) mr)).e_grp
let search (re : regexp) (subject : subject) =
let mr = next_mr (mkengine subject) in
try Some (fst (apply_on_mr (fun e -> search e re) mr)).e_grp
with NoMatch -> None
end
type string_regexp = String of string
module StringBaseRegexp
: IRegexpBase with type subject = string
and type regexp1 = string_regexp
= struct
type subject = string
type regexp1 = string_regexp
type engine = { e_sbj : string; e_pos : int; }
type pos = int
type path = int
type regexp = regexp1 gen_regexp
let mkengine (s : string) =
{ e_sbj = s; e_pos = 0; }
let at_start (e : engine) = e.e_pos = 0
let at_end (e : engine) = e.e_pos = String.length e.e_sbj
let path (e : engine) : path =
e.e_pos
let position (e : engine) = e.e_pos
let eat (e : engine) (n : int) =
if String.length e.e_sbj - e.e_pos < n
then raise NoMatch
else { e with e_pos = e.e_pos + 1 }
let eat e = eat e 1
let eat_base (e : engine) (String s : regexp1) =
let len = String.length s in
if String.length e.e_sbj - e.e_pos < len then
raise NoMatch;
s |> String.iteri (fun i c ->
if c <> e.e_sbj.[e.e_pos + i] then raise NoMatch);
{ e with e_pos = e.e_pos + len }, []
let extract (e : engine) ((r1, r2) : int * int) =
try String.sub e.e_sbj r1 (r2 - r1)
with Invalid_argument _ -> raise InvalidRange
let next (e : engine) =
if at_end e then None else Some { e with e_pos = e.e_pos + 1 }
end
module StringRegexp = Regexp(StringBaseRegexp)
|
0e90d976bb18a3712bd9ea911df2a64eea11ef1f3e22d27e955f5ad80340fcd0 | reborg/clojure-essential-reference | 5.clj | < 1 >
(intern 'disappear 'my-var 0)
(refer 'disappear :only ['my-var])
my-var
0
< 2 >
(.ns #'my-var)
;; #object[clojure.lang.Namespace 0x1f780201 "disappear"]
(create-ns 'disappear) ; <3>
(intern 'disappear 'my-var 1)
< 4 >
0
< 5 >
1 | null | https://raw.githubusercontent.com/reborg/clojure-essential-reference/c37fa19d45dd52b2995a191e3e96f0ebdc3f6d69/OtherFunctions/VarsandNamespaces/ns%2Cin-ns%2Ccreate-nsandremove-ns/5.clj | clojure | #object[clojure.lang.Namespace 0x1f780201 "disappear"]
<3> | < 1 >
(intern 'disappear 'my-var 0)
(refer 'disappear :only ['my-var])
my-var
0
< 2 >
(.ns #'my-var)
(intern 'disappear 'my-var 1)
< 4 >
0
< 5 >
1 |
e323d2e6a267bb0e78f5f7ead9aa2632b09abf7d59ad92795343b58d11849a9b | inclojure-org/intermediate-clojure-workshop | users_pure_test.clj | (ns workshop-app.handlers.users-pure-test
(:require [clojure.test :refer :all]
[workshop-app.handlers.users :as wahu])
(:import (java.time LocalDate)))
(deftest pure-get-person-test
(is (= {:status 200
:headers {"content-type" "application/json"}
:body "{\"dob\":\"2000-01-01\",\"age\":20}"}
(wahu/get-person "2000-01-01" (LocalDate/parse "2020-02-14")))
"Is our pure get handler working as expected.")) | null | https://raw.githubusercontent.com/inclojure-org/intermediate-clojure-workshop/3338a476aa815a587fa9e0b8b3804aa43492d15e/save-points/final/test/workshop_app/handlers/users_pure_test.clj | clojure | (ns workshop-app.handlers.users-pure-test
(:require [clojure.test :refer :all]
[workshop-app.handlers.users :as wahu])
(:import (java.time LocalDate)))
(deftest pure-get-person-test
(is (= {:status 200
:headers {"content-type" "application/json"}
:body "{\"dob\":\"2000-01-01\",\"age\":20}"}
(wahu/get-person "2000-01-01" (LocalDate/parse "2020-02-14")))
"Is our pure get handler working as expected.")) | |
ee14faaacd3ecf6a3bced4971cbc30978e5c77733d3e1aed5b5560b9e4b8804b | AbstractMachinesLab/caramel | typer_raw.ml | { { { COPYING * (
This file is part of Merlin , an helper for ocaml editors
Copyright ( C ) 2013 - 2015 < frederic.bour(_)lakaban.net >
refis.thomas(_)gmail.com >
< simon.castellan(_)iuwt.fr >
Permission is hereby granted , free of charge , to any person obtaining a
copy of this software and associated documentation files ( the " Software " ) ,
to deal in the Software without restriction , including without limitation the
rights to use , copy , modify , merge , publish , distribute , sublicense , and/or
sell copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
The Software is provided " as is " , without warranty of any kind , express or
implied , including but not limited to the warranties of merchantability ,
fitness for a particular purpose and noninfringement . In no event shall
the authors or copyright holders be liable for any claim , damages or other
liability , whether in an action of contract , tort or otherwise , arising
from , out of or in connection with the software or the use or other dealings
in the Software .
) * } } }
This file is part of Merlin, an helper for ocaml editors
Copyright (C) 2013 - 2015 Frédéric Bour <frederic.bour(_)lakaban.net>
Thomas Refis <refis.thomas(_)gmail.com>
Simon Castellan <simon.castellan(_)iuwt.fr>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
The Software is provided "as is", without warranty of any kind, express or
implied, including but not limited to the warranties of merchantability,
fitness for a particular purpose and noninfringement. In no event shall
the authors or copyright holders be liable for any claim, damages or other
liability, whether in an action of contract, tort or otherwise, arising
from, out of or in connection with the software or the use or other dealings
in the Software.
)* }}} *)
open Std
open Location
open Parsetree
let open_implicit_module m env =
let open Asttypes in
let lid = {loc = Location.in_file "command line";
txt = Longident.parse m } in
match snd (Typemod.type_open_ Override env lid.loc lid) with
| env -> env
| exception exn ->
Msupport.raise_error exn;
env
let fresh_env () =
(*Ident.reinit();*)
let initial =
if !Clflags.unsafe_string then
Env.initial_unsafe_string
else
Env.initial_safe_string in
let env =
if !Clflags.nopervasives then
initial
else
open_implicit_module "Pervasives" initial in
List.fold_right ~f:open_implicit_module
!Clflags.open_modules ~init:env
module Rewrite_loc = struct
let queue = ref []
let update l =
if l <> none then
match !queue with
| [] -> assert false
| l' :: ls -> queue := Location_aux.union l l' :: ls
let enter () = queue := Location.none :: !queue
let leave l0 = match !queue with
| [] -> assert false
| [l] -> queue := []; Location_aux.extend l0 l
| l :: l' :: ls ->
let l = Location_aux.extend l0 l in
queue := Location_aux.union l l' :: ls;
l
let start () = assert (!queue = []); enter ()
let exit () = match !queue with
| [_] -> queue := []
| _ -> assert false
let u_option f = function
| None -> None
| Some x -> Some (f x)
let u_loc (loc : _ Location.loc) =
update loc.loc; loc
let rec u_attribute (loc, payload) =
let loc = if Location_aux.is_relaxed_location loc then loc else u_loc loc in
(loc, u_payload payload)
and u_extension x = u_attribute x
and u_attributes l =
List.map ~f:u_attribute l
and u_payload = function
| PStr str -> PStr (u_structure str)
| PTyp ct -> PTyp (u_core_type ct)
| PPat (p, eo) -> PPat (u_pattern p, u_option u_expression eo)
and u_core_type {ptyp_desc; ptyp_attributes; ptyp_loc} =
enter ();
let ptyp_desc = u_core_type_desc ptyp_desc in
let ptyp_attributes = u_attributes ptyp_attributes in
let ptyp_loc = leave ptyp_loc in
{ptyp_desc; ptyp_loc; ptyp_attributes}
and u_core_type_desc = function
| Ptyp_any | Ptyp_var _ as desc -> desc
| Ptyp_arrow (l, t1, t2) -> Ptyp_arrow (l, u_core_type t1, u_core_type t2)
| Ptyp_tuple ts -> Ptyp_tuple (List.map ~f:u_core_type ts)
| Ptyp_constr (loc, ts) -> Ptyp_constr (u_loc loc, List.map ~f:u_core_type ts)
| Ptyp_object (fields, flag) ->
Ptyp_object (List.map ~f:(fun (s,a,ct) -> (s, u_attributes a, u_core_type ct)) fields, flag)
| Ptyp_class (loc, ts) -> Ptyp_class (u_loc loc, List.map ~f:u_core_type ts)
| Ptyp_alias (ct, name) -> Ptyp_alias (u_core_type ct, name)
| Ptyp_variant (fields, flag, label) -> Ptyp_variant (List.map ~f:u_row_field fields, flag, label)
| Ptyp_poly (ss,ct) -> Ptyp_poly (ss, u_core_type ct)
| Ptyp_package pt -> Ptyp_package (u_package_type pt)
| Ptyp_extension ext -> Ptyp_extension (u_extension ext)
and u_package_type (loc, cts) =
(u_loc loc, List.map ~f:(fun (l,ct) -> u_loc l, u_core_type ct) cts)
and u_row_field = function
| Rtag (l,attrs,has_const,cts) ->
Rtag (l, u_attributes attrs, has_const, List.map ~f:u_core_type cts)
| Rinherit ct -> Rinherit (u_core_type ct)
and u_pattern {ppat_desc; ppat_loc; ppat_attributes} =
enter ();
let ppat_desc = u_pattern_desc ppat_desc in
let ppat_attributes = u_attributes ppat_attributes in
let ppat_loc = leave ppat_loc in
{ppat_desc; ppat_loc; ppat_attributes}
and u_pattern_desc = function
| Ppat_any | Ppat_constant _ | Ppat_interval _ as p -> p
| Ppat_var l -> Ppat_var (u_loc l)
| Ppat_alias (p, l) -> Ppat_alias (u_pattern p, u_loc l)
| Ppat_tuple ps -> Ppat_tuple (List.map ~f:u_pattern ps)
| Ppat_construct (loc, po) -> Ppat_construct (u_loc loc, u_option u_pattern po)
| Ppat_variant (lbl, po) -> Ppat_variant (lbl, u_option u_pattern po)
| Ppat_record (fields, flag) -> Ppat_record (List.map ~f:(fun (l,p) -> (u_loc l, u_pattern p)) fields, flag)
| Ppat_array ps -> Ppat_array (List.map ~f:u_pattern ps)
| Ppat_or (p1, p2) -> Ppat_or (u_pattern p1, u_pattern p2)
| Ppat_constraint (p, ct) -> Ppat_constraint (u_pattern p, u_core_type ct)
| Ppat_type loc -> Ppat_type (u_loc loc)
| Ppat_lazy p -> Ppat_lazy (u_pattern p)
| Ppat_unpack loc -> Ppat_unpack (u_loc loc)
| Ppat_exception p -> Ppat_exception (u_pattern p)
| Ppat_extension ext -> Ppat_extension (u_extension ext)
and u_expression {pexp_desc; pexp_loc; pexp_attributes} =
enter ();
let pexp_desc = u_expression_desc pexp_desc in
let pexp_attributes = u_attributes pexp_attributes in
let pexp_loc = leave pexp_loc in
{pexp_desc; pexp_loc; pexp_attributes}
and u_expression_desc = function
| Pexp_ident loc -> Pexp_ident (u_loc loc)
| Pexp_constant _ as e -> e
| Pexp_let (flag, vs, e) ->
Pexp_let (flag, List.map ~f:u_value_binding vs, u_expression e)
| Pexp_function cs ->
Pexp_function (List.map ~f:u_case cs)
| Pexp_fun (lbl, eo, pattern, expr) ->
Pexp_fun (lbl, u_option u_expression eo, u_pattern pattern, u_expression expr)
| Pexp_apply (e, les) ->
Pexp_apply (u_expression e, List.map ~f:(fun (l,e) -> (l, u_expression e)) les)
| Pexp_match (e, cs) -> Pexp_match (u_expression e, List.map ~f:u_case cs)
| Pexp_try (e, cs) -> Pexp_try (u_expression e, List.map ~f:u_case cs)
| Pexp_tuple es -> Pexp_tuple (List.map ~f:u_expression es)
| Pexp_construct (loc, eo) ->
Pexp_construct (u_loc loc, u_option u_expression eo)
| Pexp_variant (lbl, eo) ->
Pexp_variant (lbl, u_option u_expression eo)
| Pexp_record (les, eo) ->
Pexp_record (List.map ~f:(fun (loc,e) -> (u_loc loc, u_expression e)) les, u_option u_expression eo)
| Pexp_field (e, loc) -> Pexp_field (u_expression e, u_loc loc)
| Pexp_setfield (e1, loc, e2) -> Pexp_setfield (u_expression e1, u_loc loc, u_expression e2)
| Pexp_array es -> Pexp_array (List.map ~f:u_expression es)
| Pexp_ifthenelse (e1,e2,e3) -> Pexp_ifthenelse (u_expression e1, u_expression e2, u_option u_expression e3)
| Pexp_sequence (e1, e2) -> Pexp_sequence (u_expression e1, u_expression e2)
| Pexp_while (e1, e2) -> Pexp_while (u_expression e1, u_expression e2)
| Pexp_for (p, e1, e2, flag, e3) -> Pexp_for (u_pattern p, u_expression e1, u_expression e2, flag, u_expression e3)
| Pexp_constraint (e, ct) -> Pexp_constraint (u_expression e, u_core_type ct)
| Pexp_coerce (e, cto, ct) -> Pexp_coerce (u_expression e, u_option u_core_type cto, u_core_type ct)
| Pexp_send (e, s) -> Pexp_send (u_expression e, s)
| Pexp_new loc -> Pexp_new (u_loc loc)
| Pexp_setinstvar (s, e) -> Pexp_setinstvar (u_loc s, u_expression e)
| Pexp_override es -> Pexp_override (List.map ~f:(fun (loc,e) -> (u_loc loc, u_expression e)) es)
| Pexp_letmodule (s, me, e) -> Pexp_letmodule (u_loc s, u_module_expr me, u_expression e)
| Pexp_assert e -> Pexp_assert (u_expression e)
| Pexp_lazy e -> Pexp_lazy (u_expression e)
| Pexp_poly (e, cto) -> Pexp_poly (u_expression e, u_option u_core_type cto)
| Pexp_object cs -> Pexp_object (u_class_structure cs)
| Pexp_newtype (s, e) -> Pexp_newtype (s, u_expression e)
| Pexp_pack me -> Pexp_pack (u_module_expr me)
| Pexp_open (flag, loc, e) -> Pexp_open (flag, u_loc loc, u_expression e)
| Pexp_extension ext -> Pexp_extension (u_extension ext)
and u_case {pc_lhs; pc_guard; pc_rhs} = {
pc_lhs = u_pattern pc_lhs;
pc_guard = u_option u_expression pc_guard;
pc_rhs = u_expression pc_rhs;
}
and u_value_description {pval_name; pval_type; pval_prim; pval_attributes; pval_loc} =
enter ();
let pval_name = u_loc pval_name in
let pval_type = u_core_type pval_type in
let pval_attributes = u_attributes pval_attributes in
let pval_loc = leave pval_loc in
{pval_name; pval_type; pval_prim; pval_attributes; pval_loc}
and u_type_declaration {ptype_name; ptype_params; ptype_cstrs; ptype_kind;
ptype_private; ptype_manifest; ptype_attributes; ptype_loc} =
enter ();
let ptype_name = u_loc ptype_name
and ptype_params = List.map ~f:(fun (ct,v) -> (u_core_type ct, v)) ptype_params
and ptype_cstrs = List.map ~f:(fun (ct1,ct2,l) ->
update l; (u_core_type ct1, u_core_type ct2, l)) ptype_cstrs
and ptype_kind = u_type_kind ptype_kind
and ptype_manifest = u_option u_core_type ptype_manifest
and ptype_attributes = u_attributes ptype_attributes
in
let ptype_loc = leave ptype_loc in
{ptype_name; ptype_params; ptype_cstrs; ptype_kind;
ptype_private; ptype_manifest; ptype_attributes; ptype_loc}
and u_type_kind = function
| Ptype_abstract | Ptype_open as k -> k
| Ptype_variant cstrs -> Ptype_variant (List.map ~f:u_constructor_declaration cstrs)
| Ptype_record lbls -> Ptype_record (List.map ~f:u_label_declaration lbls)
and u_label_declaration {pld_name; pld_mutable; pld_type; pld_loc; pld_attributes} =
enter ();
let pld_name = u_loc pld_name in
let pld_type = u_core_type pld_type in
let pld_attributes = u_attributes pld_attributes in
let pld_loc = leave pld_loc in
{pld_name; pld_mutable; pld_type; pld_loc; pld_attributes}
and u_constructor_declaration {pcd_name; pcd_args; pcd_res; pcd_loc; pcd_attributes} =
enter ();
let pcd_name = u_loc pcd_name in
let pcd_args = List.map ~f:u_core_type pcd_args in
let pcd_res = u_option u_core_type pcd_res in
let pcd_attributes = u_attributes pcd_attributes in
let pcd_loc = leave pcd_loc in
{pcd_name; pcd_args; pcd_res; pcd_loc; pcd_attributes}
and u_type_extension {ptyext_path; ptyext_params; ptyext_constructors; ptyext_private; ptyext_attributes} =
let ptyext_path = u_loc ptyext_path in
let ptyext_params = List.map ~f:(fun (ct,v) -> (u_core_type ct, v)) ptyext_params in
let ptyext_constructors = List.map ~f:u_extension_constructor ptyext_constructors in
let ptyext_attributes = u_attributes ptyext_attributes in
{ptyext_path; ptyext_params; ptyext_constructors; ptyext_private; ptyext_attributes}
and u_extension_constructor {pext_name; pext_kind; pext_loc; pext_attributes} =
enter ();
let pext_name = u_loc pext_name in
let pext_kind = u_extension_constructor_kind pext_kind in
let pext_attributes = u_attributes pext_attributes in
let pext_loc = leave pext_loc in
{pext_name; pext_kind; pext_loc; pext_attributes}
and u_extension_constructor_kind = function
| Pext_decl (cts, cto) ->
Pext_decl (List.map ~f:u_core_type cts, u_option u_core_type cto)
| Pext_rebind loc -> Pext_rebind (u_loc loc)
* { 2 Class language }
(* Type expressions for the class language *)
and u_class_type {pcty_desc; pcty_loc; pcty_attributes} =
enter ();
let pcty_desc = u_class_type_desc pcty_desc in
let pcty_attributes = u_attributes pcty_attributes in
let pcty_loc = leave pcty_loc in
{pcty_desc; pcty_loc; pcty_attributes}
and u_class_type_desc = function
| Pcty_constr (loc, cts) ->
Pcty_constr (u_loc loc, List.map ~f:u_core_type cts)
| Pcty_signature cs -> Pcty_signature (u_class_signature cs)
| Pcty_arrow (lbl, ct, clt) ->
Pcty_arrow (lbl, u_core_type ct, u_class_type clt)
| Pcty_extension ext ->
Pcty_extension (u_extension ext)
and u_class_signature {pcsig_self; pcsig_fields} =
let pcsig_self = u_core_type pcsig_self in
let pcsig_fields = List.map ~f:u_class_type_field pcsig_fields in
{pcsig_self; pcsig_fields}
and u_class_type_field {pctf_desc; pctf_loc; pctf_attributes} =
enter ();
let pctf_desc = u_class_type_field_desc pctf_desc in
let pctf_attributes = u_attributes pctf_attributes in
let pctf_loc = leave pctf_loc in
{pctf_desc; pctf_loc; pctf_attributes}
and u_class_type_field_desc = function
| Pctf_inherit clt -> Pctf_inherit (u_class_type clt)
| Pctf_val (s, fl1, fl2, ct) -> Pctf_val (s, fl1, fl2, u_core_type ct)
| Pctf_method (s, fl1, fl2, ct) -> Pctf_method (s, fl1, fl2, u_core_type ct)
| Pctf_constraint (ct1, ct2) -> Pctf_constraint (u_core_type ct1, u_core_type ct2)
| Pctf_attribute attr -> Pctf_attribute (u_attribute attr)
| Pctf_extension ext -> Pctf_extension (u_extension ext)
and u_class_infos : 'a 'b. ('a -> 'b) -> 'a class_infos -> 'b class_infos =
fun u_a {pci_virt; pci_params; pci_name; pci_expr; pci_loc; pci_attributes} ->
enter ();
let pci_params = List.map ~f:(fun (ct,v) -> (u_core_type ct, v)) pci_params in
let pci_name = u_loc pci_name in
let pci_expr = u_a pci_expr in
let pci_attributes = u_attributes pci_attributes in
let pci_loc = leave pci_loc in
{pci_virt; pci_params; pci_name; pci_expr; pci_loc; pci_attributes}
and u_class_description clt = u_class_infos u_class_type clt
and u_class_type_declaration clt = u_class_infos u_class_type clt
and u_class_expr {pcl_desc; pcl_loc; pcl_attributes} =
enter ();
let pcl_desc = u_class_expr_desc pcl_desc in
let pcl_attributes = u_attributes pcl_attributes in
let pcl_loc = leave pcl_loc in
{pcl_desc; pcl_loc; pcl_attributes}
and u_class_expr_desc = function
| Pcl_constr (loc, cts) -> Pcl_constr (u_loc loc, List.map ~f:u_core_type cts)
| Pcl_structure cs -> Pcl_structure (u_class_structure cs)
| Pcl_fun (lbl, eo, p, ce) ->
Pcl_fun (lbl, u_option u_expression eo, u_pattern p, u_class_expr ce)
| Pcl_apply (ce, les) ->
Pcl_apply (u_class_expr ce, List.map ~f:(fun (l,e) -> (l, u_expression e)) les)
| Pcl_let (rf, vbs, ce) ->
Pcl_let (rf, List.map ~f:u_value_binding vbs, u_class_expr ce)
| Pcl_constraint (ce, ct) -> Pcl_constraint (u_class_expr ce, u_class_type ct)
| Pcl_extension ext -> Pcl_extension (u_extension ext)
and u_class_structure {pcstr_self; pcstr_fields} =
let pcstr_self = u_pattern pcstr_self in
let pcstr_fields = List.map ~f:u_class_field pcstr_fields in
{pcstr_self; pcstr_fields}
and u_class_field {pcf_desc; pcf_loc; pcf_attributes} =
enter ();
let pcf_desc = u_class_field_desc pcf_desc in
let pcf_attributes = u_attributes pcf_attributes in
let pcf_loc = leave pcf_loc in
{pcf_desc; pcf_loc; pcf_attributes}
and u_class_field_desc = function
| Pcf_inherit (fl, ce, so) -> Pcf_inherit (fl, u_class_expr ce, so)
| Pcf_val (loc, fl, cfk) -> Pcf_val (u_loc loc, fl, u_class_field_kind cfk)
| Pcf_method (loc, fl, cfk) -> Pcf_method (u_loc loc, fl, u_class_field_kind cfk)
| Pcf_constraint (c1, c2) -> Pcf_constraint (u_core_type c1, u_core_type c2)
| Pcf_initializer e -> Pcf_initializer (u_expression e)
| Pcf_attribute attr -> Pcf_attribute (u_attribute attr)
| Pcf_extension ext -> Pcf_extension (u_extension ext)
and u_class_field_kind = function
| Cfk_virtual ct -> Cfk_virtual (u_core_type ct)
| Cfk_concrete (fl,e) -> Cfk_concrete (fl, u_expression e)
and u_class_declaration cd = u_class_infos u_class_expr cd
and u_module_type {pmty_desc; pmty_loc; pmty_attributes} =
enter ();
let pmty_desc = u_module_type_desc pmty_desc in
let pmty_attributes = u_attributes pmty_attributes in
let pmty_loc = leave pmty_loc in
{pmty_desc; pmty_loc; pmty_attributes}
and u_module_type_desc = function
| Pmty_ident loc -> Pmty_ident (u_loc loc)
| Pmty_signature sg -> Pmty_signature (u_signature sg)
| Pmty_functor (loc, mto, mt) -> Pmty_functor (u_loc loc, u_option u_module_type mto, u_module_type mt)
| Pmty_with (mt, wts) -> Pmty_with (u_module_type mt, List.map ~f:u_with_constraint wts)
| Pmty_typeof me -> Pmty_typeof (u_module_expr me)
| Pmty_extension ext -> Pmty_extension (u_extension ext)
| Pmty_alias loc -> Pmty_alias (u_loc loc)
and u_signature l = List.map ~f:u_signature_item l
and u_signature_item {psig_desc; psig_loc} =
enter ();
let psig_desc = u_signature_item_desc psig_desc in
let psig_loc = leave psig_loc in
{psig_desc; psig_loc}
and u_signature_item_desc = function
| Psig_value vd -> Psig_value (u_value_description vd)
| Psig_type tds -> Psig_type (List.map ~f:u_type_declaration tds)
| Psig_typext text -> Psig_typext (u_type_extension text)
| Psig_exception ec -> Psig_exception (u_extension_constructor ec)
| Psig_module md -> Psig_module (u_module_declaration md)
| Psig_recmodule mds -> Psig_recmodule (List.map ~f:u_module_declaration mds)
| Psig_modtype mtd -> Psig_modtype (u_module_type_declaration mtd)
| Psig_open od -> Psig_open (u_open_description od)
| Psig_include id -> Psig_include (u_include_description id)
| Psig_class cds -> Psig_class (List.map ~f:u_class_description cds)
| Psig_class_type cts -> Psig_class_type (List.map ~f:u_class_type_declaration cts)
| Psig_attribute attr -> Psig_attribute (u_attribute attr)
| Psig_extension (ext, attrs) -> Psig_extension (u_extension ext, u_attributes attrs)
and u_module_declaration {pmd_name; pmd_type; pmd_attributes; pmd_loc} =
enter ();
let pmd_name = u_loc pmd_name in
let pmd_type = u_module_type pmd_type in
let pmd_attributes = u_attributes pmd_attributes in
let pmd_loc = leave pmd_loc in
{pmd_name; pmd_type; pmd_attributes; pmd_loc}
and u_module_type_declaration {pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc} =
enter ();
let pmtd_name = u_loc pmtd_name in
let pmtd_type = u_option u_module_type pmtd_type in
let pmtd_attributes = u_attributes pmtd_attributes in
let pmtd_loc = leave pmtd_loc in
{pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc}
and u_open_description {popen_lid; popen_override; popen_loc; popen_attributes} =
enter ();
let popen_lid = u_loc popen_lid in
let popen_attributes = u_attributes popen_attributes in
let popen_loc = leave popen_loc in
{popen_lid; popen_override; popen_loc; popen_attributes}
and u_include_infos : 'a 'b . ('a -> 'b) -> 'a include_infos -> 'b include_infos =
fun u_a {pincl_mod; pincl_loc; pincl_attributes} ->
enter ();
let pincl_mod = u_a pincl_mod in
let pincl_attributes = u_attributes pincl_attributes in
let pincl_loc = leave pincl_loc in
{pincl_mod; pincl_loc; pincl_attributes}
and u_include_description id = u_include_infos u_module_type id
and u_include_declaration id = u_include_infos u_module_expr id
and u_with_constraint = function
| Pwith_type (loc, td) -> Pwith_type (u_loc loc, u_type_declaration td)
| Pwith_module (loc1, loc2) -> Pwith_module (u_loc loc1, u_loc loc2)
| Pwith_typesubst td -> Pwith_typesubst (u_type_declaration td)
| Pwith_modsubst (loc1, loc2) -> Pwith_modsubst (u_loc loc1, u_loc loc2)
and u_module_expr {pmod_desc; pmod_loc; pmod_attributes} =
enter ();
let pmod_desc = u_module_expr_desc pmod_desc in
let pmod_attributes = u_attributes pmod_attributes in
let pmod_loc = leave pmod_loc in
{pmod_desc; pmod_loc; pmod_attributes}
and u_module_expr_desc = function
| Pmod_ident loc -> Pmod_ident (u_loc loc)
| Pmod_structure str -> Pmod_structure (u_structure str)
| Pmod_functor (loc, mto, me) ->
Pmod_functor (u_loc loc, u_option u_module_type mto, u_module_expr me)
| Pmod_apply (me1, me2) ->
Pmod_apply (u_module_expr me1, u_module_expr me2)
| Pmod_constraint (me, mt) ->
Pmod_constraint (u_module_expr me, u_module_type mt)
| Pmod_unpack e -> Pmod_unpack (u_expression e)
| Pmod_extension ext -> Pmod_extension (u_extension ext)
and u_structure l = List.map ~f:u_structure_item l
and u_structure_item {pstr_desc; pstr_loc} =
enter ();
let pstr_desc = u_structure_item_desc pstr_desc in
let pstr_loc = leave pstr_loc in
{pstr_desc; pstr_loc}
and u_structure_item_desc = function
| Pstr_eval (expr, attrs) -> Pstr_eval (u_expression expr, u_attributes attrs)
| Pstr_value (fl, vbs) -> Pstr_value (fl, List.map ~f:u_value_binding vbs)
| Pstr_primitive vd -> Pstr_primitive (u_value_description vd)
| Pstr_type tds -> Pstr_type (List.map ~f:u_type_declaration tds)
| Pstr_typext text -> Pstr_typext (u_type_extension text)
| Pstr_exception ext -> Pstr_exception (u_extension_constructor ext)
| Pstr_module mb -> Pstr_module (u_module_binding mb)
| Pstr_recmodule mbs -> Pstr_recmodule (List.map ~f:u_module_binding mbs)
| Pstr_modtype mtd -> Pstr_modtype (u_module_type_declaration mtd)
| Pstr_open od -> Pstr_open (u_open_description od)
| Pstr_class cds -> Pstr_class (List.map ~f:u_class_declaration cds)
| Pstr_class_type ctds -> Pstr_class_type (List.map ~f:u_class_type_declaration ctds)
| Pstr_include id -> Pstr_include (u_include_declaration id)
| Pstr_attribute attr -> Pstr_attribute (u_attribute attr)
| Pstr_extension (ext, attrs) -> Pstr_extension (u_extension ext, u_attributes attrs)
and u_value_binding {pvb_pat; pvb_expr; pvb_attributes; pvb_loc} =
enter ();
let pvb_pat = u_pattern pvb_pat in
let pvb_expr = u_expression pvb_expr in
let pvb_attributes = u_attributes pvb_attributes in
let pvb_loc = leave pvb_loc in
{pvb_pat; pvb_expr; pvb_attributes; pvb_loc}
and u_module_binding {pmb_name; pmb_expr; pmb_attributes; pmb_loc} =
enter ();
let pmb_name = u_loc pmb_name in
let pmb_expr = u_module_expr pmb_expr in
let pmb_attributes = u_attributes pmb_attributes in
let pmb_loc = leave pmb_loc in
{pmb_name; pmb_expr; pmb_attributes; pmb_loc}
end
let rewrite_loc t =
Rewrite_loc.start ();
let t = match t with
| `str str -> `str (Rewrite_loc.u_structure str)
| `fake str -> `fake (Rewrite_loc.u_structure str)
| `sg sg -> `sg (Rewrite_loc.u_signature sg)
in
Rewrite_loc.exit ();
t
| null | https://raw.githubusercontent.com/AbstractMachinesLab/caramel/7d4e505d6032e22a630d2e3bd7085b77d0efbb0c/vendor/ocaml-lsp-1.4.0/ocaml-lsp-server/vendor/merlin/src/ocaml/merlin_specific/402/typer_raw.ml | ocaml | Ident.reinit();
Type expressions for the class language | { { { COPYING * (
This file is part of Merlin , an helper for ocaml editors
Copyright ( C ) 2013 - 2015 < frederic.bour(_)lakaban.net >
refis.thomas(_)gmail.com >
< simon.castellan(_)iuwt.fr >
Permission is hereby granted , free of charge , to any person obtaining a
copy of this software and associated documentation files ( the " Software " ) ,
to deal in the Software without restriction , including without limitation the
rights to use , copy , modify , merge , publish , distribute , sublicense , and/or
sell copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
The Software is provided " as is " , without warranty of any kind , express or
implied , including but not limited to the warranties of merchantability ,
fitness for a particular purpose and noninfringement . In no event shall
the authors or copyright holders be liable for any claim , damages or other
liability , whether in an action of contract , tort or otherwise , arising
from , out of or in connection with the software or the use or other dealings
in the Software .
) * } } }
This file is part of Merlin, an helper for ocaml editors
Copyright (C) 2013 - 2015 Frédéric Bour <frederic.bour(_)lakaban.net>
Thomas Refis <refis.thomas(_)gmail.com>
Simon Castellan <simon.castellan(_)iuwt.fr>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
The Software is provided "as is", without warranty of any kind, express or
implied, including but not limited to the warranties of merchantability,
fitness for a particular purpose and noninfringement. In no event shall
the authors or copyright holders be liable for any claim, damages or other
liability, whether in an action of contract, tort or otherwise, arising
from, out of or in connection with the software or the use or other dealings
in the Software.
)* }}} *)
open Std
open Location
open Parsetree
let open_implicit_module m env =
let open Asttypes in
let lid = {loc = Location.in_file "command line";
txt = Longident.parse m } in
match snd (Typemod.type_open_ Override env lid.loc lid) with
| env -> env
| exception exn ->
Msupport.raise_error exn;
env
let fresh_env () =
let initial =
if !Clflags.unsafe_string then
Env.initial_unsafe_string
else
Env.initial_safe_string in
let env =
if !Clflags.nopervasives then
initial
else
open_implicit_module "Pervasives" initial in
List.fold_right ~f:open_implicit_module
!Clflags.open_modules ~init:env
module Rewrite_loc = struct
let queue = ref []
let update l =
if l <> none then
match !queue with
| [] -> assert false
| l' :: ls -> queue := Location_aux.union l l' :: ls
let enter () = queue := Location.none :: !queue
let leave l0 = match !queue with
| [] -> assert false
| [l] -> queue := []; Location_aux.extend l0 l
| l :: l' :: ls ->
let l = Location_aux.extend l0 l in
queue := Location_aux.union l l' :: ls;
l
let start () = assert (!queue = []); enter ()
let exit () = match !queue with
| [_] -> queue := []
| _ -> assert false
let u_option f = function
| None -> None
| Some x -> Some (f x)
let u_loc (loc : _ Location.loc) =
update loc.loc; loc
let rec u_attribute (loc, payload) =
let loc = if Location_aux.is_relaxed_location loc then loc else u_loc loc in
(loc, u_payload payload)
and u_extension x = u_attribute x
and u_attributes l =
List.map ~f:u_attribute l
and u_payload = function
| PStr str -> PStr (u_structure str)
| PTyp ct -> PTyp (u_core_type ct)
| PPat (p, eo) -> PPat (u_pattern p, u_option u_expression eo)
and u_core_type {ptyp_desc; ptyp_attributes; ptyp_loc} =
enter ();
let ptyp_desc = u_core_type_desc ptyp_desc in
let ptyp_attributes = u_attributes ptyp_attributes in
let ptyp_loc = leave ptyp_loc in
{ptyp_desc; ptyp_loc; ptyp_attributes}
and u_core_type_desc = function
| Ptyp_any | Ptyp_var _ as desc -> desc
| Ptyp_arrow (l, t1, t2) -> Ptyp_arrow (l, u_core_type t1, u_core_type t2)
| Ptyp_tuple ts -> Ptyp_tuple (List.map ~f:u_core_type ts)
| Ptyp_constr (loc, ts) -> Ptyp_constr (u_loc loc, List.map ~f:u_core_type ts)
| Ptyp_object (fields, flag) ->
Ptyp_object (List.map ~f:(fun (s,a,ct) -> (s, u_attributes a, u_core_type ct)) fields, flag)
| Ptyp_class (loc, ts) -> Ptyp_class (u_loc loc, List.map ~f:u_core_type ts)
| Ptyp_alias (ct, name) -> Ptyp_alias (u_core_type ct, name)
| Ptyp_variant (fields, flag, label) -> Ptyp_variant (List.map ~f:u_row_field fields, flag, label)
| Ptyp_poly (ss,ct) -> Ptyp_poly (ss, u_core_type ct)
| Ptyp_package pt -> Ptyp_package (u_package_type pt)
| Ptyp_extension ext -> Ptyp_extension (u_extension ext)
and u_package_type (loc, cts) =
(u_loc loc, List.map ~f:(fun (l,ct) -> u_loc l, u_core_type ct) cts)
and u_row_field = function
| Rtag (l,attrs,has_const,cts) ->
Rtag (l, u_attributes attrs, has_const, List.map ~f:u_core_type cts)
| Rinherit ct -> Rinherit (u_core_type ct)
and u_pattern {ppat_desc; ppat_loc; ppat_attributes} =
enter ();
let ppat_desc = u_pattern_desc ppat_desc in
let ppat_attributes = u_attributes ppat_attributes in
let ppat_loc = leave ppat_loc in
{ppat_desc; ppat_loc; ppat_attributes}
and u_pattern_desc = function
| Ppat_any | Ppat_constant _ | Ppat_interval _ as p -> p
| Ppat_var l -> Ppat_var (u_loc l)
| Ppat_alias (p, l) -> Ppat_alias (u_pattern p, u_loc l)
| Ppat_tuple ps -> Ppat_tuple (List.map ~f:u_pattern ps)
| Ppat_construct (loc, po) -> Ppat_construct (u_loc loc, u_option u_pattern po)
| Ppat_variant (lbl, po) -> Ppat_variant (lbl, u_option u_pattern po)
| Ppat_record (fields, flag) -> Ppat_record (List.map ~f:(fun (l,p) -> (u_loc l, u_pattern p)) fields, flag)
| Ppat_array ps -> Ppat_array (List.map ~f:u_pattern ps)
| Ppat_or (p1, p2) -> Ppat_or (u_pattern p1, u_pattern p2)
| Ppat_constraint (p, ct) -> Ppat_constraint (u_pattern p, u_core_type ct)
| Ppat_type loc -> Ppat_type (u_loc loc)
| Ppat_lazy p -> Ppat_lazy (u_pattern p)
| Ppat_unpack loc -> Ppat_unpack (u_loc loc)
| Ppat_exception p -> Ppat_exception (u_pattern p)
| Ppat_extension ext -> Ppat_extension (u_extension ext)
and u_expression {pexp_desc; pexp_loc; pexp_attributes} =
enter ();
let pexp_desc = u_expression_desc pexp_desc in
let pexp_attributes = u_attributes pexp_attributes in
let pexp_loc = leave pexp_loc in
{pexp_desc; pexp_loc; pexp_attributes}
and u_expression_desc = function
| Pexp_ident loc -> Pexp_ident (u_loc loc)
| Pexp_constant _ as e -> e
| Pexp_let (flag, vs, e) ->
Pexp_let (flag, List.map ~f:u_value_binding vs, u_expression e)
| Pexp_function cs ->
Pexp_function (List.map ~f:u_case cs)
| Pexp_fun (lbl, eo, pattern, expr) ->
Pexp_fun (lbl, u_option u_expression eo, u_pattern pattern, u_expression expr)
| Pexp_apply (e, les) ->
Pexp_apply (u_expression e, List.map ~f:(fun (l,e) -> (l, u_expression e)) les)
| Pexp_match (e, cs) -> Pexp_match (u_expression e, List.map ~f:u_case cs)
| Pexp_try (e, cs) -> Pexp_try (u_expression e, List.map ~f:u_case cs)
| Pexp_tuple es -> Pexp_tuple (List.map ~f:u_expression es)
| Pexp_construct (loc, eo) ->
Pexp_construct (u_loc loc, u_option u_expression eo)
| Pexp_variant (lbl, eo) ->
Pexp_variant (lbl, u_option u_expression eo)
| Pexp_record (les, eo) ->
Pexp_record (List.map ~f:(fun (loc,e) -> (u_loc loc, u_expression e)) les, u_option u_expression eo)
| Pexp_field (e, loc) -> Pexp_field (u_expression e, u_loc loc)
| Pexp_setfield (e1, loc, e2) -> Pexp_setfield (u_expression e1, u_loc loc, u_expression e2)
| Pexp_array es -> Pexp_array (List.map ~f:u_expression es)
| Pexp_ifthenelse (e1,e2,e3) -> Pexp_ifthenelse (u_expression e1, u_expression e2, u_option u_expression e3)
| Pexp_sequence (e1, e2) -> Pexp_sequence (u_expression e1, u_expression e2)
| Pexp_while (e1, e2) -> Pexp_while (u_expression e1, u_expression e2)
| Pexp_for (p, e1, e2, flag, e3) -> Pexp_for (u_pattern p, u_expression e1, u_expression e2, flag, u_expression e3)
| Pexp_constraint (e, ct) -> Pexp_constraint (u_expression e, u_core_type ct)
| Pexp_coerce (e, cto, ct) -> Pexp_coerce (u_expression e, u_option u_core_type cto, u_core_type ct)
| Pexp_send (e, s) -> Pexp_send (u_expression e, s)
| Pexp_new loc -> Pexp_new (u_loc loc)
| Pexp_setinstvar (s, e) -> Pexp_setinstvar (u_loc s, u_expression e)
| Pexp_override es -> Pexp_override (List.map ~f:(fun (loc,e) -> (u_loc loc, u_expression e)) es)
| Pexp_letmodule (s, me, e) -> Pexp_letmodule (u_loc s, u_module_expr me, u_expression e)
| Pexp_assert e -> Pexp_assert (u_expression e)
| Pexp_lazy e -> Pexp_lazy (u_expression e)
| Pexp_poly (e, cto) -> Pexp_poly (u_expression e, u_option u_core_type cto)
| Pexp_object cs -> Pexp_object (u_class_structure cs)
| Pexp_newtype (s, e) -> Pexp_newtype (s, u_expression e)
| Pexp_pack me -> Pexp_pack (u_module_expr me)
| Pexp_open (flag, loc, e) -> Pexp_open (flag, u_loc loc, u_expression e)
| Pexp_extension ext -> Pexp_extension (u_extension ext)
and u_case {pc_lhs; pc_guard; pc_rhs} = {
pc_lhs = u_pattern pc_lhs;
pc_guard = u_option u_expression pc_guard;
pc_rhs = u_expression pc_rhs;
}
and u_value_description {pval_name; pval_type; pval_prim; pval_attributes; pval_loc} =
enter ();
let pval_name = u_loc pval_name in
let pval_type = u_core_type pval_type in
let pval_attributes = u_attributes pval_attributes in
let pval_loc = leave pval_loc in
{pval_name; pval_type; pval_prim; pval_attributes; pval_loc}
and u_type_declaration {ptype_name; ptype_params; ptype_cstrs; ptype_kind;
ptype_private; ptype_manifest; ptype_attributes; ptype_loc} =
enter ();
let ptype_name = u_loc ptype_name
and ptype_params = List.map ~f:(fun (ct,v) -> (u_core_type ct, v)) ptype_params
and ptype_cstrs = List.map ~f:(fun (ct1,ct2,l) ->
update l; (u_core_type ct1, u_core_type ct2, l)) ptype_cstrs
and ptype_kind = u_type_kind ptype_kind
and ptype_manifest = u_option u_core_type ptype_manifest
and ptype_attributes = u_attributes ptype_attributes
in
let ptype_loc = leave ptype_loc in
{ptype_name; ptype_params; ptype_cstrs; ptype_kind;
ptype_private; ptype_manifest; ptype_attributes; ptype_loc}
and u_type_kind = function
| Ptype_abstract | Ptype_open as k -> k
| Ptype_variant cstrs -> Ptype_variant (List.map ~f:u_constructor_declaration cstrs)
| Ptype_record lbls -> Ptype_record (List.map ~f:u_label_declaration lbls)
and u_label_declaration {pld_name; pld_mutable; pld_type; pld_loc; pld_attributes} =
enter ();
let pld_name = u_loc pld_name in
let pld_type = u_core_type pld_type in
let pld_attributes = u_attributes pld_attributes in
let pld_loc = leave pld_loc in
{pld_name; pld_mutable; pld_type; pld_loc; pld_attributes}
and u_constructor_declaration {pcd_name; pcd_args; pcd_res; pcd_loc; pcd_attributes} =
enter ();
let pcd_name = u_loc pcd_name in
let pcd_args = List.map ~f:u_core_type pcd_args in
let pcd_res = u_option u_core_type pcd_res in
let pcd_attributes = u_attributes pcd_attributes in
let pcd_loc = leave pcd_loc in
{pcd_name; pcd_args; pcd_res; pcd_loc; pcd_attributes}
and u_type_extension {ptyext_path; ptyext_params; ptyext_constructors; ptyext_private; ptyext_attributes} =
let ptyext_path = u_loc ptyext_path in
let ptyext_params = List.map ~f:(fun (ct,v) -> (u_core_type ct, v)) ptyext_params in
let ptyext_constructors = List.map ~f:u_extension_constructor ptyext_constructors in
let ptyext_attributes = u_attributes ptyext_attributes in
{ptyext_path; ptyext_params; ptyext_constructors; ptyext_private; ptyext_attributes}
and u_extension_constructor {pext_name; pext_kind; pext_loc; pext_attributes} =
enter ();
let pext_name = u_loc pext_name in
let pext_kind = u_extension_constructor_kind pext_kind in
let pext_attributes = u_attributes pext_attributes in
let pext_loc = leave pext_loc in
{pext_name; pext_kind; pext_loc; pext_attributes}
and u_extension_constructor_kind = function
| Pext_decl (cts, cto) ->
Pext_decl (List.map ~f:u_core_type cts, u_option u_core_type cto)
| Pext_rebind loc -> Pext_rebind (u_loc loc)
* { 2 Class language }
and u_class_type {pcty_desc; pcty_loc; pcty_attributes} =
enter ();
let pcty_desc = u_class_type_desc pcty_desc in
let pcty_attributes = u_attributes pcty_attributes in
let pcty_loc = leave pcty_loc in
{pcty_desc; pcty_loc; pcty_attributes}
and u_class_type_desc = function
| Pcty_constr (loc, cts) ->
Pcty_constr (u_loc loc, List.map ~f:u_core_type cts)
| Pcty_signature cs -> Pcty_signature (u_class_signature cs)
| Pcty_arrow (lbl, ct, clt) ->
Pcty_arrow (lbl, u_core_type ct, u_class_type clt)
| Pcty_extension ext ->
Pcty_extension (u_extension ext)
and u_class_signature {pcsig_self; pcsig_fields} =
let pcsig_self = u_core_type pcsig_self in
let pcsig_fields = List.map ~f:u_class_type_field pcsig_fields in
{pcsig_self; pcsig_fields}
and u_class_type_field {pctf_desc; pctf_loc; pctf_attributes} =
enter ();
let pctf_desc = u_class_type_field_desc pctf_desc in
let pctf_attributes = u_attributes pctf_attributes in
let pctf_loc = leave pctf_loc in
{pctf_desc; pctf_loc; pctf_attributes}
and u_class_type_field_desc = function
| Pctf_inherit clt -> Pctf_inherit (u_class_type clt)
| Pctf_val (s, fl1, fl2, ct) -> Pctf_val (s, fl1, fl2, u_core_type ct)
| Pctf_method (s, fl1, fl2, ct) -> Pctf_method (s, fl1, fl2, u_core_type ct)
| Pctf_constraint (ct1, ct2) -> Pctf_constraint (u_core_type ct1, u_core_type ct2)
| Pctf_attribute attr -> Pctf_attribute (u_attribute attr)
| Pctf_extension ext -> Pctf_extension (u_extension ext)
and u_class_infos : 'a 'b. ('a -> 'b) -> 'a class_infos -> 'b class_infos =
fun u_a {pci_virt; pci_params; pci_name; pci_expr; pci_loc; pci_attributes} ->
enter ();
let pci_params = List.map ~f:(fun (ct,v) -> (u_core_type ct, v)) pci_params in
let pci_name = u_loc pci_name in
let pci_expr = u_a pci_expr in
let pci_attributes = u_attributes pci_attributes in
let pci_loc = leave pci_loc in
{pci_virt; pci_params; pci_name; pci_expr; pci_loc; pci_attributes}
and u_class_description clt = u_class_infos u_class_type clt
and u_class_type_declaration clt = u_class_infos u_class_type clt
and u_class_expr {pcl_desc; pcl_loc; pcl_attributes} =
enter ();
let pcl_desc = u_class_expr_desc pcl_desc in
let pcl_attributes = u_attributes pcl_attributes in
let pcl_loc = leave pcl_loc in
{pcl_desc; pcl_loc; pcl_attributes}
and u_class_expr_desc = function
| Pcl_constr (loc, cts) -> Pcl_constr (u_loc loc, List.map ~f:u_core_type cts)
| Pcl_structure cs -> Pcl_structure (u_class_structure cs)
| Pcl_fun (lbl, eo, p, ce) ->
Pcl_fun (lbl, u_option u_expression eo, u_pattern p, u_class_expr ce)
| Pcl_apply (ce, les) ->
Pcl_apply (u_class_expr ce, List.map ~f:(fun (l,e) -> (l, u_expression e)) les)
| Pcl_let (rf, vbs, ce) ->
Pcl_let (rf, List.map ~f:u_value_binding vbs, u_class_expr ce)
| Pcl_constraint (ce, ct) -> Pcl_constraint (u_class_expr ce, u_class_type ct)
| Pcl_extension ext -> Pcl_extension (u_extension ext)
and u_class_structure {pcstr_self; pcstr_fields} =
let pcstr_self = u_pattern pcstr_self in
let pcstr_fields = List.map ~f:u_class_field pcstr_fields in
{pcstr_self; pcstr_fields}
and u_class_field {pcf_desc; pcf_loc; pcf_attributes} =
enter ();
let pcf_desc = u_class_field_desc pcf_desc in
let pcf_attributes = u_attributes pcf_attributes in
let pcf_loc = leave pcf_loc in
{pcf_desc; pcf_loc; pcf_attributes}
and u_class_field_desc = function
| Pcf_inherit (fl, ce, so) -> Pcf_inherit (fl, u_class_expr ce, so)
| Pcf_val (loc, fl, cfk) -> Pcf_val (u_loc loc, fl, u_class_field_kind cfk)
| Pcf_method (loc, fl, cfk) -> Pcf_method (u_loc loc, fl, u_class_field_kind cfk)
| Pcf_constraint (c1, c2) -> Pcf_constraint (u_core_type c1, u_core_type c2)
| Pcf_initializer e -> Pcf_initializer (u_expression e)
| Pcf_attribute attr -> Pcf_attribute (u_attribute attr)
| Pcf_extension ext -> Pcf_extension (u_extension ext)
and u_class_field_kind = function
| Cfk_virtual ct -> Cfk_virtual (u_core_type ct)
| Cfk_concrete (fl,e) -> Cfk_concrete (fl, u_expression e)
and u_class_declaration cd = u_class_infos u_class_expr cd
and u_module_type {pmty_desc; pmty_loc; pmty_attributes} =
enter ();
let pmty_desc = u_module_type_desc pmty_desc in
let pmty_attributes = u_attributes pmty_attributes in
let pmty_loc = leave pmty_loc in
{pmty_desc; pmty_loc; pmty_attributes}
and u_module_type_desc = function
| Pmty_ident loc -> Pmty_ident (u_loc loc)
| Pmty_signature sg -> Pmty_signature (u_signature sg)
| Pmty_functor (loc, mto, mt) -> Pmty_functor (u_loc loc, u_option u_module_type mto, u_module_type mt)
| Pmty_with (mt, wts) -> Pmty_with (u_module_type mt, List.map ~f:u_with_constraint wts)
| Pmty_typeof me -> Pmty_typeof (u_module_expr me)
| Pmty_extension ext -> Pmty_extension (u_extension ext)
| Pmty_alias loc -> Pmty_alias (u_loc loc)
and u_signature l = List.map ~f:u_signature_item l
and u_signature_item {psig_desc; psig_loc} =
enter ();
let psig_desc = u_signature_item_desc psig_desc in
let psig_loc = leave psig_loc in
{psig_desc; psig_loc}
and u_signature_item_desc = function
| Psig_value vd -> Psig_value (u_value_description vd)
| Psig_type tds -> Psig_type (List.map ~f:u_type_declaration tds)
| Psig_typext text -> Psig_typext (u_type_extension text)
| Psig_exception ec -> Psig_exception (u_extension_constructor ec)
| Psig_module md -> Psig_module (u_module_declaration md)
| Psig_recmodule mds -> Psig_recmodule (List.map ~f:u_module_declaration mds)
| Psig_modtype mtd -> Psig_modtype (u_module_type_declaration mtd)
| Psig_open od -> Psig_open (u_open_description od)
| Psig_include id -> Psig_include (u_include_description id)
| Psig_class cds -> Psig_class (List.map ~f:u_class_description cds)
| Psig_class_type cts -> Psig_class_type (List.map ~f:u_class_type_declaration cts)
| Psig_attribute attr -> Psig_attribute (u_attribute attr)
| Psig_extension (ext, attrs) -> Psig_extension (u_extension ext, u_attributes attrs)
and u_module_declaration {pmd_name; pmd_type; pmd_attributes; pmd_loc} =
enter ();
let pmd_name = u_loc pmd_name in
let pmd_type = u_module_type pmd_type in
let pmd_attributes = u_attributes pmd_attributes in
let pmd_loc = leave pmd_loc in
{pmd_name; pmd_type; pmd_attributes; pmd_loc}
and u_module_type_declaration {pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc} =
enter ();
let pmtd_name = u_loc pmtd_name in
let pmtd_type = u_option u_module_type pmtd_type in
let pmtd_attributes = u_attributes pmtd_attributes in
let pmtd_loc = leave pmtd_loc in
{pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc}
and u_open_description {popen_lid; popen_override; popen_loc; popen_attributes} =
enter ();
let popen_lid = u_loc popen_lid in
let popen_attributes = u_attributes popen_attributes in
let popen_loc = leave popen_loc in
{popen_lid; popen_override; popen_loc; popen_attributes}
and u_include_infos : 'a 'b . ('a -> 'b) -> 'a include_infos -> 'b include_infos =
fun u_a {pincl_mod; pincl_loc; pincl_attributes} ->
enter ();
let pincl_mod = u_a pincl_mod in
let pincl_attributes = u_attributes pincl_attributes in
let pincl_loc = leave pincl_loc in
{pincl_mod; pincl_loc; pincl_attributes}
and u_include_description id = u_include_infos u_module_type id
and u_include_declaration id = u_include_infos u_module_expr id
and u_with_constraint = function
| Pwith_type (loc, td) -> Pwith_type (u_loc loc, u_type_declaration td)
| Pwith_module (loc1, loc2) -> Pwith_module (u_loc loc1, u_loc loc2)
| Pwith_typesubst td -> Pwith_typesubst (u_type_declaration td)
| Pwith_modsubst (loc1, loc2) -> Pwith_modsubst (u_loc loc1, u_loc loc2)
and u_module_expr {pmod_desc; pmod_loc; pmod_attributes} =
enter ();
let pmod_desc = u_module_expr_desc pmod_desc in
let pmod_attributes = u_attributes pmod_attributes in
let pmod_loc = leave pmod_loc in
{pmod_desc; pmod_loc; pmod_attributes}
and u_module_expr_desc = function
| Pmod_ident loc -> Pmod_ident (u_loc loc)
| Pmod_structure str -> Pmod_structure (u_structure str)
| Pmod_functor (loc, mto, me) ->
Pmod_functor (u_loc loc, u_option u_module_type mto, u_module_expr me)
| Pmod_apply (me1, me2) ->
Pmod_apply (u_module_expr me1, u_module_expr me2)
| Pmod_constraint (me, mt) ->
Pmod_constraint (u_module_expr me, u_module_type mt)
| Pmod_unpack e -> Pmod_unpack (u_expression e)
| Pmod_extension ext -> Pmod_extension (u_extension ext)
and u_structure l = List.map ~f:u_structure_item l
and u_structure_item {pstr_desc; pstr_loc} =
enter ();
let pstr_desc = u_structure_item_desc pstr_desc in
let pstr_loc = leave pstr_loc in
{pstr_desc; pstr_loc}
and u_structure_item_desc = function
| Pstr_eval (expr, attrs) -> Pstr_eval (u_expression expr, u_attributes attrs)
| Pstr_value (fl, vbs) -> Pstr_value (fl, List.map ~f:u_value_binding vbs)
| Pstr_primitive vd -> Pstr_primitive (u_value_description vd)
| Pstr_type tds -> Pstr_type (List.map ~f:u_type_declaration tds)
| Pstr_typext text -> Pstr_typext (u_type_extension text)
| Pstr_exception ext -> Pstr_exception (u_extension_constructor ext)
| Pstr_module mb -> Pstr_module (u_module_binding mb)
| Pstr_recmodule mbs -> Pstr_recmodule (List.map ~f:u_module_binding mbs)
| Pstr_modtype mtd -> Pstr_modtype (u_module_type_declaration mtd)
| Pstr_open od -> Pstr_open (u_open_description od)
| Pstr_class cds -> Pstr_class (List.map ~f:u_class_declaration cds)
| Pstr_class_type ctds -> Pstr_class_type (List.map ~f:u_class_type_declaration ctds)
| Pstr_include id -> Pstr_include (u_include_declaration id)
| Pstr_attribute attr -> Pstr_attribute (u_attribute attr)
| Pstr_extension (ext, attrs) -> Pstr_extension (u_extension ext, u_attributes attrs)
and u_value_binding {pvb_pat; pvb_expr; pvb_attributes; pvb_loc} =
enter ();
let pvb_pat = u_pattern pvb_pat in
let pvb_expr = u_expression pvb_expr in
let pvb_attributes = u_attributes pvb_attributes in
let pvb_loc = leave pvb_loc in
{pvb_pat; pvb_expr; pvb_attributes; pvb_loc}
and u_module_binding {pmb_name; pmb_expr; pmb_attributes; pmb_loc} =
enter ();
let pmb_name = u_loc pmb_name in
let pmb_expr = u_module_expr pmb_expr in
let pmb_attributes = u_attributes pmb_attributes in
let pmb_loc = leave pmb_loc in
{pmb_name; pmb_expr; pmb_attributes; pmb_loc}
end
let rewrite_loc t =
Rewrite_loc.start ();
let t = match t with
| `str str -> `str (Rewrite_loc.u_structure str)
| `fake str -> `fake (Rewrite_loc.u_structure str)
| `sg sg -> `sg (Rewrite_loc.u_signature sg)
in
Rewrite_loc.exit ();
t
|
7aa12b4be88167f58a6600044b2ec77c52a9e512f49f90a5133a0799c4ea1c59 | xhtmlboi/yocaml | util.mli | (** An invasive but probably useful tooling. *)
(** {1 String util}
As I was not very serious... strings occupy a very large place in
Wordpress... so it is necessary to be able to work correctly with them. *)
val split_metadata : string -> string option * string
* { 1 Infix operators }
Even if sometimes , infix operators can seem unreadable ... the immoderate
use of Arrows has already made the code incomprehensible ... so why deprive
yourself ?
Even if sometimes, infix operators can seem unreadable... the immoderate
use of Arrows has already made the code incomprehensible... so why deprive
yourself? *)
(** [f $ x] is [f @@ x] which is [f x]... but I don't like [@@]. *)
val ( $ ) : ('a -> 'b) -> 'a -> 'b
* { 1 Working with file name }
include module type of Filepath (** @closed *)
| null | https://raw.githubusercontent.com/xhtmlboi/yocaml/8b67d643da565993c2adf6530ea98149774445bd/lib/yocaml/util.mli | ocaml | * An invasive but probably useful tooling.
* {1 String util}
As I was not very serious... strings occupy a very large place in
Wordpress... so it is necessary to be able to work correctly with them.
* [f $ x] is [f @@ x] which is [f x]... but I don't like [@@].
* @closed |
val split_metadata : string -> string option * string
* { 1 Infix operators }
Even if sometimes , infix operators can seem unreadable ... the immoderate
use of Arrows has already made the code incomprehensible ... so why deprive
yourself ?
Even if sometimes, infix operators can seem unreadable... the immoderate
use of Arrows has already made the code incomprehensible... so why deprive
yourself? *)
val ( $ ) : ('a -> 'b) -> 'a -> 'b
* { 1 Working with file name }
|
f7e40dff385a45697875fa3b103d7d4f989daa0225be57d05419bb108f7744a6 | cmk/rings | Free.hs | {-# LANGUAGE Safe #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE DeriveGeneric #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE NoImplicitPrelude #-}
# LANGUAGE RebindableSyntax #
{-# LANGUAGE TypeOperators #-}
# LANGUAGE TypeFamilies #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ViewPatterns #
-- | < +module >
module Data.Semimodule.Free (
-- * Types
type (**), type (++)
, type Free
, type FreeModule
, type FreeSemimodule
, type FreeAlgebra
, type FreeUnital
, type FreeCoalgebra
, type FreeCounital
, type FreeBialgebra
-- * Coltor accessors and constructors
, at
, unit
, counit
, indexed
, tabulated
-- * Coltor operations
, (.*)
, (*.)
, (/^)
, (^/)
, (!*!)
, lerp
, dot
, outer
, quadrance
, projectL
, projectR
-- * Matrix accessors and constructors
, idx2
, row
, fromRows
, fromRow
, col
, fromCols
, fromCol
, diag
, codiag
, scalar
, identity
, transform
, Transform(..)
-- * Matrix operations
, (!#)
, (#!)
, (!#!)
, lcomp
, rcomp
, dicomp
, trace
, transpose
) where
import safe Control.Category
import safe Data.Bool
import safe Data.Functor.Apply
import safe Data.Functor.Compose
import safe Data.Functor.Rep hiding (tabulated)
import safe Data.Profunctor.Composition (eta)
import safe Data.Ring
import safe Data.Semiring
import safe Data.Semimodule
import safe Data.Semimodule.Algebra
import safe Data.Semimodule.Transform
import safe Prelude hiding (Num(..), Fractional(..), (.), id, init, negate, sum, product)
import safe qualified Control.Monad as M
-- >>> :set -XDataKinds
-- >>> import Data.Coltor.Sized
type FreeModule a f = (Free f, Bimodule a a (f a))
type FreeSemimodule a f = (Free f, Bisemimodule a a (f a))
-- | An algebra over a free module /f/^
--
-- Note that this is distinct from a < free algebra >.
--
type FreeAlgebra a f = (FreeSemimodule a f, Algebra a (Rep f))
-- | A unital algebra over a free semimodule /f/^
--
type FreeUnital a f = (FreeAlgebra a f, Unital a (Rep f))
-- | A coalgebra over a free semimodule /f/^
--
type FreeCoalgebra a f = (FreeSemimodule a f, Coalgebra a (Rep f))
-- | A counital coalgebra over a free semimodule /f/^
--
type FreeCounital a f = (FreeCoalgebra a f, Counital a (Rep f))
-- | A bialgebra over a free semimodule /f/^
--
type FreeBialgebra a f = (FreeAlgebra a f, FreeCoalgebra a f, Bialgebra a (Rep f))
-------------------------------------------------------------------------------
-- Coltors & Rowtors
-------------------------------------------------------------------------------
-- | Create a unit vector at an index.
--
> > > i = 4 : : Finite 5
-- >>> at i 1 :: Coltor 5 Double
Coltor [ 0.0,0.0,0.0,0.0,1.0 ]
--
> > > at E21 1 : : V2 Int
V2 1 0
> > > at E42 1 : : V4 Int
V4 0 1 0 0
--
at :: (Semiring a, Free f, Eq (Rep f)) => Rep f -> a -> f a
at i x = tabulate $ \j -> bool zero x (i == j)
# INLINE at #
-- | Retrieve the coefficient of a basis element
--
> > > idx E21 ( V2 1 2 )
1
--
idx :: Free f => Rep f -> f a -> a
idx = flip index
# INLINE idx #
-- | Insert an element into an algebra.
--
-- When the algebra is trivial this is equal to 'pureRep'.
--
> > > V4 1 2 3 4 ! * ! unit two : : V4 Int
V4 2 4 6 8
--
unit :: FreeUnital a f => a -> f a
unit = tabulate . unital
-- | Reduce a coalgebra over a free semimodule.
--
/Note/ : for the stock ' Counital ' instances ( e.g. ' E2 ' , ' Finite ' , etc ) this is summation .
--
> > > x = fromTuple ( 7 , 4 ) : : Coltor 2 Int
-- >>> counit x
11
--
counit :: FreeCounital a f => f a -> a
counit = counital . index
-- | Obtain a vector from an etaay of coefficients and a basis.
--
indexed :: FreeUnital a f => f a -> Col a (Rep f)
indexed = Col . index
-- | Obtain a covector from an etaay of coefficients and a basis.
--
> > > x = fromTuple ( 7 , 4 ) : : Coltor 2 Int
> > > y = fromTuple ( 1 , 2 ) : : Coltor 2 Int
-- >>> tabulated x !* index y :: Int
-- >>> tabulated (V2 7 4) !* index (V2 1 2) :: Int
11
--
tabulated :: FreeCounital a f => f a -> Row a (Rep f)
tabulated f = Row $ \k -> f `dot` tabulate k
-------------------------------------------------------------------------------
-- Coltor operations
-------------------------------------------------------------------------------
infixl 7 !*!
-- | Multiplication operator on an algebra over a free semimodule.
--
> > > E22 & ( index $ V2 1 2 ) ! * ! ( index $ V2 7 4 )
8
--
-- /Caution/ in general '!*!' needn't be commutative, nor associative.
--
(!*!) :: FreeAlgebra a f => f a -> f a -> f a
(!*!) x y = tabulate $ joined (\i j -> index x i * index y j)
infix 6 `dot`
-- | Inner (i.e. dot) product.
--
> > > 1 : + 2 ` dot ` 3 : + 4
11
--
See also ' Data.Semimodule.Transform.inner ' .
--
dot :: FreeCounital a f => f a -> f a -> a
dot x y = counit $ liftR2 (*) x y
# INLINE dot #
infix 6 `outer`
-- | Outer product.
--
> > > V2 1 1 ` outer ` V2 1 1
-- Compose (V2 (V2 1 1) (V2 1 1))
--
outer :: (Semiring a, Free f, Free g) => f a -> g a -> (f**g) a
outer x y = Compose $ fmap (\z-> fmap (*z) y) x
# INLINE outer #
-- | Squared /l2/ norm of a vector.
--
quadrance :: FreeCounital a f => f a -> a
quadrance = M.join dot
# INLINE quadrance #
-- | Project onto the left-hand component of a direct sum.
--
projectL :: (Free f, Free g) => (f++g) a -> f a
projectL fg = eta Left .# fg
# INLINE projectL #
-- | Project onto the right-hand component of a direct sum.
--
projectR :: (Free f, Free g) => (f++g) a -> g a
projectR fg = eta Right .# fg
{-# INLINE projectR #-}
-------------------------------------------------------------------------------
Matrix accessors and constructors
-------------------------------------------------------------------------------
-- | Obtain a linear transformation from a matrix.
--
-- @ ('!#') = ('.#') . 'transform' @
--
transform :: (Free f, FreeCounital a g) => (f**g) a -> Transform a (Rep f) (Rep g)
transform x = Transform $ \k -> index (x !# tabulate k)
-- | Retrieve an element of a matrix.
--
idx2 :: (Free f, Free g) => Rep f -> Rep g -> (f**g) a -> a
idx2 i j = idx i . col j
# INLINE idx2 #
-- | Retrieve a row of a matrix.
--
row :: Free f => Rep f -> (f**g) a -> g a
row i = idx i . getCompose
# INLINE row #
-- | Retrieve a column of a matrix.
--
col :: (Free f, Free g) => Rep g -> (f**g) a -> f a
col j = idx j . distributeRep . getCompose
# INLINE col #
-- | Obtain a matrix by repeating a row.
--
fromRow :: (Free f, Free g) => g a -> (f**g) a
fromRow g = eta snd .# g
# INLINE fromRow #
-- | Obtain a matrix from a collection of rows.
--
fromRows :: (Free f, Free g) => f (g a) -> (f**g) a
fromRows = Compose
# INLINE fromRows #
-- | Obtain a matrix by repeating a column.
--
fromCol :: (Free f, Free g) => f a -> (f**g) a
fromCol f = eta fst .# f
# INLINE fromCol #
-- | Obtain a matrix from a collection of columns.
--
fromCols :: (Free f, Free g) => g (f a) -> (f**g) a
fromCols = transpose . Compose
-- | Obtain a vector from a tensor.
--
-- @ 'diag' f = 'diagonal' '.#' f @
--
-- When the algebra is trivial we have:
--
-- @ 'diag' f = 'tabulate' $ 'joined' $ 'index' . 'index' ('getCompose' f) @
--
diag :: FreeAlgebra a f => (f**f) a -> f a
diag f = diagonal .# f
-- | Obtain a tensor from a vector.
--
@ ' ' f = ' codiagonal ' ' . # ' f @
--
-- When the coalgebra is trivial we have:
--
@ ' ' = ' flip ' ' bindRep ' ' i d ' ' . ' ' getCompose ' @
--
codiag :: FreeCoalgebra a f => f a -> (f**f) a
codiag f = codiagonal .# f
-- | Obtain a < #Scalar_matrix scalar matrix > from a scalar.
--
scalar :: FreeCoalgebra a f => a -> (f**f) a
scalar = codiag . pureRep
-- | The identity matrix.
--
identity :: FreeCoalgebra a f => (f**f) a
identity = scalar one
# INLINE identity #
-------------------------------------------------------------------------------
-- Matrix operators
-------------------------------------------------------------------------------
infixr 7 !#
-- | Multiply a matrix on the right by a column vector.
--
-- @ ('!#') = ('.#') . 'transform' @
--
(!#) :: (Free f, FreeCounital a g) => (f**g) a -> g a -> f a
x !# y = tabulate (\i -> row i x `dot` y)
{-# INLINE (!#) #-}
infixl 7 #!
-- | Multiply a matrix on the left by a row vector.
--
(#!) :: (Free g, FreeCounital a f) => f a -> (f**g) a -> g a
x #! y = tabulate (\j -> x `dot` col j y)
# INLINE ( # ! ) #
infixr 7 !#!
| Multiply two matrices .
--
(!#!) :: (Free f, Free h, FreeCounital a g) => (f**g) a -> (g**h) a -> (f**h) a
(!#!) x y = tabulate (\(i,j) -> row i x `dot` col j y)
{-# INLINE (!#!) #-}
-- | Trace of an endomorphism.
--
trace :: FreeBialgebra a f => (f**f) a -> a
trace = counit . diag
# INLINE trace #
-- | Transpose a matrix.
--
transpose :: (Free f, Free g) => (f**g) a -> (g**f) a
transpose fg = braid .# fg
{-# INLINE transpose #-}
| null | https://raw.githubusercontent.com/cmk/rings/f1203d693d0069169582d478c663159de9416d87/src/Data/Semimodule/Free.hs | haskell | # LANGUAGE Safe #
# LANGUAGE ConstraintKinds #
# LANGUAGE DefaultSignatures #
# LANGUAGE DeriveGeneric #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE TypeOperators #
# LANGUAGE RankNTypes #
| < +module >
* Types
* Coltor accessors and constructors
* Coltor operations
* Matrix accessors and constructors
* Matrix operations
>>> :set -XDataKinds
>>> import Data.Coltor.Sized
| An algebra over a free module /f/^
Note that this is distinct from a < free algebra >.
| A unital algebra over a free semimodule /f/^
| A coalgebra over a free semimodule /f/^
| A counital coalgebra over a free semimodule /f/^
| A bialgebra over a free semimodule /f/^
-----------------------------------------------------------------------------
Coltors & Rowtors
-----------------------------------------------------------------------------
| Create a unit vector at an index.
>>> at i 1 :: Coltor 5 Double
| Retrieve the coefficient of a basis element
| Insert an element into an algebra.
When the algebra is trivial this is equal to 'pureRep'.
| Reduce a coalgebra over a free semimodule.
>>> counit x
| Obtain a vector from an etaay of coefficients and a basis.
| Obtain a covector from an etaay of coefficients and a basis.
>>> tabulated x !* index y :: Int
>>> tabulated (V2 7 4) !* index (V2 1 2) :: Int
-----------------------------------------------------------------------------
Coltor operations
-----------------------------------------------------------------------------
| Multiplication operator on an algebra over a free semimodule.
/Caution/ in general '!*!' needn't be commutative, nor associative.
| Inner (i.e. dot) product.
| Outer product.
Compose (V2 (V2 1 1) (V2 1 1))
| Squared /l2/ norm of a vector.
| Project onto the left-hand component of a direct sum.
| Project onto the right-hand component of a direct sum.
# INLINE projectR #
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
| Obtain a linear transformation from a matrix.
@ ('!#') = ('.#') . 'transform' @
| Retrieve an element of a matrix.
| Retrieve a row of a matrix.
| Retrieve a column of a matrix.
| Obtain a matrix by repeating a row.
| Obtain a matrix from a collection of rows.
| Obtain a matrix by repeating a column.
| Obtain a matrix from a collection of columns.
| Obtain a vector from a tensor.
@ 'diag' f = 'diagonal' '.#' f @
When the algebra is trivial we have:
@ 'diag' f = 'tabulate' $ 'joined' $ 'index' . 'index' ('getCompose' f) @
| Obtain a tensor from a vector.
When the coalgebra is trivial we have:
| Obtain a < #Scalar_matrix scalar matrix > from a scalar.
| The identity matrix.
-----------------------------------------------------------------------------
Matrix operators
-----------------------------------------------------------------------------
| Multiply a matrix on the right by a column vector.
@ ('!#') = ('.#') . 'transform' @
# INLINE (!#) #
| Multiply a matrix on the left by a row vector.
# INLINE (!#!) #
| Trace of an endomorphism.
| Transpose a matrix.
# INLINE transpose # | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE RebindableSyntax #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
module Data.Semimodule.Free (
type (**), type (++)
, type Free
, type FreeModule
, type FreeSemimodule
, type FreeAlgebra
, type FreeUnital
, type FreeCoalgebra
, type FreeCounital
, type FreeBialgebra
, at
, unit
, counit
, indexed
, tabulated
, (.*)
, (*.)
, (/^)
, (^/)
, (!*!)
, lerp
, dot
, outer
, quadrance
, projectL
, projectR
, idx2
, row
, fromRows
, fromRow
, col
, fromCols
, fromCol
, diag
, codiag
, scalar
, identity
, transform
, Transform(..)
, (!#)
, (#!)
, (!#!)
, lcomp
, rcomp
, dicomp
, trace
, transpose
) where
import safe Control.Category
import safe Data.Bool
import safe Data.Functor.Apply
import safe Data.Functor.Compose
import safe Data.Functor.Rep hiding (tabulated)
import safe Data.Profunctor.Composition (eta)
import safe Data.Ring
import safe Data.Semiring
import safe Data.Semimodule
import safe Data.Semimodule.Algebra
import safe Data.Semimodule.Transform
import safe Prelude hiding (Num(..), Fractional(..), (.), id, init, negate, sum, product)
import safe qualified Control.Monad as M
type FreeModule a f = (Free f, Bimodule a a (f a))
type FreeSemimodule a f = (Free f, Bisemimodule a a (f a))
type FreeAlgebra a f = (FreeSemimodule a f, Algebra a (Rep f))
type FreeUnital a f = (FreeAlgebra a f, Unital a (Rep f))
type FreeCoalgebra a f = (FreeSemimodule a f, Coalgebra a (Rep f))
type FreeCounital a f = (FreeCoalgebra a f, Counital a (Rep f))
type FreeBialgebra a f = (FreeAlgebra a f, FreeCoalgebra a f, Bialgebra a (Rep f))
> > > i = 4 : : Finite 5
Coltor [ 0.0,0.0,0.0,0.0,1.0 ]
> > > at E21 1 : : V2 Int
V2 1 0
> > > at E42 1 : : V4 Int
V4 0 1 0 0
at :: (Semiring a, Free f, Eq (Rep f)) => Rep f -> a -> f a
at i x = tabulate $ \j -> bool zero x (i == j)
# INLINE at #
> > > idx E21 ( V2 1 2 )
1
idx :: Free f => Rep f -> f a -> a
idx = flip index
# INLINE idx #
> > > V4 1 2 3 4 ! * ! unit two : : V4 Int
V4 2 4 6 8
unit :: FreeUnital a f => a -> f a
unit = tabulate . unital
/Note/ : for the stock ' Counital ' instances ( e.g. ' E2 ' , ' Finite ' , etc ) this is summation .
> > > x = fromTuple ( 7 , 4 ) : : Coltor 2 Int
11
counit :: FreeCounital a f => f a -> a
counit = counital . index
indexed :: FreeUnital a f => f a -> Col a (Rep f)
indexed = Col . index
> > > x = fromTuple ( 7 , 4 ) : : Coltor 2 Int
> > > y = fromTuple ( 1 , 2 ) : : Coltor 2 Int
11
tabulated :: FreeCounital a f => f a -> Row a (Rep f)
tabulated f = Row $ \k -> f `dot` tabulate k
infixl 7 !*!
> > > E22 & ( index $ V2 1 2 ) ! * ! ( index $ V2 7 4 )
8
(!*!) :: FreeAlgebra a f => f a -> f a -> f a
(!*!) x y = tabulate $ joined (\i j -> index x i * index y j)
infix 6 `dot`
> > > 1 : + 2 ` dot ` 3 : + 4
11
See also ' Data.Semimodule.Transform.inner ' .
dot :: FreeCounital a f => f a -> f a -> a
dot x y = counit $ liftR2 (*) x y
# INLINE dot #
infix 6 `outer`
> > > V2 1 1 ` outer ` V2 1 1
outer :: (Semiring a, Free f, Free g) => f a -> g a -> (f**g) a
outer x y = Compose $ fmap (\z-> fmap (*z) y) x
# INLINE outer #
quadrance :: FreeCounital a f => f a -> a
quadrance = M.join dot
# INLINE quadrance #
projectL :: (Free f, Free g) => (f++g) a -> f a
projectL fg = eta Left .# fg
# INLINE projectL #
projectR :: (Free f, Free g) => (f++g) a -> g a
projectR fg = eta Right .# fg
Matrix accessors and constructors
transform :: (Free f, FreeCounital a g) => (f**g) a -> Transform a (Rep f) (Rep g)
transform x = Transform $ \k -> index (x !# tabulate k)
idx2 :: (Free f, Free g) => Rep f -> Rep g -> (f**g) a -> a
idx2 i j = idx i . col j
# INLINE idx2 #
row :: Free f => Rep f -> (f**g) a -> g a
row i = idx i . getCompose
# INLINE row #
col :: (Free f, Free g) => Rep g -> (f**g) a -> f a
col j = idx j . distributeRep . getCompose
# INLINE col #
fromRow :: (Free f, Free g) => g a -> (f**g) a
fromRow g = eta snd .# g
# INLINE fromRow #
fromRows :: (Free f, Free g) => f (g a) -> (f**g) a
fromRows = Compose
# INLINE fromRows #
fromCol :: (Free f, Free g) => f a -> (f**g) a
fromCol f = eta fst .# f
# INLINE fromCol #
fromCols :: (Free f, Free g) => g (f a) -> (f**g) a
fromCols = transpose . Compose
diag :: FreeAlgebra a f => (f**f) a -> f a
diag f = diagonal .# f
@ ' ' f = ' codiagonal ' ' . # ' f @
@ ' ' = ' flip ' ' bindRep ' ' i d ' ' . ' ' getCompose ' @
codiag :: FreeCoalgebra a f => f a -> (f**f) a
codiag f = codiagonal .# f
scalar :: FreeCoalgebra a f => a -> (f**f) a
scalar = codiag . pureRep
identity :: FreeCoalgebra a f => (f**f) a
identity = scalar one
# INLINE identity #
infixr 7 !#
(!#) :: (Free f, FreeCounital a g) => (f**g) a -> g a -> f a
x !# y = tabulate (\i -> row i x `dot` y)
infixl 7 #!
(#!) :: (Free g, FreeCounital a f) => f a -> (f**g) a -> g a
x #! y = tabulate (\j -> x `dot` col j y)
# INLINE ( # ! ) #
infixr 7 !#!
| Multiply two matrices .
(!#!) :: (Free f, Free h, FreeCounital a g) => (f**g) a -> (g**h) a -> (f**h) a
(!#!) x y = tabulate (\(i,j) -> row i x `dot` col j y)
trace :: FreeBialgebra a f => (f**f) a -> a
trace = counit . diag
# INLINE trace #
transpose :: (Free f, Free g) => (f**g) a -> (g**f) a
transpose fg = braid .# fg
|
587159d3e03f03c02685eda529569a3319606944321416351b53e5cf077e50ce | tranma/shitty-complexity | Plot.hs | # LANGUAGE FlexibleContexts #
module Test.BigOh.Plot where
import Control.Arrow
import Control.Monad
import Control.Monad.ST
import qualified Data.Array.MArray as AM
import Data.Array.ST
import qualified Data.Array.ST as AS
import Data.Array.Unboxed
import qualified Data.List as L
import Data.Ord
import Data.STRef
import System.Console.ANSI
import System.Console.Ansigraph
type Range = (Double, Double)
data Plot
= Plot
{ plotWidth :: Int
, plotHeight :: Int
, plotPoints :: [(Double, Double)] }
deriving Show
graphPoints :: [(Double, Double)] -> IO ()
graphPoints points
| ps <- shiftUp points
= mapM_ (posgraph . fmap (fromIntegral :: Int -> Double))
(plotToGraphs $ Plot 32 64 ps)
shiftUp :: [(Double, Double)] -> [(Double, Double)]
shiftUp ps
= let minY = snd $ L.minimumBy (comparing snd) ps
in if minY < 0
then fmap (second (+ abs minY)) ps
else ps
plotToGraphs :: Plot -> [[Int]]
plotToGraphs p@(Plot _ height _)
= let ys = plotToYs p
slice n = map (\v -> ((v - (n*8)) `max` 0) `min` 8) ys
in reverse $ map slice [0..height `div` 8]
plotToYs :: Plot -> [Int]
plotToYs = grabYs . plotToArray
-- | Grab the highest y for each x
grabYs :: UArray (Int, Int) Char -> [Int]
grabYs a
= let ((x0, y0), (xn, yn)) = bounds a
ugh = reverse [y0..yn]
in flip fmap [x0..xn]
$ \x -> case dropWhile (\y -> (a ! (x,y)) == ' ') ugh of
[] -> 0
(y':_) -> yn - y'
plotToArray :: Plot -> UArray (Int, Int) Char
plotToArray (Plot width height points)
= AS.runSTUArray
$ do let maxX = L.maximum $ fmap fst points
maxY = L.maximum $ fmap snd points
scaleX = maxX / fromIntegral width
scaleY = maxY / fromIntegral height
scaled = fmap ((/scaleX) *** (/scaleY)) points
a <- AM.newArray ((0,0), (width, height)) ' '
let scaled' = fmap go scaled
let pairs = zip scaled' (drop 1 scaled')
forM_ pairs $ uncurry (bresenham a 'x')
return a
where
go (x,y) = (round x, height - round y)
printArray :: UArray (Int, Int) Char -> IO ()
printArray a
= do let (minB, maxB) = bounds a
row i = [ a ! (x, i) | x <- [fst minB .. snd maxB] ]
thing = fmap row [snd minB .. snd maxB]
mapM_ putStrLn thing
bresenham
:: STUArray s (Int, Int) Char -> Char -> (Int, Int) -> (Int, Int) -> ST s ()
bresenham vec val (xa, ya) (xb, yb)
= do yV <- var y1
errorV <- var $ deltax `div` 2
forM_ [x1 .. x2] (\x -> do
y <- get yV
draw $ if steep then (y, x) else (x, y)
mutate errorV $ subtract deltay
err <- get errorV
when (err < 0) (do
mutate yV (+ ystep)
mutate errorV (+ deltax)))
where steep = abs (yb - ya) > abs (xb - xa)
(xa', ya', xb', yb')
= if steep
then (ya, xa, yb, xb)
else (xa, ya, xb, yb)
(x1, y1, x2, y2)
= if xa' > xb'
then (xb', yb', xa', ya')
else (xa', ya', xb', yb')
deltax = x2 - x1
deltay = abs $ y2 - y1
ystep = if y1 < y2 then 1 else -1
var = Data.STRef.newSTRef
get = Data.STRef.readSTRef
mutate = Data.STRef.modifySTRef
draw (x,y) = AM.writeArray vec (x,y) val
--------------------------------------------------------------------------------
withColor :: Color -> a -> String -> IO a
withColor c r x = do
setSGR [SetColor Foreground Vivid c]
putStrLn x
setSGR [Reset]
return r
passed, failed, inconclusive :: String -> IO Bool
passed = withColor Green True
failed = withColor Red False
inconclusive = withColor Yellow False
header = withColor Blue ()
superscript :: Int -> String
superscript = map go . show
where
go '0' = '⁰'
go '1' = '¹'
go '2' = '²'
go '3' = '³'
go '4' = '⁴'
go '5' = '⁵'
go '6' = '⁶'
go '7' = '⁷'
go '8' = '⁸'
go '9' = '⁹'
go x = x
| null | https://raw.githubusercontent.com/tranma/shitty-complexity/573815a8af9d5f3cda3c96e22d59b71ec9f29de9/src/Test/BigOh/Plot.hs | haskell | | Grab the highest y for each x
------------------------------------------------------------------------------ | # LANGUAGE FlexibleContexts #
module Test.BigOh.Plot where
import Control.Arrow
import Control.Monad
import Control.Monad.ST
import qualified Data.Array.MArray as AM
import Data.Array.ST
import qualified Data.Array.ST as AS
import Data.Array.Unboxed
import qualified Data.List as L
import Data.Ord
import Data.STRef
import System.Console.ANSI
import System.Console.Ansigraph
type Range = (Double, Double)
data Plot
= Plot
{ plotWidth :: Int
, plotHeight :: Int
, plotPoints :: [(Double, Double)] }
deriving Show
graphPoints :: [(Double, Double)] -> IO ()
graphPoints points
| ps <- shiftUp points
= mapM_ (posgraph . fmap (fromIntegral :: Int -> Double))
(plotToGraphs $ Plot 32 64 ps)
shiftUp :: [(Double, Double)] -> [(Double, Double)]
shiftUp ps
= let minY = snd $ L.minimumBy (comparing snd) ps
in if minY < 0
then fmap (second (+ abs minY)) ps
else ps
plotToGraphs :: Plot -> [[Int]]
plotToGraphs p@(Plot _ height _)
= let ys = plotToYs p
slice n = map (\v -> ((v - (n*8)) `max` 0) `min` 8) ys
in reverse $ map slice [0..height `div` 8]
plotToYs :: Plot -> [Int]
plotToYs = grabYs . plotToArray
grabYs :: UArray (Int, Int) Char -> [Int]
grabYs a
= let ((x0, y0), (xn, yn)) = bounds a
ugh = reverse [y0..yn]
in flip fmap [x0..xn]
$ \x -> case dropWhile (\y -> (a ! (x,y)) == ' ') ugh of
[] -> 0
(y':_) -> yn - y'
plotToArray :: Plot -> UArray (Int, Int) Char
plotToArray (Plot width height points)
= AS.runSTUArray
$ do let maxX = L.maximum $ fmap fst points
maxY = L.maximum $ fmap snd points
scaleX = maxX / fromIntegral width
scaleY = maxY / fromIntegral height
scaled = fmap ((/scaleX) *** (/scaleY)) points
a <- AM.newArray ((0,0), (width, height)) ' '
let scaled' = fmap go scaled
let pairs = zip scaled' (drop 1 scaled')
forM_ pairs $ uncurry (bresenham a 'x')
return a
where
go (x,y) = (round x, height - round y)
printArray :: UArray (Int, Int) Char -> IO ()
printArray a
= do let (minB, maxB) = bounds a
row i = [ a ! (x, i) | x <- [fst minB .. snd maxB] ]
thing = fmap row [snd minB .. snd maxB]
mapM_ putStrLn thing
bresenham
:: STUArray s (Int, Int) Char -> Char -> (Int, Int) -> (Int, Int) -> ST s ()
bresenham vec val (xa, ya) (xb, yb)
= do yV <- var y1
errorV <- var $ deltax `div` 2
forM_ [x1 .. x2] (\x -> do
y <- get yV
draw $ if steep then (y, x) else (x, y)
mutate errorV $ subtract deltay
err <- get errorV
when (err < 0) (do
mutate yV (+ ystep)
mutate errorV (+ deltax)))
where steep = abs (yb - ya) > abs (xb - xa)
(xa', ya', xb', yb')
= if steep
then (ya, xa, yb, xb)
else (xa, ya, xb, yb)
(x1, y1, x2, y2)
= if xa' > xb'
then (xb', yb', xa', ya')
else (xa', ya', xb', yb')
deltax = x2 - x1
deltay = abs $ y2 - y1
ystep = if y1 < y2 then 1 else -1
var = Data.STRef.newSTRef
get = Data.STRef.readSTRef
mutate = Data.STRef.modifySTRef
draw (x,y) = AM.writeArray vec (x,y) val
withColor :: Color -> a -> String -> IO a
withColor c r x = do
setSGR [SetColor Foreground Vivid c]
putStrLn x
setSGR [Reset]
return r
passed, failed, inconclusive :: String -> IO Bool
passed = withColor Green True
failed = withColor Red False
inconclusive = withColor Yellow False
header = withColor Blue ()
superscript :: Int -> String
superscript = map go . show
where
go '0' = '⁰'
go '1' = '¹'
go '2' = '²'
go '3' = '³'
go '4' = '⁴'
go '5' = '⁵'
go '6' = '⁶'
go '7' = '⁷'
go '8' = '⁸'
go '9' = '⁹'
go x = x
|
313a05fec5ad0a0953b0fd8fd9455ca081f580792c89b6caab219130f1f0f6a5 | noschinl/cyp | Tasty.hs | {-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE LambdaCase #
module Test.Info2.Cyp.Tasty (
CypTest(..)
, findTests
) where
import Data.List
import Data.Tagged (Tagged(..))
import Data.Typeable (Typeable)
import qualified Test.Info2.Cyp as Cyp
import Test.Info2.Cyp.Util
import Test.Tasty
import Test.Tasty.Providers
import Text.PrettyPrint (empty, render, text, ($+$))
import System.Directory
import System.FilePath
data CypTest = CypTest { theory :: FilePath
, proof :: FilePath
} deriving Typeable
instance IsTest CypTest where
testOptions = Tagged []
run _ t _ = either (testFailed . render) (const $ testPassed "Proof is valid") <$> Cyp.proofFile (theory t) (proof t)
data NegCypTest = NegCypTest FilePath CypTest deriving Typeable
instance IsTest NegCypTest where
testOptions = Tagged []
run _ (NegCypTest expected t) _ =
Cyp.proofFile (theory t) (proof t) >>= \case
Left failure -> do
contents <- readFile expected
let doc = foldr ($+$) empty $ map text $ lines contents
return $
if contents /= render failure then
testFailed $ render $
text "Proof is invalid as expected, but with the wrong error message" `indent`
(text "Expected failure:" `indent` doc $+$ text "Actual failure:" `indent` failure)
else
testPassed "Proof is invalid as expected"
Right () ->
return $ testFailed "Proof is valid, but expected failure"
findTests :: FilePath -> IO TestTree
findTests path = do
allPos <- findAll pos
allNeg <- findAll neg
return $ testGroup ("Tests for " ++ show path)
[ testGroup "Valid proofs" $ map (mkPos pos) allPos
, testGroup "Invalid proofs" $ map (mkNeg neg) allNeg
]
where pos = path </> "pos"
neg = path </> "neg"
findAll path =
filter (not . isPrefixOf ".") <$> getDirectoryContents path
mkTest root item = CypTest { theory = root </> item </> "cthy", proof = root </> item </> "cprf" }
mkNeg root item = singleTest item $ NegCypTest (root </> item </> "cout") $ mkTest root item
mkPos root item = singleTest item $ mkTest root item
| null | https://raw.githubusercontent.com/noschinl/cyp/48e840ef0b1cf358eb3c2c6a42589b005c315164/src/Test/Info2/Cyp/Tasty.hs | haskell | # LANGUAGE DeriveDataTypeable # | # LANGUAGE LambdaCase #
module Test.Info2.Cyp.Tasty (
CypTest(..)
, findTests
) where
import Data.List
import Data.Tagged (Tagged(..))
import Data.Typeable (Typeable)
import qualified Test.Info2.Cyp as Cyp
import Test.Info2.Cyp.Util
import Test.Tasty
import Test.Tasty.Providers
import Text.PrettyPrint (empty, render, text, ($+$))
import System.Directory
import System.FilePath
data CypTest = CypTest { theory :: FilePath
, proof :: FilePath
} deriving Typeable
instance IsTest CypTest where
testOptions = Tagged []
run _ t _ = either (testFailed . render) (const $ testPassed "Proof is valid") <$> Cyp.proofFile (theory t) (proof t)
data NegCypTest = NegCypTest FilePath CypTest deriving Typeable
instance IsTest NegCypTest where
testOptions = Tagged []
run _ (NegCypTest expected t) _ =
Cyp.proofFile (theory t) (proof t) >>= \case
Left failure -> do
contents <- readFile expected
let doc = foldr ($+$) empty $ map text $ lines contents
return $
if contents /= render failure then
testFailed $ render $
text "Proof is invalid as expected, but with the wrong error message" `indent`
(text "Expected failure:" `indent` doc $+$ text "Actual failure:" `indent` failure)
else
testPassed "Proof is invalid as expected"
Right () ->
return $ testFailed "Proof is valid, but expected failure"
findTests :: FilePath -> IO TestTree
findTests path = do
allPos <- findAll pos
allNeg <- findAll neg
return $ testGroup ("Tests for " ++ show path)
[ testGroup "Valid proofs" $ map (mkPos pos) allPos
, testGroup "Invalid proofs" $ map (mkNeg neg) allNeg
]
where pos = path </> "pos"
neg = path </> "neg"
findAll path =
filter (not . isPrefixOf ".") <$> getDirectoryContents path
mkTest root item = CypTest { theory = root </> item </> "cthy", proof = root </> item </> "cprf" }
mkNeg root item = singleTest item $ NegCypTest (root </> item </> "cout") $ mkTest root item
mkPos root item = singleTest item $ mkTest root item
|
686bde6efc0e4676203068f9b9f1737176fce7919fdd640205ca52b109c019e7 | PEZ/rich4clojure | problem_068.clj | (ns rich4clojure.elementary.problem-068
(:require [hyperfiddle.rcf :refer [tests]]))
;; = Recurring Theme =
By 4Clojure user :
;; Difficulty: Elementary
;; Tags: [recursion]
;;
Clojure only has one non - stack - consuming looping
;; construct: recur. Either a function or a loop can be
;; used as the recursion point. Either way, recur rebinds
;; the bindings of the recursion point to the values it is
passed . Recur must be called from the tail - position ,
;; and calling it elsewhere will result in an error.
(def __ :tests-will-fail)
(comment
)
(tests
__ :=
(loop [x 5
result []]
(if (> x 0)
(recur (dec x) (conj result (+ 2 x)))
result)))
;; Share your solution, and/or check how others did it:
| null | https://raw.githubusercontent.com/PEZ/rich4clojure/28ea575ede8677f3a97437a646cdb3376a28ebc9/src/rich4clojure/elementary/problem_068.clj | clojure | = Recurring Theme =
Difficulty: Elementary
Tags: [recursion]
construct: recur. Either a function or a loop can be
used as the recursion point. Either way, recur rebinds
the bindings of the recursion point to the values it is
and calling it elsewhere will result in an error.
Share your solution, and/or check how others did it: | (ns rich4clojure.elementary.problem-068
(:require [hyperfiddle.rcf :refer [tests]]))
By 4Clojure user :
Clojure only has one non - stack - consuming looping
passed . Recur must be called from the tail - position ,
(def __ :tests-will-fail)
(comment
)
(tests
__ :=
(loop [x 5
result []]
(if (> x 0)
(recur (dec x) (conj result (+ 2 x)))
result)))
|
5af2ff1ffdd3672d17c5bf048229d9a116e3e99b8f35a9350981085261cd6f9c | TokTok/hs-toxcore | Result.hs | {-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE Safe #-}
# LANGUAGE StrictData #
module Data.Result
( Result (..)
) where
import Control.Applicative (Alternative (..))
data Result a
= Success a
| Failure String
deriving (Read, Show, Eq, Functor, Foldable, Traversable)
instance Applicative Result where
pure = Success
Success f <*> x = fmap f x
Failure msg <*> _ = Failure msg
instance Alternative Result where
empty = Failure "empty alternative"
s@Success {} <|> _ = s
_ <|> r = r
instance Monad Result where
return = Success
Success x >>= f = f x
Failure msg >>= _ = Failure msg
instance MonadFail Result where
fail = Failure
| null | https://raw.githubusercontent.com/TokTok/hs-toxcore/647c3070cab29aee3d795a456be534d77c167d81/test/Data/Result.hs | haskell | # LANGUAGE DeriveFoldable #
# LANGUAGE DeriveFunctor #
# LANGUAGE DeriveTraversable #
# LANGUAGE Safe # | # LANGUAGE StrictData #
module Data.Result
( Result (..)
) where
import Control.Applicative (Alternative (..))
data Result a
= Success a
| Failure String
deriving (Read, Show, Eq, Functor, Foldable, Traversable)
instance Applicative Result where
pure = Success
Success f <*> x = fmap f x
Failure msg <*> _ = Failure msg
instance Alternative Result where
empty = Failure "empty alternative"
s@Success {} <|> _ = s
_ <|> r = r
instance Monad Result where
return = Success
Success x >>= f = f x
Failure msg >>= _ = Failure msg
instance MonadFail Result where
fail = Failure
|
6315b4541c181c5264d2b2a9eea83e38d3f8e1ec2a2f43b31708cd1f3a43ee7e | logicshan/pj-lester-book | Template2.hs | module Template2 where
import Language
import Utils
runProg :: [Char] -> [Char]
compile :: CoreProgram -> TiState
eval :: TiState -> [TiState]
showResults :: [TiState] -> [Char]
runProg = showResults . eval . compile . parse
type TiState = (TiStack, TiDump, TiHeap, TiGlobals, TiStats)
type TiStack = [Addr]
data TiDump = DummyTiDump
initialTiDump = DummyTiDump
type TiHeap = Heap Node
data Node = NAp Addr Addr -- Application
| NSupercomb Name [Name] CoreExpr -- Supercombinator
| NNum Int -- A number
type TiGlobals = ASSOC Name Addr
tiStatInitial :: TiStats
tiStatIncSteps :: TiStats -> TiStats
tiStatGetSteps :: TiStats -> Int
type TiStats = Int
tiStatInitial = 0
tiStatIncSteps s = s+1
tiStatGetSteps s = s
applyToStats :: (TiStats -> TiStats) -> TiState -> TiState
applyToStats stats_fun (stack, dump, heap, sc_defs, stats)
= (stack, dump, heap, sc_defs, stats_fun stats)
compile program
= (initial_stack, initialTiDump, initial_heap, globals, tiStatInitial)
where
sc_defs = program ++ preludeDefs ++ extraPreludeDefs
(initial_heap, globals) = buildInitialHeap sc_defs
initial_stack = [address_of_main]
address_of_main = aLookup globals "main" (error "main is not defined")
extraPreludeDefs = []
buildInitialHeap :: [CoreScDefn] -> (TiHeap, TiGlobals)
buildInitialHeap sc_defs = mapAccuml allocateSc hInitial sc_defs
allocateSc :: TiHeap -> CoreScDefn -> (TiHeap, (Name, Addr))
allocateSc heap (name, args, body)
= (heap', (name, addr))
where
(heap', addr) = hAlloc heap (NSupercomb name args body)
eval state = state : rest_states
where
rest_states | tiFinal state = []
| otherwise = eval next_state
next_state = doAdmin (step state)
doAdmin :: TiState -> TiState
doAdmin state = applyToStats tiStatIncSteps state
tiFinal :: TiState -> Bool
tiFinal ([sole_addr], dump, heap, globals, stats)
= isDataNode (hLookup heap sole_addr)
tiFinal ([], dump, heap, globals, stats) = error "Empty stack!"
Stack contains more than one item
isDataNode :: Node -> Bool
isDataNode (NNum n) = True
isDataNode node = False
step :: TiState -> TiState
step state
= dispatch (hLookup heap (hd stack))
where
(stack, dump, heap, globals, stats) = state
dispatch (NNum n) = numStep state n
dispatch (NAp a1 a2) = apStep state a1 a2
dispatch (NSupercomb sc args body) = scStep state sc args body
numStep :: TiState -> Int -> TiState
numStep state n = error "Number applied as a function!"
apStep :: TiState -> Addr -> Addr -> TiState
apStep (stack, dump, heap, globals, stats) a1 a2
= (a1 : stack, dump, heap, globals, stats)
scStep :: TiState -> Name -> [Name] -> CoreExpr -> TiState
scStep (stack, dump, heap, globals, stats) sc_name arg_names body
= (new_stack, dump, new_heap, globals, stats)
where
new_stack = result_addr : (drop (length arg_names+1) stack)
(new_heap, result_addr) = instantiate body heap env
env = arg_bindings ++ globals
arg_bindings = zip2 arg_names (getargs heap stack)
getargs :: TiHeap -> TiStack -> [Addr]
getargs heap (sc:stack)
= map get_arg stack
where get_arg addr = arg where (NAp fun arg) = hLookup heap addr
instantiate :: CoreExpr -- Body of supercombinator
Heap before instantiation
-> ASSOC Name Addr -- Association of names to addresses
Heap after instantiation , and
-- address of root of instance
instantiate (ENum n) heap env = hAlloc heap (NNum n)
instantiate (EAp e1 e2) heap env
= hAlloc heap2 (NAp a1 a2) where (heap1, a1) = instantiate e1 heap env
(heap2, a2) = instantiate e2 heap1 env
instantiate (EVar v) heap env
= (heap, aLookup env v (error ("Undefined name " ++ show v)))
instantiate (EConstr tag arity) heap env
= instantiateConstr tag arity heap env
instantiate (ELet isrec defs body) heap env
= instantiateLet isrec defs body heap env
instantiate (ECase e alts) heap env = error "Can't instantiate case exprs"
instantiateLet isrec defs body heap old_env
= instantiate body heap1 new_env
where
(heap1, extra_bindings) = mapAccuml instantiate_rhs heap defs
new_env = extra_bindings ++ old_env
rhs_env | isrec = new_env
| otherwise = old_env
instantiate_rhs heap (name, rhs)
= (heap1, (name, addr))
where
(heap1, addr) = instantiate rhs heap rhs_env
instantiateConstr tag arity heap env
= error "Can't instantiate constructors yet"
showResults states
= iDisplay (iConcat [ iLayn (map showState states),
showStats (last states)
])
showState :: TiState -> Iseq
showState (stack, dump, heap, globals, stats)
= iConcat [ showStack heap stack, iNewline ]
showStack :: TiHeap -> TiStack -> Iseq
showStack heap stack
= iConcat [
iStr "Stk [",
iIndent (iInterleave iNewline (map show_stack_item stack)),
iStr " ]"
]
where
show_stack_item addr
= iConcat [ showFWAddr addr, iStr ": ",
showStkNode heap (hLookup heap addr)
]
showStkNode :: TiHeap -> Node -> Iseq
showStkNode heap (NAp fun_addr arg_addr)
= iConcat [ iStr "NAp ", showFWAddr fun_addr,
iStr " ", showFWAddr arg_addr, iStr " (",
showNode (hLookup heap arg_addr), iStr ")"
]
showStkNode heap node = showNode node
showNode :: Node -> Iseq
showNode (NAp a1 a2) = iConcat [ iStr "NAp ", showAddr a1,
iStr " ", showAddr a2
]
showNode (NSupercomb name args body) = iStr ("NSupercomb " ++ name)
showNode (NNum n) = (iStr "NNum ") `iAppend` (iNum n)
showAddr :: Addr -> Iseq
showAddr addr = iStr (show addr)
Show address in field of width 4
showFWAddr addr = iStr (space (4 - length str) ++ str)
where
str = show addr
showStats :: TiState -> Iseq
showStats (stack, dump, heap, globals, stats)
= iConcat [ iNewline, iNewline, iStr "Total number of steps = ",
iNum (tiStatGetSteps stats)
]
| null | https://raw.githubusercontent.com/logicshan/pj-lester-book/2ddb3fadc35800582002dd34be351980df2dec65/Template2.hs | haskell | Application
Supercombinator
A number
Body of supercombinator
Association of names to addresses
address of root of instance | module Template2 where
import Language
import Utils
runProg :: [Char] -> [Char]
compile :: CoreProgram -> TiState
eval :: TiState -> [TiState]
showResults :: [TiState] -> [Char]
runProg = showResults . eval . compile . parse
type TiState = (TiStack, TiDump, TiHeap, TiGlobals, TiStats)
type TiStack = [Addr]
data TiDump = DummyTiDump
initialTiDump = DummyTiDump
type TiHeap = Heap Node
type TiGlobals = ASSOC Name Addr
tiStatInitial :: TiStats
tiStatIncSteps :: TiStats -> TiStats
tiStatGetSteps :: TiStats -> Int
type TiStats = Int
tiStatInitial = 0
tiStatIncSteps s = s+1
tiStatGetSteps s = s
applyToStats :: (TiStats -> TiStats) -> TiState -> TiState
applyToStats stats_fun (stack, dump, heap, sc_defs, stats)
= (stack, dump, heap, sc_defs, stats_fun stats)
compile program
= (initial_stack, initialTiDump, initial_heap, globals, tiStatInitial)
where
sc_defs = program ++ preludeDefs ++ extraPreludeDefs
(initial_heap, globals) = buildInitialHeap sc_defs
initial_stack = [address_of_main]
address_of_main = aLookup globals "main" (error "main is not defined")
extraPreludeDefs = []
buildInitialHeap :: [CoreScDefn] -> (TiHeap, TiGlobals)
buildInitialHeap sc_defs = mapAccuml allocateSc hInitial sc_defs
allocateSc :: TiHeap -> CoreScDefn -> (TiHeap, (Name, Addr))
allocateSc heap (name, args, body)
= (heap', (name, addr))
where
(heap', addr) = hAlloc heap (NSupercomb name args body)
eval state = state : rest_states
where
rest_states | tiFinal state = []
| otherwise = eval next_state
next_state = doAdmin (step state)
doAdmin :: TiState -> TiState
doAdmin state = applyToStats tiStatIncSteps state
tiFinal :: TiState -> Bool
tiFinal ([sole_addr], dump, heap, globals, stats)
= isDataNode (hLookup heap sole_addr)
tiFinal ([], dump, heap, globals, stats) = error "Empty stack!"
Stack contains more than one item
isDataNode :: Node -> Bool
isDataNode (NNum n) = True
isDataNode node = False
step :: TiState -> TiState
step state
= dispatch (hLookup heap (hd stack))
where
(stack, dump, heap, globals, stats) = state
dispatch (NNum n) = numStep state n
dispatch (NAp a1 a2) = apStep state a1 a2
dispatch (NSupercomb sc args body) = scStep state sc args body
numStep :: TiState -> Int -> TiState
numStep state n = error "Number applied as a function!"
apStep :: TiState -> Addr -> Addr -> TiState
apStep (stack, dump, heap, globals, stats) a1 a2
= (a1 : stack, dump, heap, globals, stats)
scStep :: TiState -> Name -> [Name] -> CoreExpr -> TiState
scStep (stack, dump, heap, globals, stats) sc_name arg_names body
= (new_stack, dump, new_heap, globals, stats)
where
new_stack = result_addr : (drop (length arg_names+1) stack)
(new_heap, result_addr) = instantiate body heap env
env = arg_bindings ++ globals
arg_bindings = zip2 arg_names (getargs heap stack)
getargs :: TiHeap -> TiStack -> [Addr]
getargs heap (sc:stack)
= map get_arg stack
where get_arg addr = arg where (NAp fun arg) = hLookup heap addr
Heap before instantiation
Heap after instantiation , and
instantiate (ENum n) heap env = hAlloc heap (NNum n)
instantiate (EAp e1 e2) heap env
= hAlloc heap2 (NAp a1 a2) where (heap1, a1) = instantiate e1 heap env
(heap2, a2) = instantiate e2 heap1 env
instantiate (EVar v) heap env
= (heap, aLookup env v (error ("Undefined name " ++ show v)))
instantiate (EConstr tag arity) heap env
= instantiateConstr tag arity heap env
instantiate (ELet isrec defs body) heap env
= instantiateLet isrec defs body heap env
instantiate (ECase e alts) heap env = error "Can't instantiate case exprs"
instantiateLet isrec defs body heap old_env
= instantiate body heap1 new_env
where
(heap1, extra_bindings) = mapAccuml instantiate_rhs heap defs
new_env = extra_bindings ++ old_env
rhs_env | isrec = new_env
| otherwise = old_env
instantiate_rhs heap (name, rhs)
= (heap1, (name, addr))
where
(heap1, addr) = instantiate rhs heap rhs_env
instantiateConstr tag arity heap env
= error "Can't instantiate constructors yet"
showResults states
= iDisplay (iConcat [ iLayn (map showState states),
showStats (last states)
])
showState :: TiState -> Iseq
showState (stack, dump, heap, globals, stats)
= iConcat [ showStack heap stack, iNewline ]
showStack :: TiHeap -> TiStack -> Iseq
showStack heap stack
= iConcat [
iStr "Stk [",
iIndent (iInterleave iNewline (map show_stack_item stack)),
iStr " ]"
]
where
show_stack_item addr
= iConcat [ showFWAddr addr, iStr ": ",
showStkNode heap (hLookup heap addr)
]
showStkNode :: TiHeap -> Node -> Iseq
showStkNode heap (NAp fun_addr arg_addr)
= iConcat [ iStr "NAp ", showFWAddr fun_addr,
iStr " ", showFWAddr arg_addr, iStr " (",
showNode (hLookup heap arg_addr), iStr ")"
]
showStkNode heap node = showNode node
showNode :: Node -> Iseq
showNode (NAp a1 a2) = iConcat [ iStr "NAp ", showAddr a1,
iStr " ", showAddr a2
]
showNode (NSupercomb name args body) = iStr ("NSupercomb " ++ name)
showNode (NNum n) = (iStr "NNum ") `iAppend` (iNum n)
showAddr :: Addr -> Iseq
showAddr addr = iStr (show addr)
Show address in field of width 4
showFWAddr addr = iStr (space (4 - length str) ++ str)
where
str = show addr
showStats :: TiState -> Iseq
showStats (stack, dump, heap, globals, stats)
= iConcat [ iNewline, iNewline, iStr "Total number of steps = ",
iNum (tiStatGetSteps stats)
]
|
df37b62603151f5e5ad4165656bb1f63742256d4b9ca224ee4fe7cc2892f9b14 | Bogdanp/racket-gui-easy | slider.rkt | #lang racket/base
(require racket/gui/easy
racket/gui/easy/operator)
(define @n (@ 50))
(define @n-str (@n . ~> . number->string))
(render
(window
(vpanel
(text @n-str)
(slider
@n (λ:= @n)
#:style '(horizontal plain))
(progress @n #:style '(vertical))
(input @n-str (λ (_ text)
(define n (string->number text))
(when (and n (>= n 0) (<= n 100))
(@n . := . n)))))))
| null | https://raw.githubusercontent.com/Bogdanp/racket-gui-easy/e3bcbfe912b9914b5fda437546c53a8caaa65060/examples/slider.rkt | racket | #lang racket/base
(require racket/gui/easy
racket/gui/easy/operator)
(define @n (@ 50))
(define @n-str (@n . ~> . number->string))
(render
(window
(vpanel
(text @n-str)
(slider
@n (λ:= @n)
#:style '(horizontal plain))
(progress @n #:style '(vertical))
(input @n-str (λ (_ text)
(define n (string->number text))
(when (and n (>= n 0) (<= n 100))
(@n . := . n)))))))
| |
d130de5fc52f107e179a8a7317967e5e35352f9875be4c5104726ce602d7eb0e | circuithub/rel8 | Window.hs | module Rel8.Expr.Window
( cumulative
, rowNumber
, rank
, denseRank
, percentRank
, cumeDist
, ntile
, lag
, lead
, firstValue
, lastValue
, nthValue
)
where
-- base
import Data.Int ( Int32, Int64 )
import Prelude
-- opaleye
import qualified Opaleye.Internal.Aggregate as Opaleye
import qualified Opaleye.Internal.PackMap as Opaleye
import qualified Opaleye.Internal.Window as Opaleye
import qualified Opaleye.Window as Opaleye
-- profunctors
import Data.Profunctor (dimap)
-- rel8
import Rel8.Aggregate ( Aggregate( Aggregate ) )
import Rel8.Expr ( Expr )
import Rel8.Expr.Opaleye ( fromColumn, fromPrimExpr, toColumn, toPrimExpr )
import Rel8.Schema.Null ( Nullify )
import Rel8.Window ( Window( Window ) )
cumulative :: (a -> Aggregate b) -> Window a (Expr b)
cumulative f =
fromWindowFunction $ Opaleye.aggregatorWindowFunction (fromAggregate f) id
| [ @row_number()@]( / docs / current / functions - window.html )
rowNumber :: Window a (Expr Int64)
rowNumber = fromWindowFunction $ fromPrimExpr . fromColumn <$> Opaleye.rowNumber
-- | [@rank()@](-window.html)
rank :: Window a (Expr Int64)
rank = fromWindowFunction $ fromPrimExpr . fromColumn <$> Opaleye.rank
-- | [@dense_rank()@](-window.html)
denseRank :: Window a (Expr Int64)
denseRank = fromWindowFunction $ fromPrimExpr . fromColumn <$> Opaleye.denseRank
-- | [@percent_rank()@](-window.html)
percentRank :: Window a (Expr Double)
percentRank = fromWindowFunction $ fromPrimExpr . fromColumn <$> Opaleye.percentRank
-- | [@cume_dist()@](-window.html)
cumeDist :: Window a (Expr Double)
cumeDist = fromWindowFunction $ fromPrimExpr . fromColumn <$> Opaleye.cumeDist
-- | [@ntile(num_buckets)@](-window.html)
ntile :: Expr Int32 -> Window a (Expr Int32)
ntile buckets = fromWindowFunction $ fromPrimExpr . fromColumn <$>
Opaleye.ntile (toColumn (toPrimExpr buckets))
-- | [@lag(value, offset, default)@](-window.html)
lag :: Expr Int32 -> Expr a -> Window (Expr a) (Expr a)
lag offset def =
fromWindowFunction $
dimap (toColumn . toPrimExpr) (fromPrimExpr . fromColumn) $
Opaleye.lag (toColumn (toPrimExpr offset)) (toColumn (toPrimExpr def))
| [ @lead(value , offset , default)@]( / docs / current / functions - window.html )
lead :: Expr Int32 -> Expr a -> Window (Expr a) (Expr a)
lead offset def =
fromWindowFunction $
dimap (toColumn . toPrimExpr) (fromPrimExpr . fromColumn) $
Opaleye.lead (toColumn (toPrimExpr offset)) (toColumn (toPrimExpr def))
-- | [@first_value(value)@](-window.html)
firstValue :: Window (Expr a) (Expr a)
firstValue =
fromWindowFunction $
dimap (toColumn . toPrimExpr) (fromPrimExpr . fromColumn)
Opaleye.firstValue
-- | [@last_value(value)@](-window.html)
lastValue :: Window (Expr a) (Expr a)
lastValue =
fromWindowFunction $
dimap (toColumn . toPrimExpr) (fromPrimExpr . fromColumn)
Opaleye.lastValue
| [ @nth_value(value , n)@]( / docs / current / functions - window.html )
nthValue :: Expr Int32 -> Window (Expr a) (Expr (Nullify a))
nthValue n =
fromWindowFunction $
dimap (toColumn . toPrimExpr) (fromPrimExpr . fromColumn) $
Opaleye.nthValue (toColumn (toPrimExpr n))
fromAggregate :: (a -> Aggregate b) -> Opaleye.Aggregator a (Expr b)
fromAggregate f = Opaleye.Aggregator $ Opaleye.PackMap $ \w a -> case f a of
Aggregate (Opaleye.Aggregator (Opaleye.PackMap x)) -> x w ()
fromWindowFunction :: Opaleye.WindowFunction a b -> Window a b
fromWindowFunction (Opaleye.WindowFunction (Opaleye.PackMap w)) =
Window $ Opaleye.Windows $ Opaleye.PackMap $ \f -> w $ \o -> f (o, mempty)
| null | https://raw.githubusercontent.com/circuithub/rel8/2e82fccb02470198297f4a71b9da8f535d8029b1/src/Rel8/Expr/Window.hs | haskell | base
opaleye
profunctors
rel8
| [@rank()@](-window.html)
| [@dense_rank()@](-window.html)
| [@percent_rank()@](-window.html)
| [@cume_dist()@](-window.html)
| [@ntile(num_buckets)@](-window.html)
| [@lag(value, offset, default)@](-window.html)
| [@first_value(value)@](-window.html)
| [@last_value(value)@](-window.html) | module Rel8.Expr.Window
( cumulative
, rowNumber
, rank
, denseRank
, percentRank
, cumeDist
, ntile
, lag
, lead
, firstValue
, lastValue
, nthValue
)
where
import Data.Int ( Int32, Int64 )
import Prelude
import qualified Opaleye.Internal.Aggregate as Opaleye
import qualified Opaleye.Internal.PackMap as Opaleye
import qualified Opaleye.Internal.Window as Opaleye
import qualified Opaleye.Window as Opaleye
import Data.Profunctor (dimap)
import Rel8.Aggregate ( Aggregate( Aggregate ) )
import Rel8.Expr ( Expr )
import Rel8.Expr.Opaleye ( fromColumn, fromPrimExpr, toColumn, toPrimExpr )
import Rel8.Schema.Null ( Nullify )
import Rel8.Window ( Window( Window ) )
cumulative :: (a -> Aggregate b) -> Window a (Expr b)
cumulative f =
fromWindowFunction $ Opaleye.aggregatorWindowFunction (fromAggregate f) id
| [ @row_number()@]( / docs / current / functions - window.html )
rowNumber :: Window a (Expr Int64)
rowNumber = fromWindowFunction $ fromPrimExpr . fromColumn <$> Opaleye.rowNumber
rank :: Window a (Expr Int64)
rank = fromWindowFunction $ fromPrimExpr . fromColumn <$> Opaleye.rank
denseRank :: Window a (Expr Int64)
denseRank = fromWindowFunction $ fromPrimExpr . fromColumn <$> Opaleye.denseRank
percentRank :: Window a (Expr Double)
percentRank = fromWindowFunction $ fromPrimExpr . fromColumn <$> Opaleye.percentRank
cumeDist :: Window a (Expr Double)
cumeDist = fromWindowFunction $ fromPrimExpr . fromColumn <$> Opaleye.cumeDist
ntile :: Expr Int32 -> Window a (Expr Int32)
ntile buckets = fromWindowFunction $ fromPrimExpr . fromColumn <$>
Opaleye.ntile (toColumn (toPrimExpr buckets))
lag :: Expr Int32 -> Expr a -> Window (Expr a) (Expr a)
lag offset def =
fromWindowFunction $
dimap (toColumn . toPrimExpr) (fromPrimExpr . fromColumn) $
Opaleye.lag (toColumn (toPrimExpr offset)) (toColumn (toPrimExpr def))
| [ @lead(value , offset , default)@]( / docs / current / functions - window.html )
lead :: Expr Int32 -> Expr a -> Window (Expr a) (Expr a)
lead offset def =
fromWindowFunction $
dimap (toColumn . toPrimExpr) (fromPrimExpr . fromColumn) $
Opaleye.lead (toColumn (toPrimExpr offset)) (toColumn (toPrimExpr def))
firstValue :: Window (Expr a) (Expr a)
firstValue =
fromWindowFunction $
dimap (toColumn . toPrimExpr) (fromPrimExpr . fromColumn)
Opaleye.firstValue
lastValue :: Window (Expr a) (Expr a)
lastValue =
fromWindowFunction $
dimap (toColumn . toPrimExpr) (fromPrimExpr . fromColumn)
Opaleye.lastValue
| [ @nth_value(value , n)@]( / docs / current / functions - window.html )
nthValue :: Expr Int32 -> Window (Expr a) (Expr (Nullify a))
nthValue n =
fromWindowFunction $
dimap (toColumn . toPrimExpr) (fromPrimExpr . fromColumn) $
Opaleye.nthValue (toColumn (toPrimExpr n))
fromAggregate :: (a -> Aggregate b) -> Opaleye.Aggregator a (Expr b)
fromAggregate f = Opaleye.Aggregator $ Opaleye.PackMap $ \w a -> case f a of
Aggregate (Opaleye.Aggregator (Opaleye.PackMap x)) -> x w ()
fromWindowFunction :: Opaleye.WindowFunction a b -> Window a b
fromWindowFunction (Opaleye.WindowFunction (Opaleye.PackMap w)) =
Window $ Opaleye.Windows $ Opaleye.PackMap $ \f -> w $ \o -> f (o, mempty)
|
b8b9597e8f7f1418e82d0f739816aab5fcb8a04c499e6778a48f22e6e608bb99 | chunsj/TH | genchars-obama-lstm.lisp | ;; from
;; -effectiveness/
(defpackage :genchars-obama-lstm
(:use #:common-lisp
#:mu
#:th
#:th.ex.data))
(in-package :genchars-obama-lstm)
(defparameter *data-lines* (remove-if (lambda (line) (< ($count line) 1)) (text-lines :obama)))
(defparameter *data* (format nil "~{~A~^~%~}" *data-lines*))
(defparameter *chars* (remove-duplicates (coerce *data* 'list)))
(defparameter *data-size* ($count *data*))
(defparameter *vocab-size* ($count *chars*))
(defparameter *char-to-idx* (let ((ht #{}))
(loop :for i :from 0 :below *vocab-size*
:for ch = ($ *chars* i)
:do (setf ($ ht ch) i))
ht))
(defparameter *idx-to-char* *chars*)
(defun choose (probs)
(let* ((sprobs ($sum probs))
(probs ($div probs sprobs)))
($ ($reshape! ($multinomial probs 1) ($count probs)) 0)))
;;
;; non batched lstm for example
;;
(defparameter *hidden-size* 100)
(defparameter *sequence-length* 50)
(defparameter *lstm* (parameters))
(defparameter *wa* ($push *lstm* ($- ($* 0.16 (rnd *vocab-size* *hidden-size*)) 0.08)))
(defparameter *ua* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size* *hidden-size*)) 0.08)))
(defparameter *ba* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size*)) 0.08)))
(defparameter *wi* ($push *lstm* ($- ($* 0.16 (rnd *vocab-size* *hidden-size*)) 0.08)))
(defparameter *ui* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size* *hidden-size*)) 0.08)))
(defparameter *bi* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size*)) 0.08)))
(defparameter *wf* ($push *lstm* ($- ($* 0.16 (rnd *vocab-size* *hidden-size*)) 0.08)))
(defparameter *uf* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size* *hidden-size*)) 0.08)))
(defparameter *bf* ($push *lstm* (ones *hidden-size*)))
(defparameter *wo* ($push *lstm* ($- ($* 0.16 (rnd *vocab-size* *hidden-size*)) 0.08)))
(defparameter *uo* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size* *hidden-size*)) 0.08)))
(defparameter *bo* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size*)) 0.08)))
(defparameter *wy* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size* *vocab-size*)) 0.08)))
(defparameter *by* ($push *lstm* ($- ($* 0.16 (rnd *vocab-size*)) 0.08)))
(defun lstm-write-weight-to (w fname)
(let ((f (file.disk fname "w")))
($fwrite ($data w) f)
($fclose f)))
(defun lstm-read-weight-from (w fname)
(let ((f (file.disk fname "r")))
($fread ($data w) f)
($fclose f)))
(defun lstm-write-weights ()
(lstm-write-weight-to *wa* "examples/weights/genchar-obama-lstm/lstm-wa.dat")
(lstm-write-weight-to *ua* "examples/weights/genchar-obama-lstm/lstm-ua.dat")
(lstm-write-weight-to *ba* "examples/weights/genchar-obama-lstm/lstm-ba.dat")
(lstm-write-weight-to *wi* "examples/weights/genchar-obama-lstm/lstm-wi.dat")
(lstm-write-weight-to *ui* "examples/weights/genchar-obama-lstm/lstm-ui.dat")
(lstm-write-weight-to *bi* "examples/weights/genchar-obama-lstm/lstm-bi.dat")
(lstm-write-weight-to *wf* "examples/weights/genchar-obama-lstm/lstm-wf.dat")
(lstm-write-weight-to *uf* "examples/weights/genchar-obama-lstm/lstm-uf.dat")
(lstm-write-weight-to *bf* "examples/weights/genchar-obama-lstm/lstm-bf.dat")
(lstm-write-weight-to *wo* "examples/weights/genchar-obama-lstm/lstm-wo.dat")
(lstm-write-weight-to *uo* "examples/weights/genchar-obama-lstm/lstm-uo.dat")
(lstm-write-weight-to *bo* "examples/weights/genchar-obama-lstm/lstm-bo.dat")
(lstm-write-weight-to *wy* "examples/weights/genchar-obama-lstm/lstm-wy.dat")
(lstm-write-weight-to *by* "examples/weights/genchar-obama-lstm/lstm-by.dat"))
(defun lstm-read-weights ()
(lstm-read-weight-from *wa* "examples/weights/genchar-obama-lstm/lstm-wa.dat")
(lstm-read-weight-from *ua* "examples/weights/genchar-obama-lstm/lstm-ua.dat")
(lstm-read-weight-from *ba* "examples/weights/genchar-obama-lstm/lstm-ba.dat")
(lstm-read-weight-from *wi* "examples/weights/genchar-obama-lstm/lstm-wi.dat")
(lstm-read-weight-from *ui* "examples/weights/genchar-obama-lstm/lstm-ui.dat")
(lstm-read-weight-from *bi* "examples/weights/genchar-obama-lstm/lstm-bi.dat")
(lstm-read-weight-from *wf* "examples/weights/genchar-obama-lstm/lstm-wf.dat")
(lstm-read-weight-from *uf* "examples/weights/genchar-obama-lstm/lstm-uf.dat")
(lstm-read-weight-from *bf* "examples/weights/genchar-obama-lstm/lstm-bf.dat")
(lstm-read-weight-from *wo* "examples/weights/genchar-obama-lstm/lstm-wo.dat")
(lstm-read-weight-from *uo* "examples/weights/genchar-obama-lstm/lstm-uo.dat")
(lstm-read-weight-from *bo* "examples/weights/genchar-obama-lstm/lstm-bo.dat")
(lstm-read-weight-from *wy* "examples/weights/genchar-obama-lstm/lstm-wy.dat")
(lstm-read-weight-from *by* "examples/weights/genchar-obama-lstm/lstm-by.dat"))
(defun cindices (str)
(let ((m (zeros ($count str) *vocab-size*)))
(loop :for i :from 0 :below ($count str)
:for ch = ($ str i)
:do (setf ($ m i ($ *char-to-idx* ch)) 1))
m))
(defun rstrings (indices) (coerce (mapcar (lambda (i) ($ *idx-to-char* i)) indices) 'string))
(defun seedh (str &optional (temperature 1))
(let ((input (cindices str))
(ph (zeros 1 *hidden-size*))
(pc (zeros 1 *hidden-size*))
(wa ($data *wa*))
(ua ($data *ua*))
(ba ($data *ba*))
(wi ($data *wi*))
(ui ($data *ui*))
(bi ($data *bi*))
(wf ($data *wf*))
(uf ($data *uf*))
(bf ($data *bf*))
(wo ($data *wo*))
(uo ($data *uo*))
(bo ($data *bo*))
(wy ($data *wy*))
(by ($data *by*))
(ncidx 0))
(loop :for i :from 0 :below ($size input 0)
:for xt = ($index input 0 i)
:for (ht ct) = ($lstm xt ph pc wi ui wf uf wo uo wa ua bi bf bo ba)
:for yt = ($affine ht wy by)
:for ps = ($softmax ($/ yt temperature))
:for nidx = (choose ps)
:do (setf ph ht
pc ct
ncidx nidx))
(list ncidx ph pc)))
(defun sample (str n &optional (temperature 1))
(let ((x (zeros 1 *vocab-size*))
(indices nil)
(sh (when str (seedh str temperature)))
(wa ($data *wa*))
(ua ($data *ua*))
(ba ($data *ba*))
(wi ($data *wi*))
(ui ($data *ui*))
(bi ($data *bi*))
(wf ($data *wf*))
(uf ($data *uf*))
(bf ($data *bf*))
(wo ($data *wo*))
(uo ($data *uo*))
(bo ($data *bo*))
(wy ($data *wy*))
(by ($data *by*))
(ph nil)
(pc nil))
(if sh
(let ((idx0 ($0 sh))
(h ($1 sh))
(c ($2 sh)))
(setf ($ x 0 idx0) 1)
(setf ph h
pc c)
(push idx0 indices))
(let ((idx0 (random *vocab-size*))
(h (zeros 1 *hidden-size*))
(c (zeros 1 *hidden-size*)))
(setf ($ x 0 idx0) 1)
(setf ph h
pc c)
(push idx0 indices)))
(loop :for i :from 0 :below n
:for (ht ct) = ($lstm x ph pc wi ui wf uf wo uo wa ua bi bf bo ba)
:for yt = ($affine ht wy by)
:for ps = ($softmax ($/ yt temperature))
:for nidx = (choose ps)
:do (progn
(setf ph ht
pc ct)
(push nidx indices)
($zero! x)
(setf ($ x 0 nidx) 1)))
(concatenate 'string str (rstrings (reverse indices)))))
(defparameter *upto* (- *data-size* *sequence-length* 1))
;; XXX of course, we need better strategy for building data
;; for example, breaking at the word level will be better one.
(defparameter *inputs* (loop :for p :from 0 :below *upto* :by *sequence-length*
:for input-str = (subseq *data* p (+ p *sequence-length*))
:collect (let ((m (zeros *sequence-length* *vocab-size*)))
(loop :for i :from 0 :below *sequence-length*
:for ch = ($ input-str i)
:do (setf ($ m i ($ *char-to-idx* ch)) 1))
m)))
(defparameter *targets* (loop :for p :from 0 :below *upto* :by *sequence-length*
:for target-str = (subseq *data* (1+ p) (+ p *sequence-length* 1))
:collect (let ((m (zeros *sequence-length* *vocab-size*)))
(loop :for i :from 0 :below *sequence-length*
:for ch = ($ target-str i)
:do (setf ($ m i ($ *char-to-idx* ch)) 1))
m)))
(defparameter *mloss* (* (- (log (/ 1 *vocab-size*))) *sequence-length*))
(defparameter *min-mloss* *mloss*)
(defparameter *epochs* 50)
($cg! *lstm*)
(gcf)
(time
(loop :for iter :from 1 :to *epochs*
:for n = 0
:for maxloss = 0
:for maxloss-pos = -1
:for max-mloss = 0
:do (progn
(loop :for input :in *inputs*
:for target :in *targets*
:do (let ((ph (zeros 1 *hidden-size*))
(pc (zeros 1 *hidden-size*))
(tloss 0))
(loop :for i :from 0 :below ($size input 0)
:for xt = ($index input 0 i)
:for (ht ct) = ($lstm xt ph pc
*wi* *ui* *wf* *uf*
*wo* *uo* *wa* *ua*
*bi* *bf* *bo* *ba*)
:for yt = ($affine ht *wy* *by*)
:for ps = ($softmax yt)
:for y = ($index target 0 i)
:for l = ($cee ps y)
:do (progn
(setf ph ht
pc ct)
(incf tloss ($data l))))
(when (> tloss maxloss)
(setf maxloss-pos n)
(setf maxloss tloss))
($rmgd! *lstm*)
(setf *mloss* (+ (* 0.999 *mloss*) (* 0.001 tloss)))
(when (> *mloss* max-mloss) (setf max-mloss *mloss*))
(when (zerop (rem n 200))
(prn "[ITER]" iter n *mloss* maxloss maxloss-pos))
(incf n)))
(when (< max-mloss *min-mloss*)
(prn "*** BETTER MLOSS - WRITE WEIGHTS: FROM" *min-mloss* "TO" max-mloss)
(setf *min-mloss* max-mloss)
(lstm-write-weights)))))
(prn (sample "This is not correct." 200 0.5))
(prn (sample "I" 200 0.5))
(lstm-write-weights)
(lstm-read-weights)
rmgd 0.002 0.99 - 1.31868 - 1.61637
;; adgd - 1.551497 - 1.841827
amgd 0.002 - 1.3747485 - 1.70623
(loop :for p :from 0 :below *upto* :by *sequence-length*
:for n :from 0
:for input-str = (subseq *data* p (+ p *sequence-length*))
:do (when (member n '(75856 44515 44514 21663 18796 1258 336 178))
(prn (format nil "~6,d" n) input-str)))
| null | https://raw.githubusercontent.com/chunsj/TH/890f05ab81148d9fe558be3979c30c303b448480/examples/genchars/genchars-obama-lstm.lisp | lisp | from
-effectiveness/
non batched lstm for example
XXX of course, we need better strategy for building data
for example, breaking at the word level will be better one.
adgd - 1.551497 - 1.841827 |
(defpackage :genchars-obama-lstm
(:use #:common-lisp
#:mu
#:th
#:th.ex.data))
(in-package :genchars-obama-lstm)
(defparameter *data-lines* (remove-if (lambda (line) (< ($count line) 1)) (text-lines :obama)))
(defparameter *data* (format nil "~{~A~^~%~}" *data-lines*))
(defparameter *chars* (remove-duplicates (coerce *data* 'list)))
(defparameter *data-size* ($count *data*))
(defparameter *vocab-size* ($count *chars*))
(defparameter *char-to-idx* (let ((ht #{}))
(loop :for i :from 0 :below *vocab-size*
:for ch = ($ *chars* i)
:do (setf ($ ht ch) i))
ht))
(defparameter *idx-to-char* *chars*)
(defun choose (probs)
(let* ((sprobs ($sum probs))
(probs ($div probs sprobs)))
($ ($reshape! ($multinomial probs 1) ($count probs)) 0)))
(defparameter *hidden-size* 100)
(defparameter *sequence-length* 50)
(defparameter *lstm* (parameters))
(defparameter *wa* ($push *lstm* ($- ($* 0.16 (rnd *vocab-size* *hidden-size*)) 0.08)))
(defparameter *ua* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size* *hidden-size*)) 0.08)))
(defparameter *ba* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size*)) 0.08)))
(defparameter *wi* ($push *lstm* ($- ($* 0.16 (rnd *vocab-size* *hidden-size*)) 0.08)))
(defparameter *ui* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size* *hidden-size*)) 0.08)))
(defparameter *bi* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size*)) 0.08)))
(defparameter *wf* ($push *lstm* ($- ($* 0.16 (rnd *vocab-size* *hidden-size*)) 0.08)))
(defparameter *uf* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size* *hidden-size*)) 0.08)))
(defparameter *bf* ($push *lstm* (ones *hidden-size*)))
(defparameter *wo* ($push *lstm* ($- ($* 0.16 (rnd *vocab-size* *hidden-size*)) 0.08)))
(defparameter *uo* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size* *hidden-size*)) 0.08)))
(defparameter *bo* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size*)) 0.08)))
(defparameter *wy* ($push *lstm* ($- ($* 0.16 (rnd *hidden-size* *vocab-size*)) 0.08)))
(defparameter *by* ($push *lstm* ($- ($* 0.16 (rnd *vocab-size*)) 0.08)))
(defun lstm-write-weight-to (w fname)
(let ((f (file.disk fname "w")))
($fwrite ($data w) f)
($fclose f)))
(defun lstm-read-weight-from (w fname)
(let ((f (file.disk fname "r")))
($fread ($data w) f)
($fclose f)))
(defun lstm-write-weights ()
(lstm-write-weight-to *wa* "examples/weights/genchar-obama-lstm/lstm-wa.dat")
(lstm-write-weight-to *ua* "examples/weights/genchar-obama-lstm/lstm-ua.dat")
(lstm-write-weight-to *ba* "examples/weights/genchar-obama-lstm/lstm-ba.dat")
(lstm-write-weight-to *wi* "examples/weights/genchar-obama-lstm/lstm-wi.dat")
(lstm-write-weight-to *ui* "examples/weights/genchar-obama-lstm/lstm-ui.dat")
(lstm-write-weight-to *bi* "examples/weights/genchar-obama-lstm/lstm-bi.dat")
(lstm-write-weight-to *wf* "examples/weights/genchar-obama-lstm/lstm-wf.dat")
(lstm-write-weight-to *uf* "examples/weights/genchar-obama-lstm/lstm-uf.dat")
(lstm-write-weight-to *bf* "examples/weights/genchar-obama-lstm/lstm-bf.dat")
(lstm-write-weight-to *wo* "examples/weights/genchar-obama-lstm/lstm-wo.dat")
(lstm-write-weight-to *uo* "examples/weights/genchar-obama-lstm/lstm-uo.dat")
(lstm-write-weight-to *bo* "examples/weights/genchar-obama-lstm/lstm-bo.dat")
(lstm-write-weight-to *wy* "examples/weights/genchar-obama-lstm/lstm-wy.dat")
(lstm-write-weight-to *by* "examples/weights/genchar-obama-lstm/lstm-by.dat"))
(defun lstm-read-weights ()
(lstm-read-weight-from *wa* "examples/weights/genchar-obama-lstm/lstm-wa.dat")
(lstm-read-weight-from *ua* "examples/weights/genchar-obama-lstm/lstm-ua.dat")
(lstm-read-weight-from *ba* "examples/weights/genchar-obama-lstm/lstm-ba.dat")
(lstm-read-weight-from *wi* "examples/weights/genchar-obama-lstm/lstm-wi.dat")
(lstm-read-weight-from *ui* "examples/weights/genchar-obama-lstm/lstm-ui.dat")
(lstm-read-weight-from *bi* "examples/weights/genchar-obama-lstm/lstm-bi.dat")
(lstm-read-weight-from *wf* "examples/weights/genchar-obama-lstm/lstm-wf.dat")
(lstm-read-weight-from *uf* "examples/weights/genchar-obama-lstm/lstm-uf.dat")
(lstm-read-weight-from *bf* "examples/weights/genchar-obama-lstm/lstm-bf.dat")
(lstm-read-weight-from *wo* "examples/weights/genchar-obama-lstm/lstm-wo.dat")
(lstm-read-weight-from *uo* "examples/weights/genchar-obama-lstm/lstm-uo.dat")
(lstm-read-weight-from *bo* "examples/weights/genchar-obama-lstm/lstm-bo.dat")
(lstm-read-weight-from *wy* "examples/weights/genchar-obama-lstm/lstm-wy.dat")
(lstm-read-weight-from *by* "examples/weights/genchar-obama-lstm/lstm-by.dat"))
(defun cindices (str)
(let ((m (zeros ($count str) *vocab-size*)))
(loop :for i :from 0 :below ($count str)
:for ch = ($ str i)
:do (setf ($ m i ($ *char-to-idx* ch)) 1))
m))
(defun rstrings (indices) (coerce (mapcar (lambda (i) ($ *idx-to-char* i)) indices) 'string))
(defun seedh (str &optional (temperature 1))
(let ((input (cindices str))
(ph (zeros 1 *hidden-size*))
(pc (zeros 1 *hidden-size*))
(wa ($data *wa*))
(ua ($data *ua*))
(ba ($data *ba*))
(wi ($data *wi*))
(ui ($data *ui*))
(bi ($data *bi*))
(wf ($data *wf*))
(uf ($data *uf*))
(bf ($data *bf*))
(wo ($data *wo*))
(uo ($data *uo*))
(bo ($data *bo*))
(wy ($data *wy*))
(by ($data *by*))
(ncidx 0))
(loop :for i :from 0 :below ($size input 0)
:for xt = ($index input 0 i)
:for (ht ct) = ($lstm xt ph pc wi ui wf uf wo uo wa ua bi bf bo ba)
:for yt = ($affine ht wy by)
:for ps = ($softmax ($/ yt temperature))
:for nidx = (choose ps)
:do (setf ph ht
pc ct
ncidx nidx))
(list ncidx ph pc)))
(defun sample (str n &optional (temperature 1))
(let ((x (zeros 1 *vocab-size*))
(indices nil)
(sh (when str (seedh str temperature)))
(wa ($data *wa*))
(ua ($data *ua*))
(ba ($data *ba*))
(wi ($data *wi*))
(ui ($data *ui*))
(bi ($data *bi*))
(wf ($data *wf*))
(uf ($data *uf*))
(bf ($data *bf*))
(wo ($data *wo*))
(uo ($data *uo*))
(bo ($data *bo*))
(wy ($data *wy*))
(by ($data *by*))
(ph nil)
(pc nil))
(if sh
(let ((idx0 ($0 sh))
(h ($1 sh))
(c ($2 sh)))
(setf ($ x 0 idx0) 1)
(setf ph h
pc c)
(push idx0 indices))
(let ((idx0 (random *vocab-size*))
(h (zeros 1 *hidden-size*))
(c (zeros 1 *hidden-size*)))
(setf ($ x 0 idx0) 1)
(setf ph h
pc c)
(push idx0 indices)))
(loop :for i :from 0 :below n
:for (ht ct) = ($lstm x ph pc wi ui wf uf wo uo wa ua bi bf bo ba)
:for yt = ($affine ht wy by)
:for ps = ($softmax ($/ yt temperature))
:for nidx = (choose ps)
:do (progn
(setf ph ht
pc ct)
(push nidx indices)
($zero! x)
(setf ($ x 0 nidx) 1)))
(concatenate 'string str (rstrings (reverse indices)))))
(defparameter *upto* (- *data-size* *sequence-length* 1))
(defparameter *inputs* (loop :for p :from 0 :below *upto* :by *sequence-length*
:for input-str = (subseq *data* p (+ p *sequence-length*))
:collect (let ((m (zeros *sequence-length* *vocab-size*)))
(loop :for i :from 0 :below *sequence-length*
:for ch = ($ input-str i)
:do (setf ($ m i ($ *char-to-idx* ch)) 1))
m)))
(defparameter *targets* (loop :for p :from 0 :below *upto* :by *sequence-length*
:for target-str = (subseq *data* (1+ p) (+ p *sequence-length* 1))
:collect (let ((m (zeros *sequence-length* *vocab-size*)))
(loop :for i :from 0 :below *sequence-length*
:for ch = ($ target-str i)
:do (setf ($ m i ($ *char-to-idx* ch)) 1))
m)))
(defparameter *mloss* (* (- (log (/ 1 *vocab-size*))) *sequence-length*))
(defparameter *min-mloss* *mloss*)
(defparameter *epochs* 50)
($cg! *lstm*)
(gcf)
(time
(loop :for iter :from 1 :to *epochs*
:for n = 0
:for maxloss = 0
:for maxloss-pos = -1
:for max-mloss = 0
:do (progn
(loop :for input :in *inputs*
:for target :in *targets*
:do (let ((ph (zeros 1 *hidden-size*))
(pc (zeros 1 *hidden-size*))
(tloss 0))
(loop :for i :from 0 :below ($size input 0)
:for xt = ($index input 0 i)
:for (ht ct) = ($lstm xt ph pc
*wi* *ui* *wf* *uf*
*wo* *uo* *wa* *ua*
*bi* *bf* *bo* *ba*)
:for yt = ($affine ht *wy* *by*)
:for ps = ($softmax yt)
:for y = ($index target 0 i)
:for l = ($cee ps y)
:do (progn
(setf ph ht
pc ct)
(incf tloss ($data l))))
(when (> tloss maxloss)
(setf maxloss-pos n)
(setf maxloss tloss))
($rmgd! *lstm*)
(setf *mloss* (+ (* 0.999 *mloss*) (* 0.001 tloss)))
(when (> *mloss* max-mloss) (setf max-mloss *mloss*))
(when (zerop (rem n 200))
(prn "[ITER]" iter n *mloss* maxloss maxloss-pos))
(incf n)))
(when (< max-mloss *min-mloss*)
(prn "*** BETTER MLOSS - WRITE WEIGHTS: FROM" *min-mloss* "TO" max-mloss)
(setf *min-mloss* max-mloss)
(lstm-write-weights)))))
(prn (sample "This is not correct." 200 0.5))
(prn (sample "I" 200 0.5))
(lstm-write-weights)
(lstm-read-weights)
rmgd 0.002 0.99 - 1.31868 - 1.61637
amgd 0.002 - 1.3747485 - 1.70623
(loop :for p :from 0 :below *upto* :by *sequence-length*
:for n :from 0
:for input-str = (subseq *data* p (+ p *sequence-length*))
:do (when (member n '(75856 44515 44514 21663 18796 1258 336 178))
(prn (format nil "~6,d" n) input-str)))
|
953dc3ff75c7fb0ac2ca4bd8dfa26faa77f0ccf3c71a396045b9dfdc2e8be885 | ayato-p/bulkhead | bulkhead.clj | (ns org.panchromatic.bulkhead
(:require [integrant.core :as ig]
[org.panchromatic.bulkhead.internal :as internal]
[org.panchromatic.bulkhead.state :as state]))
(defn set-prep! [f]
(alter-var-root #'state/prep (constantly f)))
(defmacro with-bulkhead
{:arglists '([bindings mock-map body])}
[& args]
(let [[bindings mock-map body] (internal/parse-args args &env)
prep (internal/detect-prep-fn &env)]
`(let [mock-map# ~mock-map
org&mock-keypairs# (internal/make-keypairs (keys mock-map#))
start-keys# (-> (reduce-kv #(-> (disj %1 %2) (conj %3))
~(-> bindings vals set)
org&mock-keypairs#))]
(internal/create-mock-components! org&mock-keypairs# mock-map#)
(try
(let [~'$system (-> (reduce-kv #(-> (dissoc %1 %2) (assoc %3 {}))
(~prep)
org&mock-keypairs#)
(ig/init start-keys#))
~@(internal/bind-components '$system bindings)]
(try
~@body
(finally
(ig/halt! ~'$system start-keys#))))
(finally
(internal/destroy-mock-components! org&mock-keypairs#))))))
| null | https://raw.githubusercontent.com/ayato-p/bulkhead/64c9571d6fb33860b8079f06e2db54b02d779b41/src/org/panchromatic/bulkhead.clj | clojure | (ns org.panchromatic.bulkhead
(:require [integrant.core :as ig]
[org.panchromatic.bulkhead.internal :as internal]
[org.panchromatic.bulkhead.state :as state]))
(defn set-prep! [f]
(alter-var-root #'state/prep (constantly f)))
(defmacro with-bulkhead
{:arglists '([bindings mock-map body])}
[& args]
(let [[bindings mock-map body] (internal/parse-args args &env)
prep (internal/detect-prep-fn &env)]
`(let [mock-map# ~mock-map
org&mock-keypairs# (internal/make-keypairs (keys mock-map#))
start-keys# (-> (reduce-kv #(-> (disj %1 %2) (conj %3))
~(-> bindings vals set)
org&mock-keypairs#))]
(internal/create-mock-components! org&mock-keypairs# mock-map#)
(try
(let [~'$system (-> (reduce-kv #(-> (dissoc %1 %2) (assoc %3 {}))
(~prep)
org&mock-keypairs#)
(ig/init start-keys#))
~@(internal/bind-components '$system bindings)]
(try
~@body
(finally
(ig/halt! ~'$system start-keys#))))
(finally
(internal/destroy-mock-components! org&mock-keypairs#))))))
| |
7eb63ac839ff904e7bef7934e7d84290aa7ce18c8425f893ba2d8f37b9d047f7 | jjmeyer0/gt | trie.mli | type 'a trie =
Tnone
| Texact of char list * 'a
| Tnext of 'a option * 'a trie array
val mk_trievec : 'a -> 'a array
val charlist_of_string : string -> char list
val trie_insert : 'a trie -> string -> 'a -> 'a trie
val trie_lookup : 'a trie -> string -> 'a option
val trie_contains : 'a trie -> string -> bool
| null | https://raw.githubusercontent.com/jjmeyer0/gt/c0c7febc2e3fd532d44617f663b224cc0b9c7cf2/src/trie.mli | ocaml | type 'a trie =
Tnone
| Texact of char list * 'a
| Tnext of 'a option * 'a trie array
val mk_trievec : 'a -> 'a array
val charlist_of_string : string -> char list
val trie_insert : 'a trie -> string -> 'a -> 'a trie
val trie_lookup : 'a trie -> string -> 'a option
val trie_contains : 'a trie -> string -> bool
| |
07840d25cc92fb96877f71407dea728f62966e9bf34e953e23efd5145fcd291b | Elzair/nazghul | af-entry.scm | ;; ----------------------------------------------------------------------------
;; af-entry.scm
;;
;; This file defines the on-entry procedure executed whenever the player enters
;; the abandoned farm place. The purpose of this proc is to respawn some
;; monsters in the place.
;;
I never want there to be more than 5 each of trolls or spiders . I 'll roll to
add monsters if there are 2 or less , and I 'll never add more than 3 .
;; ----------------------------------------------------------------------------
(define (af-spawn-spiders kplace n)
(if (> n 0)
(begin
(psummon (place-random-corner kplace)
mk-wood-spider
1)
(af-spawn-spiders kplace (- n 1)))))
(define (af-entry kplace kplayer)
(let ((chars (filter obj-is-char? (kern-place-get-objects kplace))))
(let ((trolls (filter char-is-troll? chars))
(spiders (filter char-is-spider? chars)))
(if (< (length trolls) 2)
(psummon (mk-loc kplace 19 13)
mk-troll
(kern-dice-roll "1d2")))
(if (< (length spiders) 2)
(af-spawn-spiders kplace (kern-dice-roll "1d2")))))
#t)
| null | https://raw.githubusercontent.com/Elzair/nazghul/8f3a45ed6289cd9f469c4ff618d39366f2fbc1d8/worlds/haxima-1.001/af-entry.scm | scheme | ----------------------------------------------------------------------------
af-entry.scm
This file defines the on-entry procedure executed whenever the player enters
the abandoned farm place. The purpose of this proc is to respawn some
monsters in the place.
---------------------------------------------------------------------------- | I never want there to be more than 5 each of trolls or spiders . I 'll roll to
add monsters if there are 2 or less , and I 'll never add more than 3 .
(define (af-spawn-spiders kplace n)
(if (> n 0)
(begin
(psummon (place-random-corner kplace)
mk-wood-spider
1)
(af-spawn-spiders kplace (- n 1)))))
(define (af-entry kplace kplayer)
(let ((chars (filter obj-is-char? (kern-place-get-objects kplace))))
(let ((trolls (filter char-is-troll? chars))
(spiders (filter char-is-spider? chars)))
(if (< (length trolls) 2)
(psummon (mk-loc kplace 19 13)
mk-troll
(kern-dice-roll "1d2")))
(if (< (length spiders) 2)
(af-spawn-spiders kplace (kern-dice-roll "1d2")))))
#t)
|
96e1c7c96b4a8972b35f9aee143c2f7ef6b6223796317d8115dba69dfb2b005d | grzm/awyeah-api | signers.clj | Copyright ( c ) Cognitect , Inc.
;; All rights reserved.
(ns ^:skip-wiki com.grzm.awyeah.signers
"Impl, don't call directly."
(:require
[clojure.string :as str]
[com.grzm.awyeah.service :as service]
[com.grzm.awyeah.util :as util])
(:import
(java.net URI)
(java.net URLDecoder)))
(set! *warn-on-reflection* true)
(defmulti sign-http-request
"Sign the HTTP request."
(fn [service _endpoint _credentials _http-request]
(get-in service [:metadata :signatureVersion])))
(defn uri-encode
"Escape (%XX) special characters in the string `s`.
Letters, digits, and the characters `_-~.` are never encoded.
The optional `extra-chars` specifies extra characters to not encode."
([^String s]
(when s
(uri-encode s "")))
([^String s extra-chars]
(when s
(let [safe-chars (->> extra-chars
(into #{\_ \- \~ \.})
(into #{} (map int)))
builder (StringBuilder.)]
(doseq [b (.getBytes s "UTF-8")]
(.append builder
(if (or (Character/isLetterOrDigit (int b))
(contains? safe-chars b))
(char b)
(format "%%%02X" b))))
(.toString builder)))))
(defn credential-scope
[{:keys [region service]} request]
(str/join "/" [(->> (get-in request [:headers "x-amz-date"])
(util/parse-date util/x-amz-date-format)
(util/format-date util/x-amz-date-only-format))
region
service
"aws4_request"]))
(defn- canonical-method
[{:keys [request-method]}]
(-> request-method name str/upper-case))
(defn- canonical-uri
[{:keys [uri]}]
(let [encoded-path (-> uri
( URI . ) throws Exception on ' // ' .
( URI . ) throws Exception on space .
(URI.)
(.normalize)
(.getPath)
(uri-encode "/"))]
(if (.isEmpty ^String encoded-path)
"/"
encoded-path)))
(defn- canonical-query-string
[{:keys [uri query-string]}]
(let [qs (or query-string (second (str/split uri #"\?")))]
(when-not (str/blank? qs)
(->> (str/split qs #"&")
(map #(str/split % #"=" 2))
TODO ( dchelimsky 2019 - 01 - 30 ) decoding first because sometimes
;; it's already been encoding. Look into avoiding that!
(map (fn [kv] (map #(uri-encode (URLDecoder/decode %)) kv)))
(sort (fn [[k1 v1] [k2 v2]]
(if (= k1 k2)
(compare v1 v2)
(compare k1 k2))))
(map (fn [[k v]] (str k "=" v)))
(str/join "&")))))
(defn- canonical-headers
[{:keys [headers]}]
(reduce-kv (fn [m k v]
(assoc m (str/lower-case k) (-> v str/trim (str/replace #"\s+" " "))))
(sorted-map)
headers))
(defn- canonical-headers-string
[request]
(->> (canonical-headers request)
(map (fn [[k v]] (str k ":" v "\n")))
(str/join "")))
(defn signed-headers
[request]
(->> (canonical-headers request)
keys
(str/join ";")))
(defn hashed-body
[request]
(util/hex-encode (util/sha-256 (:body request))))
(defn canonical-request
[{:keys [headers] :as request}]
(str/join "\n" [(canonical-method request)
(canonical-uri request)
(canonical-query-string request)
(canonical-headers-string request)
(signed-headers request)
(or (get headers "x-amz-content-sha256")
(hashed-body request))]))
(defn string-to-sign
[request auth-info]
(let [bytes (.getBytes ^String (canonical-request request))]
(str/join "\n" ["AWS4-HMAC-SHA256"
(get-in request [:headers "x-amz-date"])
(credential-scope auth-info request)
(util/hex-encode (util/sha-256 bytes))])))
(defn signing-key
[request {:keys [secret-access-key region service]}]
(-> (.getBytes (str "AWS4" secret-access-key) "UTF-8")
(util/hmac-sha-256 (->> (get-in request [:headers "x-amz-date"])
(util/parse-date util/x-amz-date-format)
(util/format-date util/x-amz-date-only-format)))
(util/hmac-sha-256 region)
(util/hmac-sha-256 service)
(util/hmac-sha-256 "aws4_request")))
(defn signature
[auth-info request]
(util/hex-encode
(util/hmac-sha-256 (signing-key request auth-info)
(string-to-sign request auth-info))))
(defn v4-sign-http-request
[service endpoint credentials http-request & {:keys [content-sha256-header?]}]
(let [{:keys [:aws/access-key-id :aws/secret-access-key :aws/session-token]} credentials
auth-info {:access-key-id access-key-id
:secret-access-key secret-access-key
:service (or (service/signing-name service)
(service/endpoint-prefix service))
:region (or (get-in endpoint [:credentialScope :region])
(:region endpoint))}
req (cond-> http-request
session-token (assoc-in [:headers "x-amz-security-token"] session-token)
content-sha256-header? (assoc-in [:headers "x-amz-content-sha256"] (hashed-body http-request)))]
(assoc-in req
[:headers "authorization"]
(format "AWS4-HMAC-SHA256 Credential=%s/%s, SignedHeaders=%s, Signature=%s"
(:access-key-id auth-info)
(credential-scope auth-info req)
(signed-headers req)
(signature auth-info req)))))
(defmethod sign-http-request "v4"
[service endpoint credentials http-request]
(v4-sign-http-request service endpoint credentials http-request))
(defmethod sign-http-request "s3"
[service endpoint credentials http-request]
(v4-sign-http-request service endpoint credentials http-request :content-sha256-header? true))
(defmethod sign-http-request "s3v4"
[service endpoint credentials http-request]
(v4-sign-http-request service endpoint credentials http-request :content-sha256-header? true))
| null | https://raw.githubusercontent.com/grzm/awyeah-api/1810bf624da2be58c77813106a1d51e32db11690/src/com/grzm/awyeah/signers.clj | clojure | All rights reserved.
it's already been encoding. Look into avoiding that! | Copyright ( c ) Cognitect , Inc.
(ns ^:skip-wiki com.grzm.awyeah.signers
"Impl, don't call directly."
(:require
[clojure.string :as str]
[com.grzm.awyeah.service :as service]
[com.grzm.awyeah.util :as util])
(:import
(java.net URI)
(java.net URLDecoder)))
(set! *warn-on-reflection* true)
(defmulti sign-http-request
"Sign the HTTP request."
(fn [service _endpoint _credentials _http-request]
(get-in service [:metadata :signatureVersion])))
(defn uri-encode
"Escape (%XX) special characters in the string `s`.
Letters, digits, and the characters `_-~.` are never encoded.
The optional `extra-chars` specifies extra characters to not encode."
([^String s]
(when s
(uri-encode s "")))
([^String s extra-chars]
(when s
(let [safe-chars (->> extra-chars
(into #{\_ \- \~ \.})
(into #{} (map int)))
builder (StringBuilder.)]
(doseq [b (.getBytes s "UTF-8")]
(.append builder
(if (or (Character/isLetterOrDigit (int b))
(contains? safe-chars b))
(char b)
(format "%%%02X" b))))
(.toString builder)))))
(defn credential-scope
[{:keys [region service]} request]
(str/join "/" [(->> (get-in request [:headers "x-amz-date"])
(util/parse-date util/x-amz-date-format)
(util/format-date util/x-amz-date-only-format))
region
service
"aws4_request"]))
(defn- canonical-method
[{:keys [request-method]}]
(-> request-method name str/upper-case))
(defn- canonical-uri
[{:keys [uri]}]
(let [encoded-path (-> uri
( URI . ) throws Exception on ' // ' .
( URI . ) throws Exception on space .
(URI.)
(.normalize)
(.getPath)
(uri-encode "/"))]
(if (.isEmpty ^String encoded-path)
"/"
encoded-path)))
(defn- canonical-query-string
[{:keys [uri query-string]}]
(let [qs (or query-string (second (str/split uri #"\?")))]
(when-not (str/blank? qs)
(->> (str/split qs #"&")
(map #(str/split % #"=" 2))
TODO ( dchelimsky 2019 - 01 - 30 ) decoding first because sometimes
(map (fn [kv] (map #(uri-encode (URLDecoder/decode %)) kv)))
(sort (fn [[k1 v1] [k2 v2]]
(if (= k1 k2)
(compare v1 v2)
(compare k1 k2))))
(map (fn [[k v]] (str k "=" v)))
(str/join "&")))))
(defn- canonical-headers
[{:keys [headers]}]
(reduce-kv (fn [m k v]
(assoc m (str/lower-case k) (-> v str/trim (str/replace #"\s+" " "))))
(sorted-map)
headers))
(defn- canonical-headers-string
[request]
(->> (canonical-headers request)
(map (fn [[k v]] (str k ":" v "\n")))
(str/join "")))
(defn signed-headers
[request]
(->> (canonical-headers request)
keys
(str/join ";")))
(defn hashed-body
[request]
(util/hex-encode (util/sha-256 (:body request))))
(defn canonical-request
[{:keys [headers] :as request}]
(str/join "\n" [(canonical-method request)
(canonical-uri request)
(canonical-query-string request)
(canonical-headers-string request)
(signed-headers request)
(or (get headers "x-amz-content-sha256")
(hashed-body request))]))
(defn string-to-sign
[request auth-info]
(let [bytes (.getBytes ^String (canonical-request request))]
(str/join "\n" ["AWS4-HMAC-SHA256"
(get-in request [:headers "x-amz-date"])
(credential-scope auth-info request)
(util/hex-encode (util/sha-256 bytes))])))
(defn signing-key
[request {:keys [secret-access-key region service]}]
(-> (.getBytes (str "AWS4" secret-access-key) "UTF-8")
(util/hmac-sha-256 (->> (get-in request [:headers "x-amz-date"])
(util/parse-date util/x-amz-date-format)
(util/format-date util/x-amz-date-only-format)))
(util/hmac-sha-256 region)
(util/hmac-sha-256 service)
(util/hmac-sha-256 "aws4_request")))
(defn signature
[auth-info request]
(util/hex-encode
(util/hmac-sha-256 (signing-key request auth-info)
(string-to-sign request auth-info))))
(defn v4-sign-http-request
[service endpoint credentials http-request & {:keys [content-sha256-header?]}]
(let [{:keys [:aws/access-key-id :aws/secret-access-key :aws/session-token]} credentials
auth-info {:access-key-id access-key-id
:secret-access-key secret-access-key
:service (or (service/signing-name service)
(service/endpoint-prefix service))
:region (or (get-in endpoint [:credentialScope :region])
(:region endpoint))}
req (cond-> http-request
session-token (assoc-in [:headers "x-amz-security-token"] session-token)
content-sha256-header? (assoc-in [:headers "x-amz-content-sha256"] (hashed-body http-request)))]
(assoc-in req
[:headers "authorization"]
(format "AWS4-HMAC-SHA256 Credential=%s/%s, SignedHeaders=%s, Signature=%s"
(:access-key-id auth-info)
(credential-scope auth-info req)
(signed-headers req)
(signature auth-info req)))))
(defmethod sign-http-request "v4"
[service endpoint credentials http-request]
(v4-sign-http-request service endpoint credentials http-request))
(defmethod sign-http-request "s3"
[service endpoint credentials http-request]
(v4-sign-http-request service endpoint credentials http-request :content-sha256-header? true))
(defmethod sign-http-request "s3v4"
[service endpoint credentials http-request]
(v4-sign-http-request service endpoint credentials http-request :content-sha256-header? true))
|
2727ff4e82bd77542b180a895477f1d9b0a5df0eede41d6894371f8c805ac725 | yedi/rhyme-finder | streams.clj | (ns rhyme-finder.streams)
(defn indices [pred coll]
(keep-indexed #(when (pred %2) %1) coll))
(defn combos
"[3 4] 4 = 2 1234343412343434 => [[2, 4, 6], [10, 12, 14]]"
[val dist match?-fn min-combo-len coll]
(let [indexes (indices (partial match?-fn val) coll)]
(loop [rem (rest indexes)
curr (first indexes)
ret [[curr]]]
(if (seq rem)
(let [new (first rem)]
(recur (rest rem)
new
(if (> (- new curr) dist)
(conj ret [new])
(update-in ret [(dec (count ret))] conj new))))
(filterv #(<= min-combo-len (count %)) ret)))))
(defn trim-empty [streams]
(filter (fn [s] (seq (:streams s))) streams))
(defn get-streams
"[1 2 3 4] 3 = 2 1234343412343434 => [
{:value 1 :streams []}
{:value 2 :streams []}
{:value 3 :streams [[2, 4, 6], [10, 12, 14]]}
{:value 4 :streams [[3, 5, 7], [11, 13, 15]]}
]
[12, 23, 34, 43] 3 = 2 1234343412343434=> [
{:value 12 :streams []}
{:value 23 :streams []}
{:value 34 :streams [[2, 4, 6], [10, 12, 14]]}
{:value 43 :streams [[3, 5], [11, 13]]}
]
Takes the vals to check, the max distance between matching values, a matching fn,
the minimum # of streams a collection has and returns the streams."
[vals dist match?-fn min-len coll]
(let [append-fn (fn [ret val]
(conj ret {:value val
:streams (combos val dist match?-fn min-len coll)}))]
(trim-empty (reduce append-fn [] vals))))
(defn find-streams
[clen dist match?-fn min-len coll]
(let [coll (partition clen 1 coll)]
(get-streams (set coll) dist match?-fn min-len coll)))
| null | https://raw.githubusercontent.com/yedi/rhyme-finder/c2f994606794e16361f04b03950113ce82a4e090/src/clj/rhyme_finder/streams.clj | clojure | (ns rhyme-finder.streams)
(defn indices [pred coll]
(keep-indexed #(when (pred %2) %1) coll))
(defn combos
"[3 4] 4 = 2 1234343412343434 => [[2, 4, 6], [10, 12, 14]]"
[val dist match?-fn min-combo-len coll]
(let [indexes (indices (partial match?-fn val) coll)]
(loop [rem (rest indexes)
curr (first indexes)
ret [[curr]]]
(if (seq rem)
(let [new (first rem)]
(recur (rest rem)
new
(if (> (- new curr) dist)
(conj ret [new])
(update-in ret [(dec (count ret))] conj new))))
(filterv #(<= min-combo-len (count %)) ret)))))
(defn trim-empty [streams]
(filter (fn [s] (seq (:streams s))) streams))
(defn get-streams
"[1 2 3 4] 3 = 2 1234343412343434 => [
{:value 1 :streams []}
{:value 2 :streams []}
{:value 3 :streams [[2, 4, 6], [10, 12, 14]]}
{:value 4 :streams [[3, 5, 7], [11, 13, 15]]}
]
[12, 23, 34, 43] 3 = 2 1234343412343434=> [
{:value 12 :streams []}
{:value 23 :streams []}
{:value 34 :streams [[2, 4, 6], [10, 12, 14]]}
{:value 43 :streams [[3, 5], [11, 13]]}
]
Takes the vals to check, the max distance between matching values, a matching fn,
the minimum # of streams a collection has and returns the streams."
[vals dist match?-fn min-len coll]
(let [append-fn (fn [ret val]
(conj ret {:value val
:streams (combos val dist match?-fn min-len coll)}))]
(trim-empty (reduce append-fn [] vals))))
(defn find-streams
[clen dist match?-fn min-len coll]
(let [coll (partition clen 1 coll)]
(get-streams (set coll) dist match?-fn min-len coll)))
| |
e3e3a405f07458ebbb059ca8a771515122a08ee9c1970be975d03841bf238a37 | OCamlPro/directories | win_functions_functor.ml |
module Apply (F : Cstubs.FOREIGN) = struct
open Ctypes
open F
open Win_types
module Kernel32 = struct
(** see
-us/windows/win32/api/stringapiset/nf-stringapiset-widechartomultibyte *)
let wide_char_to_multi_byte =
foreign "WideCharToMultiByte" (
UINT CodePage
DWORD dwFlags
LPCWCH lpWideCharStr
Int.t @-> (* int cchWideChar *)
LPSTR lpMultiByteStr
Int.t @-> (* int cbMultiByte *)
LPCH.t @-> (* LPCCH lpDefaultChar *)
LPBOOL.t @-> (* LPBOOL lpUsedDefaultChar *)
returning Int.t (* int *)
)
end
module Shell32 = struct
(** see
-us/windows/win32/api/shlobj_core/nf-shlobj_core-shgetknownfolderpath *)
let sh_get_known_folder_path =
foreign "SHGetKnownFolderPath" (
ptr GUID.t @-> (* REFKNOWNFOLDERID rfid (= GUID * ) *)
DWORD dwFlags (= unsigned long )
Token.t @-> (* HANDLE hToken (= void * ) *)
ptr PWSTR.t @-> (* PWSTR * ppszPath (= short unsigned int ** ) *)
returning Hresult.t (* HRESULT *)
)
end
end
| null | https://raw.githubusercontent.com/OCamlPro/directories/b7597a0495da62a8f62a0c20f5a9071ac1c77eb5/src/windows/bindings/win_functions_functor.ml | ocaml | * see
-us/windows/win32/api/stringapiset/nf-stringapiset-widechartomultibyte
int cchWideChar
int cbMultiByte
LPCCH lpDefaultChar
LPBOOL lpUsedDefaultChar
int
* see
-us/windows/win32/api/shlobj_core/nf-shlobj_core-shgetknownfolderpath
REFKNOWNFOLDERID rfid (= GUID * )
HANDLE hToken (= void * )
PWSTR * ppszPath (= short unsigned int ** )
HRESULT |
module Apply (F : Cstubs.FOREIGN) = struct
open Ctypes
open F
open Win_types
module Kernel32 = struct
let wide_char_to_multi_byte =
foreign "WideCharToMultiByte" (
UINT CodePage
DWORD dwFlags
LPCWCH lpWideCharStr
LPSTR lpMultiByteStr
)
end
module Shell32 = struct
let sh_get_known_folder_path =
foreign "SHGetKnownFolderPath" (
DWORD dwFlags (= unsigned long )
)
end
end
|
933874a9cf066238569937dde9485ea0c11cfeb2627c1b3eb30ce816f53aa510 | fission-codes/fission | Types.hs | module Fission.CLI.Parser.Config.IPFS.Types (Config (..)) where
import qualified Network.IPFS.BinPath.Types as IPFS
import qualified Network.IPFS.Timeout.Types as IPFS
import Fission.Prelude
data Config = Config
^ Path to the IPFS binary ( defaults to system )
^ IPFS timeout
} deriving (Show, Eq)
| null | https://raw.githubusercontent.com/fission-codes/fission/11d14b729ccebfd69499a534445fb072ac3433a3/fission-cli/library/Fission/CLI/Parser/Config/IPFS/Types.hs | haskell | module Fission.CLI.Parser.Config.IPFS.Types (Config (..)) where
import qualified Network.IPFS.BinPath.Types as IPFS
import qualified Network.IPFS.Timeout.Types as IPFS
import Fission.Prelude
data Config = Config
^ Path to the IPFS binary ( defaults to system )
^ IPFS timeout
} deriving (Show, Eq)
| |
4f822c99d3f61b59dbdde1b09ca1f482851e0bc79e6cf5bca203e7d83b9be020 | portkey-cloud/aws-clj-sdk | codecommit.clj | (ns portkey.aws.codecommit (:require [portkey.aws]))
(def
endpoints
'{"ap-northeast-1"
{:credential-scope
{:service "codecommit", :region "ap-northeast-1"},
:ssl-common-name "codecommit.ap-northeast-1.amazonaws.com",
:endpoint "-northeast-1.amazonaws.com",
:signature-version :v4},
"eu-west-1"
{:credential-scope {:service "codecommit", :region "eu-west-1"},
:ssl-common-name "codecommit.eu-west-1.amazonaws.com",
:endpoint "-west-1.amazonaws.com",
:signature-version :v4},
"us-east-2"
{:credential-scope {:service "codecommit", :region "us-east-2"},
:ssl-common-name "codecommit.us-east-2.amazonaws.com",
:endpoint "-east-2.amazonaws.com",
:signature-version :v4},
"ap-southeast-2"
{:credential-scope
{:service "codecommit", :region "ap-southeast-2"},
:ssl-common-name "codecommit.ap-southeast-2.amazonaws.com",
:endpoint "-southeast-2.amazonaws.com",
:signature-version :v4},
"sa-east-1"
{:credential-scope {:service "codecommit", :region "sa-east-1"},
:ssl-common-name "codecommit.sa-east-1.amazonaws.com",
:endpoint "-east-1.amazonaws.com",
:signature-version :v4},
"ap-southeast-1"
{:credential-scope
{:service "codecommit", :region "ap-southeast-1"},
:ssl-common-name "codecommit.ap-southeast-1.amazonaws.com",
:endpoint "-southeast-1.amazonaws.com",
:signature-version :v4},
"ap-northeast-2"
{:credential-scope
{:service "codecommit", :region "ap-northeast-2"},
:ssl-common-name "codecommit.ap-northeast-2.amazonaws.com",
:endpoint "-northeast-2.amazonaws.com",
:signature-version :v4},
"eu-west-3"
{:credential-scope {:service "codecommit", :region "eu-west-3"},
:ssl-common-name "codecommit.eu-west-3.amazonaws.com",
:endpoint "-west-3.amazonaws.com",
:signature-version :v4},
"ca-central-1"
{:credential-scope {:service "codecommit", :region "ca-central-1"},
:ssl-common-name "codecommit.ca-central-1.amazonaws.com",
:endpoint "-central-1.amazonaws.com",
:signature-version :v4},
"eu-central-1"
{:credential-scope {:service "codecommit", :region "eu-central-1"},
:ssl-common-name "codecommit.eu-central-1.amazonaws.com",
:endpoint "-central-1.amazonaws.com",
:signature-version :v4},
"eu-west-2"
{:credential-scope {:service "codecommit", :region "eu-west-2"},
:ssl-common-name "codecommit.eu-west-2.amazonaws.com",
:endpoint "-west-2.amazonaws.com",
:signature-version :v4},
"us-west-2"
{:credential-scope {:service "codecommit", :region "us-west-2"},
:ssl-common-name "codecommit.us-west-2.amazonaws.com",
:endpoint "-west-2.amazonaws.com",
:signature-version :v4},
"us-east-1"
{:credential-scope {:service "codecommit", :region "us-east-1"},
:ssl-common-name "codecommit.us-east-1.amazonaws.com",
:endpoint "-east-1.amazonaws.com",
:signature-version :v4},
"us-west-1"
{:credential-scope {:service "codecommit", :region "us-west-1"},
:ssl-common-name "codecommit.us-west-1.amazonaws.com",
:endpoint "-west-1.amazonaws.com",
:signature-version :v4},
"ap-south-1"
{:credential-scope {:service "codecommit", :region "ap-south-1"},
:ssl-common-name "codecommit.ap-south-1.amazonaws.com",
:endpoint "-south-1.amazonaws.com",
:signature-version :v4}})
(comment TODO support "json")
| null | https://raw.githubusercontent.com/portkey-cloud/aws-clj-sdk/10623a5c86bd56c8b312f56b76ae5ff52c26a945/src/portkey/aws/codecommit.clj | clojure | (ns portkey.aws.codecommit (:require [portkey.aws]))
(def
endpoints
'{"ap-northeast-1"
{:credential-scope
{:service "codecommit", :region "ap-northeast-1"},
:ssl-common-name "codecommit.ap-northeast-1.amazonaws.com",
:endpoint "-northeast-1.amazonaws.com",
:signature-version :v4},
"eu-west-1"
{:credential-scope {:service "codecommit", :region "eu-west-1"},
:ssl-common-name "codecommit.eu-west-1.amazonaws.com",
:endpoint "-west-1.amazonaws.com",
:signature-version :v4},
"us-east-2"
{:credential-scope {:service "codecommit", :region "us-east-2"},
:ssl-common-name "codecommit.us-east-2.amazonaws.com",
:endpoint "-east-2.amazonaws.com",
:signature-version :v4},
"ap-southeast-2"
{:credential-scope
{:service "codecommit", :region "ap-southeast-2"},
:ssl-common-name "codecommit.ap-southeast-2.amazonaws.com",
:endpoint "-southeast-2.amazonaws.com",
:signature-version :v4},
"sa-east-1"
{:credential-scope {:service "codecommit", :region "sa-east-1"},
:ssl-common-name "codecommit.sa-east-1.amazonaws.com",
:endpoint "-east-1.amazonaws.com",
:signature-version :v4},
"ap-southeast-1"
{:credential-scope
{:service "codecommit", :region "ap-southeast-1"},
:ssl-common-name "codecommit.ap-southeast-1.amazonaws.com",
:endpoint "-southeast-1.amazonaws.com",
:signature-version :v4},
"ap-northeast-2"
{:credential-scope
{:service "codecommit", :region "ap-northeast-2"},
:ssl-common-name "codecommit.ap-northeast-2.amazonaws.com",
:endpoint "-northeast-2.amazonaws.com",
:signature-version :v4},
"eu-west-3"
{:credential-scope {:service "codecommit", :region "eu-west-3"},
:ssl-common-name "codecommit.eu-west-3.amazonaws.com",
:endpoint "-west-3.amazonaws.com",
:signature-version :v4},
"ca-central-1"
{:credential-scope {:service "codecommit", :region "ca-central-1"},
:ssl-common-name "codecommit.ca-central-1.amazonaws.com",
:endpoint "-central-1.amazonaws.com",
:signature-version :v4},
"eu-central-1"
{:credential-scope {:service "codecommit", :region "eu-central-1"},
:ssl-common-name "codecommit.eu-central-1.amazonaws.com",
:endpoint "-central-1.amazonaws.com",
:signature-version :v4},
"eu-west-2"
{:credential-scope {:service "codecommit", :region "eu-west-2"},
:ssl-common-name "codecommit.eu-west-2.amazonaws.com",
:endpoint "-west-2.amazonaws.com",
:signature-version :v4},
"us-west-2"
{:credential-scope {:service "codecommit", :region "us-west-2"},
:ssl-common-name "codecommit.us-west-2.amazonaws.com",
:endpoint "-west-2.amazonaws.com",
:signature-version :v4},
"us-east-1"
{:credential-scope {:service "codecommit", :region "us-east-1"},
:ssl-common-name "codecommit.us-east-1.amazonaws.com",
:endpoint "-east-1.amazonaws.com",
:signature-version :v4},
"us-west-1"
{:credential-scope {:service "codecommit", :region "us-west-1"},
:ssl-common-name "codecommit.us-west-1.amazonaws.com",
:endpoint "-west-1.amazonaws.com",
:signature-version :v4},
"ap-south-1"
{:credential-scope {:service "codecommit", :region "ap-south-1"},
:ssl-common-name "codecommit.ap-south-1.amazonaws.com",
:endpoint "-south-1.amazonaws.com",
:signature-version :v4}})
(comment TODO support "json")
| |
38d7984d7c8ca466f607e95248e00f2207ec4ac3d6a6da8bea043c7d1ed9c9f8 | YoshikuniJujo/test_haskell | Device.hs | {-# LANGUAGE RankNTypes #-}
# LANGUAGE MonoLocalBinds #
# LANGUAGE PatternSynonyms #
# OPTIONS_GHC -Wall -fno - warn - tabs #
module Gpu.Vulkan.Device (
D, create, M.CreateInfo(..), M.QueueCreateInfo(..), M.CreateFlags,
getQueue,
waitIdle
) where
import Foreign.Storable.PeekPoke
import Foreign.Storable.HeteroList
import Control.Exception
import Data.Word
import Gpu.Vulkan.Device.Type
import qualified Gpu.Vulkan.AllocationCallbacks as AllocationCallbacks
import qualified Gpu.Vulkan.PhysicalDevice as PhysicalDevice
import qualified Gpu.Vulkan.Device.Middle as M
import qualified Gpu.Vulkan.QueueFamily.Middle as QueueFamily
import qualified Gpu.Vulkan.Queue as Queue
create :: (Pokable n, WithPokedHeteroToListM ns, Pokable n3, Pokable n4) =>
PhysicalDevice.P -> M.CreateInfo n ns ->
Maybe (AllocationCallbacks.A n3) -> Maybe (AllocationCallbacks.A n4) ->
(forall s . D s -> IO a) -> IO a
create phdvc ci macc macd f =
bracket (M.create phdvc ci macc) (`M.destroy` macd) (f . D)
getQueue :: D s -> QueueFamily.Index -> Word32 -> IO Queue.Q
getQueue (D dvc) (QueueFamily.Index qfi) qi = M.getQueue dvc qfi qi
waitIdle :: D s -> IO ()
waitIdle (D d) = M.waitIdle d
| null | https://raw.githubusercontent.com/YoshikuniJujo/test_haskell/fa60580c77c26494bf3a8ee2606e4cf5fd282b82/themes/gui/vulkan/try-my-vulkan-snd/src/Gpu/Vulkan/Device.hs | haskell | # LANGUAGE RankNTypes # | # LANGUAGE MonoLocalBinds #
# LANGUAGE PatternSynonyms #
# OPTIONS_GHC -Wall -fno - warn - tabs #
module Gpu.Vulkan.Device (
D, create, M.CreateInfo(..), M.QueueCreateInfo(..), M.CreateFlags,
getQueue,
waitIdle
) where
import Foreign.Storable.PeekPoke
import Foreign.Storable.HeteroList
import Control.Exception
import Data.Word
import Gpu.Vulkan.Device.Type
import qualified Gpu.Vulkan.AllocationCallbacks as AllocationCallbacks
import qualified Gpu.Vulkan.PhysicalDevice as PhysicalDevice
import qualified Gpu.Vulkan.Device.Middle as M
import qualified Gpu.Vulkan.QueueFamily.Middle as QueueFamily
import qualified Gpu.Vulkan.Queue as Queue
create :: (Pokable n, WithPokedHeteroToListM ns, Pokable n3, Pokable n4) =>
PhysicalDevice.P -> M.CreateInfo n ns ->
Maybe (AllocationCallbacks.A n3) -> Maybe (AllocationCallbacks.A n4) ->
(forall s . D s -> IO a) -> IO a
create phdvc ci macc macd f =
bracket (M.create phdvc ci macc) (`M.destroy` macd) (f . D)
getQueue :: D s -> QueueFamily.Index -> Word32 -> IO Queue.Q
getQueue (D dvc) (QueueFamily.Index qfi) qi = M.getQueue dvc qfi qi
waitIdle :: D s -> IO ()
waitIdle (D d) = M.waitIdle d
|
14e596dfe76a54c82ca890d51117870903208c5ea619faf170812c98814d7731 | dhleong/spade | demo.cljs | (ns spade.demo
(:require [clojure.string :as str]
[reagent.dom :as rdom]
[spade.core :refer [defclass defattrs defglobal defkeyframes]]
[spade.react :as spade]))
(defkeyframes anim-frames []
["0%" {:opacity 0}]
["100%" {:opacity 1}])
(defkeyframes parameterized-anim-frames [start end]
["0%" {:opacity start}]
["100%" {:opacity end}])
(defglobal background
[:body {:*my-var* "22pt"
:background "#333"}])
(defglobal text
[:body {:color "#fff"}])
(defclass serenity []
(at-media {:min-width "750px"}
{:padding "80px"})
{:padding "8px"}
[:.title {:font-size :*my-var*
:animation [[(parameterized-anim-frames 0 0.5) "560ms" 'ease-in-out]]}])
(defclass colorized-with-key [color]
^{:key (str/upper-case color)}
{:height "20px"
:width "20px"
:background-color color})
(defclass colorized-with-key-in-block [color]
(let [k (str/upper-case color)]
^{:key k}
{:height "20px"
:width "20px"
:background-color color}))
(defclass colorized [color]
{:height "20px"
:width "20px"
:background-color color})
(defclass flex []
{:display 'flex})
(defattrs composed-attrs []
{:composes (flex)})
(defn demo []
[:<>
[:div {:class (serenity)}
[:div.title "Test"]]
[:div {:class (flex)}
[:div {:class (colorized-with-key "red")}]
[:div {:class (colorized-with-key "blue")}]
[:div {:class (colorized-with-key "green")}]]
[:div {:class (flex)}
[:div {:class (colorized-with-key-in-block "red")}]
[:div {:class (colorized-with-key-in-block "blue")}]
[:div {:class (colorized-with-key-in-block "green")}]]
[:div {:class (flex)}
[:div {:class (colorized "red")}]
[:div {:class (colorized "blue")}]
[:div {:class (colorized "green")}]]
[:div (composed-attrs)
[:div {:class (colorized "red")}]
[:div {:class (colorized "blue")}]
[:div {:class (colorized "green")}]]
])
(defn view []
[:div
[:style#styles]
[demo]])
(defn mount-root []
(rdom/render
[spade/with-dom #(.getElementById js/document "styles")
[view]]
(.getElementById js/document "app")))
(defn init! []
(mount-root))
(init!)
| null | https://raw.githubusercontent.com/dhleong/spade/d77c2adcf451aa9c0b55bd0a835d53f95c7becf4/dev/spade/demo.cljs | clojure | (ns spade.demo
(:require [clojure.string :as str]
[reagent.dom :as rdom]
[spade.core :refer [defclass defattrs defglobal defkeyframes]]
[spade.react :as spade]))
(defkeyframes anim-frames []
["0%" {:opacity 0}]
["100%" {:opacity 1}])
(defkeyframes parameterized-anim-frames [start end]
["0%" {:opacity start}]
["100%" {:opacity end}])
(defglobal background
[:body {:*my-var* "22pt"
:background "#333"}])
(defglobal text
[:body {:color "#fff"}])
(defclass serenity []
(at-media {:min-width "750px"}
{:padding "80px"})
{:padding "8px"}
[:.title {:font-size :*my-var*
:animation [[(parameterized-anim-frames 0 0.5) "560ms" 'ease-in-out]]}])
(defclass colorized-with-key [color]
^{:key (str/upper-case color)}
{:height "20px"
:width "20px"
:background-color color})
(defclass colorized-with-key-in-block [color]
(let [k (str/upper-case color)]
^{:key k}
{:height "20px"
:width "20px"
:background-color color}))
(defclass colorized [color]
{:height "20px"
:width "20px"
:background-color color})
(defclass flex []
{:display 'flex})
(defattrs composed-attrs []
{:composes (flex)})
(defn demo []
[:<>
[:div {:class (serenity)}
[:div.title "Test"]]
[:div {:class (flex)}
[:div {:class (colorized-with-key "red")}]
[:div {:class (colorized-with-key "blue")}]
[:div {:class (colorized-with-key "green")}]]
[:div {:class (flex)}
[:div {:class (colorized-with-key-in-block "red")}]
[:div {:class (colorized-with-key-in-block "blue")}]
[:div {:class (colorized-with-key-in-block "green")}]]
[:div {:class (flex)}
[:div {:class (colorized "red")}]
[:div {:class (colorized "blue")}]
[:div {:class (colorized "green")}]]
[:div (composed-attrs)
[:div {:class (colorized "red")}]
[:div {:class (colorized "blue")}]
[:div {:class (colorized "green")}]]
])
(defn view []
[:div
[:style#styles]
[demo]])
(defn mount-root []
(rdom/render
[spade/with-dom #(.getElementById js/document "styles")
[view]]
(.getElementById js/document "app")))
(defn init! []
(mount-root))
(init!)
| |
d322947801cc5885be781d0e389ec749aa2f21465f888b8109742ec8f8f653c0 | bravit/hid-examples | BenchBuildIPGroups.hs | module BenchBuildIPGroups where
import Criterion.Main
import NFUtils ()
import ParseIP
bench_buildIP :: [Benchmark]
bench_buildIP = [
bgroup "buildIP" [
let theip = [17,0,32,2] in
bgroup "single" [
bench "default" $ nf buildIP theip
, bench "foldr" $ nf buildIP_foldr theip
, bench "foldl" $ nf buildIP_foldl theip
, bench "foldl-shl" $ nf buildIP_foldl_shl theip
]
, let ipcomps = [[0,0,0,1], [192,168,1,1], [17,0,32,2],
[255,255,252,41], [255,255,252,41]] in
bgroup "several" [
bench "default" $ nf (map buildIP) ipcomps
, bench "foldr" $ nf (map buildIP_foldr) ipcomps
, bench "foldl" $ nf (map buildIP_foldl) ipcomps
, bench "foldl-shl" $ nf (map buildIP_foldl_shl) ipcomps
]
]
]
| null | https://raw.githubusercontent.com/bravit/hid-examples/913e116b7ee9c7971bba10fe70ae0b61bfb9391b/benchmarks/iplookup/BenchBuildIPGroups.hs | haskell | module BenchBuildIPGroups where
import Criterion.Main
import NFUtils ()
import ParseIP
bench_buildIP :: [Benchmark]
bench_buildIP = [
bgroup "buildIP" [
let theip = [17,0,32,2] in
bgroup "single" [
bench "default" $ nf buildIP theip
, bench "foldr" $ nf buildIP_foldr theip
, bench "foldl" $ nf buildIP_foldl theip
, bench "foldl-shl" $ nf buildIP_foldl_shl theip
]
, let ipcomps = [[0,0,0,1], [192,168,1,1], [17,0,32,2],
[255,255,252,41], [255,255,252,41]] in
bgroup "several" [
bench "default" $ nf (map buildIP) ipcomps
, bench "foldr" $ nf (map buildIP_foldr) ipcomps
, bench "foldl" $ nf (map buildIP_foldl) ipcomps
, bench "foldl-shl" $ nf (map buildIP_foldl_shl) ipcomps
]
]
]
| |
47ca551dd14eb1df20654fbf676049bb6787c45cd12301a74fa7f7402579bed8 | hopv/MoCHi | typConst.ml | open Util
open Combinator
(** Type constants *)
* { 6 Constructors }
type t =
(* base types *)
| Unit
| Bool
| Int
| Real
| String
| Ext of string (* external types *)
| Bot
| Top
| Unknown
(* composed types *)
| Arrow
| List
(* intersection and union types *)
| Inter of int
| Union of int
(* refinement types *)
| Ref (*of t * Idnt.t * Formulat.t*)
(* abstraction types *)
| Abs (*of t * Idnt.t * Formulat.t*)
* { 6 Inspectors }
let arity_of = function
| Unit | Bool | Int | Real | String -> 0
| Ext(_) -> 0(*@todo*)
| Bot | Top -> 0
| Unknown -> 0
| Arrow -> 2
| List -> 1
| Inter(n) -> n
| Union(n) -> n
| Ref -> 2
| Abs -> 2
let rec string_of = function
| Unit -> "unit"
| Bool -> "bool"
| Int -> "int"
| Real -> "real"
| String -> "string"
| Ext(a) -> a
| Bot -> "bot"
| Top -> "top"
| Unknown -> "unknown"
| Arrow -> "arrow"
| List -> "list"
| Inter(n) -> "inter " ^ string_of_int n
| Union(n) -> "union " ^ string_of_int n
| Ref -> "refine"
| Abs -> "abst"
let sexp_of = function
| Unit -> "Unit"
| Bool -> "Bool"
| Int -> "Int"
| Real -> "Real"
| _ -> assert false
let is_base = function
| Unit | Bool | Int | Real | String
| Ext(_)(*@todo?*)
| Bot | Top
| Unknown -> true
| _ -> false
let is_ext = function
| Ext _ -> true
| _ -> false
let is_unknown = function
| Unknown -> true
| _ -> false
let equiv_mod_unknown tyc1 tyc2 =
tyc1 = tyc2
|| is_unknown tyc1
|| is_unknown tyc2
* { 6 Printers }
let pr uprs ppf c =
match c, uprs with
| Arrow, [upr1; upr2] ->
Format.fprintf
ppf
"@[<hov>%a ->@ %a@]"
upr1 ()
upr2 ()
| _, _ ->
Printer.concat_uprs_app
((Printer.upr_of String.pr (string_of c)) :: uprs)
"@ "
ppf
()
let pr_tex uprs ppf c =
match c, uprs with
| Arrow, [upr1; upr2] ->
Format.fprintf
ppf
"@[<hov>%a \\rightarrow@ %a@]"
upr1 ()
upr2 ()
| _, _ ->
Printer.concat_uprs_app
((Printer.upr_of String.pr (string_of c)) :: uprs)
"@ "
ppf
()
| null | https://raw.githubusercontent.com/hopv/MoCHi/b0ac0d626d64b1e3c779d8e98cb232121cc3196a/fpat/typConst.ml | ocaml | * Type constants
base types
external types
composed types
intersection and union types
refinement types
of t * Idnt.t * Formulat.t
abstraction types
of t * Idnt.t * Formulat.t
@todo
@todo? | open Util
open Combinator
* { 6 Constructors }
type t =
| Unit
| Bool
| Int
| Real
| String
| Bot
| Top
| Unknown
| Arrow
| List
| Inter of int
| Union of int
* { 6 Inspectors }
let arity_of = function
| Unit | Bool | Int | Real | String -> 0
| Bot | Top -> 0
| Unknown -> 0
| Arrow -> 2
| List -> 1
| Inter(n) -> n
| Union(n) -> n
| Ref -> 2
| Abs -> 2
let rec string_of = function
| Unit -> "unit"
| Bool -> "bool"
| Int -> "int"
| Real -> "real"
| String -> "string"
| Ext(a) -> a
| Bot -> "bot"
| Top -> "top"
| Unknown -> "unknown"
| Arrow -> "arrow"
| List -> "list"
| Inter(n) -> "inter " ^ string_of_int n
| Union(n) -> "union " ^ string_of_int n
| Ref -> "refine"
| Abs -> "abst"
let sexp_of = function
| Unit -> "Unit"
| Bool -> "Bool"
| Int -> "Int"
| Real -> "Real"
| _ -> assert false
let is_base = function
| Unit | Bool | Int | Real | String
| Bot | Top
| Unknown -> true
| _ -> false
let is_ext = function
| Ext _ -> true
| _ -> false
let is_unknown = function
| Unknown -> true
| _ -> false
let equiv_mod_unknown tyc1 tyc2 =
tyc1 = tyc2
|| is_unknown tyc1
|| is_unknown tyc2
* { 6 Printers }
let pr uprs ppf c =
match c, uprs with
| Arrow, [upr1; upr2] ->
Format.fprintf
ppf
"@[<hov>%a ->@ %a@]"
upr1 ()
upr2 ()
| _, _ ->
Printer.concat_uprs_app
((Printer.upr_of String.pr (string_of c)) :: uprs)
"@ "
ppf
()
let pr_tex uprs ppf c =
match c, uprs with
| Arrow, [upr1; upr2] ->
Format.fprintf
ppf
"@[<hov>%a \\rightarrow@ %a@]"
upr1 ()
upr2 ()
| _, _ ->
Printer.concat_uprs_app
((Printer.upr_of String.pr (string_of c)) :: uprs)
"@ "
ppf
()
|
f16f82c396e1f36aadbadffafdddeedbabfaa78393fb6ccd78bdbf10ac0d6e39 | pink-gorilla/webly | dialog.cljs | (ns frontend.dialog
(:require-macros [reagent.ratom :refer [reaction]])
(:require
[re-frame.core :refer [reg-sub-raw reg-event-db dispatch subscribe]]))
; stolen from:
; -frame-modal
; todo: incorporate this
; -tim.com/learning-lab/tailwind-starter-kit/documentation/vue/modals/small
(reg-event-db
:modal/open
(fn [db [_ child size close]]
(assoc-in db [:modal]
{:show? true
:child child
:close (or close nil) ; optionally dispatch on close reframe event
:size (or size :default)})))
(reg-event-db
:modal/close
(fn [db [_]]
(let [{:keys [show? close]} (:modal db)]
(if show?
(do (when close
(dispatch close))
(assoc-in db [:modal] {:show? false
:child nil
:size :default
:close nil}))
db))))
(defn modal-panel
[{:keys [child size]}]
[:div {:class "modal-wrapper"}
[:div {:class "modal-backdrop"
:on-click (fn [event]
(dispatch [:modal/close])
(.preventDefault event)
(.stopPropagation event))}]
[:div {:class "modal-child"
:style {:width (case size
:extra-small "15%"
:small "30%"
:large "70%"
:extra-large "85%"
"50%")}} child]])
(reg-sub-raw
:modal
(fn [db _] (reaction (:modal @db))))
(defn modal-container []
(let [modal (subscribe [:modal])]
(fn []
[:div
(when (:show? @modal)
[modal-panel @modal])])))
| null | https://raw.githubusercontent.com/pink-gorilla/webly/fcc124b1ad92849d783d7d71b064d0009223d6a6/frontend/src/frontend/dialog.cljs | clojure | stolen from:
-frame-modal
todo: incorporate this
-tim.com/learning-lab/tailwind-starter-kit/documentation/vue/modals/small
optionally dispatch on close reframe event | (ns frontend.dialog
(:require-macros [reagent.ratom :refer [reaction]])
(:require
[re-frame.core :refer [reg-sub-raw reg-event-db dispatch subscribe]]))
(reg-event-db
:modal/open
(fn [db [_ child size close]]
(assoc-in db [:modal]
{:show? true
:child child
:size (or size :default)})))
(reg-event-db
:modal/close
(fn [db [_]]
(let [{:keys [show? close]} (:modal db)]
(if show?
(do (when close
(dispatch close))
(assoc-in db [:modal] {:show? false
:child nil
:size :default
:close nil}))
db))))
(defn modal-panel
[{:keys [child size]}]
[:div {:class "modal-wrapper"}
[:div {:class "modal-backdrop"
:on-click (fn [event]
(dispatch [:modal/close])
(.preventDefault event)
(.stopPropagation event))}]
[:div {:class "modal-child"
:style {:width (case size
:extra-small "15%"
:small "30%"
:large "70%"
:extra-large "85%"
"50%")}} child]])
(reg-sub-raw
:modal
(fn [db _] (reaction (:modal @db))))
(defn modal-container []
(let [modal (subscribe [:modal])]
(fn []
[:div
(when (:show? @modal)
[modal-panel @modal])])))
|
c6e6699dd46070dda8122417d62a15cf3bdbb8e6d1c0dd6815bc2c43f7bee776 | elastic/eui-cljs | take_mounted_snapshot.cljs | (ns eui.test.take-mounted-snapshot
(:require ["@elastic/eui/lib/test/take_mounted_snapshot.js" :as eui]))
(def takeMountedSnapshot eui/takeMountedSnapshot)
| null | https://raw.githubusercontent.com/elastic/eui-cljs/ad60b57470a2eb8db9bca050e02f52dd964d9f8e/src/eui/test/take_mounted_snapshot.cljs | clojure | (ns eui.test.take-mounted-snapshot
(:require ["@elastic/eui/lib/test/take_mounted_snapshot.js" :as eui]))
(def takeMountedSnapshot eui/takeMountedSnapshot)
| |
789c402834a1ecc85cf594b82b8bce4557e6e4effdd3b9274027b4c4403ec84d | russ/openpoker | betting.erl | Copyright ( C ) 2005 - 2008 Wager Labs , SA
%%%%
%%%% THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS
CREATIVE COMMONS PUBLIC LICENSE ( " CCPL " OR " LICENSE " ) . THE WORK IS
%%%% PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF
%%%% THE WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT
%%%% LAW IS PROHIBITED.
%%%%
%%%% BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT
%%%% AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENSE. TO THE EXTENT
%%%% THIS LICENSE MAY BE CONSIDERED TO BE A CONTRACT, THE LICENSOR GRANTS
%%%% YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE
%%%% OF SUCH TERMS AND CONDITIONS.
%%%%
%%%% Please see LICENSE for full legal details and the following URL
%%%% for a human-readable explanation:
%%%%
-nc-sa/3.0/us/
%%%%
-module(betting).
-export([start/3, betting/3]).
-include_lib("eunit/include/eunit.hrl").
-include("common.hrl").
-include("texas.hrl").
-include("pp.hrl").
-include("game.hrl").
start(Game, Ctx, [MaxRaises, Stage]) ->
start(Game, Ctx, [MaxRaises, Stage, false]);
start(Game, Ctx, [MaxRaises, Stage, HaveBlinds]) ->
Ctx1 = Ctx#texas{
have_blinds = HaveBlinds,
max_raises = MaxRaises,
stage = Stage
},
Ctx2 = if
not HaveBlinds ->
Ctx1#texas{ call = 0.0 };
true ->
Ctx1
end,
B = Ctx2#texas.b,
Active = g:get_seats(Game, B, ?PS_PLAY),
PlayerCount = length(Active),
if
PlayerCount < 2 ->
{stop, Game, Ctx2};
true ->
Event = #game_stage{
game = Game#game.gid,
stage = Ctx2#texas.stage
},
Game1 = g:broadcast(Game, Event),
if
HaveBlinds ->
%% start with the player after the big blind
BB = Ctx2#texas.bb,
Temp = g:get_seats(Game1, BB, ?PS_PLAY),
Player = hd(Temp);
true ->
start with the first player after the button
Player = hd(Active)
end,
Game2 = Game1#game{ raise_count = 0 },
ask_for_bet(Game2, Ctx2, Player)
end.
betting(Game, Ctx, #raise{ player = Player })
when Ctx#texas.exp_player /= Player ->
{continue, Game, Ctx};
%%% Call
betting(Game, Ctx, #raise{ player = Player, raise = 0.0 }) ->
Game1 = g:cancel_timer(Game),
Amt = Ctx#texas.exp_amt,
Seat = g:get_seat(Game1, Ctx#texas.exp_seat),
Inplay = Seat#seat.inplay,
if
Amt > Inplay ->
betting(Game, Ctx, #fold{ player = Player });
true ->
%% proper bet
Game2 = g:set_state(Game1, Player, ?PS_BET),
Game3 = g:add_bet(Game2, Player, Amt),
R1 = #notify_raise{
game = Game3#game.gid,
player = Seat#seat.pid,
raise = 0.0,
call = Amt
},
Game4 = g:broadcast(Game3, R1),
next_turn(Game4, Ctx, Ctx#texas.exp_seat)
end;
%%% Raise
betting(Game, Ctx, #raise{ player = Player, raise = Amt }) ->
Game1 = g:cancel_timer(Game),
Call = Ctx#texas.exp_amt,
Min = Ctx#texas.exp_min,
Max = Ctx#texas.exp_max,
Seat = g:get_seat(Game, Ctx#texas.exp_seat),
Inplay = Seat#seat.inplay,
RC = Game1#game.raise_count,
if
(Amt > Inplay) or
(Amt > Max) or
(Max == 0.0) or % should have sent CALL
((Amt < Min) and ((Amt + Call) /= Inplay)) ->
betting(Game1, Ctx, #fold{ player = Player });
true ->
%% proper raise
RC1 = if
Call /= 0.0 ->
RC + 1;
true ->
RC
end,
Game2 = g:add_bet(Game1, Player, Amt + Call),
Game3 = g:reset_player_state(Game2, ?PS_BET, ?PS_PLAY),
Game4 = if
Amt + Call == Inplay ->
Game3;
true ->
g:set_state(Game3, Player, ?PS_BET)
end,
R1 = #notify_raise{
game = Game4#game.gid,
player = Seat#seat.pid,
raise = Amt,
call = Call
},
Game5 = g:broadcast(Game4, R1),
Game6 = Game5#game{ raise_count = RC1 },
Ctx1 = Ctx#texas{ call = Ctx#texas.call + Amt },
next_turn(Game6, Ctx1, Ctx1#texas.exp_seat)
end;
betting(Game, Ctx, R = #fold{}) ->
if
Ctx#texas.exp_player /= R#fold.player ->
{continue, Game, Ctx};
true ->
Game1 = g:cancel_timer(Game),
Game2 = g:set_state(Game1, Ctx#texas.exp_seat, ?PS_FOLD),
next_turn(Game2, Ctx, Ctx#texas.exp_seat)
end;
betting(Game, Ctx, {timeout, _, _}) ->
Game1 = g:cancel_timer(Game),
Player = Ctx#texas.exp_player,
error_logger:warning_report([{message, "Player timeout!"},
{module, ?MODULE},
{player, Player}
]),
betting(Game1, Ctx, #fold{ player = Player });
betting(Game, Ctx, R)
when is_record(R, join), Game#game.tourney /= none;
is_record(R, join), Game#game.tourney /= none;
is_record(R, sit_out), Game#game.tourney /= none;
is_record(R, come_back), Game#game.tourney /= none ->
{skip, Game, Ctx};
betting(Game, Ctx, R = #join{}) ->
Game1 = g:join(Game, R#join{ state = ?PS_FOLD }),
{continue, Game1, Ctx};
betting(Game, Ctx, R = #leave{}) ->
Game1 = g:leave(Game, R#leave{ state = ?PS_CAN_LEAVE }),
{continue, Game1, Ctx};
betting(Game, Ctx, Event) ->
error_logger:error_report([{module, ?MODULE},
{line, ?LINE},
{message, Event},
{self, self()}
]),
{continue, Game, Ctx}.
next_turn(Game, Ctx, N) ->
Active = g:get_seats(Game, N, ?PS_PLAY),
Standing = g:get_seats(Game, N, ?PS_STANDING),
ActiveCount = length(Active),
StandingCount = length(Standing),
if
StandingCount < 2 ->
%% last man standing wins
{goto, showdown, Game, Ctx};
ActiveCount == 0.0 ->
%% we are done with this stage
Game1 = g:reset_player_state(Game, ?PS_BET, ?PS_PLAY),
Game2 = g:new_stage(Game1),
Ctx1 = Ctx#texas{ call = 0.0 },
{stop, Game2, Ctx1 };
true ->
%% next player
ask_for_bet(Game, Ctx, hd(Active))
end.
ask_for_bet(Game, Ctx, N) ->
Seat = g:get_seat(Game, N),
Player = Seat#seat.player,
Inplay = Seat#seat.inplay,
Bet = Seat#seat.bet,
Stage = Ctx#texas.stage,
PotSize = g:pot_size(Game),
Call = Ctx#texas.call - Bet,
Low = Game#game.low,
High = Game#game.high,
{Min, Max} = (Game#game.limit):raise(Low, High, PotSize, Inplay, Stage),
Game1 = g:request_bet(Game, N, Call, Min, Max),
Game2 = g:restart_timer(Game1, Game1#game.timeout),
Ctx1 = Ctx#texas{
exp_player = Player,
exp_seat = N,
exp_amt = Call,
exp_min = Min,
exp_max = Max
},
{next, betting, Game2, Ctx1}.
| null | https://raw.githubusercontent.com/russ/openpoker/62edd72a35b9ef52f55da9303cf1e06142e95895/src/betting.erl | erlang |
THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS
PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF
THE WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT
LAW IS PROHIBITED.
BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT
AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENSE. TO THE EXTENT
THIS LICENSE MAY BE CONSIDERED TO BE A CONTRACT, THE LICENSOR GRANTS
YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE
OF SUCH TERMS AND CONDITIONS.
Please see LICENSE for full legal details and the following URL
for a human-readable explanation:
start with the player after the big blind
Call
proper bet
Raise
should have sent CALL
proper raise
last man standing wins
we are done with this stage
next player | Copyright ( C ) 2005 - 2008 Wager Labs , SA
CREATIVE COMMONS PUBLIC LICENSE ( " CCPL " OR " LICENSE " ) . THE WORK IS
-nc-sa/3.0/us/
-module(betting).
-export([start/3, betting/3]).
-include_lib("eunit/include/eunit.hrl").
-include("common.hrl").
-include("texas.hrl").
-include("pp.hrl").
-include("game.hrl").
start(Game, Ctx, [MaxRaises, Stage]) ->
start(Game, Ctx, [MaxRaises, Stage, false]);
start(Game, Ctx, [MaxRaises, Stage, HaveBlinds]) ->
Ctx1 = Ctx#texas{
have_blinds = HaveBlinds,
max_raises = MaxRaises,
stage = Stage
},
Ctx2 = if
not HaveBlinds ->
Ctx1#texas{ call = 0.0 };
true ->
Ctx1
end,
B = Ctx2#texas.b,
Active = g:get_seats(Game, B, ?PS_PLAY),
PlayerCount = length(Active),
if
PlayerCount < 2 ->
{stop, Game, Ctx2};
true ->
Event = #game_stage{
game = Game#game.gid,
stage = Ctx2#texas.stage
},
Game1 = g:broadcast(Game, Event),
if
HaveBlinds ->
BB = Ctx2#texas.bb,
Temp = g:get_seats(Game1, BB, ?PS_PLAY),
Player = hd(Temp);
true ->
start with the first player after the button
Player = hd(Active)
end,
Game2 = Game1#game{ raise_count = 0 },
ask_for_bet(Game2, Ctx2, Player)
end.
betting(Game, Ctx, #raise{ player = Player })
when Ctx#texas.exp_player /= Player ->
{continue, Game, Ctx};
betting(Game, Ctx, #raise{ player = Player, raise = 0.0 }) ->
Game1 = g:cancel_timer(Game),
Amt = Ctx#texas.exp_amt,
Seat = g:get_seat(Game1, Ctx#texas.exp_seat),
Inplay = Seat#seat.inplay,
if
Amt > Inplay ->
betting(Game, Ctx, #fold{ player = Player });
true ->
Game2 = g:set_state(Game1, Player, ?PS_BET),
Game3 = g:add_bet(Game2, Player, Amt),
R1 = #notify_raise{
game = Game3#game.gid,
player = Seat#seat.pid,
raise = 0.0,
call = Amt
},
Game4 = g:broadcast(Game3, R1),
next_turn(Game4, Ctx, Ctx#texas.exp_seat)
end;
betting(Game, Ctx, #raise{ player = Player, raise = Amt }) ->
Game1 = g:cancel_timer(Game),
Call = Ctx#texas.exp_amt,
Min = Ctx#texas.exp_min,
Max = Ctx#texas.exp_max,
Seat = g:get_seat(Game, Ctx#texas.exp_seat),
Inplay = Seat#seat.inplay,
RC = Game1#game.raise_count,
if
(Amt > Inplay) or
(Amt > Max) or
((Amt < Min) and ((Amt + Call) /= Inplay)) ->
betting(Game1, Ctx, #fold{ player = Player });
true ->
RC1 = if
Call /= 0.0 ->
RC + 1;
true ->
RC
end,
Game2 = g:add_bet(Game1, Player, Amt + Call),
Game3 = g:reset_player_state(Game2, ?PS_BET, ?PS_PLAY),
Game4 = if
Amt + Call == Inplay ->
Game3;
true ->
g:set_state(Game3, Player, ?PS_BET)
end,
R1 = #notify_raise{
game = Game4#game.gid,
player = Seat#seat.pid,
raise = Amt,
call = Call
},
Game5 = g:broadcast(Game4, R1),
Game6 = Game5#game{ raise_count = RC1 },
Ctx1 = Ctx#texas{ call = Ctx#texas.call + Amt },
next_turn(Game6, Ctx1, Ctx1#texas.exp_seat)
end;
betting(Game, Ctx, R = #fold{}) ->
if
Ctx#texas.exp_player /= R#fold.player ->
{continue, Game, Ctx};
true ->
Game1 = g:cancel_timer(Game),
Game2 = g:set_state(Game1, Ctx#texas.exp_seat, ?PS_FOLD),
next_turn(Game2, Ctx, Ctx#texas.exp_seat)
end;
betting(Game, Ctx, {timeout, _, _}) ->
Game1 = g:cancel_timer(Game),
Player = Ctx#texas.exp_player,
error_logger:warning_report([{message, "Player timeout!"},
{module, ?MODULE},
{player, Player}
]),
betting(Game1, Ctx, #fold{ player = Player });
betting(Game, Ctx, R)
when is_record(R, join), Game#game.tourney /= none;
is_record(R, join), Game#game.tourney /= none;
is_record(R, sit_out), Game#game.tourney /= none;
is_record(R, come_back), Game#game.tourney /= none ->
{skip, Game, Ctx};
betting(Game, Ctx, R = #join{}) ->
Game1 = g:join(Game, R#join{ state = ?PS_FOLD }),
{continue, Game1, Ctx};
betting(Game, Ctx, R = #leave{}) ->
Game1 = g:leave(Game, R#leave{ state = ?PS_CAN_LEAVE }),
{continue, Game1, Ctx};
betting(Game, Ctx, Event) ->
error_logger:error_report([{module, ?MODULE},
{line, ?LINE},
{message, Event},
{self, self()}
]),
{continue, Game, Ctx}.
next_turn(Game, Ctx, N) ->
Active = g:get_seats(Game, N, ?PS_PLAY),
Standing = g:get_seats(Game, N, ?PS_STANDING),
ActiveCount = length(Active),
StandingCount = length(Standing),
if
StandingCount < 2 ->
{goto, showdown, Game, Ctx};
ActiveCount == 0.0 ->
Game1 = g:reset_player_state(Game, ?PS_BET, ?PS_PLAY),
Game2 = g:new_stage(Game1),
Ctx1 = Ctx#texas{ call = 0.0 },
{stop, Game2, Ctx1 };
true ->
ask_for_bet(Game, Ctx, hd(Active))
end.
ask_for_bet(Game, Ctx, N) ->
Seat = g:get_seat(Game, N),
Player = Seat#seat.player,
Inplay = Seat#seat.inplay,
Bet = Seat#seat.bet,
Stage = Ctx#texas.stage,
PotSize = g:pot_size(Game),
Call = Ctx#texas.call - Bet,
Low = Game#game.low,
High = Game#game.high,
{Min, Max} = (Game#game.limit):raise(Low, High, PotSize, Inplay, Stage),
Game1 = g:request_bet(Game, N, Call, Min, Max),
Game2 = g:restart_timer(Game1, Game1#game.timeout),
Ctx1 = Ctx#texas{
exp_player = Player,
exp_seat = N,
exp_amt = Call,
exp_min = Min,
exp_max = Max
},
{next, betting, Game2, Ctx1}.
|
dc8c22e173774fbe72e4d2083d1aeb6bc0c8ea68c49b8d4adb106c9e55c3632c | adrieng/pulsar | name.ml | This file is part of Pulsar , a temporal functional language .
* Copyright ( C ) 2017
*
* This program is free software : you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation , either version 3 of the License , or ( at your option ) any later
* version .
*
* This program is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE . See the LICENSE file in the top - level directory .
* Copyright (C) 2017 Adrien Guatto
*
* This program is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the LICENSE file in the top-level directory.
*)
(** Module name. *)
type modname = string
(** Top-level declaration name. *)
type shortname = string
(** Fully qualified name. *)
type t = { modname : modname; name : shortname; }
let print fmt { modname; name; } =
Format.fprintf fmt "%s.%s" modname name
let compare q1 q2 =
Warp.Utils.compare_both
(Warp.Utils.compare_string q1.modname q2.modname)
(fun () -> Warp.Utils.compare_string q1.name q2.name)
TODO check that modname conformance .
let make ~modname ~name =
{
modname;
name;
}
| null | https://raw.githubusercontent.com/adrieng/pulsar/c3901388659d9c7978b04dce0815e3ff9aea1a0c/pulsar-lib/name.ml | ocaml | * Module name.
* Top-level declaration name.
* Fully qualified name. | This file is part of Pulsar , a temporal functional language .
* Copyright ( C ) 2017
*
* This program is free software : you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation , either version 3 of the License , or ( at your option ) any later
* version .
*
* This program is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE . See the LICENSE file in the top - level directory .
* Copyright (C) 2017 Adrien Guatto
*
* This program is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the LICENSE file in the top-level directory.
*)
type modname = string
type shortname = string
type t = { modname : modname; name : shortname; }
let print fmt { modname; name; } =
Format.fprintf fmt "%s.%s" modname name
let compare q1 q2 =
Warp.Utils.compare_both
(Warp.Utils.compare_string q1.modname q2.modname)
(fun () -> Warp.Utils.compare_string q1.name q2.name)
TODO check that modname conformance .
let make ~modname ~name =
{
modname;
name;
}
|
03e2085e162725c85bea2870769e9b46c68179de7d399ef65da52e4fba862e28 | robert-strandh/Climacs | prolog2paiprolog.lisp | ;;; -*- Mode: Lisp; Package: CLIMACS-PROLOG-SYNTAX -*-
( c ) copyright 2005 by
( )
;;; This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation ; either
version 2 of the License , or ( at your option ) any later version .
;;;
;;; This library is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details .
;;;
You should have received a copy of the GNU Library General Public
;;; License along with this library; if not, write to the
Free Software Foundation , Inc. , 59 Temple Place - Suite 330 ,
Boston , MA 02111 - 1307 USA .
(in-package #:climacs-prolog-syntax)
#+nil
(progn
(defclass prolog-buffer (drei-buffer:standard-buffer)
((filepath :initform nil :accessor filepath)
(syntax :accessor syntax)))
(defmethod initialize-instance :after ((buffer prolog-buffer) &rest args)
(declare (ignore args))
(with-slots (syntax) buffer
(setf syntax (make-instance 'prolog-syntax :buffer buffer))))
)
(defvar *loaded-files* nil "List of files loaded by ensure_loaded directive.")
#+nil
(progn
(defun eval-prolog-file (filepath)
(setf *loaded-files* nil)
(let ((*package*
(or (find-package :paiprolog) (error "Paiprolog not loaded."))))
(dolist (e (buffer->paiprolog (find-prolog-file filepath)))
(when e
(pprint e)
(eval e)))))
(defun find-prolog-file (filepath)
(let ((buffer (make-instance 'prolog-buffer)))
(when (probe-file filepath)
(with-open-file (stream filepath :direction :input)
(esa-buffer:save-buffer-to-stream stream buffer)))
(setf (filepath buffer) filepath
(drei-buffer:offset (low-mark buffer)) 0
(drei-buffer:offset (high-mark buffer)) (drei-buffer:size buffer))
(update-syntax-for-display buffer (syntax buffer) (low-mark buffer)
(high-mark buffer))
buffer))
)
(defun view->paiprolog (view)
(let ((lexemes (drei-syntax::lexemes (lexer (syntax view))))
(expressions '()))
(drei-syntax:update-parse (drei-syntax:syntax view))
(dotimes (i (flexichain:nb-elements lexemes) (nreverse expressions))
(let ((lexeme (flexichain:element* lexemes i)))
(when (typep lexeme 'end-lexeme)
(with-hash-table-iterator
(next-entry (drei-syntax::parse-trees (slot-value lexeme 'state)))
(loop
(multiple-value-bind (more from-state items)
(next-entry)
(declare (ignore from-state))
(cond ((null more) (return))
((typep (car items) 'clause-prolog-text)
(push (prolog->paiprolog (car items)) expressions)
(return))
((typep (car items) 'directive-prolog-text)
;; TODO: handle other directives
(let* ((dexpr (cadr (prolog->paiprolog (car items))))
(dsym (car dexpr)))
(case (intern (symbol-name dsym) :climacs-prolog-syntax)
(ensure-loaded
(unless (member (cadr dexpr) *loaded-files*
:test #'string=)
(dolist (e (view->paiprolog
(find-prolog-file (cadr dexpr))))
(push e expressions))
(push (cadr dexpr) *loaded-files*)))
(include
(dolist (e (view->paiprolog
(find-prolog-file (cadr dexpr))))
(push e expressions)))))
(return))
(t nil))))))))))
;;===========================================================================
;; ISO DIRECTIVES
;;
;; Properties of procedures
;; :- dynamic(PI).
;; PI is a predicate indicator, predicate indicator sequence, or
predicate indicator list . Each procedure identified by PI is
dynamic . The predicates abolish/1 , , ,
;; assertz/1 and retract/1 may be applied to these predicates
;; without raising a permission_error.
;; :- multifile(PI).
;; PI is a predicate indicator, predicate indicator sequence, or
predicate indicator list . Each procedure identified by PI may
be defined by clauses that are contained in more than one
Prolog text .
;; :- discontiguous(PI).
;; PI is a predicate indicator, predicate indicator sequence, or
predicate indicator list . Each procedure identified by PI may
be defined by clauses which are not consecutive in the Prolog
;; text.
;; :- set_prolog_flag(Flag, Value)
The Prolog flag Flag shall have its value set to Value .
;;
;; Format and syntax of read-terms
: - op(Priority , OpSpecifier , Operator ) .
The arguments Priority , , and Operator are as the
;; corresponding arguments of the builtin predicate op/3. The effect
;; on the operator table is to be the same.
: - char_conversion(InChar , ) . The arguments InChar and
;; OutChar are as for the builtin predicate char_conversion/2,
;; the effect on the character conversion table is the same.
char_conversion(In_char , ) is true , with the side
;; effect of adding the pair (In_char, Out_char) to the
;; character conversion table if In_char is not equal to
;; Out_char and removing any pair (In_char, _) from the table if
;; In_char is equal to Out_char. When the flag char_conversion
;; has the value true, the In_char will be replaced by Out_char
when a term is read using .
;;
A goal to be executed after the Prolog text has been prepared for execution
;; :- initialization(Goal).
The goal Goal is to be executed immediately after the Prolog
;; text has been prepared for execution. If there are several
;; such directives the order in which the goals is executed is
;; implementation defined.
;;
Another unit of Prolog text to be prepared for execution .
;; :- include(PrologText).
The Prolog text identified by PrologText is to be textually
included at this point of the current Prolog text .
;; :- ensure_loaded(PrologText)
The Prolog text identified by PrologText must be prepared for
execution ( exactly once ) with the current Prolog text . This
;; directive is idempotent.
;;
NB PrologText is implementation dependent .
;;
;;===========================================================================
(defgeneric prolog->paiprolog (prolog-parse-tree))
PROLOG - NONTERMINALs
(defmethod prolog->paiprolog ((n null))
nil)
(defmethod prolog->paiprolog ((n layout-text))
nil)
(defmethod prolog->paiprolog ((n empty-prolog-text))
nil)
(defmethod prolog->paiprolog ((n directive-prolog-text))
(prolog->paiprolog (directive n)))
(defmethod prolog->paiprolog ((n clause-prolog-text))
(let ((expr (prolog->paiprolog (clause n))))
(cond ((null expr)
nil)
((cl:atom expr)
(cons (intern-paiprolog "<-") (list expr)))
((eq (car expr) (intern-paiprolog "<-"))
expr)
(t (list (intern-paiprolog "<-") expr)))))
(defmethod prolog->paiprolog ((n directive))
(prolog->paiprolog (directive-term n)))
(defmethod prolog->paiprolog ((n directive-term))
(prolog->paiprolog (term n)))
(defmethod prolog->paiprolog ((n clause))
(prolog->paiprolog (clause-term n)))
(defmethod prolog->paiprolog ((n clause-term))
(prolog->paiprolog (term n)))
(defmethod prolog->paiprolog ((n lterm))
(prolog->paiprolog (term n)))
(defmethod prolog->paiprolog ((n bracketed-term))
(prolog->paiprolog (term n)))
(defmethod prolog->paiprolog ((n functional-compound-term))
`(,(functor->paiprolog (prolog->paiprolog (functor n)))
,@(prolog->paiprolog (arg-list n))))
(defun functor->paiprolog (functor)
"Consumes a string or symbol designating a prolog functor and
returns a symbol designating a paiprolog functor."
(etypecase functor
(symbol functor)
(string
(let ((id (identifier->paiprolog functor)))
(if id id (read-from-string functor))))))
(defmethod prolog->paiprolog ((n atom))
(prolog->paiprolog (value n)))
(defmethod prolog->paiprolog ((n arg-list))
(list (prolog->paiprolog (exp n))))
(defmethod prolog->paiprolog ((n arg-list-pair))
(cons (prolog->paiprolog (exp n)) (prolog->paiprolog (arg-list n))))
(defmethod prolog->paiprolog ((n exp-term))
(prolog->paiprolog (term n)))
(defmethod prolog->paiprolog ((n exp-atom))
(prolog->paiprolog (atom n)))
(defmethod prolog->paiprolog ((n variable-term))
(prolog->paiprolog (name n)))
(defmethod prolog->paiprolog ((n constant-term))
(let ((value (value n)))
(typecase value
(cons (- (prolog->paiprolog (cadr value))))
(t (prolog->paiprolog value)))))
(defmethod prolog->paiprolog ((n list-compound-term))
(prolog->paiprolog (items n)))
(defmethod prolog->paiprolog ((n items))
(list (prolog->paiprolog (exp n))))
(defmethod prolog->paiprolog ((n items-pair))
(cons (prolog->paiprolog (exp n))
(prolog->paiprolog (texp n))))
(defmethod prolog->paiprolog ((n items-list))
(cons (prolog->paiprolog (exp n))
(prolog->paiprolog (tlist n))))
(defmethod prolog->paiprolog ((n binary-operator-compound-lterm))
(list (prolog->paiprolog (operator n))
(prolog->paiprolog (left n))
(prolog->paiprolog (right n))))
(defmethod prolog->paiprolog ((n prefix-operator-compound-lterm))
(list (prolog->paiprolog (operator n))
(prolog->paiprolog (right n))))
(defmethod prolog->paiprolog ((n postfix-operator-compound-lterm))
(list (prolog->paiprolog (operator n))
(prolog->paiprolog (left n))))
(defmethod prolog->paiprolog ((n binary-operator-compound-term))
;; TODO: special-case AND
(list (prolog->paiprolog (operator n))
(prolog->paiprolog (left n))
(prolog->paiprolog (right n))))
(defmethod prolog->paiprolog ((n prefix-operator-compound-term))
(list (prolog->paiprolog (operator n))
(prolog->paiprolog (right n))))
(defmethod prolog->paiprolog ((n postfix-operator-compound-term))
(list (prolog->paiprolog (operator n))
(prolog->paiprolog (left n))))
(defmethod prolog->paiprolog ((n op))
(prolog->paiprolog (name n)))
(defmethod prolog->paiprolog ((n empty-list))
'())
(defmethod prolog->paiprolog ((n char-code-list-compound-term))
(prolog->paiprolog (ccl n)))
(defmethod prolog->paiprolog ((n curly-compound-term))
;; TODO: what is a curly-compound-term?
(list (prolog->paiprolog (term n))))
(defmethod prolog->paiprolog ((n curly-brackets))
;; TODO: what are curly brackets?
(intern-paiprolog "{}"))
(defmethod prolog->paiprolog ((n char-code-list))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n float-number))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n integer))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n name))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n head-tail-separator))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n open-list))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n close-list))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n variable))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n open-ct))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n open))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n close))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n comma))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n end))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n open-curly))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n close-curly))
(prolog->paiprolog (syntactic-lexeme n)))
;;; PROLOG-LEXEMEs
(defmethod prolog->paiprolog ((l prolog-lexeme))
;; {start,end}-lexeme
;; comment-lexeme
;; error-lexeme
;; {open,open-ct,close}-lexeme
;; {open,close}-curly-lexeme
;; {open,close}-list-lexeme
;; head-tail-separator-lexeme
nil)
(defmethod prolog->paiprolog ((l char-code-list-lexeme))
(read-from-string (lexeme-string l)))
;;; Numbers
(defmethod prolog->paiprolog ((l integer-lexeme))
(read-from-string (lexeme-string l)))
(defmethod prolog->paiprolog ((l float-number-lexeme))
(read-from-string (lexeme-string l)))
;;; VARIABLE-LEXEMEs
(defmethod prolog->paiprolog ((l named-lexeme))
(intern-paiprolog (concatenate 'string "?" (lexeme-string l))))
(defmethod prolog->paiprolog ((l anonymous-lexeme))
(intern-paiprolog "?"))
;;; NAME-LEXEMEs
(defmethod prolog->paiprolog ((l comma-lexeme))
(intern-paiprolog "and"))
(defmethod prolog->paiprolog ((l semicolon-lexeme))
(intern-paiprolog "or"))
(defmethod prolog->paiprolog ((l cut-lexeme))
(intern-paiprolog "!"))
(defmethod prolog->paiprolog ((l quoted-lexeme))
(let ((s (lexeme-string l)))
(subseq s 1 (1- (length s)))))
(defmethod prolog->paiprolog ((l identifier-lexeme))
(intern-paiprolog (substitute #\- #\_ (lexeme-string l))))
(defmethod prolog->paiprolog ((l graphic-lexeme))
(let* ((s (lexeme-string l))
(id (identifier->paiprolog s)))
(if id id (intern-paiprolog s))))
(defun identifier->paiprolog (id-string)
(cond ((string= id-string ":-")
(intern-paiprolog "<-"))
((string= id-string ",")
(intern-paiprolog "and"))
((string= id-string ";")
(intern-paiprolog "or"))
((string= id-string "->")
(intern-paiprolog "if"))
((string= id-string "=:=")
(intern-paiprolog "=:="))
((string= id-string "\\+")
(intern-paiprolog "fail-if"))
(t nil)))
(defun intern-paiprolog (name)
(intern (string-upcase name) :paiprolog))
(define-command (com-export-paiprolog :name t :command-table prolog-table)
((pathname 'pathname))
(let ((expressions (view->paiprolog (current-view))))
(let ((*package* (find-package :paiprolog)))
(with-open-file (s pathname :direction :output :if-exists :supersede)
(dolist (e expressions)
(prin1 e s)
(terpri s))))))
| null | https://raw.githubusercontent.com/robert-strandh/Climacs/68ccba6a2a9cc78c1a84f1698c31c78a215f3d90/Syntax/Prolog/prolog2paiprolog.lisp | lisp | -*- Mode: Lisp; Package: CLIMACS-PROLOG-SYNTAX -*-
This library is free software; you can redistribute it and/or
either
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
License along with this library; if not, write to the
TODO: handle other directives
===========================================================================
ISO DIRECTIVES
Properties of procedures
:- dynamic(PI).
PI is a predicate indicator, predicate indicator sequence, or
assertz/1 and retract/1 may be applied to these predicates
without raising a permission_error.
:- multifile(PI).
PI is a predicate indicator, predicate indicator sequence, or
:- discontiguous(PI).
PI is a predicate indicator, predicate indicator sequence, or
text.
:- set_prolog_flag(Flag, Value)
Format and syntax of read-terms
corresponding arguments of the builtin predicate op/3. The effect
on the operator table is to be the same.
OutChar are as for the builtin predicate char_conversion/2,
the effect on the character conversion table is the same.
effect of adding the pair (In_char, Out_char) to the
character conversion table if In_char is not equal to
Out_char and removing any pair (In_char, _) from the table if
In_char is equal to Out_char. When the flag char_conversion
has the value true, the In_char will be replaced by Out_char
:- initialization(Goal).
text has been prepared for execution. If there are several
such directives the order in which the goals is executed is
implementation defined.
:- include(PrologText).
:- ensure_loaded(PrologText)
directive is idempotent.
===========================================================================
TODO: special-case AND
TODO: what is a curly-compound-term?
TODO: what are curly brackets?
PROLOG-LEXEMEs
{start,end}-lexeme
comment-lexeme
error-lexeme
{open,open-ct,close}-lexeme
{open,close}-curly-lexeme
{open,close}-list-lexeme
head-tail-separator-lexeme
Numbers
VARIABLE-LEXEMEs
NAME-LEXEMEs |
( c ) copyright 2005 by
( )
modify it under the terms of the GNU Library General Public
version 2 of the License , or ( at your option ) any later version .
Library General Public License for more details .
You should have received a copy of the GNU Library General Public
Free Software Foundation , Inc. , 59 Temple Place - Suite 330 ,
Boston , MA 02111 - 1307 USA .
(in-package #:climacs-prolog-syntax)
#+nil
(progn
(defclass prolog-buffer (drei-buffer:standard-buffer)
((filepath :initform nil :accessor filepath)
(syntax :accessor syntax)))
(defmethod initialize-instance :after ((buffer prolog-buffer) &rest args)
(declare (ignore args))
(with-slots (syntax) buffer
(setf syntax (make-instance 'prolog-syntax :buffer buffer))))
)
(defvar *loaded-files* nil "List of files loaded by ensure_loaded directive.")
#+nil
(progn
(defun eval-prolog-file (filepath)
(setf *loaded-files* nil)
(let ((*package*
(or (find-package :paiprolog) (error "Paiprolog not loaded."))))
(dolist (e (buffer->paiprolog (find-prolog-file filepath)))
(when e
(pprint e)
(eval e)))))
(defun find-prolog-file (filepath)
(let ((buffer (make-instance 'prolog-buffer)))
(when (probe-file filepath)
(with-open-file (stream filepath :direction :input)
(esa-buffer:save-buffer-to-stream stream buffer)))
(setf (filepath buffer) filepath
(drei-buffer:offset (low-mark buffer)) 0
(drei-buffer:offset (high-mark buffer)) (drei-buffer:size buffer))
(update-syntax-for-display buffer (syntax buffer) (low-mark buffer)
(high-mark buffer))
buffer))
)
(defun view->paiprolog (view)
(let ((lexemes (drei-syntax::lexemes (lexer (syntax view))))
(expressions '()))
(drei-syntax:update-parse (drei-syntax:syntax view))
(dotimes (i (flexichain:nb-elements lexemes) (nreverse expressions))
(let ((lexeme (flexichain:element* lexemes i)))
(when (typep lexeme 'end-lexeme)
(with-hash-table-iterator
(next-entry (drei-syntax::parse-trees (slot-value lexeme 'state)))
(loop
(multiple-value-bind (more from-state items)
(next-entry)
(declare (ignore from-state))
(cond ((null more) (return))
((typep (car items) 'clause-prolog-text)
(push (prolog->paiprolog (car items)) expressions)
(return))
((typep (car items) 'directive-prolog-text)
(let* ((dexpr (cadr (prolog->paiprolog (car items))))
(dsym (car dexpr)))
(case (intern (symbol-name dsym) :climacs-prolog-syntax)
(ensure-loaded
(unless (member (cadr dexpr) *loaded-files*
:test #'string=)
(dolist (e (view->paiprolog
(find-prolog-file (cadr dexpr))))
(push e expressions))
(push (cadr dexpr) *loaded-files*)))
(include
(dolist (e (view->paiprolog
(find-prolog-file (cadr dexpr))))
(push e expressions)))))
(return))
(t nil))))))))))
predicate indicator list . Each procedure identified by PI is
dynamic . The predicates abolish/1 , , ,
predicate indicator list . Each procedure identified by PI may
be defined by clauses that are contained in more than one
Prolog text .
predicate indicator list . Each procedure identified by PI may
be defined by clauses which are not consecutive in the Prolog
The Prolog flag Flag shall have its value set to Value .
: - op(Priority , OpSpecifier , Operator ) .
The arguments Priority , , and Operator are as the
: - char_conversion(InChar , ) . The arguments InChar and
char_conversion(In_char , ) is true , with the side
when a term is read using .
A goal to be executed after the Prolog text has been prepared for execution
The goal Goal is to be executed immediately after the Prolog
Another unit of Prolog text to be prepared for execution .
The Prolog text identified by PrologText is to be textually
included at this point of the current Prolog text .
The Prolog text identified by PrologText must be prepared for
execution ( exactly once ) with the current Prolog text . This
NB PrologText is implementation dependent .
(defgeneric prolog->paiprolog (prolog-parse-tree))
PROLOG - NONTERMINALs
(defmethod prolog->paiprolog ((n null))
nil)
(defmethod prolog->paiprolog ((n layout-text))
nil)
(defmethod prolog->paiprolog ((n empty-prolog-text))
nil)
(defmethod prolog->paiprolog ((n directive-prolog-text))
(prolog->paiprolog (directive n)))
(defmethod prolog->paiprolog ((n clause-prolog-text))
(let ((expr (prolog->paiprolog (clause n))))
(cond ((null expr)
nil)
((cl:atom expr)
(cons (intern-paiprolog "<-") (list expr)))
((eq (car expr) (intern-paiprolog "<-"))
expr)
(t (list (intern-paiprolog "<-") expr)))))
(defmethod prolog->paiprolog ((n directive))
(prolog->paiprolog (directive-term n)))
(defmethod prolog->paiprolog ((n directive-term))
(prolog->paiprolog (term n)))
(defmethod prolog->paiprolog ((n clause))
(prolog->paiprolog (clause-term n)))
(defmethod prolog->paiprolog ((n clause-term))
(prolog->paiprolog (term n)))
(defmethod prolog->paiprolog ((n lterm))
(prolog->paiprolog (term n)))
(defmethod prolog->paiprolog ((n bracketed-term))
(prolog->paiprolog (term n)))
(defmethod prolog->paiprolog ((n functional-compound-term))
`(,(functor->paiprolog (prolog->paiprolog (functor n)))
,@(prolog->paiprolog (arg-list n))))
(defun functor->paiprolog (functor)
"Consumes a string or symbol designating a prolog functor and
returns a symbol designating a paiprolog functor."
(etypecase functor
(symbol functor)
(string
(let ((id (identifier->paiprolog functor)))
(if id id (read-from-string functor))))))
(defmethod prolog->paiprolog ((n atom))
(prolog->paiprolog (value n)))
(defmethod prolog->paiprolog ((n arg-list))
(list (prolog->paiprolog (exp n))))
(defmethod prolog->paiprolog ((n arg-list-pair))
(cons (prolog->paiprolog (exp n)) (prolog->paiprolog (arg-list n))))
(defmethod prolog->paiprolog ((n exp-term))
(prolog->paiprolog (term n)))
(defmethod prolog->paiprolog ((n exp-atom))
(prolog->paiprolog (atom n)))
(defmethod prolog->paiprolog ((n variable-term))
(prolog->paiprolog (name n)))
(defmethod prolog->paiprolog ((n constant-term))
(let ((value (value n)))
(typecase value
(cons (- (prolog->paiprolog (cadr value))))
(t (prolog->paiprolog value)))))
(defmethod prolog->paiprolog ((n list-compound-term))
(prolog->paiprolog (items n)))
(defmethod prolog->paiprolog ((n items))
(list (prolog->paiprolog (exp n))))
(defmethod prolog->paiprolog ((n items-pair))
(cons (prolog->paiprolog (exp n))
(prolog->paiprolog (texp n))))
(defmethod prolog->paiprolog ((n items-list))
(cons (prolog->paiprolog (exp n))
(prolog->paiprolog (tlist n))))
(defmethod prolog->paiprolog ((n binary-operator-compound-lterm))
(list (prolog->paiprolog (operator n))
(prolog->paiprolog (left n))
(prolog->paiprolog (right n))))
(defmethod prolog->paiprolog ((n prefix-operator-compound-lterm))
(list (prolog->paiprolog (operator n))
(prolog->paiprolog (right n))))
(defmethod prolog->paiprolog ((n postfix-operator-compound-lterm))
(list (prolog->paiprolog (operator n))
(prolog->paiprolog (left n))))
(defmethod prolog->paiprolog ((n binary-operator-compound-term))
(list (prolog->paiprolog (operator n))
(prolog->paiprolog (left n))
(prolog->paiprolog (right n))))
(defmethod prolog->paiprolog ((n prefix-operator-compound-term))
(list (prolog->paiprolog (operator n))
(prolog->paiprolog (right n))))
(defmethod prolog->paiprolog ((n postfix-operator-compound-term))
(list (prolog->paiprolog (operator n))
(prolog->paiprolog (left n))))
(defmethod prolog->paiprolog ((n op))
(prolog->paiprolog (name n)))
(defmethod prolog->paiprolog ((n empty-list))
'())
(defmethod prolog->paiprolog ((n char-code-list-compound-term))
(prolog->paiprolog (ccl n)))
(defmethod prolog->paiprolog ((n curly-compound-term))
(list (prolog->paiprolog (term n))))
(defmethod prolog->paiprolog ((n curly-brackets))
(intern-paiprolog "{}"))
(defmethod prolog->paiprolog ((n char-code-list))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n float-number))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n integer))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n name))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n head-tail-separator))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n open-list))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n close-list))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n variable))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n open-ct))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n open))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n close))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n comma))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n end))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n open-curly))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((n close-curly))
(prolog->paiprolog (syntactic-lexeme n)))
(defmethod prolog->paiprolog ((l prolog-lexeme))
nil)
(defmethod prolog->paiprolog ((l char-code-list-lexeme))
(read-from-string (lexeme-string l)))
(defmethod prolog->paiprolog ((l integer-lexeme))
(read-from-string (lexeme-string l)))
(defmethod prolog->paiprolog ((l float-number-lexeme))
(read-from-string (lexeme-string l)))
(defmethod prolog->paiprolog ((l named-lexeme))
(intern-paiprolog (concatenate 'string "?" (lexeme-string l))))
(defmethod prolog->paiprolog ((l anonymous-lexeme))
(intern-paiprolog "?"))
(defmethod prolog->paiprolog ((l comma-lexeme))
(intern-paiprolog "and"))
(defmethod prolog->paiprolog ((l semicolon-lexeme))
(intern-paiprolog "or"))
(defmethod prolog->paiprolog ((l cut-lexeme))
(intern-paiprolog "!"))
(defmethod prolog->paiprolog ((l quoted-lexeme))
(let ((s (lexeme-string l)))
(subseq s 1 (1- (length s)))))
(defmethod prolog->paiprolog ((l identifier-lexeme))
(intern-paiprolog (substitute #\- #\_ (lexeme-string l))))
(defmethod prolog->paiprolog ((l graphic-lexeme))
(let* ((s (lexeme-string l))
(id (identifier->paiprolog s)))
(if id id (intern-paiprolog s))))
(defun identifier->paiprolog (id-string)
(cond ((string= id-string ":-")
(intern-paiprolog "<-"))
((string= id-string ",")
(intern-paiprolog "and"))
((string= id-string ";")
(intern-paiprolog "or"))
((string= id-string "->")
(intern-paiprolog "if"))
((string= id-string "=:=")
(intern-paiprolog "=:="))
((string= id-string "\\+")
(intern-paiprolog "fail-if"))
(t nil)))
(defun intern-paiprolog (name)
(intern (string-upcase name) :paiprolog))
(define-command (com-export-paiprolog :name t :command-table prolog-table)
((pathname 'pathname))
(let ((expressions (view->paiprolog (current-view))))
(let ((*package* (find-package :paiprolog)))
(with-open-file (s pathname :direction :output :if-exists :supersede)
(dolist (e expressions)
(prin1 e s)
(terpri s))))))
|
f26bfceed444349b9bfe4c21b0d736149213d4c8afbfdcf2c17d93a50c66fb5f | osfameron/words | expression.hs | data Expression = Number Int
| Add Expression Expression
| Subtract Expression Expression
deriving (Eq, Ord, Show)
calculate :: Expression -> Int
calculate (Number x) = x
calculate (Add x y) = (calculate x) + (calculate y)
calculate (Subtract x y) = (calculate x) - (calculate y)
| null | https://raw.githubusercontent.com/osfameron/words/2afc9513b75ef713083627d8e7102650f028a99f/s2/expression.hs | haskell | data Expression = Number Int
| Add Expression Expression
| Subtract Expression Expression
deriving (Eq, Ord, Show)
calculate :: Expression -> Int
calculate (Number x) = x
calculate (Add x y) = (calculate x) + (calculate y)
calculate (Subtract x y) = (calculate x) - (calculate y)
| |
49d279c70b9e46a01da763283c1813258badf03ea1ce883d9e398f76bcbc6aa4 | hidaris/thinking-dumps | parser.rkt | #lang typed/racket
(require "ast.rkt")
(provide parse)
;; easy version
(: string->sexp (-> String Any))
(define (string->sexp s)
(read (open-input-string s)))
(: parse-bool (-> Any BoolExp))
(define (parse-bool sexp)
(match sexp
[`(zero? ,n)
(IsZero (parse-sexp n))]
[_ (error 'parse
"bool exression expected")]))
(: parse-sexp (-> Any Expression))
(define (parse-sexp sexp)
(match sexp
[(? real? x) (Const x)]
[(? symbol? x) (Var x)]
[`(- ,n1 ,n2)
(Diff (parse-sexp n1)
(parse-sexp n2))]
[`(zero? ,x)
(parse-bool sexp)]
[`(if ,test ,then ,else)
(If (parse-bool test)
(parse-sexp then)
(parse-sexp else))]
[`(let ,(? symbol? var) ,val in ,body)
(Let var
(parse-sexp val)
(parse-sexp body))]
[`(- ,n)
(Minus (parse-sexp n))]
))
(: parse (-> String Program))
(define (parse str)
(let ([sexp (string->sexp str)])
(AProgram (parse-sexp sexp))))
| null | https://raw.githubusercontent.com/hidaris/thinking-dumps/3fceaf9e6195ab99c8315749814a7377ef8baf86/eopl-solutions/chap3/3-14/parser.rkt | racket | easy version | #lang typed/racket
(require "ast.rkt")
(provide parse)
(: string->sexp (-> String Any))
(define (string->sexp s)
(read (open-input-string s)))
(: parse-bool (-> Any BoolExp))
(define (parse-bool sexp)
(match sexp
[`(zero? ,n)
(IsZero (parse-sexp n))]
[_ (error 'parse
"bool exression expected")]))
(: parse-sexp (-> Any Expression))
(define (parse-sexp sexp)
(match sexp
[(? real? x) (Const x)]
[(? symbol? x) (Var x)]
[`(- ,n1 ,n2)
(Diff (parse-sexp n1)
(parse-sexp n2))]
[`(zero? ,x)
(parse-bool sexp)]
[`(if ,test ,then ,else)
(If (parse-bool test)
(parse-sexp then)
(parse-sexp else))]
[`(let ,(? symbol? var) ,val in ,body)
(Let var
(parse-sexp val)
(parse-sexp body))]
[`(- ,n)
(Minus (parse-sexp n))]
))
(: parse (-> String Program))
(define (parse str)
(let ([sexp (string->sexp str)])
(AProgram (parse-sexp sexp))))
|
3b19452512d415bc9b3779e97a2bca204e98ccf8669694e27321b37b82a7732c | bragful/ephp | ephp_stack.erl | -module(ephp_stack).
-author('').
-compile([{no_auto_import, [get/1]}]).
-behaviour(gen_server).
-include("ephp.hrl").
-export([start_link/0, destroy/0, get/0, get_array/0, get_array/1, push/6, pop/0]).
-export([init/1, handle_cast/2, handle_call/3, handle_info/2, code_change/3,
terminate/2]).
start_link() ->
case erlang:get(stack) of
undefined ->
%% FIXME: generate this information under a supervisor with
%% the context.
{ok, PID} = gen_server:start_link(?MODULE, [self()], []),
erlang:put(stack, PID),
{ok, PID};
PID ->
case is_process_alive(PID) of
true ->
{ok, PID};
false ->
erlang:erase(stack),
start_link()
end
end.
destroy() ->
case erlang:get(stack) of
undefined ->
ok;
PID ->
gen_server:stop(PID)
end.
get() ->
PID = erlang:get(stack),
gen_server:call(PID, get).
get_array() ->
get_array(0).
get_array(PopElements) ->
PID = erlang:get(stack),
gen_server:call(PID, {get_array, PopElements}).
push(_File, undefined, _Fun, _Args, _Class, _Object) ->
ok;
push(File, {{line, Line}, _}, Fun, Args, Class, Object) ->
PID = erlang:get(stack),
Data = {push, File, Line, Fun, Args, Class, Object},
gen_server:cast(PID, Data).
pop() ->
PID = erlang:get(stack),
gen_server:call(PID, pop).
%% gen_server callbacks
init([Parent]) ->
monitor(process, Parent),
{ok, []}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
terminate(_Reason, _State) ->
erlang:erase(stack),
ok.
handle_cast({push, File, Line, Fun, Args, Class, Object}, Stack) ->
Type =
if Class =:= undefined ->
undefined;
Object =:= undefined ->
<<"::">>;
true ->
<<"->">>
end,
New = #stack_trace{function = Fun,
args = Args,
file = File,
line = Line,
object = Object,
class = Class,
type = Type},
NewStack =
case add_function(Fun) of
[] when File =:= undefined ->
Stack;
_ ->
[New | Stack]
end,
%% TODO: use filtering tables for tracing, implement ephp_tracer.
case ephp_config:get_bool(<<"tracer.enable">>) of
true ->
gen_server:cast(ephp_tracer, New);
false ->
ok
end,
{noreply, NewStack}.
handle_call({get_array, PopElements}, _From, Stack) ->
{_, GetStack} =
lists:foldl(fun (StackEl, {0, Array}) ->
#stack_trace{function = Fun,
file = File,
line = Line,
class = Class,
object = Object,
type = Type,
args = Args} =
StackEl,
Element =
ephp_array:from_list([{<<"file">>, File}, {<<"line">>, Line}]
++ add_function(Fun)
++ add_class(Class)
++ add_object(Object)
++ add_type(Type)
++ add_args(Fun, Args)),
{0, ephp_array:store(auto, Element, Array)};
(_Stack, {N, Array}) ->
{N - 1, Array}
end,
{PopElements, ephp_array:new()},
Stack),
{reply, GetStack, Stack};
handle_call(get, _From, Stack) ->
{reply, Stack, Stack};
handle_call(pop, _From, [] = Stack) ->
{reply, undefined, Stack};
handle_call(pop, _From, [Head | Stack]) ->
{reply, Head, Stack}.
handle_info({'DOWN', _Ref, process, _Pid, _Reason}, State) ->
{stop, normal, State}.
%% internal functions
add_function(Fun) ->
Incs = [<<"include">>, <<"include_once">>, <<"require">>, <<"require_once">>],
case lists:member(
ephp_string:to_lower(Fun), Incs)
of
true ->
[];
false ->
[{<<"function">>, Fun}]
end.
add_class(undefined) ->
[];
add_class(Class) ->
[{<<"class">>, Class}].
add_object(undefined) ->
[];
add_object(Object) ->
[{<<"object">>, Object}].
add_type(undefined) ->
[];
add_type(Type) ->
[{<<"type">>, Type}].
add_args(Fun, Args) ->
[{<<"args">>, ephp_array:from_list(Args)}]
++ case add_function(Fun) of
[] ->
[{<<"function">>, Fun}];
_ ->
[]
end.
| null | https://raw.githubusercontent.com/bragful/ephp/119f1760a5cdf4c8b6a01739e852c7183af7acff/src/ephp_stack.erl | erlang | FIXME: generate this information under a supervisor with
the context.
gen_server callbacks
TODO: use filtering tables for tracing, implement ephp_tracer.
internal functions | -module(ephp_stack).
-author('').
-compile([{no_auto_import, [get/1]}]).
-behaviour(gen_server).
-include("ephp.hrl").
-export([start_link/0, destroy/0, get/0, get_array/0, get_array/1, push/6, pop/0]).
-export([init/1, handle_cast/2, handle_call/3, handle_info/2, code_change/3,
terminate/2]).
start_link() ->
case erlang:get(stack) of
undefined ->
{ok, PID} = gen_server:start_link(?MODULE, [self()], []),
erlang:put(stack, PID),
{ok, PID};
PID ->
case is_process_alive(PID) of
true ->
{ok, PID};
false ->
erlang:erase(stack),
start_link()
end
end.
destroy() ->
case erlang:get(stack) of
undefined ->
ok;
PID ->
gen_server:stop(PID)
end.
get() ->
PID = erlang:get(stack),
gen_server:call(PID, get).
get_array() ->
get_array(0).
get_array(PopElements) ->
PID = erlang:get(stack),
gen_server:call(PID, {get_array, PopElements}).
push(_File, undefined, _Fun, _Args, _Class, _Object) ->
ok;
push(File, {{line, Line}, _}, Fun, Args, Class, Object) ->
PID = erlang:get(stack),
Data = {push, File, Line, Fun, Args, Class, Object},
gen_server:cast(PID, Data).
pop() ->
PID = erlang:get(stack),
gen_server:call(PID, pop).
init([Parent]) ->
monitor(process, Parent),
{ok, []}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
terminate(_Reason, _State) ->
erlang:erase(stack),
ok.
handle_cast({push, File, Line, Fun, Args, Class, Object}, Stack) ->
Type =
if Class =:= undefined ->
undefined;
Object =:= undefined ->
<<"::">>;
true ->
<<"->">>
end,
New = #stack_trace{function = Fun,
args = Args,
file = File,
line = Line,
object = Object,
class = Class,
type = Type},
NewStack =
case add_function(Fun) of
[] when File =:= undefined ->
Stack;
_ ->
[New | Stack]
end,
case ephp_config:get_bool(<<"tracer.enable">>) of
true ->
gen_server:cast(ephp_tracer, New);
false ->
ok
end,
{noreply, NewStack}.
handle_call({get_array, PopElements}, _From, Stack) ->
{_, GetStack} =
lists:foldl(fun (StackEl, {0, Array}) ->
#stack_trace{function = Fun,
file = File,
line = Line,
class = Class,
object = Object,
type = Type,
args = Args} =
StackEl,
Element =
ephp_array:from_list([{<<"file">>, File}, {<<"line">>, Line}]
++ add_function(Fun)
++ add_class(Class)
++ add_object(Object)
++ add_type(Type)
++ add_args(Fun, Args)),
{0, ephp_array:store(auto, Element, Array)};
(_Stack, {N, Array}) ->
{N - 1, Array}
end,
{PopElements, ephp_array:new()},
Stack),
{reply, GetStack, Stack};
handle_call(get, _From, Stack) ->
{reply, Stack, Stack};
handle_call(pop, _From, [] = Stack) ->
{reply, undefined, Stack};
handle_call(pop, _From, [Head | Stack]) ->
{reply, Head, Stack}.
handle_info({'DOWN', _Ref, process, _Pid, _Reason}, State) ->
{stop, normal, State}.
add_function(Fun) ->
Incs = [<<"include">>, <<"include_once">>, <<"require">>, <<"require_once">>],
case lists:member(
ephp_string:to_lower(Fun), Incs)
of
true ->
[];
false ->
[{<<"function">>, Fun}]
end.
add_class(undefined) ->
[];
add_class(Class) ->
[{<<"class">>, Class}].
add_object(undefined) ->
[];
add_object(Object) ->
[{<<"object">>, Object}].
add_type(undefined) ->
[];
add_type(Type) ->
[{<<"type">>, Type}].
add_args(Fun, Args) ->
[{<<"args">>, ephp_array:from_list(Args)}]
++ case add_function(Fun) of
[] ->
[{<<"function">>, Fun}];
_ ->
[]
end.
|
1be0e35d0027bfad48a9fc51bb74299e274a330a14e9a7ce85e65a5796b00dfd | backtracking/combine | matrix.mli |
type 'a matrix = 'a array array
val copy: 'a matrix -> 'a matrix
| null | https://raw.githubusercontent.com/backtracking/combine/d2d9276c99f6d09662900c348f062c618a1b4c95/src/lib/matrix.mli | ocaml |
type 'a matrix = 'a array array
val copy: 'a matrix -> 'a matrix
| |
e9132bfc84ac4bf8089ad15b41b6b0fa7379ed0f3c2a46352d0427bbadd4aae1 | tonyday567/mvc-todo | Arbitrary.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE FlexibleInstances #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - type - defaults #
# OPTIONS_GHC -fno - warn - orphans #
module Todo.Arbitrary where
import Todo.Model
import Protolude hiding (Show(..), show)
import Control.Applicative
import Control.Monad
import Data . Monoid
import Data.Default (def)
import Test.QuickCheck
import qualified Data.Map as Map
import Data.String
import Data.List (nub)
import Prelude (Show(..))
import Data.Text (pack)
instance Arbitrary ItemStatus where
arbitrary =
elements
[ Active
, Completed
]
instance Arbitrary ItemId where
arbitrary = do
let maxI = 10
ItemId <$> choose (0,maxI)
instance Arbitrary Item where
arbitrary = Item <$> arbitrary <*> (pack . show <$> (arbitrary :: Gen TodoStatement))
instance Arbitrary Todos where
arbitrary =
Todos <$>
(pack . show <$> (arbitrary :: Gen HaskellVerb)) <*>
frequency
[ (8, pure Nothing)
, (2, Just <$> arbitrary)
] <*>
arbitrary <*>
frequency
[ (6, pure Nothing)
, (4, Just <$> arbitrary)
] <*>
(Map.fromList <$> (: []) <$> ((,) <$> arbitrary <*> arbitrary))
instance Arbitrary Action where
arbitrary = frequency
[ (10, Toggle <$> arbitrary)
, (2, pure ToggleAll)
, (6, NewItem <$> (pack . show <$> (arbitrary :: Gen TodoStatement)))
, (6, EditItem <$> arbitrary)
, (6, EditItemCancel <$> arbitrary)
, (6, EditItemDone <$> arbitrary <*> (pack . show <$> (arbitrary :: Gen TodoStatement)))
, (2, Filter <$> arbitrary)
, (4, DeleteItem <$> arbitrary)
, (1, pure ClearCompleted)
]
testApply :: IO [Todos]
testApply =
zipWith apply <$>
sample' arbitrary <*>
sample' arbitrary
data TodoStatement = TodoStatement HaskellVerb HaskellNoun
instance Show TodoStatement where
show (TodoStatement verb noun) = show verb <> " " <> show noun
newtype HaskellVerb = HaskellVerb { unVerb :: Text } deriving (Show, Eq)
instance IsString HaskellVerb where
fromString = HaskellVerb . fromString
newtype HaskellPrefix = HaskellPrefix { unPrefix :: Text } deriving (Show, Eq)
newtype Haskellism = Haskellism { unHaskellism :: Text }
deriving (Show, Eq)
newtype HaskellSuffix = HaskellSuffix { unSuffix :: Text } deriving (Show, Eq)
data HaskellNoun = HaskellNoun [HaskellPrefix] Haskellism [HaskellSuffix]
instance Show HaskellNoun where
show (HaskellNoun ps h ss) = show $ mconcat (unPrefix <$> ps) <> unHaskellism h <> mconcat (unSuffix <$> ss)
instance IsString HaskellNoun where
fromString s = HaskellNoun [] (Haskellism (pack s)) []
instance IsString Haskellism where
fromString = Haskellism . fromString
instance Arbitrary (TodoStatement) where
arbitrary = TodoStatement <$> arbitrary <*> arbitrary
instance Arbitrary (HaskellNoun) where
arbitrary = frequency $
[ (20, HaskellNoun <$> ((take 2 . nub) <$> arbitrary) <*> arbitrary <*> ((take 1) <$> arbitrary))
, (1, pure "cabal hell")
, (1, pure "ADTs")
, (1, pure "everything")
, (5, HaskellNoun <$> pure [] <*> arbitrary <*> pure [])
]
instance Arbitrary (HaskellVerb) where
arbitrary = frequency $ (\(x,y) -> (x, pure y)) <$>
[ (3, "invent")
, (3, "ponder")
, (5, "code")
, (1, "beta-reduce")
, (1, "lambdify")
, (3, "refactor")
, (2, "reduce")
, (1, "DeBruijnize")
, (2, "curry")
, (1, "howard-curry")
, (1, "simplify")
, (1, "complexificate")
, (2, "git the")
, (1, "build")
, (1, "prettify")
, (1, "compile")
, (1, "generalize")
, (1, "abstract")
, (1, "ignore")
, (1, "saturate")
, ( 3 , show < $ > ( arbitrary : : ) )
]
instance Arbitrary (HaskellPrefix) where
arbitrary = frequency $ (\(x,y) -> (x, pure (HaskellPrefix y))) <$>
[ (1, "homo-")
, (1, "functo-")
, (2, "contra-")
, (2, "bi-")
, (3, "iso-")
, (2, "pro-")
, (4, "co-")
, (4, "free-")
, (1, "endo-")
, (1, "morphic-")
, (10, "")
]
instance Arbitrary (HaskellSuffix) where
arbitrary = frequency $ (\(x,y) -> (x, pure (HaskellSuffix y))) <$>
[ (1, "-ism")
, (1, "-orial")
, (1, "-ic")
, (12, "")
]
instance Arbitrary (Haskellism) where
arbitrary = frequency $ (\(x,y) -> (x, pure (Haskellism y))) <$>
[ (6, "functor")
, (4, "monoid")
, (1, "dimap")
, (3, "applicative")
, (2, "arrow")
, (3, "monad")
, (1, "something")
]
| null | https://raw.githubusercontent.com/tonyday567/mvc-todo/13b2e52f1169c77caf528d3ce74901ad6f06eec6/src/Todo/Arbitrary.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE FlexibleInstances #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - type - defaults #
# OPTIONS_GHC -fno - warn - orphans #
module Todo.Arbitrary where
import Todo.Model
import Protolude hiding (Show(..), show)
import Control.Applicative
import Control.Monad
import Data . Monoid
import Data.Default (def)
import Test.QuickCheck
import qualified Data.Map as Map
import Data.String
import Data.List (nub)
import Prelude (Show(..))
import Data.Text (pack)
instance Arbitrary ItemStatus where
arbitrary =
elements
[ Active
, Completed
]
instance Arbitrary ItemId where
arbitrary = do
let maxI = 10
ItemId <$> choose (0,maxI)
instance Arbitrary Item where
arbitrary = Item <$> arbitrary <*> (pack . show <$> (arbitrary :: Gen TodoStatement))
instance Arbitrary Todos where
arbitrary =
Todos <$>
(pack . show <$> (arbitrary :: Gen HaskellVerb)) <*>
frequency
[ (8, pure Nothing)
, (2, Just <$> arbitrary)
] <*>
arbitrary <*>
frequency
[ (6, pure Nothing)
, (4, Just <$> arbitrary)
] <*>
(Map.fromList <$> (: []) <$> ((,) <$> arbitrary <*> arbitrary))
instance Arbitrary Action where
arbitrary = frequency
[ (10, Toggle <$> arbitrary)
, (2, pure ToggleAll)
, (6, NewItem <$> (pack . show <$> (arbitrary :: Gen TodoStatement)))
, (6, EditItem <$> arbitrary)
, (6, EditItemCancel <$> arbitrary)
, (6, EditItemDone <$> arbitrary <*> (pack . show <$> (arbitrary :: Gen TodoStatement)))
, (2, Filter <$> arbitrary)
, (4, DeleteItem <$> arbitrary)
, (1, pure ClearCompleted)
]
testApply :: IO [Todos]
testApply =
zipWith apply <$>
sample' arbitrary <*>
sample' arbitrary
data TodoStatement = TodoStatement HaskellVerb HaskellNoun
instance Show TodoStatement where
show (TodoStatement verb noun) = show verb <> " " <> show noun
newtype HaskellVerb = HaskellVerb { unVerb :: Text } deriving (Show, Eq)
instance IsString HaskellVerb where
fromString = HaskellVerb . fromString
newtype HaskellPrefix = HaskellPrefix { unPrefix :: Text } deriving (Show, Eq)
newtype Haskellism = Haskellism { unHaskellism :: Text }
deriving (Show, Eq)
newtype HaskellSuffix = HaskellSuffix { unSuffix :: Text } deriving (Show, Eq)
data HaskellNoun = HaskellNoun [HaskellPrefix] Haskellism [HaskellSuffix]
instance Show HaskellNoun where
show (HaskellNoun ps h ss) = show $ mconcat (unPrefix <$> ps) <> unHaskellism h <> mconcat (unSuffix <$> ss)
instance IsString HaskellNoun where
fromString s = HaskellNoun [] (Haskellism (pack s)) []
instance IsString Haskellism where
fromString = Haskellism . fromString
instance Arbitrary (TodoStatement) where
arbitrary = TodoStatement <$> arbitrary <*> arbitrary
instance Arbitrary (HaskellNoun) where
arbitrary = frequency $
[ (20, HaskellNoun <$> ((take 2 . nub) <$> arbitrary) <*> arbitrary <*> ((take 1) <$> arbitrary))
, (1, pure "cabal hell")
, (1, pure "ADTs")
, (1, pure "everything")
, (5, HaskellNoun <$> pure [] <*> arbitrary <*> pure [])
]
instance Arbitrary (HaskellVerb) where
arbitrary = frequency $ (\(x,y) -> (x, pure y)) <$>
[ (3, "invent")
, (3, "ponder")
, (5, "code")
, (1, "beta-reduce")
, (1, "lambdify")
, (3, "refactor")
, (2, "reduce")
, (1, "DeBruijnize")
, (2, "curry")
, (1, "howard-curry")
, (1, "simplify")
, (1, "complexificate")
, (2, "git the")
, (1, "build")
, (1, "prettify")
, (1, "compile")
, (1, "generalize")
, (1, "abstract")
, (1, "ignore")
, (1, "saturate")
, ( 3 , show < $ > ( arbitrary : : ) )
]
instance Arbitrary (HaskellPrefix) where
arbitrary = frequency $ (\(x,y) -> (x, pure (HaskellPrefix y))) <$>
[ (1, "homo-")
, (1, "functo-")
, (2, "contra-")
, (2, "bi-")
, (3, "iso-")
, (2, "pro-")
, (4, "co-")
, (4, "free-")
, (1, "endo-")
, (1, "morphic-")
, (10, "")
]
instance Arbitrary (HaskellSuffix) where
arbitrary = frequency $ (\(x,y) -> (x, pure (HaskellSuffix y))) <$>
[ (1, "-ism")
, (1, "-orial")
, (1, "-ic")
, (12, "")
]
instance Arbitrary (Haskellism) where
arbitrary = frequency $ (\(x,y) -> (x, pure (Haskellism y))) <$>
[ (6, "functor")
, (4, "monoid")
, (1, "dimap")
, (3, "applicative")
, (2, "arrow")
, (3, "monad")
, (1, "something")
]
|
d264cb94715ff2d65fa98b62932fe5fb98ab1a6eee28984e16bf3a1bc2a2872d | iand675/hs-opentelemetry | TraceState.hs | -----------------------------------------------------------------------------
-----------------------------------------------------------------------------
|
Module : OpenTelemetry . Trace . TraceState
Copyright : ( c ) , 2021
License : BSD-3
Description : W3C - compliant way to provide additional vendor - specific trace identification information across different distributed tracing systems
Maintainer :
Stability : experimental
Portability : non - portable ( GHC extensions )
The main purpose of the tracestate HTTP header is to provide additional vendor - specific trace identification information across different distributed tracing systems and is a companion header for the traceparent field . It also conveys information about the request ’s position in multiple distributed tracing graphs .
The tracestate field may contain any opaque value in any of the keys . Tracestate MAY be sent or received as multiple header fields . Multiple tracestate header fields MUST be handled as specified by RFC7230 Section 3.2.2 Field Order . The tracestate header SHOULD be sent as a single field when possible , but MAY be split into multiple header fields . When sending tracestate as multiple header fields , it MUST be split according to RFC7230 . When receiving multiple tracestate header fields , they MUST be combined into a single header according to RFC7230 .
See the W3C specification -context/#tracestate-header
for more details .
Module : OpenTelemetry.Trace.TraceState
Copyright : (c) Ian Duncan, 2021
License : BSD-3
Description : W3C-compliant way to provide additional vendor-specific trace identification information across different distributed tracing systems
Maintainer : Ian Duncan
Stability : experimental
Portability : non-portable (GHC extensions)
The main purpose of the tracestate HTTP header is to provide additional vendor-specific trace identification information across different distributed tracing systems and is a companion header for the traceparent field. It also conveys information about the request’s position in multiple distributed tracing graphs.
The tracestate field may contain any opaque value in any of the keys. Tracestate MAY be sent or received as multiple header fields. Multiple tracestate header fields MUST be handled as specified by RFC7230 Section 3.2.2 Field Order. The tracestate header SHOULD be sent as a single field when possible, but MAY be split into multiple header fields. When sending tracestate as multiple header fields, it MUST be split according to RFC7230. When receiving multiple tracestate header fields, they MUST be combined into a single header according to RFC7230.
See the W3C specification -context/#tracestate-header
for more details.
-}
module OpenTelemetry.Trace.TraceState (
TraceState (TraceState),
Key (..),
Value (..),
empty,
insert,
update,
delete,
toList,
) where
import Data.Text (Text)
newtype Key = Key Text
deriving (Show, Eq, Ord)
newtype Value = Value Text
deriving (Show, Eq, Ord)
| Data structure compliant with the storage and serialization needs of
the W3C @tracestate@ header .
the W3C @tracestate@ header.
-}
newtype TraceState = TraceState [(Key, Value)]
deriving (Show, Eq, Ord)
| An empty ' TraceState ' key - value pair dictionary
empty :: TraceState
empty = TraceState []
| Add a key - value pair to a ' TraceState '
O(n )
O(n)
-}
insert :: Key -> Value -> TraceState -> TraceState
insert k v ts = case delete k ts of
(TraceState l) -> TraceState ((k, v) : l)
| Update a value in the ' TraceState ' . Does nothing if
the value associated with the given key does n't exist .
O(n )
the value associated with the given key doesn't exist.
O(n)
-}
update :: Key -> (Value -> Value) -> TraceState -> TraceState
update k f (TraceState ts) = case break (\(k', _v) -> k == k') ts of
(before, []) -> TraceState before
(before, (_, v) : kvs) -> TraceState ((k, f v) : (before ++ kvs))
{- | Remove a key-value pair for the given key.
O(n)
-}
delete :: Key -> TraceState -> TraceState
delete k (TraceState ts) = TraceState $ filter (\(k', _) -> k' /= k) ts
| Convert the ' TraceState ' to a list .
O(1 )
O(1)
-}
toList :: TraceState -> [(Key, Value)]
toList (TraceState ts) = ts
| null | https://raw.githubusercontent.com/iand675/hs-opentelemetry/1f0328eb59fec2a97aec7ef98fe4f1e0d5c8f2ac/api/src/OpenTelemetry/Trace/TraceState.hs | haskell | ---------------------------------------------------------------------------
---------------------------------------------------------------------------
| Remove a key-value pair for the given key.
O(n)
|
|
Module : OpenTelemetry . Trace . TraceState
Copyright : ( c ) , 2021
License : BSD-3
Description : W3C - compliant way to provide additional vendor - specific trace identification information across different distributed tracing systems
Maintainer :
Stability : experimental
Portability : non - portable ( GHC extensions )
The main purpose of the tracestate HTTP header is to provide additional vendor - specific trace identification information across different distributed tracing systems and is a companion header for the traceparent field . It also conveys information about the request ’s position in multiple distributed tracing graphs .
The tracestate field may contain any opaque value in any of the keys . Tracestate MAY be sent or received as multiple header fields . Multiple tracestate header fields MUST be handled as specified by RFC7230 Section 3.2.2 Field Order . The tracestate header SHOULD be sent as a single field when possible , but MAY be split into multiple header fields . When sending tracestate as multiple header fields , it MUST be split according to RFC7230 . When receiving multiple tracestate header fields , they MUST be combined into a single header according to RFC7230 .
See the W3C specification -context/#tracestate-header
for more details .
Module : OpenTelemetry.Trace.TraceState
Copyright : (c) Ian Duncan, 2021
License : BSD-3
Description : W3C-compliant way to provide additional vendor-specific trace identification information across different distributed tracing systems
Maintainer : Ian Duncan
Stability : experimental
Portability : non-portable (GHC extensions)
The main purpose of the tracestate HTTP header is to provide additional vendor-specific trace identification information across different distributed tracing systems and is a companion header for the traceparent field. It also conveys information about the request’s position in multiple distributed tracing graphs.
The tracestate field may contain any opaque value in any of the keys. Tracestate MAY be sent or received as multiple header fields. Multiple tracestate header fields MUST be handled as specified by RFC7230 Section 3.2.2 Field Order. The tracestate header SHOULD be sent as a single field when possible, but MAY be split into multiple header fields. When sending tracestate as multiple header fields, it MUST be split according to RFC7230. When receiving multiple tracestate header fields, they MUST be combined into a single header according to RFC7230.
See the W3C specification -context/#tracestate-header
for more details.
-}
module OpenTelemetry.Trace.TraceState (
TraceState (TraceState),
Key (..),
Value (..),
empty,
insert,
update,
delete,
toList,
) where
import Data.Text (Text)
newtype Key = Key Text
deriving (Show, Eq, Ord)
newtype Value = Value Text
deriving (Show, Eq, Ord)
| Data structure compliant with the storage and serialization needs of
the W3C @tracestate@ header .
the W3C @tracestate@ header.
-}
newtype TraceState = TraceState [(Key, Value)]
deriving (Show, Eq, Ord)
| An empty ' TraceState ' key - value pair dictionary
empty :: TraceState
empty = TraceState []
| Add a key - value pair to a ' TraceState '
O(n )
O(n)
-}
insert :: Key -> Value -> TraceState -> TraceState
insert k v ts = case delete k ts of
(TraceState l) -> TraceState ((k, v) : l)
| Update a value in the ' TraceState ' . Does nothing if
the value associated with the given key does n't exist .
O(n )
the value associated with the given key doesn't exist.
O(n)
-}
update :: Key -> (Value -> Value) -> TraceState -> TraceState
update k f (TraceState ts) = case break (\(k', _v) -> k == k') ts of
(before, []) -> TraceState before
(before, (_, v) : kvs) -> TraceState ((k, f v) : (before ++ kvs))
delete :: Key -> TraceState -> TraceState
delete k (TraceState ts) = TraceState $ filter (\(k', _) -> k' /= k) ts
| Convert the ' TraceState ' to a list .
O(1 )
O(1)
-}
toList :: TraceState -> [(Key, Value)]
toList (TraceState ts) = ts
|
e413993544dc77a4a1dacb53ea1d5da658eb1d9a96c91a01cde132f25c14cd52 | ocaml-flambda/flambda-backend | compiler_hooks.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
Copyright 2021 Jane Street Group LLC
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open Misc
open Compile_common
Hooks allow to inspect the IR produced by a pass without altering
the compilation pipeline .
Hooks are allowed to inspect the data but are prohibited from
altering it . If one hook were to mutate the data there 's no guarantee
of how the compiler would behave .
Several hooks can be registered for the same pass . There 's no guarantees
on the order of execution of hooks .
When one IR is the output of several passes , the hooks are usually called
on the latest version of the IR ( the exception being passes marked as " raw " ,
where corresponding hooks are called on the earliest version of the IR ) .
the compilation pipeline.
Hooks are allowed to inspect the data but are prohibited from
altering it. If one hook were to mutate the data there's no guarantee
of how the compiler would behave.
Several hooks can be registered for the same pass. There's no guarantees
on the order of execution of hooks.
When one IR is the output of several passes, the hooks are usually called
on the latest version of the IR (the exception being passes marked as "raw",
where corresponding hooks are called on the earliest version of the IR).
*)
type _ pass =
| Parse_tree_intf : Parsetree.signature pass
| Parse_tree_impl : Parsetree.structure pass
| Typed_tree_intf : Typedtree.signature pass
| Typed_tree_impl : Typedtree.implementation pass
| Raw_lambda : Lambda.program pass
| Lambda : Lambda.program pass
| Raw_flambda2 : Flambda2_terms.Flambda_unit.t pass
| Flambda2 : Flambda2_terms.Flambda_unit.t pass
| Raw_flambda1 : Flambda.program pass
| Flambda1 : Flambda.program pass
| Raw_clambda : Clambda.ulambda pass
| Clambda : Clambda.ulambda pass
| Mach_polling : Mach.fundecl pass
| Mach_combine : Mach.fundecl pass
| Mach_cse : Mach.fundecl pass
| Mach_spill : Mach.fundecl pass
| Mach_live : Mach.fundecl pass
| Mach_reload : Mach.fundecl pass
| Mach_sel : Mach.fundecl pass
| Mach_split : Mach.fundecl pass
| Linear : Linear.fundecl pass
| Cfg : Cfg_with_layout.t pass
| Cmm : Cmm.phrase list pass
| Inlining_tree : Flambda2_simplify_shared.Inlining_report.Inlining_tree.t pass
(* Register a new hook for [pass]. *)
val register : 'a pass -> ('a -> unit) -> unit
(* Execute the hooks registered for [pass]. *)
val execute : 'a pass -> 'a -> unit
val execute_and_pipe : 'a pass -> 'a -> 'a
(* Remove all hooks registered for [pass] *)
val clear : 'a pass -> unit
| null | https://raw.githubusercontent.com/ocaml-flambda/flambda-backend/5efd9f1c6fa45fc85be47c668ed90d2aa95d5d74/driver/compiler_hooks.mli | ocaml | ************************************************************************
OCaml
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Register a new hook for [pass].
Execute the hooks registered for [pass].
Remove all hooks registered for [pass] | Copyright 2021 Jane Street Group LLC
the GNU Lesser General Public License version 2.1 , with the
open Misc
open Compile_common
Hooks allow to inspect the IR produced by a pass without altering
the compilation pipeline .
Hooks are allowed to inspect the data but are prohibited from
altering it . If one hook were to mutate the data there 's no guarantee
of how the compiler would behave .
Several hooks can be registered for the same pass . There 's no guarantees
on the order of execution of hooks .
When one IR is the output of several passes , the hooks are usually called
on the latest version of the IR ( the exception being passes marked as " raw " ,
where corresponding hooks are called on the earliest version of the IR ) .
the compilation pipeline.
Hooks are allowed to inspect the data but are prohibited from
altering it. If one hook were to mutate the data there's no guarantee
of how the compiler would behave.
Several hooks can be registered for the same pass. There's no guarantees
on the order of execution of hooks.
When one IR is the output of several passes, the hooks are usually called
on the latest version of the IR (the exception being passes marked as "raw",
where corresponding hooks are called on the earliest version of the IR).
*)
type _ pass =
| Parse_tree_intf : Parsetree.signature pass
| Parse_tree_impl : Parsetree.structure pass
| Typed_tree_intf : Typedtree.signature pass
| Typed_tree_impl : Typedtree.implementation pass
| Raw_lambda : Lambda.program pass
| Lambda : Lambda.program pass
| Raw_flambda2 : Flambda2_terms.Flambda_unit.t pass
| Flambda2 : Flambda2_terms.Flambda_unit.t pass
| Raw_flambda1 : Flambda.program pass
| Flambda1 : Flambda.program pass
| Raw_clambda : Clambda.ulambda pass
| Clambda : Clambda.ulambda pass
| Mach_polling : Mach.fundecl pass
| Mach_combine : Mach.fundecl pass
| Mach_cse : Mach.fundecl pass
| Mach_spill : Mach.fundecl pass
| Mach_live : Mach.fundecl pass
| Mach_reload : Mach.fundecl pass
| Mach_sel : Mach.fundecl pass
| Mach_split : Mach.fundecl pass
| Linear : Linear.fundecl pass
| Cfg : Cfg_with_layout.t pass
| Cmm : Cmm.phrase list pass
| Inlining_tree : Flambda2_simplify_shared.Inlining_report.Inlining_tree.t pass
val register : 'a pass -> ('a -> unit) -> unit
val execute : 'a pass -> 'a -> unit
val execute_and_pipe : 'a pass -> 'a -> 'a
val clear : 'a pass -> unit
|
dcdeb805f8ce801fcb389ad16d5bc738c3dfc8efcfbd47ea860c900afbf164e1 | huangjs/cl | browse-demo.lisp | Simple demo of MOP browsing using a toy memory .
;;;
;;; Load this file then call
;;;
;;; > (browse 'causal-1)
;;;
;;; Double-click items in the browser window that appears
;;; to browse those items.
;;;
;;; Note: if you want to add anything to memory,
;;; you need to be in the CLYDE-MEMORY package.
;;; MODULES
;;; -------
(eval-when (:compile-toplevel :load-toplevel :execute)
(require "clyde")
(require "mop-browser")
)
;;; PACKAGES
;;; --------
(in-package "CL-USER")
(eval-when (:compile-toplevel :load-toplevel :execute)
(use-package "CLYDE-MEMORY")
(use-package "MOP-BROWSER")
) | null | https://raw.githubusercontent.com/huangjs/cl/96158b3f82f82a6b7d53ef04b3b29c5c8de2dbf7/lib/other-code/cs325/www.cs.northwestern.edu/academics/courses/325/programs/browse-demo.lisp | lisp |
Load this file then call
> (browse 'causal-1)
Double-click items in the browser window that appears
to browse those items.
Note: if you want to add anything to memory,
you need to be in the CLYDE-MEMORY package.
MODULES
-------
PACKAGES
--------
| Simple demo of MOP browsing using a toy memory .
(eval-when (:compile-toplevel :load-toplevel :execute)
(require "clyde")
(require "mop-browser")
)
(in-package "CL-USER")
(eval-when (:compile-toplevel :load-toplevel :execute)
(use-package "CLYDE-MEMORY")
(use-package "MOP-BROWSER")
) |
5a7cf312389e97bf85da29d73d0280d43200504a95b266ed503a94508901e047 | fxfactorial/bs-web3 | types.ml | class type ['provider_impl] _module = object
method set_provider : unit -> unit [@@bs.set]
end [@bs]
type t
class type bzz = object
inherit [t] _module
end [@bs]
class type eth = object
inherit [t] _module
end [@bs]
type request = < > Js.t
(* class type batch = object
*
* end [@bs] *)
type batch =
<add:request -> unit [@bs.meth];
execute:unit -> unit [@bs.meth]> Js.t
| null | https://raw.githubusercontent.com/fxfactorial/bs-web3/816c42923cc8f82a5341a99c4b3ff8602ede5029/src/types.ml | ocaml | class type batch = object
*
* end [@bs] | class type ['provider_impl] _module = object
method set_provider : unit -> unit [@@bs.set]
end [@bs]
type t
class type bzz = object
inherit [t] _module
end [@bs]
class type eth = object
inherit [t] _module
end [@bs]
type request = < > Js.t
type batch =
<add:request -> unit [@bs.meth];
execute:unit -> unit [@bs.meth]> Js.t
|
19b28083d81aa3b5dccca1791aa462956c7d4fdb0809fe21f0849c14cf85a281 | hanshuebner/bos | boi-handlers.lisp | (in-package :bos.web)
(enable-interpol-syntax)
(defclass boi-handler (page-handler)
())
(defmethod authorized-p ((handler boi-handler))
(bos.m2:editor-p (bknr-session-user)))
(defclass create-contract-handler (boi-handler)
())
(defun find-sponsor (sponsor-id)
(let ((sponsor (store-object-with-id (parse-integer sponsor-id :junk-allowed t))))
(unless sponsor
(error "Invalid sponsor ID"))
(unless (subtypep (type-of sponsor) 'sponsor)
(error "Invalid sponsor ID (wrong type)"))
sponsor))
(defmethod handle ((handler create-contract-handler))
(with-xml-error-handler ()
(with-query-params (num-sqm country sponsor-id name paid expires)
(setf num-sqm (ignore-errors (parse-integer num-sqm :junk-allowed t)))
(unless num-sqm
(error "missing or invalid num-sqm parameter"))
(unless country
(error "missing country code"))
(setf expires (if expires
(or (parse-integer expires :junk-allowed t)
(error "invalid expires parameter"))
7))
(setf expires (+ (get-universal-time) (* expires 60 60 24)))
(let* ((sponsor (if sponsor-id
(find-sponsor sponsor-id)
(make-sponsor :full-name name)))
(contract (make-contract sponsor num-sqm :expires expires :paidp paid)))
(with-xml-response (:root-element "response")
(with-element "status"
(attribute "success" 1)
(if sponsor-id
(text "Contract has been created")
(text "Contract and sponsor have been created")))
(with-element "contract"
(attribute "id" (store-object-id contract)))
(unless sponsor-id
(with-element "sponsor"
(attribute "id" (store-object-id sponsor))
(attribute "master-code" (sponsor-master-code sponsor)))))))))
(defclass pay-contract-handler (boi-handler)
())
(defmethod handle ((handler pay-contract-handler))
(with-xml-error-handler ()
(with-query-params (contract-id name)
(unless contract-id
(error "missing contract-id parameter"))
(let ((contract (get-contract (or (ignore-errors (parse-integer contract-id))
(error "bad contract-id parameter")))))
(when (contract-paidp contract)
(error "contract has already been paid for"))
(with-transaction (:contract-paid)
(contract-set-paidp contract (format nil "~A: manually set paid by ~A"
(format-date-time)
(user-login (bknr.web:bknr-session-user))))
(when name
(setf (user-full-name (contract-sponsor contract)) name))))
(with-xml-response (:root-element "response")
(with-element "status"
(attribute "success" 1)
(text "Contract has been marked as paid for"))))))
(defclass cancel-contract-handler (boi-handler)
())
(defmethod handle ((handler cancel-contract-handler))
(with-xml-error-handler ()
(with-query-params (contract-id)
(unless contract-id
(error "missing contract-id parameter"))
(let ((contract (get-contract (or (ignore-errors (parse-integer contract-id))
(error "bad contract-id parameter")))))
(when (contract-paidp contract)
(error "contract has already been paid for"))
(delete-object contract)
(with-xml-response (:root-element "response")
(with-element "status"
(attribute "success" 1)
(text "Contract has been deleted"))))))) | null | https://raw.githubusercontent.com/hanshuebner/bos/ab5944cc46f4a5ff5a08fd8aa4d228c0f9cfc771/web/boi-handlers.lisp | lisp | (in-package :bos.web)
(enable-interpol-syntax)
(defclass boi-handler (page-handler)
())
(defmethod authorized-p ((handler boi-handler))
(bos.m2:editor-p (bknr-session-user)))
(defclass create-contract-handler (boi-handler)
())
(defun find-sponsor (sponsor-id)
(let ((sponsor (store-object-with-id (parse-integer sponsor-id :junk-allowed t))))
(unless sponsor
(error "Invalid sponsor ID"))
(unless (subtypep (type-of sponsor) 'sponsor)
(error "Invalid sponsor ID (wrong type)"))
sponsor))
(defmethod handle ((handler create-contract-handler))
(with-xml-error-handler ()
(with-query-params (num-sqm country sponsor-id name paid expires)
(setf num-sqm (ignore-errors (parse-integer num-sqm :junk-allowed t)))
(unless num-sqm
(error "missing or invalid num-sqm parameter"))
(unless country
(error "missing country code"))
(setf expires (if expires
(or (parse-integer expires :junk-allowed t)
(error "invalid expires parameter"))
7))
(setf expires (+ (get-universal-time) (* expires 60 60 24)))
(let* ((sponsor (if sponsor-id
(find-sponsor sponsor-id)
(make-sponsor :full-name name)))
(contract (make-contract sponsor num-sqm :expires expires :paidp paid)))
(with-xml-response (:root-element "response")
(with-element "status"
(attribute "success" 1)
(if sponsor-id
(text "Contract has been created")
(text "Contract and sponsor have been created")))
(with-element "contract"
(attribute "id" (store-object-id contract)))
(unless sponsor-id
(with-element "sponsor"
(attribute "id" (store-object-id sponsor))
(attribute "master-code" (sponsor-master-code sponsor)))))))))
(defclass pay-contract-handler (boi-handler)
())
(defmethod handle ((handler pay-contract-handler))
(with-xml-error-handler ()
(with-query-params (contract-id name)
(unless contract-id
(error "missing contract-id parameter"))
(let ((contract (get-contract (or (ignore-errors (parse-integer contract-id))
(error "bad contract-id parameter")))))
(when (contract-paidp contract)
(error "contract has already been paid for"))
(with-transaction (:contract-paid)
(contract-set-paidp contract (format nil "~A: manually set paid by ~A"
(format-date-time)
(user-login (bknr.web:bknr-session-user))))
(when name
(setf (user-full-name (contract-sponsor contract)) name))))
(with-xml-response (:root-element "response")
(with-element "status"
(attribute "success" 1)
(text "Contract has been marked as paid for"))))))
(defclass cancel-contract-handler (boi-handler)
())
(defmethod handle ((handler cancel-contract-handler))
(with-xml-error-handler ()
(with-query-params (contract-id)
(unless contract-id
(error "missing contract-id parameter"))
(let ((contract (get-contract (or (ignore-errors (parse-integer contract-id))
(error "bad contract-id parameter")))))
(when (contract-paidp contract)
(error "contract has already been paid for"))
(delete-object contract)
(with-xml-response (:root-element "response")
(with-element "status"
(attribute "success" 1)
(text "Contract has been deleted"))))))) | |
5a91e3b118bf76ba149e43f47878cde58ab13a5803452e8c97f274041faa5fa8 | naxels/youtube-channel-data | url.clj | (ns youtube-channel-data.youtube.url
"Build Youtube Data API URL's"
(:require [youtube-channel-data.utils :as u]
[youtube-channel-data.youtube.config :as ytc]))
; Define url paths
(def base-url "")
; Use v3
(def sub-path "/youtube/v3/")
; Main API string builder closure
(defn api
"Returns fn with route filled in.
Ultimately Returns https string encoded ready for slurp"
[route]
(fn
[query-params]
(let [query-string (u/query-params->query-string (conj query-params (ytc/api-key)))]
(str base-url sub-path route "?" query-string))))
; Caller helper methods
;;
(def videos (api "videos"))
(def channels (api "channels"))
;;
(def playlist-items (api "playlistItems"))
| null | https://raw.githubusercontent.com/naxels/youtube-channel-data/fd0534b26b9b131968332109fb94a35d53149d1f/src/youtube_channel_data/youtube/url.clj | clojure | Define url paths
Use v3
Main API string builder closure
Caller helper methods
| (ns youtube-channel-data.youtube.url
"Build Youtube Data API URL's"
(:require [youtube-channel-data.utils :as u]
[youtube-channel-data.youtube.config :as ytc]))
(def base-url "")
(def sub-path "/youtube/v3/")
(defn api
"Returns fn with route filled in.
Ultimately Returns https string encoded ready for slurp"
[route]
(fn
[query-params]
(let [query-string (u/query-params->query-string (conj query-params (ytc/api-key)))]
(str base-url sub-path route "?" query-string))))
(def videos (api "videos"))
(def channels (api "channels"))
(def playlist-items (api "playlistItems"))
|
2f63323ca732b9d58b01dfc72bdfd1e83c6d5414c3954a7de45721d0e1783739 | FlowForwarding/loom | tap_time.erl | %%------------------------------------------------------------------------------
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%%-----------------------------------------------------------------------------
%%
@author Infoblox Inc < >
2013 Infoblox Inc
%% @doc time utilities
-module(tap_time).
-export([now/0,
since/1,
diff/2,
diff_millis/2,
universal/1,
universal_now/0,
universal_time_diff/2,
rfc3339/1,
rfc3339_to_epoch/1]).
now() ->
os:timestamp().
since(A) ->
diff(tap_time:now(), A).
time difference ( B - A ) in seconds
diff(A, B) ->
diff_millis(A, B) div 1000.
diff_millis(A, B) ->
timer:now_diff(A, B) div 1000.
universal(T) ->
calendar:now_to_universal_time(T).
universal_now() ->
universal(tap_time:now()).
universal_time_diff(A, B) ->
calendar:datetime_to_gregorian_seconds(B) -
calendar:datetime_to_gregorian_seconds(A).
rfc3339({{Year, Month, Day}, {Hour, Minute, Second}})->
lists:flatten(
io_lib:format("~4.4.0w-~2.2.0w-~2.2.0wT~2.2.0w:~2.2.0w:~2.2.0wZ",
[Year, Month, Day, Hour, Minute, Second])).
rfc3339_to_epoch(Timestamp)->
{ok, [Year,Month,Day,Hour,Minute,Second],[]} =
io_lib:fread("~4d-~2d-~2dT~2d:~2d:~2dZ", Timestamp),
{{Year,Month,Day},{Hour, Minute, Second}}.
| null | https://raw.githubusercontent.com/FlowForwarding/loom/86a9c5aa8b7d4776062365716c9a3dbbf3330bc5/tapestry/apps/tapestry/src/tap_time.erl | erlang | ------------------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------------------------------
@doc time utilities | Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author Infoblox Inc < >
2013 Infoblox Inc
-module(tap_time).
-export([now/0,
since/1,
diff/2,
diff_millis/2,
universal/1,
universal_now/0,
universal_time_diff/2,
rfc3339/1,
rfc3339_to_epoch/1]).
now() ->
os:timestamp().
since(A) ->
diff(tap_time:now(), A).
time difference ( B - A ) in seconds
diff(A, B) ->
diff_millis(A, B) div 1000.
diff_millis(A, B) ->
timer:now_diff(A, B) div 1000.
universal(T) ->
calendar:now_to_universal_time(T).
universal_now() ->
universal(tap_time:now()).
universal_time_diff(A, B) ->
calendar:datetime_to_gregorian_seconds(B) -
calendar:datetime_to_gregorian_seconds(A).
rfc3339({{Year, Month, Day}, {Hour, Minute, Second}})->
lists:flatten(
io_lib:format("~4.4.0w-~2.2.0w-~2.2.0wT~2.2.0w:~2.2.0w:~2.2.0wZ",
[Year, Month, Day, Hour, Minute, Second])).
rfc3339_to_epoch(Timestamp)->
{ok, [Year,Month,Day,Hour,Minute,Second],[]} =
io_lib:fread("~4d-~2d-~2dT~2d:~2d:~2dZ", Timestamp),
{{Year,Month,Day},{Hour, Minute, Second}}.
|
63a0749f63e4915a50e4c03142e05ac0b6fe2924719af2eac031f47bebe78a8a | magnetcoop/hydrogen-ce | user.clj | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
;; file, You can obtain one at /
(ns user)
(defn dev
"Load and switch to the 'dev' namespace."
[]
(require 'dev)
(in-ns 'dev)
:loaded)
| null | https://raw.githubusercontent.com/magnetcoop/hydrogen-ce/83450437eaeaaa47171e4de152b2951d32cf2c7a/dev/src/user.clj | clojure | file, You can obtain one at / | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
(ns user)
(defn dev
"Load and switch to the 'dev' namespace."
[]
(require 'dev)
(in-ns 'dev)
:loaded)
|
d09c78122fb7671b35df0e747909f9c3fddca9a4a13adbee43d88ecfa01d4994 | sfrank/minheap | violation-heap-test.lisp | (defpackage :heap-violation-test (:use :CL :violation-heap :lisp-unit))
(in-package :heap-violation-test)
(define-test test-basic
(let ((heap (make-instance 'violation-heap))
(htable (make-hash-table :test #'eql)))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))
(dotimes (i 100)
(let ((ii (+ i 100)))
(setf (gethash ii htable)
(insert heap ii ii))))
(assert-false (empty-p heap))
(assert-eql 100 (heap-size heap))
(decrease-key heap (gethash 120 htable) 50)
(decrease-key heap (gethash 140 htable) 25)
(decrease-key heap (gethash 160 htable) 15)
(assert-eql 160 (peek-min heap))
(assert-eql 160 (extract-min heap))
(assert-eql 140 (peek-min heap))
(assert-eql 140 (extract-node heap (gethash 140 htable)))
(assert-eql 120 (extract-min heap))
(assert-eql 100 (peek-min heap))
(assert-eql 97 (heap-size heap))
(clear-heap heap)
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))))
(define-test test-multiple-keys
(let ((heap (make-instance 'violation-heap)))
(loop for i in '(2 3 2 4 1 2)
do (insert heap i i))
(assert-eql 6 (heap-size heap))
(assert-eql 1 (extract-min heap))
(assert-eql 2 (extract-min heap))
(assert-eql 2 (extract-min heap))
(assert-eql 2 (extract-min heap))
(assert-eql 3 (extract-min heap))
(assert-eql 4 (extract-min heap))
(assert-eql 0 (heap-size heap))))
(define-test test-mk
(let ((heap (make-instance 'violation-heap)))
(loop for i in '(2 4 2 3 1)
do (insert heap i i))
(assert-eql 1 (extract-min heap))
(assert-eql 2 (extract-min heap))
(assert-eql 2 (extract-min heap))
(assert-eql 3 (extract-min heap))
(assert-eql 4 (extract-min heap))))
(define-test test-stress
(let ((heap (make-instance 'violation-heap))
(size 75000))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))
(let ((list (remove-duplicates (loop for i below size
collect (random most-positive-fixnum)))))
(dolist (key list)
(insert heap key key))
(assert-false (empty-p heap))
(dolist (key (sort list #'<))
(assert-eql key (extract-min heap))))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))))
(define-test test-stress-2
(let ((heap (make-instance 'violation-heap))
(size 150))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))
(let ((list (remove-duplicates (loop for i below size
collect i))))
(dolist (key list)
(insert heap key key))
(assert-false (empty-p heap))
(dolist (key (sort list #'<))
(assert-eql key (extract-min heap))))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))))
(define-test test-multiple-keys-stress
(let ((heap (make-instance 'violation-heap))
(size 350)
(key-range 100))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))
(let ((list (loop for i below size
collect (random key-range))))
(dolist (key list)
(insert heap key key))
(assert-false (empty-p heap))
(dolist (key (sort list #'<))
(assert-eql key (extract-min heap))))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))))
(define-test test-meld
(let ((heap-1 (make-instance 'violation-heap))
(heap-2 (make-instance 'violation-heap)))
(assert-true (empty-p heap-1))
(assert-eql 0 (heap-size heap-1))
(assert-true (empty-p heap-2))
(assert-eql 0 (heap-size heap-2))
(loop for i in '(1 2 3 4 5)
do (insert heap-1 i i))
(assert-false (empty-p heap-1))
(assert-eql 5 (heap-size heap-1))
(loop for i in '(6 7 8 9)
do (insert heap-2 i i))
(assert-false (empty-p heap-2))
(assert-eql 4 (heap-size heap-2))
(let ((melded-heap (meld heap-1 heap-2)))
(assert-false (empty-p melded-heap))
(assert-true (empty-p heap-2))
(assert-eql 9 (heap-size melded-heap))
(loop for i in '(1 2 3 4 5 6 7 8 9)
do (assert-eql i (extract-min melded-heap)))
(assert-true (empty-p melded-heap))
(assert-eql 0 (heap-size melded-heap)))))
(define-test test-meld-2
(let ((heap-1 (make-instance 'violation-heap))
(heap-2 (make-instance 'violation-heap)))
(assert-true (empty-p heap-1))
(assert-eql 0 (heap-size heap-1))
(assert-true (empty-p heap-2))
(assert-eql 0 (heap-size heap-2))
(loop for i in '(1 2 3 4 5)
do (insert heap-2 i i))
(assert-false (empty-p heap-2))
(assert-eql 5 (heap-size heap-2))
(loop for i in '(6 7 8 9 10 11)
do (insert heap-1 i i))
(assert-false (empty-p heap-1))
(assert-eql 6 (heap-size heap-1))
(let ((melded-heap (meld heap-1 heap-2)))
(assert-false (empty-p melded-heap))
(assert-true (empty-p heap-2))
(assert-eql 11 (heap-size melded-heap))
(loop for i in '(1 2 3 4 5 6 7 8 9 10 11)
do (assert-eql i (extract-min melded-heap)))
(assert-true (empty-p melded-heap))
(assert-eql 0 (heap-size melded-heap)))))
(define-test test-cascading
(let* ((heap (make-instance 'violation-heap))
(list (cdr (sort (loop for i in '(5 3 0 2 4 1 7)
collect (insert heap i i))
#'< :key #'violation-heap::node-key))))
(assert-eql 7 (heap-size heap))
(extract-min heap)
set key of node 3 to 0
set key of node 5 to 2
set key of node 5 to -1
(dolist (value '(5 3 1 2 4 7))
(assert-eql value (extract-min heap)))
(assert-eql 0 (heap-size heap))))
(define-test test-cascading-2
(let* ((heap (make-instance 'violation-heap))
(list (loop for i in '(7 6 5 4 3 2 1)
collect (insert heap i i))))
(assert-eql 7 (heap-size heap))
(extract-min heap)
set key of node 7 to 1
set key of node 5 to 0
set key of node 4 to -1
(dolist (value '(4 5 7 2 3 6))
(assert-eql value (extract-min heap)))
(assert-eql 0 (heap-size heap))))
(defun timing ()
(let ((heap (make-instance 'violation-heap))
(size 100000)
list)
(dotimes (i size)
(let ((key (random most-positive-fixnum)))
(push key list)))
(time
(progn
(loop for i in list
do (insert heap i i))
(dotimes (i size)
(extract-min heap))))))
| null | https://raw.githubusercontent.com/sfrank/minheap/51cc9edcbbe13d9132fe12b0b197848f31513232/test/violation-heap-test.lisp | lisp | (defpackage :heap-violation-test (:use :CL :violation-heap :lisp-unit))
(in-package :heap-violation-test)
(define-test test-basic
(let ((heap (make-instance 'violation-heap))
(htable (make-hash-table :test #'eql)))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))
(dotimes (i 100)
(let ((ii (+ i 100)))
(setf (gethash ii htable)
(insert heap ii ii))))
(assert-false (empty-p heap))
(assert-eql 100 (heap-size heap))
(decrease-key heap (gethash 120 htable) 50)
(decrease-key heap (gethash 140 htable) 25)
(decrease-key heap (gethash 160 htable) 15)
(assert-eql 160 (peek-min heap))
(assert-eql 160 (extract-min heap))
(assert-eql 140 (peek-min heap))
(assert-eql 140 (extract-node heap (gethash 140 htable)))
(assert-eql 120 (extract-min heap))
(assert-eql 100 (peek-min heap))
(assert-eql 97 (heap-size heap))
(clear-heap heap)
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))))
(define-test test-multiple-keys
(let ((heap (make-instance 'violation-heap)))
(loop for i in '(2 3 2 4 1 2)
do (insert heap i i))
(assert-eql 6 (heap-size heap))
(assert-eql 1 (extract-min heap))
(assert-eql 2 (extract-min heap))
(assert-eql 2 (extract-min heap))
(assert-eql 2 (extract-min heap))
(assert-eql 3 (extract-min heap))
(assert-eql 4 (extract-min heap))
(assert-eql 0 (heap-size heap))))
(define-test test-mk
(let ((heap (make-instance 'violation-heap)))
(loop for i in '(2 4 2 3 1)
do (insert heap i i))
(assert-eql 1 (extract-min heap))
(assert-eql 2 (extract-min heap))
(assert-eql 2 (extract-min heap))
(assert-eql 3 (extract-min heap))
(assert-eql 4 (extract-min heap))))
(define-test test-stress
(let ((heap (make-instance 'violation-heap))
(size 75000))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))
(let ((list (remove-duplicates (loop for i below size
collect (random most-positive-fixnum)))))
(dolist (key list)
(insert heap key key))
(assert-false (empty-p heap))
(dolist (key (sort list #'<))
(assert-eql key (extract-min heap))))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))))
(define-test test-stress-2
(let ((heap (make-instance 'violation-heap))
(size 150))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))
(let ((list (remove-duplicates (loop for i below size
collect i))))
(dolist (key list)
(insert heap key key))
(assert-false (empty-p heap))
(dolist (key (sort list #'<))
(assert-eql key (extract-min heap))))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))))
(define-test test-multiple-keys-stress
(let ((heap (make-instance 'violation-heap))
(size 350)
(key-range 100))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))
(let ((list (loop for i below size
collect (random key-range))))
(dolist (key list)
(insert heap key key))
(assert-false (empty-p heap))
(dolist (key (sort list #'<))
(assert-eql key (extract-min heap))))
(assert-true (empty-p heap))
(assert-eql 0 (heap-size heap))))
(define-test test-meld
(let ((heap-1 (make-instance 'violation-heap))
(heap-2 (make-instance 'violation-heap)))
(assert-true (empty-p heap-1))
(assert-eql 0 (heap-size heap-1))
(assert-true (empty-p heap-2))
(assert-eql 0 (heap-size heap-2))
(loop for i in '(1 2 3 4 5)
do (insert heap-1 i i))
(assert-false (empty-p heap-1))
(assert-eql 5 (heap-size heap-1))
(loop for i in '(6 7 8 9)
do (insert heap-2 i i))
(assert-false (empty-p heap-2))
(assert-eql 4 (heap-size heap-2))
(let ((melded-heap (meld heap-1 heap-2)))
(assert-false (empty-p melded-heap))
(assert-true (empty-p heap-2))
(assert-eql 9 (heap-size melded-heap))
(loop for i in '(1 2 3 4 5 6 7 8 9)
do (assert-eql i (extract-min melded-heap)))
(assert-true (empty-p melded-heap))
(assert-eql 0 (heap-size melded-heap)))))
(define-test test-meld-2
(let ((heap-1 (make-instance 'violation-heap))
(heap-2 (make-instance 'violation-heap)))
(assert-true (empty-p heap-1))
(assert-eql 0 (heap-size heap-1))
(assert-true (empty-p heap-2))
(assert-eql 0 (heap-size heap-2))
(loop for i in '(1 2 3 4 5)
do (insert heap-2 i i))
(assert-false (empty-p heap-2))
(assert-eql 5 (heap-size heap-2))
(loop for i in '(6 7 8 9 10 11)
do (insert heap-1 i i))
(assert-false (empty-p heap-1))
(assert-eql 6 (heap-size heap-1))
(let ((melded-heap (meld heap-1 heap-2)))
(assert-false (empty-p melded-heap))
(assert-true (empty-p heap-2))
(assert-eql 11 (heap-size melded-heap))
(loop for i in '(1 2 3 4 5 6 7 8 9 10 11)
do (assert-eql i (extract-min melded-heap)))
(assert-true (empty-p melded-heap))
(assert-eql 0 (heap-size melded-heap)))))
(define-test test-cascading
(let* ((heap (make-instance 'violation-heap))
(list (cdr (sort (loop for i in '(5 3 0 2 4 1 7)
collect (insert heap i i))
#'< :key #'violation-heap::node-key))))
(assert-eql 7 (heap-size heap))
(extract-min heap)
set key of node 3 to 0
set key of node 5 to 2
set key of node 5 to -1
(dolist (value '(5 3 1 2 4 7))
(assert-eql value (extract-min heap)))
(assert-eql 0 (heap-size heap))))
(define-test test-cascading-2
(let* ((heap (make-instance 'violation-heap))
(list (loop for i in '(7 6 5 4 3 2 1)
collect (insert heap i i))))
(assert-eql 7 (heap-size heap))
(extract-min heap)
set key of node 7 to 1
set key of node 5 to 0
set key of node 4 to -1
(dolist (value '(4 5 7 2 3 6))
(assert-eql value (extract-min heap)))
(assert-eql 0 (heap-size heap))))
(defun timing ()
(let ((heap (make-instance 'violation-heap))
(size 100000)
list)
(dotimes (i size)
(let ((key (random most-positive-fixnum)))
(push key list)))
(time
(progn
(loop for i in list
do (insert heap i i))
(dotimes (i size)
(extract-min heap))))))
| |
97d413792346c571507d8b59526e8a803ef33a780e210fc98d6c1e79674d94c7 | kana/sicp | ex-4.45.scm | Exercise 4.45 . With the grammar given above , the following sentence can be
parsed in five different ways : ` ` The professor lectures to the student in
the class with the cat . '' Give the five parses and explain the differences
;;; in shades of meaning among them.
(load "./sec-4.3.2.scm")
(load "./sec-4.3.3.scm")
(ambtest `(begin
,@parser-definitions
(define the-sentence
'(the professor lectures to the student in the class with the cat))
(print (parse the-sentence))
))
; Results are:
; (sentence (simple-noun-phrase (article the) (noun professor)) (verb-phrase (verb-phrase (verb-phrase (verb lectures) (prep-phrase (prep to) (simple-noun-phrase (article the) (noun student)))) (prep-phrase (prep in) (simple-noun-phrase (article the) (noun class)))) (prep-phrase (prep with) (simple-noun-phrase (article the) (noun cat)))))
; (sentence (simple-noun-phrase (article the) (noun professor)) (verb-phrase (verb-phrase (verb lectures) (prep-phrase (prep to) (simple-noun-phrase (article the) (noun student)))) (prep-phrase (prep in) (noun-phrase (simple-noun-phrase (article the) (noun class)) (prep-phrase (prep with) (simple-noun-phrase (article the) (noun cat)))))))
; (sentence (simple-noun-phrase (article the) (noun professor)) (verb-phrase (verb-phrase (verb lectures) (prep-phrase (prep to) (noun-phrase (simple-noun-phrase (article the) (noun student)) (prep-phrase (prep in) (simple-noun-phrase (article the) (noun class)))))) (prep-phrase (prep with) (simple-noun-phrase (article the) (noun cat)))))
; (sentence (simple-noun-phrase (article the) (noun professor)) (verb-phrase (verb lectures) (prep-phrase (prep to) (noun-phrase (noun-phrase (simple-noun-phrase (article the) (noun student)) (prep-phrase (prep in) (simple-noun-phrase (article the) (noun class)))) (prep-phrase (prep with) (simple-noun-phrase (article the) (noun cat)))))))
; (sentence (simple-noun-phrase (article the) (noun professor)) (verb-phrase (verb lectures) (prep-phrase (prep to) (noun-phrase (simple-noun-phrase (article the) (noun student)) (prep-phrase (prep in) (noun-phrase (simple-noun-phrase (article the) (noun class)) (prep-phrase (prep with) (simple-noun-phrase (article the) (noun cat)))))))))
| null | https://raw.githubusercontent.com/kana/sicp/912bda4276995492ffc2ec971618316701e196f6/ex-4.45.scm | scheme | in shades of meaning among them.
Results are:
(sentence (simple-noun-phrase (article the) (noun professor)) (verb-phrase (verb-phrase (verb-phrase (verb lectures) (prep-phrase (prep to) (simple-noun-phrase (article the) (noun student)))) (prep-phrase (prep in) (simple-noun-phrase (article the) (noun class)))) (prep-phrase (prep with) (simple-noun-phrase (article the) (noun cat)))))
(sentence (simple-noun-phrase (article the) (noun professor)) (verb-phrase (verb-phrase (verb lectures) (prep-phrase (prep to) (simple-noun-phrase (article the) (noun student)))) (prep-phrase (prep in) (noun-phrase (simple-noun-phrase (article the) (noun class)) (prep-phrase (prep with) (simple-noun-phrase (article the) (noun cat)))))))
(sentence (simple-noun-phrase (article the) (noun professor)) (verb-phrase (verb-phrase (verb lectures) (prep-phrase (prep to) (noun-phrase (simple-noun-phrase (article the) (noun student)) (prep-phrase (prep in) (simple-noun-phrase (article the) (noun class)))))) (prep-phrase (prep with) (simple-noun-phrase (article the) (noun cat)))))
(sentence (simple-noun-phrase (article the) (noun professor)) (verb-phrase (verb lectures) (prep-phrase (prep to) (noun-phrase (noun-phrase (simple-noun-phrase (article the) (noun student)) (prep-phrase (prep in) (simple-noun-phrase (article the) (noun class)))) (prep-phrase (prep with) (simple-noun-phrase (article the) (noun cat)))))))
(sentence (simple-noun-phrase (article the) (noun professor)) (verb-phrase (verb lectures) (prep-phrase (prep to) (noun-phrase (simple-noun-phrase (article the) (noun student)) (prep-phrase (prep in) (noun-phrase (simple-noun-phrase (article the) (noun class)) (prep-phrase (prep with) (simple-noun-phrase (article the) (noun cat))))))))) | Exercise 4.45 . With the grammar given above , the following sentence can be
parsed in five different ways : ` ` The professor lectures to the student in
the class with the cat . '' Give the five parses and explain the differences
(load "./sec-4.3.2.scm")
(load "./sec-4.3.3.scm")
(ambtest `(begin
,@parser-definitions
(define the-sentence
'(the professor lectures to the student in the class with the cat))
(print (parse the-sentence))
))
|
00a439660b5be42e6e0212c06ef08c78f751ba2699de049f301fbf48f9e5ea37 | liqula/react-hs | TodoViews.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
{-# LANGUAGE EmptyDataDecls #-}
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE FunctionalDependencies #
# LANGUAGE LambdaCase #
# LANGUAGE MagicHash #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PolyKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeFamilyDependencies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -fno - warn - orphans #
| The views for the TODO app
module TodoViews where
import Control.Monad (when)
import GHCJS.Types
import React.Flux
import qualified Data.JSString as JSS
import TodoDispatcher
import TodoStore
import TodoComponents
| The controller view and also the top level of the TODO app . This controller view registers
-- with the store and will be re-rendered whenever the store changes.
todoApp :: View ()
todoApp = mkControllerView @'[StoreArg TodoState] "todo app" $ \todoState () ->
div_ $ do
view_ todoHeader "header" ()
mainSection_ todoState
view_ todoFooter "footer" todoState
| The TODO header as a React view with no properties .
todoHeader :: View ()
todoHeader = mkView "header" $ \() ->
header_ ["id" $= "header"] $ do
h1_ "todos"
todoTextInput_ TextInputArgs
{ tiaId = Just "new-todo"
, tiaClass = "new-todo"
, tiaPlaceholder = "What needs to be done?"
, tiaSaveAction = SACreate
, tiaValue = Nothing
}
| A view that does not use a ReactView and is instead just a function .
-- Note how we use an underscore to signal that this is directly a combinator that can be used
-- inside the rendering function.
mainSection_ :: TodoState -> ReactElementM 'EventHandlerCode ()
mainSection_ st = section_ ["id" $= "main"] $ do
labeledInput_ "toggle-all" "Mark all as complete"
[ "type" $= "checkbox"
, "checked" $= if all (todoComplete . snd) $ todoList st then "checked" else ""
, onChange $ \_ -> handleTodo ToggleAllComplete
]
ul_ [ "id" $= "todo-list" ] $ mapM_ todoItem_ $ todoList st
-- | A view for each todo item. We specifically use a ReactView here to take advantage of the
ability for React to only re - render the todo items that have changed . Care is taken in the
transform function of the store to not change the object for the pair ( Int , Todo ) , and
in this case will not re - render the todo item . For more details , see the " Performance "
-- section of the React.Flux documentation.
todoItem :: View (Int, Todo)
todoItem = mkView "todo item" $ \(todoIdx, todo) ->
li_ [ classNamesLast [("completed", todoComplete todo), ("editing", todoIsEditing todo)]
, "key" @= todoIdx
] $ do
cldiv_ "view" $ do
input_ [ "className" $= "toggle"
, "type" $= "checkbox"
, "checked" @= todoComplete todo
, onChange $ \_ -> handleTodo $ TodoSetComplete todoIdx $ not $ todoComplete todo
]
label_ [ onDoubleClick $ \_ _ -> handleTodo $ TodoEdit todoIdx] $
elemText $ todoText todo
clbutton_ "destroy" (dispatchTodo $ TodoDelete todoIdx) mempty
when (todoIsEditing todo) $
todoTextInput_ TextInputArgs
{ tiaId = Nothing
, tiaClass = "edit"
, tiaPlaceholder = ""
, tiaSaveAction = SAUpdate todoIdx
, tiaValue = Just $ todoText todo
}
-- | A combinator for a todo item to use inside rendering functions
todoItem_ :: (Int, Todo) -> ReactElementM eventHandler ()
todoItem_ (i, t) = view_ todoItem (JSS.pack $ show i) (i, t)
-- | A view for the footer, taking the entire state as the properties. This could alternatively
-- been modeled as a controller-view, attaching directly to the store.
todoFooter :: View TodoState
todoFooter = mkView "footer" $ \(TodoState todos) ->
let completed = length (filter (todoComplete . snd) todos)
itemsLeft = length todos - completed
in footer_ [ "id" $= "footer"] $ do
span_ [ "id" $= "todo-count" ] $ do
strong_ $ elemShow itemsLeft
elemText $ if itemsLeft == 1 then " item left" else " items left"
when (completed > 0) $ do
button_ [ "id" $= "clear-completed"
, onClick $ \_ _ -> handleTodo ClearCompletedTodos
] $
elemString $ "Clear completed (" ++ show completed ++ ")"
foreign import javascript unsafe
"window.alert($1)"
js_alert :: JSString -> IO ()
| null | https://raw.githubusercontent.com/liqula/react-hs/204c96ee3514b5ddec65378d37872266b05e8954/react-hs-examples/todo/src/TodoViews.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE EmptyDataDecls #
# LANGUAGE OverloadedStrings #
with the store and will be re-rendered whenever the store changes.
Note how we use an underscore to signal that this is directly a combinator that can be used
inside the rendering function.
| A view for each todo item. We specifically use a ReactView here to take advantage of the
section of the React.Flux documentation.
| A combinator for a todo item to use inside rendering functions
| A view for the footer, taking the entire state as the properties. This could alternatively
been modeled as a controller-view, attaching directly to the store. | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE FunctionalDependencies #
# LANGUAGE LambdaCase #
# LANGUAGE MagicHash #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PolyKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeFamilyDependencies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -fno - warn - orphans #
| The views for the TODO app
module TodoViews where
import Control.Monad (when)
import GHCJS.Types
import React.Flux
import qualified Data.JSString as JSS
import TodoDispatcher
import TodoStore
import TodoComponents
| The controller view and also the top level of the TODO app . This controller view registers
todoApp :: View ()
todoApp = mkControllerView @'[StoreArg TodoState] "todo app" $ \todoState () ->
div_ $ do
view_ todoHeader "header" ()
mainSection_ todoState
view_ todoFooter "footer" todoState
| The TODO header as a React view with no properties .
todoHeader :: View ()
todoHeader = mkView "header" $ \() ->
header_ ["id" $= "header"] $ do
h1_ "todos"
todoTextInput_ TextInputArgs
{ tiaId = Just "new-todo"
, tiaClass = "new-todo"
, tiaPlaceholder = "What needs to be done?"
, tiaSaveAction = SACreate
, tiaValue = Nothing
}
| A view that does not use a ReactView and is instead just a function .
mainSection_ :: TodoState -> ReactElementM 'EventHandlerCode ()
mainSection_ st = section_ ["id" $= "main"] $ do
labeledInput_ "toggle-all" "Mark all as complete"
[ "type" $= "checkbox"
, "checked" $= if all (todoComplete . snd) $ todoList st then "checked" else ""
, onChange $ \_ -> handleTodo ToggleAllComplete
]
ul_ [ "id" $= "todo-list" ] $ mapM_ todoItem_ $ todoList st
ability for React to only re - render the todo items that have changed . Care is taken in the
transform function of the store to not change the object for the pair ( Int , Todo ) , and
in this case will not re - render the todo item . For more details , see the " Performance "
todoItem :: View (Int, Todo)
todoItem = mkView "todo item" $ \(todoIdx, todo) ->
li_ [ classNamesLast [("completed", todoComplete todo), ("editing", todoIsEditing todo)]
, "key" @= todoIdx
] $ do
cldiv_ "view" $ do
input_ [ "className" $= "toggle"
, "type" $= "checkbox"
, "checked" @= todoComplete todo
, onChange $ \_ -> handleTodo $ TodoSetComplete todoIdx $ not $ todoComplete todo
]
label_ [ onDoubleClick $ \_ _ -> handleTodo $ TodoEdit todoIdx] $
elemText $ todoText todo
clbutton_ "destroy" (dispatchTodo $ TodoDelete todoIdx) mempty
when (todoIsEditing todo) $
todoTextInput_ TextInputArgs
{ tiaId = Nothing
, tiaClass = "edit"
, tiaPlaceholder = ""
, tiaSaveAction = SAUpdate todoIdx
, tiaValue = Just $ todoText todo
}
todoItem_ :: (Int, Todo) -> ReactElementM eventHandler ()
todoItem_ (i, t) = view_ todoItem (JSS.pack $ show i) (i, t)
todoFooter :: View TodoState
todoFooter = mkView "footer" $ \(TodoState todos) ->
let completed = length (filter (todoComplete . snd) todos)
itemsLeft = length todos - completed
in footer_ [ "id" $= "footer"] $ do
span_ [ "id" $= "todo-count" ] $ do
strong_ $ elemShow itemsLeft
elemText $ if itemsLeft == 1 then " item left" else " items left"
when (completed > 0) $ do
button_ [ "id" $= "clear-completed"
, onClick $ \_ _ -> handleTodo ClearCompletedTodos
] $
elemString $ "Clear completed (" ++ show completed ++ ")"
foreign import javascript unsafe
"window.alert($1)"
js_alert :: JSString -> IO ()
|
337d324b2c848f1e56811c466752d51bb3668d603a4d2f24e265ccddd0b2f7c5 | ghollisjr/cl-ana | convert-units.lisp | cl - ana is a Common Lisp data analysis library .
Copyright 2013 , 2014
;;;;
This file is part of cl - ana .
;;;;
;;;; cl-ana is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;;; (at your option) any later version.
;;;;
;;;; cl-ana is distributed in the hope that it will be useful, but
;;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;;; General Public License for more details.
;;;;
You should have received a copy of the GNU General Public License
;;;; along with cl-ana. If not, see </>.
;;;;
You may contact ( me ! ) via email at
;;;;
(in-package :cl-ana.quantity)
(defun convert-units (quantity new-units)
"Gets the scale of quantity if expressed in new-units.
new-units can be either a product unit (usual unit, e.g. meter/second)
or a list of product units which is interpreted as a sum of units.
Using a list of units results in a list of unit-scales, one element
per unit in the sum. The quantity value is the result of multiplying
the unit-scales with the corresponding product units and them summing.
Useful for expressing U.S. quantities like heights in feet and
inches."
(if (listp new-units)
(let ((units (butlast new-units))
(last-unit (first (last new-units)))
(result ())
(q quantity)
factor)
(dolist (unit units)
(setf factor (floor (/ q unit)))
(setf q (- q (* unit factor)))
(push factor result))
(push (/ q last-unit) result)
(nreverse result))
(/ quantity new-units)))
| null | https://raw.githubusercontent.com/ghollisjr/cl-ana/5cb4c0b0c9c4957452ad2a769d6ff9e8d5df0b10/quantity/convert-units.lisp | lisp |
cl-ana is free software: you can redistribute it and/or modify it
(at your option) any later version.
cl-ana is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
along with cl-ana. If not, see </>.
| cl - ana is a Common Lisp data analysis library .
Copyright 2013 , 2014
This file is part of cl - ana .
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
You may contact ( me ! ) via email at
(in-package :cl-ana.quantity)
(defun convert-units (quantity new-units)
"Gets the scale of quantity if expressed in new-units.
new-units can be either a product unit (usual unit, e.g. meter/second)
or a list of product units which is interpreted as a sum of units.
Using a list of units results in a list of unit-scales, one element
per unit in the sum. The quantity value is the result of multiplying
the unit-scales with the corresponding product units and them summing.
Useful for expressing U.S. quantities like heights in feet and
inches."
(if (listp new-units)
(let ((units (butlast new-units))
(last-unit (first (last new-units)))
(result ())
(q quantity)
factor)
(dolist (unit units)
(setf factor (floor (/ q unit)))
(setf q (- q (* unit factor)))
(push factor result))
(push (/ q last-unit) result)
(nreverse result))
(/ quantity new-units)))
|
af6141e39aa465b1bbd7be77a0823a15c77e1bc602c566048c6ddca00725e1c1 | tomlokhorst/AwesomePrelude | Bool.hs | {-# LANGUAGE EmptyDataDecls #-}
module Generic.Data.Bool where
import Prelude ()
infixr 3 &&
infixr 2 ||
data Bool
class BoolC j where
false :: j Bool
true :: j Bool
bool :: j a -> j a -> j Bool -> j a
if' :: BoolC j => j Bool -> j a -> j a -> j a
if' b x y = bool y x b
(&&) :: BoolC j => j Bool -> j Bool -> j Bool
x && y = bool false y x
(||) :: BoolC j => j Bool -> j Bool -> j Bool
x || y = bool y true x
not :: BoolC j => j Bool -> j Bool
not = bool true false
| null | https://raw.githubusercontent.com/tomlokhorst/AwesomePrelude/c06fc73e1489aecdd3556b0395d427065245efee/src/Generic/Data/Bool.hs | haskell | # LANGUAGE EmptyDataDecls # |
module Generic.Data.Bool where
import Prelude ()
infixr 3 &&
infixr 2 ||
data Bool
class BoolC j where
false :: j Bool
true :: j Bool
bool :: j a -> j a -> j Bool -> j a
if' :: BoolC j => j Bool -> j a -> j a -> j a
if' b x y = bool y x b
(&&) :: BoolC j => j Bool -> j Bool -> j Bool
x && y = bool false y x
(||) :: BoolC j => j Bool -> j Bool -> j Bool
x || y = bool y true x
not :: BoolC j => j Bool -> j Bool
not = bool true false
|
65cadcc78b376e4157823c1900b54038d25e220a15763f9e1fae3d5b2c56403e | MattWindsor91/travesty | mappable.ml | This file is part of ' travesty ' .
Copyright ( c ) 2018 by
Permission is hereby granted , free of charge , to any person obtaining a
copy of this software and associated documentation files ( the " Software " ) ,
to deal in the Software without restriction , including without limitation
the rights to use , copy , modify , merge , publish , distribute , sublicense ,
and/or sell copies of the Software , and to permit persons to whom the
Software is furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE .
Copyright (c) 2018 by Matt Windsor
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE. *)
open Base
open Mappable_types
module Extend1 (S : S1_container) : Extensions1 with type 'a t := 'a S.t =
struct
include Container_exts.Extend1 (S)
let right_pad ~padding xs =
let maxlen = max_measure ~measure:List.length xs
and f = Fn.const padding in
S.map ~f:(fun p -> p @ List.init (maxlen - List.length p) ~f) xs
end
| null | https://raw.githubusercontent.com/MattWindsor91/travesty/3f4da33830cc928ad879077e690277088de1f836/src/mappable.ml | ocaml | This file is part of ' travesty ' .
Copyright ( c ) 2018 by
Permission is hereby granted , free of charge , to any person obtaining a
copy of this software and associated documentation files ( the " Software " ) ,
to deal in the Software without restriction , including without limitation
the rights to use , copy , modify , merge , publish , distribute , sublicense ,
and/or sell copies of the Software , and to permit persons to whom the
Software is furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE .
Copyright (c) 2018 by Matt Windsor
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE. *)
open Base
open Mappable_types
module Extend1 (S : S1_container) : Extensions1 with type 'a t := 'a S.t =
struct
include Container_exts.Extend1 (S)
let right_pad ~padding xs =
let maxlen = max_measure ~measure:List.length xs
and f = Fn.const padding in
S.map ~f:(fun p -> p @ List.init (maxlen - List.length p) ~f) xs
end
| |
5965582dab1395c6080acd4f9f445c95fdb3ee17c63a6ceacfad5b63013a46fd | dfinity-side-projects/winter | Winter.hs | # LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
| This is a parser for Wast scripts , used by the WebAssembly specification .
module Wasm.Text.Winter where
import Control.Monad.Except
import Control.Monad.Primitive
import Data.Binary.Get
import Data.Bifunctor
import Data.Functor.Classes
import Data.Kind (Type)
import Wasm.Text.Wast
import qualified Wasm.Binary.Decode as Decode
import qualified Wasm.Exec.Eval as Eval
import qualified Wasm.Runtime.Instance as Instance
import qualified Wasm.Syntax.AST as AST
import qualified Wasm.Syntax.Values as Values
import Wasm.Util.Source
data Winter (f :: Type -> Type) = Winter
instance (PrimMonad m, Regioned f, Decode.Decodable f, Show1 f)
=> WasmEngine (Winter f) m where
type Value (Winter f) = Values.Value
type Module (Winter f) = AST.Module f
type ModuleInst (Winter f) m = Instance.ModuleInst f m
const_i32 = Values.I32
const_i64 = Values.I64
const_f32 = Values.F32
const_f64 = Values.F64
decodeModule = Right . runGet Decode.getModule
initializeModule m names mods =
fmap (bimap show (\(r,i,e) -> (r, i, fmap show e)))
$ runExceptT $ Eval.initialize m names mods
invokeByName mods inst name stack =
fmap (bimap show (,inst))
$ runExceptT $ Eval.invokeByName mods inst name stack
getByName inst name =
fmap (bimap show (,inst))
$ runExceptT $ Eval.getByName inst name
| null | https://raw.githubusercontent.com/dfinity-side-projects/winter/cca827ab9299146c0b3b51920e5ad63c4c6014c3/src/Wasm/Text/Winter.hs | haskell | # LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
| This is a parser for Wast scripts , used by the WebAssembly specification .
module Wasm.Text.Winter where
import Control.Monad.Except
import Control.Monad.Primitive
import Data.Binary.Get
import Data.Bifunctor
import Data.Functor.Classes
import Data.Kind (Type)
import Wasm.Text.Wast
import qualified Wasm.Binary.Decode as Decode
import qualified Wasm.Exec.Eval as Eval
import qualified Wasm.Runtime.Instance as Instance
import qualified Wasm.Syntax.AST as AST
import qualified Wasm.Syntax.Values as Values
import Wasm.Util.Source
data Winter (f :: Type -> Type) = Winter
instance (PrimMonad m, Regioned f, Decode.Decodable f, Show1 f)
=> WasmEngine (Winter f) m where
type Value (Winter f) = Values.Value
type Module (Winter f) = AST.Module f
type ModuleInst (Winter f) m = Instance.ModuleInst f m
const_i32 = Values.I32
const_i64 = Values.I64
const_f32 = Values.F32
const_f64 = Values.F64
decodeModule = Right . runGet Decode.getModule
initializeModule m names mods =
fmap (bimap show (\(r,i,e) -> (r, i, fmap show e)))
$ runExceptT $ Eval.initialize m names mods
invokeByName mods inst name stack =
fmap (bimap show (,inst))
$ runExceptT $ Eval.invokeByName mods inst name stack
getByName inst name =
fmap (bimap show (,inst))
$ runExceptT $ Eval.getByName inst name
| |
d3daf6ec0523b7f090800f2aa0e8d0e9f60dc9d4eb72af761cc63fff9954cb0c | racket/racket7 | reader.rkt | #lang racket/base
(require racket/contract
racket/list
racket/match
"structures.rkt")
(provide/contract
[read-xml (() (input-port?) . ->* . document?)]
[read-xml/document (() (input-port?) . ->* . document?)]
[read-xml/element (() (input-port?) . ->* . element?)]
[xml-count-bytes (parameter/c boolean?)]
[read-comments (parameter/c boolean?)]
[collapse-whitespace (parameter/c boolean?)]
[exn:xml? (any/c . -> . boolean?)])
;; Start-tag ::= (make-start-tag Location Location Symbol (listof Attribute))
(define-struct (start-tag source) (name attrs))
;; End-tag ::= (make-end-tag Location Location Symbol)
(define-struct (end-tag source) (name))
;; Token ::= Contents | Start-tag | End-tag | Eof
(define xml-count-bytes (make-parameter #f))
(define read-comments (make-parameter #f))
(define collapse-whitespace (make-parameter #f))
;; read-xml : [Input-port] -> Document
(define read-xml
(lambda ([in (current-input-port)])
(let*-values ([(in pos) (positionify in)]
[(misc0 start) (read-misc in pos)])
(make-document (make-prolog misc0 #f empty)
(read-xml-element-helper pos in start)
(let-values ([(misc1 end-of-file) (read-misc in pos)])
(unless (EOF? end-of-file)
(parse-error (list
(make-srcloc
(object-name in)
#f
#f
(location-offset (source-start end-of-file))
(- (location-offset (source-stop end-of-file))
(location-offset (source-start end-of-file)))))
"extra stuff at end of document ~e"
end-of-file))
misc1)))))
;; read-xml : [Input-port] -> Document
(define (read-xml/document [in (current-input-port)])
(let*-values ([(in pos) (positionify in)]
[(misc0 start) (read-misc in pos)])
(make-document (make-prolog misc0 #f empty)
(read-xml-element-helper pos in start)
empty)))
;; read-xml/element : [Input-port] -> Element
(define read-xml/element
(lambda ([in (current-input-port)])
(let-values ([(in pos) (positionify in)])
(skip-space in)
(read-xml-element-helper pos in (lex in pos)))))
read - xml - element - helper :
(define (read-xml-element-helper pos in start)
(cond
[(start-tag? start) (read-element start in pos)]
[(element? start) start]
[else
(parse-error
(list
(make-srcloc
(object-name in)
#f
#f
; XXX Some data structures should really be changed to be sources
(if (source? start)
(location-offset (source-start start))
#f)
(if (source? start)
(- (location-offset (source-stop start))
(location-offset (source-start start)))
#f)))
"expected root element - received ~e"
(cond
[(pcdata? start) (pcdata-string start)]
[(EOF? start) eof]
[else start]))]))
read - misc : Input - port ( - > Location ) - > ( ) Token
(define (read-misc in pos)
(let read-more ()
(let ([x (lex in pos)])
(cond
[(p-i? x)
(let-values ([(lst next) (read-more)])
(values (cons x lst) next))]
[(comment? x)
(let-values ([(lst next) (read-more)])
(if (read-comments)
(values (cons x lst) next)
(values lst next)))]
[(and (pcdata? x) (andmap char-whitespace? (string->list (pcdata-string x))))
(read-more)]
[else (values null x)]))))
;; read-element : Start-tag Input-port (-> Location) -> Element
(define (read-element start in pos)
(let ([name (start-tag-name start)]
[a (source-start start)]
[b (source-stop start)])
(let read-content ([k (lambda (body end-loc)
(make-element
a end-loc name (start-tag-attrs start)
body))])
(let ([x (lex in pos)])
(cond
[(EOF? x)
(parse-error (list
(make-srcloc
(object-name in)
#f
#f
(location-offset (source-start start))
(- (location-offset (source-stop start))
(location-offset (source-start start)))))
"unclosed `~a' tag at [~a ~a]"
name
(format-source a)
(format-source b))]
[(start-tag? x)
(let ([next-el (read-element x in pos)])
(read-content (lambda (body end-loc)
(k (cons next-el body)
end-loc))))]
[(end-tag? x)
(let ([end-loc (source-stop x)])
(unless (eq? name (end-tag-name x))
(parse-error
(list
(make-srcloc (object-name in)
#f
#f
(location-offset a)
(- (location-offset b) (location-offset a)))
(make-srcloc (object-name in)
#f
#f
(location-offset (source-start x))
(- (location-offset end-loc) (location-offset (source-start x)))))
"start tag `~a' at [~a ~a] doesn't match end tag `~a' at [~a ~a]"
name
(format-source a)
(format-source b)
(end-tag-name x)
(format-source (source-start x))
(format-source end-loc)))
(k null end-loc))]
[(entity? x) (read-content (lambda (body end-loc)
(k (cons (expand-entity x) body)
end-loc)))]
[(comment? x) (if (read-comments)
(read-content (lambda (body end-loc) (k (cons x body) end-loc)))
(read-content k))]
[else (read-content (lambda (body end-loc) (k (cons x body) end-loc)))])))))
expand - entity : Entity - > ( U Entity Pcdata )
;; more here - allow expansion of user defined entities
(define (expand-entity x)
(let ([expanded (default-entity-table (entity-text x))])
(if expanded
(make-pcdata (source-start x) (source-stop x) expanded)
x)))
;; default-entity-table : Symbol -> (U #f String)
(define (default-entity-table name)
(case name
[(amp) "&"]
[(lt) "<"]
[(gt) ">"]
[(quot) "\""]
[(apos) "'"]
[else #f]))
(define-struct (EOF source) ())
;; lex : Input-port (-> Location) -> (U Token special)
(define (lex in pos)
(let ([c (peek-char-or-special in)])
(cond
[(eof-object? c) (read-char in) (EOF (pos) (pos))]
[(eq? c #\&) (lex-entity in pos)]
[(eq? c #\<) (lex-tag-cdata-pi-comment in pos)]
[(not (char? c)) (read-char-or-special in)]
[else (lex-pcdata in pos)])))
; lex-entity : Input-port (-> Location) -> Entity
pre : the first char is a # \ &
(define (lex-entity in pos)
(let ([start (pos)])
(read-char in)
(let ([data (case (peek-char in)
[(#\#)
(read-char in)
(let ([n (case (peek-char in)
[(#\x) (read-char in)
in pos ) 16 ) ]
[else (string->number (read-until #\; in pos))])])
(unless (number? n)
(lex-error in pos "malformed numeric entity"))
(unless (valid-char? n)
(lex-error in pos "not a well-formed numeric entity (does not match the production for Char, see XML 4.1)"))
n)]
[else
(begin0
(lex-name in pos)
(unless (eq? (read-char in) #\;)
(lex-error in pos "expected ; at the end of an entity")))])])
(make-entity start (pos) data))))
; lex-tag-cdata-pi-comment : Input-port (-> Location) -> Start-tag | Element | End-tag | Cdata | p-i | Comment
pre : the first char is a # \ <
(define (lex-tag-cdata-pi-comment in pos)
(let ([start (pos)])
(read-char in)
(case (non-eof peek-char-or-special in pos)
[(#\!)
(read-char in)
(case (non-eof peek-char in pos)
[(#\-) (read-char in)
(unless (eq? (read-char-or-special in) #\-)
(lex-error in pos "expected second - after <!-"))
(let ([data (lex-comment-contents in pos)])
(unless (eq? (read-char in) #\>)
(lex-error in pos "expected > to end comment (\"--\" can't appear in comments)"))
;(make-comment start (pos) data)
(make-comment data))]
[(#\[) (read-char in)
(unless (string=? (read-string 6 in) "CDATA[")
(lex-error in pos "expected CDATA following <["))
(let ([data (lex-cdata-contents in pos)])
(make-cdata start (pos) (format "<![CDATA[~a]]>" data)))]
[else (skip-dtd in pos)
(skip-space in)
(unless (eq? (peek-char-or-special in) #\<)
(lex-error in pos "expected p-i, comment, or element after doctype"))
(lex-tag-cdata-pi-comment in pos)])]
[(#\?) (read-char in)
(let ([name (lex-name in pos)])
(skip-space in)
(let ([data (lex-pi-data in pos)])
(make-p-i start (pos) name data)))]
[(#\/) (read-char in)
(let ([name (lex-name in pos)])
(skip-space in)
(unless (eq? (read-char-or-special in) #\>)
(lex-error in pos "expected > to close ~a's end tag" name))
(make-end-tag start (pos) name))]
[else ; includes 'special, but lex-name will fail in that case
(let ([name (lex-name in pos)]
[attrs (lex-attributes in pos)])
(skip-space in)
(case (read-char-or-special in)
[(#\/)
(unless (eq? (read-char in) #\>)
(lex-error in pos "expected > to close empty element ~a" name))
(make-element start (pos) name attrs null)]
[(#\>) (make-start-tag start (pos) name attrs)]
[else (lex-error in pos "expected / or > to close tag `~a'" name)]))])))
lex - attributes : Input - port ( - > Location ) - > ( listof Attribute )
(define (lex-attributes in pos)
(let* ([result_list
(let loop ()
(skip-space in)
(cond [(name-start? (peek-char-or-special in))
(cons (lex-attribute in pos) (loop))]
[else null]))]
[check_dup (check-duplicates result_list (lambda (a b) (eq? (attribute-name a) (attribute-name b))))])
(if check_dup
(lex-error in pos "duplicated attribute name ~a" (attribute-name check_dup))
result_list)))
;; lex-attribute : Input-port (-> Location) -> Attribute
(define (lex-attribute in pos)
(let ([start (pos)]
[name (lex-name in pos)])
(skip-space in)
(unless (eq? (read-char in) #\=)
(lex-error in pos "expected = in attribute ~a" name))
(skip-space in)
;; more here - handle entites and disallow "<"
(let* ([delimiter (read-char-or-special in)]
[value (case delimiter
[(#\' #\")
(list->string
(let read-more ()
(let ([c (non-eof peek-char-or-special in pos)])
(cond
[(eq? c 'special)
(lex-error in pos "attribute values cannot contain non-text values")]
[(eq? c delimiter) (read-char in) null]
[(eq? c #\&)
(let ([entity (expand-entity (lex-entity in pos))])
(if (pcdata? entity)
(append (string->list (pcdata-string entity)) (read-more))
;; more here - do something with user defined entites
(read-more)))]
[else (read-char in) (cons c (read-more))]))))]
[else (if (char? delimiter)
(lex-error in pos "attribute values must be in ''s or in \"\"s")
delimiter)])])
(make-attribute start (pos) name value))))
;; skip-space : Input-port -> Void
deviation - should sometimes insist on at least one space
(define (skip-space in)
(let loop ()
(let ([c (peek-char-or-special in)])
(when (and (char? c)
(char-whitespace? c))
(read-char in)
(loop)))))
;; lex-pcdata : Input-port (-> Location) -> Pcdata
deviation - disallow ] ] > " for compatability " with SGML , sec 2.4 XML spec
(define (lex-pcdata in pos)
(let ([start (pos)]
[data (let loop ()
(let ([next (peek-char-or-special in)])
(cond
[(or (eof-object? next)
(not (char? next))
(eq? next #\&)
(eq? next #\<))
null]
[(and (char-whitespace? next) (collapse-whitespace))
(skip-space in)
(cons #\space (loop))]
[else (cons (read-char in) (loop))])))])
(make-pcdata start
(pos)
(list->string data))))
;; lex-name : Input-port (-> Location) -> Symbol
(define (lex-name in pos)
(let ([c (non-eof read-char-or-special in pos)])
(unless (name-start? c)
(lex-error in pos "expected name, received ~e" c))
(string->symbol
(list->string
(cons c (let lex-rest ()
(let ([c (non-eof peek-char-or-special in pos)])
(cond
[(eq? c 'special)
(lex-error in pos "names cannot contain non-text values")]
[(name-char? c)
(cons (read-char in) (lex-rest))]
[else null]))))))))
;; skip-dtd : Input-port (-> Location) -> Void
(define (skip-dtd in pos)
(let skip ()
(case (non-eof read-char in pos)
[(#\') (read-until #\' in pos) (skip)]
[(#\") (read-until #\" in pos) (skip)]
[(#\<)
(case (non-eof read-char in pos)
[(#\!) (case (non-eof read-char in pos)
[(#\-) (read-char in) (lex-comment-contents in pos) (read-char in) (skip)]
[else (skip) (skip)])]
[(#\?) (lex-pi-data in pos) (skip)]
[else (skip) (skip)])]
[(#\>) (void)]
[else (skip)])))
name - start ? : Bool
(define (name-start? ch)
(and (char? ch)
(or (char-alphabetic? ch)
(eq? ch #\_)
(eq? ch #\:))))
name - char ? : Bool
(define (name-char? ch)
(and (char? ch)
(or (name-start? ch)
(char-numeric? ch)
(eq? ch #\.)
(eq? ch #\-))))
read - until : ( - > Location ) - > String
;; discards the stop character, too
(define (read-until char in pos)
(list->string
(let read-more ()
(let ([c (non-eof read-char in pos)])
(cond
[(eq? c char) null]
[else (cons c (read-more))])))))
non - eof : ( Input - port - > ( U ) ) Input - port ( - > Location ) - >
(define (non-eof f in pos)
(let ([c (f in)])
(cond
[(eof-object? c) (lex-error in pos "unexpected eof")]
[else c])))
;; gen-read-until-string : String -> Input-port (-> Location) -> String
uses Knuth - Morris - Pratt from
Introduction to Algorithms , Cormen , , and Rivest , pages 869 - 876
;; discards stop from input
;; ---
Modified by to look more like the version on Wikipedia after discovering a bug when parsing CDATA
The use of the hasheq table and the purely numeric code trades hash efficiency for stack / ec capture efficiency
(struct hash-string (port pos ht))
(define (hash-string-ref hs k)
(match-define (hash-string port pos ht) hs)
(hash-ref! ht k (lambda () (non-eof read-char port pos))))
(define (gen-read-until-string W)
(define Wlen (string-length W))
(define T (make-vector Wlen #f))
(vector-set! T 0 -1)
(vector-set! T 1 0)
(let kmp-table ([pos 2] [cnd 0])
(when (pos . < . Wlen)
(cond
[(char=? (string-ref W (sub1 pos)) (string-ref W cnd))
(vector-set! T pos (add1 cnd))
(kmp-table (add1 pos) (add1 cnd))]
[(cnd . > . 0)
(kmp-table pos (vector-ref T cnd))]
[(zero? cnd)
(vector-set! T pos 0)
(kmp-table (add1 pos) 0)])))
(lambda (S-as-port S-pos)
(define S (hash-string S-as-port S-pos (make-hasheq)))
(define W-starts-at
(let kmp-search ([m 0] [i 0])
(if (char=? (string-ref W i) (hash-string-ref S (+ m i)))
(let ([i (add1 i)])
(if (= i Wlen)
m
(kmp-search m i)))
(let* ([Ti (vector-ref T i)]
[m (+ m i (* -1 Ti))])
(if (Ti . > . -1)
(let ([i Ti])
(kmp-search m i))
(let ([i 0])
(kmp-search m i)))))))
(list->string
(for/list ([i (in-range 0 W-starts-at)])
(hash-string-ref S i)))))
;; "-->" makes more sense, but "--" follows the spec.
(define lex-comment-contents (gen-read-until-string "--"))
(define lex-pi-data (gen-read-until-string "?>"))
(define lex-cdata-contents (gen-read-until-string "]]>"))
;; positionify : Input-port -> Input-port (-> Location)
; This function predates port-count-lines! and port-next-location.
; Otherwise I would have used those directly at the call sites.
(define (positionify in)
(unless (xml-count-bytes)
(port-count-lines! in))
(values
in
(lambda ()
(let-values ([(line column offset) (port-next-location in)])
(make-location line column offset)))))
locs : ( listof ( list number number ) )
(define-struct (exn:xml exn:fail:read) ())
;; lex-error : Input-port String (-> Location) TST* -> alpha
;; raises a lexer error, using exn:xml
(define (lex-error in pos str . rest)
(let* ([the-pos (pos)]
[offset (location-offset the-pos)])
(raise
(make-exn:xml
(format "read-xml: lex-error: at position ~a: ~a"
(format-source the-pos)
(apply format str rest))
(current-continuation-marks)
(list
(make-srcloc (object-name in) #f #f offset 1))))))
parse - error : ( listof srcloc ) ( listof TST ) * - > alpha
;; raises a parsing error, using exn:xml
(define (parse-error src fmt . args)
(raise (make-exn:xml (string-append "read-xml: parse-error: "
(apply format fmt args))
(current-continuation-marks)
src)))
;; format-source : Location -> string
;; to format the source location for an error message
(define (format-source loc)
(if (location? loc)
(format "~a.~a/~a" (location-line loc) (location-char loc) (location-offset loc))
(format "~a" loc)))
| null | https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/racket/collects/xml/private/reader.rkt | racket | Start-tag ::= (make-start-tag Location Location Symbol (listof Attribute))
End-tag ::= (make-end-tag Location Location Symbol)
Token ::= Contents | Start-tag | End-tag | Eof
read-xml : [Input-port] -> Document
read-xml : [Input-port] -> Document
read-xml/element : [Input-port] -> Element
XXX Some data structures should really be changed to be sources
read-element : Start-tag Input-port (-> Location) -> Element
more here - allow expansion of user defined entities
default-entity-table : Symbol -> (U #f String)
lex : Input-port (-> Location) -> (U Token special)
lex-entity : Input-port (-> Location) -> Entity
in pos))])])
)
lex-tag-cdata-pi-comment : Input-port (-> Location) -> Start-tag | Element | End-tag | Cdata | p-i | Comment
(make-comment start (pos) data)
includes 'special, but lex-name will fail in that case
lex-attribute : Input-port (-> Location) -> Attribute
more here - handle entites and disallow "<"
more here - do something with user defined entites
skip-space : Input-port -> Void
lex-pcdata : Input-port (-> Location) -> Pcdata
lex-name : Input-port (-> Location) -> Symbol
skip-dtd : Input-port (-> Location) -> Void
discards the stop character, too
gen-read-until-string : String -> Input-port (-> Location) -> String
discards stop from input
---
"-->" makes more sense, but "--" follows the spec.
positionify : Input-port -> Input-port (-> Location)
This function predates port-count-lines! and port-next-location.
Otherwise I would have used those directly at the call sites.
lex-error : Input-port String (-> Location) TST* -> alpha
raises a lexer error, using exn:xml
raises a parsing error, using exn:xml
format-source : Location -> string
to format the source location for an error message | #lang racket/base
(require racket/contract
racket/list
racket/match
"structures.rkt")
(provide/contract
[read-xml (() (input-port?) . ->* . document?)]
[read-xml/document (() (input-port?) . ->* . document?)]
[read-xml/element (() (input-port?) . ->* . element?)]
[xml-count-bytes (parameter/c boolean?)]
[read-comments (parameter/c boolean?)]
[collapse-whitespace (parameter/c boolean?)]
[exn:xml? (any/c . -> . boolean?)])
(define-struct (start-tag source) (name attrs))
(define-struct (end-tag source) (name))
(define xml-count-bytes (make-parameter #f))
(define read-comments (make-parameter #f))
(define collapse-whitespace (make-parameter #f))
(define read-xml
(lambda ([in (current-input-port)])
(let*-values ([(in pos) (positionify in)]
[(misc0 start) (read-misc in pos)])
(make-document (make-prolog misc0 #f empty)
(read-xml-element-helper pos in start)
(let-values ([(misc1 end-of-file) (read-misc in pos)])
(unless (EOF? end-of-file)
(parse-error (list
(make-srcloc
(object-name in)
#f
#f
(location-offset (source-start end-of-file))
(- (location-offset (source-stop end-of-file))
(location-offset (source-start end-of-file)))))
"extra stuff at end of document ~e"
end-of-file))
misc1)))))
(define (read-xml/document [in (current-input-port)])
(let*-values ([(in pos) (positionify in)]
[(misc0 start) (read-misc in pos)])
(make-document (make-prolog misc0 #f empty)
(read-xml-element-helper pos in start)
empty)))
(define read-xml/element
(lambda ([in (current-input-port)])
(let-values ([(in pos) (positionify in)])
(skip-space in)
(read-xml-element-helper pos in (lex in pos)))))
read - xml - element - helper :
(define (read-xml-element-helper pos in start)
(cond
[(start-tag? start) (read-element start in pos)]
[(element? start) start]
[else
(parse-error
(list
(make-srcloc
(object-name in)
#f
#f
(if (source? start)
(location-offset (source-start start))
#f)
(if (source? start)
(- (location-offset (source-stop start))
(location-offset (source-start start)))
#f)))
"expected root element - received ~e"
(cond
[(pcdata? start) (pcdata-string start)]
[(EOF? start) eof]
[else start]))]))
read - misc : Input - port ( - > Location ) - > ( ) Token
(define (read-misc in pos)
(let read-more ()
(let ([x (lex in pos)])
(cond
[(p-i? x)
(let-values ([(lst next) (read-more)])
(values (cons x lst) next))]
[(comment? x)
(let-values ([(lst next) (read-more)])
(if (read-comments)
(values (cons x lst) next)
(values lst next)))]
[(and (pcdata? x) (andmap char-whitespace? (string->list (pcdata-string x))))
(read-more)]
[else (values null x)]))))
(define (read-element start in pos)
(let ([name (start-tag-name start)]
[a (source-start start)]
[b (source-stop start)])
(let read-content ([k (lambda (body end-loc)
(make-element
a end-loc name (start-tag-attrs start)
body))])
(let ([x (lex in pos)])
(cond
[(EOF? x)
(parse-error (list
(make-srcloc
(object-name in)
#f
#f
(location-offset (source-start start))
(- (location-offset (source-stop start))
(location-offset (source-start start)))))
"unclosed `~a' tag at [~a ~a]"
name
(format-source a)
(format-source b))]
[(start-tag? x)
(let ([next-el (read-element x in pos)])
(read-content (lambda (body end-loc)
(k (cons next-el body)
end-loc))))]
[(end-tag? x)
(let ([end-loc (source-stop x)])
(unless (eq? name (end-tag-name x))
(parse-error
(list
(make-srcloc (object-name in)
#f
#f
(location-offset a)
(- (location-offset b) (location-offset a)))
(make-srcloc (object-name in)
#f
#f
(location-offset (source-start x))
(- (location-offset end-loc) (location-offset (source-start x)))))
"start tag `~a' at [~a ~a] doesn't match end tag `~a' at [~a ~a]"
name
(format-source a)
(format-source b)
(end-tag-name x)
(format-source (source-start x))
(format-source end-loc)))
(k null end-loc))]
[(entity? x) (read-content (lambda (body end-loc)
(k (cons (expand-entity x) body)
end-loc)))]
[(comment? x) (if (read-comments)
(read-content (lambda (body end-loc) (k (cons x body) end-loc)))
(read-content k))]
[else (read-content (lambda (body end-loc) (k (cons x body) end-loc)))])))))
expand - entity : Entity - > ( U Entity Pcdata )
(define (expand-entity x)
(let ([expanded (default-entity-table (entity-text x))])
(if expanded
(make-pcdata (source-start x) (source-stop x) expanded)
x)))
(define (default-entity-table name)
(case name
[(amp) "&"]
[(lt) "<"]
[(gt) ">"]
[(quot) "\""]
[(apos) "'"]
[else #f]))
(define-struct (EOF source) ())
(define (lex in pos)
(let ([c (peek-char-or-special in)])
(cond
[(eof-object? c) (read-char in) (EOF (pos) (pos))]
[(eq? c #\&) (lex-entity in pos)]
[(eq? c #\<) (lex-tag-cdata-pi-comment in pos)]
[(not (char? c)) (read-char-or-special in)]
[else (lex-pcdata in pos)])))
pre : the first char is a # \ &
(define (lex-entity in pos)
(let ([start (pos)])
(read-char in)
(let ([data (case (peek-char in)
[(#\#)
(read-char in)
(let ([n (case (peek-char in)
[(#\x) (read-char in)
in pos ) 16 ) ]
(unless (number? n)
(lex-error in pos "malformed numeric entity"))
(unless (valid-char? n)
(lex-error in pos "not a well-formed numeric entity (does not match the production for Char, see XML 4.1)"))
n)]
[else
(begin0
(lex-name in pos)
(lex-error in pos "expected ; at the end of an entity")))])])
(make-entity start (pos) data))))
pre : the first char is a # \ <
(define (lex-tag-cdata-pi-comment in pos)
(let ([start (pos)])
(read-char in)
(case (non-eof peek-char-or-special in pos)
[(#\!)
(read-char in)
(case (non-eof peek-char in pos)
[(#\-) (read-char in)
(unless (eq? (read-char-or-special in) #\-)
(lex-error in pos "expected second - after <!-"))
(let ([data (lex-comment-contents in pos)])
(unless (eq? (read-char in) #\>)
(lex-error in pos "expected > to end comment (\"--\" can't appear in comments)"))
(make-comment data))]
[(#\[) (read-char in)
(unless (string=? (read-string 6 in) "CDATA[")
(lex-error in pos "expected CDATA following <["))
(let ([data (lex-cdata-contents in pos)])
(make-cdata start (pos) (format "<![CDATA[~a]]>" data)))]
[else (skip-dtd in pos)
(skip-space in)
(unless (eq? (peek-char-or-special in) #\<)
(lex-error in pos "expected p-i, comment, or element after doctype"))
(lex-tag-cdata-pi-comment in pos)])]
[(#\?) (read-char in)
(let ([name (lex-name in pos)])
(skip-space in)
(let ([data (lex-pi-data in pos)])
(make-p-i start (pos) name data)))]
[(#\/) (read-char in)
(let ([name (lex-name in pos)])
(skip-space in)
(unless (eq? (read-char-or-special in) #\>)
(lex-error in pos "expected > to close ~a's end tag" name))
(make-end-tag start (pos) name))]
(let ([name (lex-name in pos)]
[attrs (lex-attributes in pos)])
(skip-space in)
(case (read-char-or-special in)
[(#\/)
(unless (eq? (read-char in) #\>)
(lex-error in pos "expected > to close empty element ~a" name))
(make-element start (pos) name attrs null)]
[(#\>) (make-start-tag start (pos) name attrs)]
[else (lex-error in pos "expected / or > to close tag `~a'" name)]))])))
lex - attributes : Input - port ( - > Location ) - > ( listof Attribute )
(define (lex-attributes in pos)
(let* ([result_list
(let loop ()
(skip-space in)
(cond [(name-start? (peek-char-or-special in))
(cons (lex-attribute in pos) (loop))]
[else null]))]
[check_dup (check-duplicates result_list (lambda (a b) (eq? (attribute-name a) (attribute-name b))))])
(if check_dup
(lex-error in pos "duplicated attribute name ~a" (attribute-name check_dup))
result_list)))
(define (lex-attribute in pos)
(let ([start (pos)]
[name (lex-name in pos)])
(skip-space in)
(unless (eq? (read-char in) #\=)
(lex-error in pos "expected = in attribute ~a" name))
(skip-space in)
(let* ([delimiter (read-char-or-special in)]
[value (case delimiter
[(#\' #\")
(list->string
(let read-more ()
(let ([c (non-eof peek-char-or-special in pos)])
(cond
[(eq? c 'special)
(lex-error in pos "attribute values cannot contain non-text values")]
[(eq? c delimiter) (read-char in) null]
[(eq? c #\&)
(let ([entity (expand-entity (lex-entity in pos))])
(if (pcdata? entity)
(append (string->list (pcdata-string entity)) (read-more))
(read-more)))]
[else (read-char in) (cons c (read-more))]))))]
[else (if (char? delimiter)
(lex-error in pos "attribute values must be in ''s or in \"\"s")
delimiter)])])
(make-attribute start (pos) name value))))
deviation - should sometimes insist on at least one space
(define (skip-space in)
(let loop ()
(let ([c (peek-char-or-special in)])
(when (and (char? c)
(char-whitespace? c))
(read-char in)
(loop)))))
deviation - disallow ] ] > " for compatability " with SGML , sec 2.4 XML spec
(define (lex-pcdata in pos)
(let ([start (pos)]
[data (let loop ()
(let ([next (peek-char-or-special in)])
(cond
[(or (eof-object? next)
(not (char? next))
(eq? next #\&)
(eq? next #\<))
null]
[(and (char-whitespace? next) (collapse-whitespace))
(skip-space in)
(cons #\space (loop))]
[else (cons (read-char in) (loop))])))])
(make-pcdata start
(pos)
(list->string data))))
(define (lex-name in pos)
(let ([c (non-eof read-char-or-special in pos)])
(unless (name-start? c)
(lex-error in pos "expected name, received ~e" c))
(string->symbol
(list->string
(cons c (let lex-rest ()
(let ([c (non-eof peek-char-or-special in pos)])
(cond
[(eq? c 'special)
(lex-error in pos "names cannot contain non-text values")]
[(name-char? c)
(cons (read-char in) (lex-rest))]
[else null]))))))))
(define (skip-dtd in pos)
(let skip ()
(case (non-eof read-char in pos)
[(#\') (read-until #\' in pos) (skip)]
[(#\") (read-until #\" in pos) (skip)]
[(#\<)
(case (non-eof read-char in pos)
[(#\!) (case (non-eof read-char in pos)
[(#\-) (read-char in) (lex-comment-contents in pos) (read-char in) (skip)]
[else (skip) (skip)])]
[(#\?) (lex-pi-data in pos) (skip)]
[else (skip) (skip)])]
[(#\>) (void)]
[else (skip)])))
name - start ? : Bool
(define (name-start? ch)
(and (char? ch)
(or (char-alphabetic? ch)
(eq? ch #\_)
(eq? ch #\:))))
name - char ? : Bool
(define (name-char? ch)
(and (char? ch)
(or (name-start? ch)
(char-numeric? ch)
(eq? ch #\.)
(eq? ch #\-))))
read - until : ( - > Location ) - > String
(define (read-until char in pos)
(list->string
(let read-more ()
(let ([c (non-eof read-char in pos)])
(cond
[(eq? c char) null]
[else (cons c (read-more))])))))
non - eof : ( Input - port - > ( U ) ) Input - port ( - > Location ) - >
(define (non-eof f in pos)
(let ([c (f in)])
(cond
[(eof-object? c) (lex-error in pos "unexpected eof")]
[else c])))
uses Knuth - Morris - Pratt from
Introduction to Algorithms , Cormen , , and Rivest , pages 869 - 876
Modified by to look more like the version on Wikipedia after discovering a bug when parsing CDATA
The use of the hasheq table and the purely numeric code trades hash efficiency for stack / ec capture efficiency
(struct hash-string (port pos ht))
(define (hash-string-ref hs k)
(match-define (hash-string port pos ht) hs)
(hash-ref! ht k (lambda () (non-eof read-char port pos))))
(define (gen-read-until-string W)
(define Wlen (string-length W))
(define T (make-vector Wlen #f))
(vector-set! T 0 -1)
(vector-set! T 1 0)
(let kmp-table ([pos 2] [cnd 0])
(when (pos . < . Wlen)
(cond
[(char=? (string-ref W (sub1 pos)) (string-ref W cnd))
(vector-set! T pos (add1 cnd))
(kmp-table (add1 pos) (add1 cnd))]
[(cnd . > . 0)
(kmp-table pos (vector-ref T cnd))]
[(zero? cnd)
(vector-set! T pos 0)
(kmp-table (add1 pos) 0)])))
(lambda (S-as-port S-pos)
(define S (hash-string S-as-port S-pos (make-hasheq)))
(define W-starts-at
(let kmp-search ([m 0] [i 0])
(if (char=? (string-ref W i) (hash-string-ref S (+ m i)))
(let ([i (add1 i)])
(if (= i Wlen)
m
(kmp-search m i)))
(let* ([Ti (vector-ref T i)]
[m (+ m i (* -1 Ti))])
(if (Ti . > . -1)
(let ([i Ti])
(kmp-search m i))
(let ([i 0])
(kmp-search m i)))))))
(list->string
(for/list ([i (in-range 0 W-starts-at)])
(hash-string-ref S i)))))
(define lex-comment-contents (gen-read-until-string "--"))
(define lex-pi-data (gen-read-until-string "?>"))
(define lex-cdata-contents (gen-read-until-string "]]>"))
(define (positionify in)
(unless (xml-count-bytes)
(port-count-lines! in))
(values
in
(lambda ()
(let-values ([(line column offset) (port-next-location in)])
(make-location line column offset)))))
locs : ( listof ( list number number ) )
(define-struct (exn:xml exn:fail:read) ())
(define (lex-error in pos str . rest)
(let* ([the-pos (pos)]
[offset (location-offset the-pos)])
(raise
(make-exn:xml
(format "read-xml: lex-error: at position ~a: ~a"
(format-source the-pos)
(apply format str rest))
(current-continuation-marks)
(list
(make-srcloc (object-name in) #f #f offset 1))))))
parse - error : ( listof srcloc ) ( listof TST ) * - > alpha
(define (parse-error src fmt . args)
(raise (make-exn:xml (string-append "read-xml: parse-error: "
(apply format fmt args))
(current-continuation-marks)
src)))
(define (format-source loc)
(if (location? loc)
(format "~a.~a/~a" (location-line loc) (location-char loc) (location-offset loc))
(format "~a" loc)))
|
677f5c98057da7ecfa892683b72aff4c9e660e822abda5cdbf187a1b6d458b51 | bmeurer/ocamljit2 | syntax.ml | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ Id$
(* The shallow abstract syntax *)
type location =
Location of int * int
type regular_expression =
Epsilon
| Characters of char list
| Sequence of regular_expression * regular_expression
| Alternative of regular_expression * regular_expression
| Repetition of regular_expression
type lexer_definition =
Lexdef of location * (string * (regular_expression * location) list) list
(* Representation of automata *)
type automata =
Perform of int
| Shift of automata_trans * automata_move array
and automata_trans =
No_remember
| Remember of int
and automata_move =
Backtrack
| Goto of int
| null | https://raw.githubusercontent.com/bmeurer/ocamljit2/ef06db5c688c1160acc1de1f63c29473bcd0055c/testsuite/tests/tool-lexyacc/syntax.ml | ocaml | *********************************************************************
Objective Caml
*********************************************************************
The shallow abstract syntax
Representation of automata | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ Id$
type location =
Location of int * int
type regular_expression =
Epsilon
| Characters of char list
| Sequence of regular_expression * regular_expression
| Alternative of regular_expression * regular_expression
| Repetition of regular_expression
type lexer_definition =
Lexdef of location * (string * (regular_expression * location) list) list
type automata =
Perform of int
| Shift of automata_trans * automata_move array
and automata_trans =
No_remember
| Remember of int
and automata_move =
Backtrack
| Goto of int
|
01b38baf55c865517dbef6a2d7513fcf7d2f79db419b64b82b255e375a057c40 | lspitzner/brittany | Decl.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
module Language.Haskell.Brittany.Internal.Layouters.Decl where
import qualified Data.Data
import qualified Data.Foldable
import qualified Data.Maybe
import qualified Data.Semigroup as Semigroup
import qualified Data.Text as Text
import GHC (AnnKeywordId(..), GenLocated(L))
import GHC.Data.Bag (bagToList, emptyBag)
import qualified GHC.Data.FastString as FastString
import GHC.Hs
import qualified GHC.OldList as List
import GHC.Types.Basic
( Activation(..)
, InlinePragma(..)
, InlineSpec(..)
, LexicalFixity(..)
, RuleMatchInfo(..)
)
import GHC.Types.SrcLoc (Located, SrcSpan, getLoc, unLoc)
import Language.Haskell.Brittany.Internal.Config.Types
import Language.Haskell.Brittany.Internal.ExactPrintUtils
import Language.Haskell.Brittany.Internal.LayouterBasics
import Language.Haskell.Brittany.Internal.Layouters.DataDecl
import {-# SOURCE #-} Language.Haskell.Brittany.Internal.Layouters.Expr
import Language.Haskell.Brittany.Internal.Layouters.Pattern
import {-# SOURCE #-} Language.Haskell.Brittany.Internal.Layouters.Stmt
import Language.Haskell.Brittany.Internal.Layouters.Type
import Language.Haskell.Brittany.Internal.Prelude
import Language.Haskell.Brittany.Internal.PreludeUtils
import Language.Haskell.Brittany.Internal.Types
import qualified Language.Haskell.GHC.ExactPrint as ExactPrint
import Language.Haskell.GHC.ExactPrint.Types (mkAnnKey)
import qualified Language.Haskell.GHC.ExactPrint.Utils as ExactPrint
layoutDecl :: ToBriDoc HsDecl
layoutDecl d@(L loc decl) = case decl of
SigD _ sig -> withTransformedAnns d $ layoutSig (L loc sig)
ValD _ bind -> withTransformedAnns d $ layoutBind (L loc bind) >>= \case
Left ns -> docLines $ return <$> ns
Right n -> return n
TyClD _ tycl -> withTransformedAnns d $ layoutTyCl (L loc tycl)
InstD _ (TyFamInstD _ tfid) ->
withTransformedAnns d $ layoutTyFamInstDecl False d tfid
InstD _ (ClsInstD _ inst) ->
withTransformedAnns d $ layoutClsInst (L loc inst)
_ -> briDocByExactNoComment d
--------------------------------------------------------------------------------
-- Sig
--------------------------------------------------------------------------------
layoutSig :: ToBriDoc Sig
layoutSig lsig@(L _loc sig) = case sig of
TypeSig _ names (HsWC _ (HsIB _ typ)) -> layoutNamesAndType Nothing names typ
InlineSig _ name (InlinePragma _ spec _arity phaseAct conlike) ->
docWrapNode lsig $ do
nameStr <- lrdrNameToTextAnn name
specStr <- specStringCompat lsig spec
let
phaseStr = case phaseAct of
not [ ] - for NOINLINE NeverActive is
-- in fact the default
AlwaysActive -> ""
ActiveBefore _ i -> "[~" ++ show i ++ "] "
ActiveAfter _ i -> "[" ++ show i ++ "] "
FinalActive -> error "brittany internal error: FinalActive"
let
conlikeStr = case conlike of
FunLike -> ""
ConLike -> "CONLIKE "
docLit
$ Text.pack ("{-# " ++ specStr ++ conlikeStr ++ phaseStr)
<> nameStr
<> Text.pack " #-}"
ClassOpSig _ False names (HsIB _ typ) -> layoutNamesAndType Nothing names typ
PatSynSig _ names (HsIB _ typ) ->
layoutNamesAndType (Just "pattern") names typ
TODO
where
layoutNamesAndType mKeyword names typ = docWrapNode lsig $ do
let
keyDoc = case mKeyword of
Just key -> [appSep . docLit $ Text.pack key]
Nothing -> []
nameStrs <- names `forM` lrdrNameToTextAnn
let nameStr = Text.intercalate (Text.pack ", ") $ nameStrs
typeDoc <- docSharedWrapper layoutType typ
hasComments <- hasAnyCommentsBelow lsig
shouldBeHanging <-
mAsk <&> _conf_layout .> _lconfig_hangingTypeSignature .> confUnpack
if shouldBeHanging
then
docSeq
$ [ appSep
$ docWrapNodeRest lsig
$ docSeq
$ keyDoc
<> [docLit nameStr]
, docSetBaseY $ docLines
[ docCols
ColTyOpPrefix
[ docLit $ Text.pack ":: "
, docAddBaseY (BrIndentSpecial 3) $ typeDoc
]
]
]
else layoutLhsAndType
hasComments
(appSep . docWrapNodeRest lsig . docSeq $ keyDoc <> [docLit nameStr])
"::"
typeDoc
specStringCompat
:: MonadMultiWriter [BrittanyError] m => LSig GhcPs -> InlineSpec -> m String
specStringCompat ast = \case
NoUserInline -> mTell [ErrorUnknownNode "NoUserInline" ast] $> ""
Inline -> pure "INLINE "
Inlinable -> pure "INLINABLE "
NoInline -> pure "NOINLINE "
layoutGuardLStmt :: ToBriDoc' (Stmt GhcPs (LHsExpr GhcPs))
layoutGuardLStmt lgstmt@(L _ stmtLR) = docWrapNode lgstmt $ case stmtLR of
BodyStmt _ body _ _ -> layoutExpr body
BindStmt _ lPat expr -> do
patDoc <- docSharedWrapper layoutPat lPat
expDoc <- docSharedWrapper layoutExpr expr
docCols
ColBindStmt
[ appSep $ colsWrapPat =<< patDoc
, docSeq [appSep $ docLit $ Text.pack "<-", expDoc]
]
TODO
--------------------------------------------------------------------------------
HsBind
--------------------------------------------------------------------------------
layoutBind
:: ToBriDocC (HsBindLR GhcPs GhcPs) (Either [BriDocNumbered] BriDocNumbered)
layoutBind lbind@(L _ bind) = case bind of
FunBind _ fId (MG _ lmatches@(L _ matches) _) [] -> do
idStr <- lrdrNameToTextAnn fId
binderDoc <- docLit $ Text.pack "="
funcPatDocs <-
docWrapNode lbind
$ docWrapNode lmatches
$ layoutPatternBind (Just idStr) binderDoc
`mapM` matches
return $ Left $ funcPatDocs
PatBind _ pat (GRHSs _ grhss whereBinds) ([], []) -> do
patDocs <- colsWrapPat =<< layoutPat pat
clauseDocs <- layoutGrhs `mapM` grhss
mWhereDocs <- layoutLocalBinds whereBinds
TODO : is this the right AnnKey ?
binderDoc <- docLit $ Text.pack "="
hasComments <- hasAnyCommentsBelow lbind
fmap Right $ docWrapNode lbind $ layoutPatternBindFinal
Nothing
binderDoc
(Just patDocs)
clauseDocs
mWhereArg
hasComments
PatSynBind _ (PSB _ patID lpat rpat dir) -> do
fmap Right $ docWrapNode lbind $ layoutPatSynBind patID lpat dir rpat
_ -> Right <$> unknownNodeError "" lbind
layoutIPBind :: ToBriDoc IPBind
layoutIPBind lipbind@(L _ bind) = case bind of
IPBind _ (Right _) _ -> error "brittany internal error: IPBind Right"
IPBind _ (Left (L _ (HsIPName name))) expr -> do
ipName <- docLit $ Text.pack $ '?' : FastString.unpackFS name
binderDoc <- docLit $ Text.pack "="
exprDoc <- layoutExpr expr
hasComments <- hasAnyCommentsBelow lipbind
layoutPatternBindFinal
Nothing
binderDoc
(Just ipName)
[([], exprDoc, expr)]
Nothing
hasComments
data BagBindOrSig = BagBind (LHsBindLR GhcPs GhcPs)
| BagSig (LSig GhcPs)
bindOrSigtoSrcSpan :: BagBindOrSig -> SrcSpan
bindOrSigtoSrcSpan (BagBind (L l _)) = l
bindOrSigtoSrcSpan (BagSig (L l _)) = l
layoutLocalBinds
:: ToBriDocC (HsLocalBindsLR GhcPs GhcPs) (Maybe [BriDocNumbered])
layoutLocalBinds lbinds@(L _ binds) = case binds of
HsValBinds ( [ ] ) - >
-- Just . (>>= either id return) . Data.Foldable.toList <$> mapBagM layoutBind lhsBindsLR -- TODO: fix ordering
-- x@(HsValBinds (ValBindsIn{})) ->
-- Just . (:[]) <$> unknownNodeError "HsValBinds (ValBindsIn _ (_:_))" x
HsValBinds _ (ValBinds _ bindlrs sigs) -> do
let
unordered =
[ BagBind b | b <- Data.Foldable.toList bindlrs ]
++ [ BagSig s | s <- sigs ]
ordered = List.sortOn (ExactPrint.rs . bindOrSigtoSrcSpan) unordered
docs <- docWrapNode lbinds $ join <$> ordered `forM` \case
BagBind b -> either id return <$> layoutBind b
BagSig s -> return <$> layoutSig s
return $ Just $ docs
-- x@(HsValBinds (ValBindsOut _binds _lsigs)) ->
HsValBinds _ (XValBindsLR{}) -> error "brittany internal error: XValBindsLR"
HsIPBinds _ (IPBinds _ bb) -> Just <$> mapM layoutIPBind bb
EmptyLocalBinds{} -> return $ Nothing
-- TODO: we don't need the `LHsExpr GhcPs` anymore, now that there is
parSpacing stuff . B
layoutGrhs
:: LGRHS GhcPs (LHsExpr GhcPs)
-> ToBriDocM ([BriDocNumbered], BriDocNumbered, LHsExpr GhcPs)
layoutGrhs lgrhs@(L _ (GRHS _ guards body)) = do
guardDocs <- docWrapNode lgrhs $ layoutStmt `mapM` guards
bodyDoc <- layoutExpr body
return (guardDocs, bodyDoc, body)
layoutPatternBind
:: Maybe Text
-> BriDocNumbered
-> LMatch GhcPs (LHsExpr GhcPs)
-> ToBriDocM BriDocNumbered
layoutPatternBind funId binderDoc lmatch@(L _ match) = do
let pats = m_pats match
let (GRHSs _ grhss whereBinds) = m_grhss match
patDocs <- pats `forM` \p -> fmap return $ colsWrapPat =<< layoutPat p
let isInfix = isInfixMatch match
mIdStr <- case match of
Match _ (FunRhs matchId _ _) _ _ -> Just <$> lrdrNameToTextAnn matchId
_ -> pure Nothing
let mIdStr' = fixPatternBindIdentifier match <$> mIdStr
patDoc <- docWrapNodePrior lmatch $ case (mIdStr', patDocs) of
(Just idStr, p1 : p2 : pr) | isInfix -> if null pr
then docCols
ColPatternsFuncInfix
[ appSep $ docForceSingleline p1
, appSep $ docLit $ idStr
, docForceSingleline p2
]
else docCols
ColPatternsFuncInfix
([ docCols
ColPatterns
[ docParenL
, appSep $ docForceSingleline p1
, appSep $ docLit $ idStr
, docForceSingleline p2
, appSep $ docParenR
]
]
++ (spacifyDocs $ docForceSingleline <$> pr)
)
(Just idStr, []) -> docLit idStr
(Just idStr, ps) ->
docCols ColPatternsFuncPrefix
$ appSep (docLit $ idStr)
: (spacifyDocs $ docForceSingleline <$> ps)
(Nothing, ps) ->
docCols ColPatterns
$ (List.intersperse docSeparator $ docForceSingleline <$> ps)
clauseDocs <- docWrapNodeRest lmatch $ layoutGrhs `mapM` grhss
mWhereDocs <- layoutLocalBinds whereBinds
let mWhereArg = mWhereDocs <&> (,) (mkAnnKey lmatch)
let alignmentToken = if null pats then Nothing else funId
hasComments <- hasAnyCommentsBelow lmatch
layoutPatternBindFinal
alignmentToken
binderDoc
(Just patDoc)
clauseDocs
mWhereArg
hasComments
fixPatternBindIdentifier :: Match GhcPs (LHsExpr GhcPs) -> Text -> Text
fixPatternBindIdentifier match idStr = go $ m_ctxt match
where
go = \case
(FunRhs _ _ SrcLazy) -> Text.cons '~' idStr
(FunRhs _ _ SrcStrict) -> Text.cons '!' idStr
(FunRhs _ _ NoSrcStrict) -> idStr
(StmtCtxt ctx1) -> goInner ctx1
_ -> idStr
-- I have really no idea if this path ever occurs, but better safe than
risking another " drop bangpatterns " bugs .
goInner = \case
(PatGuard ctx1) -> go ctx1
(ParStmtCtxt ctx1) -> goInner ctx1
(TransStmtCtxt ctx1) -> goInner ctx1
_ -> idStr
layoutPatternBindFinal
:: Maybe Text
-> BriDocNumbered
-> Maybe BriDocNumbered
-> [([BriDocNumbered], BriDocNumbered, LHsExpr GhcPs)]
-> Maybe (ExactPrint.AnnKey, [BriDocNumbered])
^ AnnKey for the node that contains the AnnWhere position annotation
-> Bool
-> ToBriDocM BriDocNumbered
layoutPatternBindFinal alignmentToken binderDoc mPatDoc clauseDocs mWhereDocs hasComments
= do
let
patPartInline = case mPatDoc of
Nothing -> []
Just patDoc -> [appSep $ docForceSingleline $ return patDoc]
patPartParWrap = case mPatDoc of
Nothing -> id
Just patDoc -> docPar (return patDoc)
whereIndent <- do
shouldSpecial <-
mAsk <&> _conf_layout .> _lconfig_indentWhereSpecial .> confUnpack
regularIndentAmount <-
mAsk <&> _conf_layout .> _lconfig_indentAmount .> confUnpack
pure $ if shouldSpecial
then BrIndentSpecial (max 1 (regularIndentAmount `div` 2))
else BrIndentRegular
-- TODO: apart from this, there probably are more nodes below which could
-- be shared between alternatives.
wherePartMultiLine :: [ToBriDocM BriDocNumbered] <- case mWhereDocs of
Nothing -> return $ []
Just (annKeyWhere, [w]) -> pure . pure <$> docAlt
[ docEnsureIndent BrIndentRegular
$ docSeq
[ docLit $ Text.pack "where"
, docSeparator
, docForceSingleline $ return w
]
, docMoveToKWDP annKeyWhere AnnWhere False
$ docEnsureIndent whereIndent
$ docLines
[ docLit $ Text.pack "where"
, docEnsureIndent whereIndent
$ docSetIndentLevel
$ docNonBottomSpacing
$ return w
]
]
Just (annKeyWhere, ws) ->
fmap (pure . pure)
$ docMoveToKWDP annKeyWhere AnnWhere False
$ docEnsureIndent whereIndent
$ docLines
[ docLit $ Text.pack "where"
, docEnsureIndent whereIndent
$ docSetIndentLevel
$ docNonBottomSpacing
$ docLines
$ return
<$> ws
]
let
singleLineGuardsDoc guards = appSep $ case guards of
[] -> docEmpty
[g] -> docSeq
[appSep $ docLit $ Text.pack "|", docForceSingleline $ return g]
gs ->
docSeq
$ [appSep $ docLit $ Text.pack "|"]
++ (List.intersperse
docCommaSep
(docForceSingleline . return <$> gs)
)
wherePart = case mWhereDocs of
Nothing -> Just docEmpty
Just (_, [w]) -> Just $ docSeq
[ docSeparator
, appSep $ docLit $ Text.pack "where"
, docSetIndentLevel $ docForceSingleline $ return w
]
_ -> Nothing
indentPolicy <- mAsk <&> _conf_layout .> _lconfig_indentPolicy .> confUnpack
runFilteredAlternative $ do
case clauseDocs of
[(guards, body, _bodyRaw)] -> do
let guardPart = singleLineGuardsDoc guards
forM_ wherePart $ \wherePart' ->
one - line solution
addAlternativeCond (not hasComments) $ docCols
(ColBindingLine alignmentToken)
[ docSeq (patPartInline ++ [guardPart])
, docSeq
[ appSep $ return binderDoc
, docForceSingleline $ return body
, wherePart'
]
]
one - line solution + where in next line(s )
addAlternativeCond (Data.Maybe.isJust mWhereDocs)
$ docLines
$ [ docCols
(ColBindingLine alignmentToken)
[ docSeq (patPartInline ++ [guardPart])
, docSeq
[ appSep $ return binderDoc
, docForceParSpacing $ docAddBaseY BrIndentRegular $ return
body
]
]
]
++ wherePartMultiLine
two - line solution + where in next line(s )
addAlternative
$ docLines
$ [ docForceSingleline
$ docSeq (patPartInline ++ [guardPart, return binderDoc])
, docEnsureIndent BrIndentRegular $ docForceSingleline $ return
body
]
++ wherePartMultiLine
pattern and exactly one clause in single line , body as par ;
-- where in following lines
addAlternative
$ docLines
$ [ docCols
(ColBindingLine alignmentToken)
[ docSeq (patPartInline ++ [guardPart])
, docSeq
[ appSep $ return binderDoc
, docForceParSpacing $ docAddBaseY BrIndentRegular $ return
body
]
]
]
, lineMod $ docAlt
-- [ docSetBaseY $ return body
, docAddBaseY BrIndentRegular $ return body
-- ]
++ wherePartMultiLine
pattern and exactly one clause in single line , body in new line .
addAlternative
$ docLines
$ [ docSeq (patPartInline ++ [guardPart, return binderDoc])
, docNonBottomSpacing
$ docEnsureIndent BrIndentRegular
$ docAddBaseY BrIndentRegular
$ return body
]
++ wherePartMultiLine
_ -> return () -- no alternatives exclusively when `length clauseDocs /= 1`
case mPatDoc of
Nothing -> return ()
Just patDoc ->
-- multiple clauses added in-paragraph, each in a single line
-- example: foo | bar = baz
-- | lll = asd
addAlternativeCond (indentPolicy == IndentPolicyFree)
$ docLines
$ [ docSeq
[ appSep $ docForceSingleline $ return patDoc
, docSetBaseY
$ docLines
$ clauseDocs
<&> \(guardDocs, bodyDoc, _) -> do
let guardPart = singleLineGuardsDoc guardDocs
-- the docForceSingleline might seems superflous, but it
-- helps the alternative resolving impl.
docForceSingleline $ docCols
ColGuardedBody
[ guardPart
, docSeq
[ appSep $ return binderDoc
, docForceSingleline $ return bodyDoc
-- i am not sure if there is a benefit to using
-- docForceParSpacing additionally here:
, docAddBaseY BrIndentRegular $ return bodyDoc
]
]
]
]
++ wherePartMultiLine
-- multiple clauses, each in a separate, single line
addAlternative
$ docLines
$ [ docAddBaseY BrIndentRegular
$ patPartParWrap
$ docLines
$ map docSetBaseY
$ clauseDocs
<&> \(guardDocs, bodyDoc, _) -> do
let guardPart = singleLineGuardsDoc guardDocs
-- the docForceSingleline might seems superflous, but it
-- helps the alternative resolving impl.
docForceSingleline $ docCols
ColGuardedBody
[ guardPart
, docSeq
[ appSep $ return binderDoc
, docForceSingleline $ return bodyDoc
-- i am not sure if there is a benefit to using
-- docForceParSpacing additionally here:
, docAddBaseY BrIndentRegular $ return bodyDoc
]
]
]
++ wherePartMultiLine
-- multiple clauses, each with the guard(s) in a single line, body
-- as a paragraph
addAlternative
$ docLines
$ [ docAddBaseY BrIndentRegular
$ patPartParWrap
$ docLines
$ map docSetBaseY
$ clauseDocs
<&> \(guardDocs, bodyDoc, _) ->
docSeq
$ (case guardDocs of
[] -> []
[g] ->
[ docForceSingleline $ docSeq
[appSep $ docLit $ Text.pack "|", return g]
]
gs ->
[ docForceSingleline
$ docSeq
$ [appSep $ docLit $ Text.pack "|"]
++ List.intersperse docCommaSep (return <$> gs)
]
)
++ [ docSeparator
, docCols
ColOpPrefix
[ appSep $ return binderDoc
, docAddBaseY BrIndentRegular
$ docForceParSpacing
$ return bodyDoc
]
]
]
++ wherePartMultiLine
-- multiple clauses, each with the guard(s) in a single line, body
-- in a new line as a paragraph
addAlternative
$ docLines
$ [ docAddBaseY BrIndentRegular
$ patPartParWrap
$ docLines
$ map docSetBaseY
$ clauseDocs
>>= \(guardDocs, bodyDoc, _) ->
(case guardDocs of
[] -> []
[g] ->
[ docForceSingleline
$ docSeq [appSep $ docLit $ Text.pack "|", return g]
]
gs ->
[ docForceSingleline
$ docSeq
$ [appSep $ docLit $ Text.pack "|"]
++ List.intersperse docCommaSep (return <$> gs)
]
)
++ [ docCols
ColOpPrefix
[ appSep $ return binderDoc
, docAddBaseY BrIndentRegular
$ docForceParSpacing
$ return bodyDoc
]
]
]
++ wherePartMultiLine
-- conservative approach: everything starts on the left.
addAlternative
$ docLines
$ [ docAddBaseY BrIndentRegular
$ patPartParWrap
$ docLines
$ map docSetBaseY
$ clauseDocs
>>= \(guardDocs, bodyDoc, _) ->
(case guardDocs of
[] -> []
[g] -> [docSeq [appSep $ docLit $ Text.pack "|", return g]]
(g1 : gr) ->
(docSeq [appSep $ docLit $ Text.pack "|", return g1]
: (gr <&> \g ->
docSeq [appSep $ docLit $ Text.pack ",", return g]
)
)
)
++ [ docCols
ColOpPrefix
[ appSep $ return binderDoc
, docAddBaseY BrIndentRegular $ return bodyDoc
]
]
]
++ wherePartMultiLine
-- | Layout a pattern synonym binding
layoutPatSynBind
:: Located (IdP GhcPs)
-> HsPatSynDetails (Located (IdP GhcPs))
-> HsPatSynDir GhcPs
-> LPat GhcPs
-> ToBriDocM BriDocNumbered
layoutPatSynBind name patSynDetails patDir rpat = do
let
patDoc = docLit $ Text.pack "pattern"
binderDoc = case patDir of
ImplicitBidirectional -> docLit $ Text.pack "="
_ -> docLit $ Text.pack "<-"
body = colsWrapPat =<< layoutPat rpat
whereDoc = docLit $ Text.pack "where"
mWhereDocs <- layoutPatSynWhere patDir
headDoc <-
fmap pure
$ docSeq
$ [ patDoc
, docSeparator
, layoutLPatSyn name patSynDetails
, docSeparator
, binderDoc
]
runFilteredAlternative $ do
addAlternative
$
-- pattern .. where
-- ..
-- ..
docAddBaseY BrIndentRegular
$ docSeq
([headDoc, docSeparator, body] ++ case mWhereDocs of
Just ds -> [docSeparator, docPar whereDoc (docLines ds)]
Nothing -> []
)
addAlternative
$
-- pattern .. =
-- ..
-- pattern .. <-
-- .. where
-- ..
-- ..
docAddBaseY BrIndentRegular
$ docPar
headDoc
(case mWhereDocs of
Nothing -> body
Just ds -> docLines ([docSeq [body, docSeparator, whereDoc]] ++ ds)
)
-- | Helper method for the left hand side of a pattern synonym
layoutLPatSyn
:: Located (IdP GhcPs)
-> HsPatSynDetails (Located (IdP GhcPs))
-> ToBriDocM BriDocNumbered
layoutLPatSyn name (PrefixCon vars) = do
docName <- lrdrNameToTextAnn name
names <- mapM lrdrNameToTextAnn vars
docSeq . fmap appSep $ docLit docName : (docLit <$> names)
layoutLPatSyn name (InfixCon left right) = do
leftDoc <- lrdrNameToTextAnn left
docName <- lrdrNameToTextAnn name
rightDoc <- lrdrNameToTextAnn right
docSeq . fmap (appSep . docLit) $ [leftDoc, docName, rightDoc]
layoutLPatSyn name (RecCon recArgs) = do
docName <- lrdrNameToTextAnn name
args <- mapM (lrdrNameToTextAnn . recordPatSynSelectorId) recArgs
docSeq
. fmap docLit
$ [docName, Text.pack " { "]
<> intersperse (Text.pack ", ") args
<> [Text.pack " }"]
-- | Helper method to get the where clause from of explicitly bidirectional
-- pattern synonyms
layoutPatSynWhere
:: HsPatSynDir GhcPs -> ToBriDocM (Maybe [ToBriDocM BriDocNumbered])
layoutPatSynWhere hs = case hs of
ExplicitBidirectional (MG _ (L _ lbinds) _) -> do
binderDoc <- docLit $ Text.pack "="
Just
<$> mapM (docSharedWrapper $ layoutPatternBind Nothing binderDoc) lbinds
_ -> pure Nothing
--------------------------------------------------------------------------------
-- TyClDecl
--------------------------------------------------------------------------------
layoutTyCl :: ToBriDoc TyClDecl
layoutTyCl ltycl@(L _loc tycl) = case tycl of
SynDecl _ name vars fixity typ -> do
let
isInfix = case fixity of
Prefix -> False
Infix -> True
-- hasTrailingParen <- hasAnnKeywordComment ltycl AnnCloseP
-- let parenWrapper = if hasTrailingParen
-- then appSep . docWrapNodeRest ltycl
-- else id
let wrapNodeRest = docWrapNodeRest ltycl
docWrapNodePrior ltycl
$ layoutSynDecl isInfix wrapNodeRest name (hsq_explicit vars) typ
DataDecl _ext name tyVars _ dataDefn ->
layoutDataDecl ltycl name tyVars dataDefn
_ -> briDocByExactNoComment ltycl
layoutSynDecl
:: Bool
-> (ToBriDocM BriDocNumbered -> ToBriDocM BriDocNumbered)
-> Located (IdP GhcPs)
-> [LHsTyVarBndr () GhcPs]
-> LHsType GhcPs
-> ToBriDocM BriDocNumbered
layoutSynDecl isInfix wrapNodeRest name vars typ = do
nameStr <- lrdrNameToTextAnn name
let
lhs = appSep . wrapNodeRest $ if isInfix
then do
let (a : b : rest) = vars
hasOwnParens <- hasAnnKeywordComment a AnnOpenP
-- This isn't quite right, but does give syntactically valid results
let needsParens = not (null rest) || hasOwnParens
docSeq
$ [docLit $ Text.pack "type", docSeparator]
++ [ docParenL | needsParens ]
++ [ layoutTyVarBndr False a
, docSeparator
, docLit nameStr
, docSeparator
, layoutTyVarBndr False b
]
++ [ docParenR | needsParens ]
++ fmap (layoutTyVarBndr True) rest
else
docSeq
$ [ docLit $ Text.pack "type"
, docSeparator
, docWrapNode name $ docLit nameStr
]
++ fmap (layoutTyVarBndr True) vars
sharedLhs <- docSharedWrapper id lhs
typeDoc <- docSharedWrapper layoutType typ
hasComments <- hasAnyCommentsConnected typ
layoutLhsAndType hasComments sharedLhs "=" typeDoc
layoutTyVarBndr :: Bool -> ToBriDoc (HsTyVarBndr ())
layoutTyVarBndr needsSep lbndr@(L _ bndr) = do
docWrapNodePrior lbndr $ case bndr of
UserTyVar _ _ name -> do
nameStr <- lrdrNameToTextAnn name
docSeq $ [ docSeparator | needsSep ] ++ [docLit nameStr]
KindedTyVar _ _ name kind -> do
nameStr <- lrdrNameToTextAnn name
docSeq
$ [ docSeparator | needsSep ]
++ [ docLit $ Text.pack "("
, appSep $ docLit nameStr
, appSep . docLit $ Text.pack "::"
, docForceSingleline $ layoutType kind
, docLit $ Text.pack ")"
]
--------------------------------------------------------------------------------
-- TyFamInstDecl
--------------------------------------------------------------------------------
layoutTyFamInstDecl
:: Data.Data.Data a
=> Bool
-> Located a
-> TyFamInstDecl GhcPs
-> ToBriDocM BriDocNumbered
layoutTyFamInstDecl inClass outerNode tfid = do
let
FamEqn _ name bndrsMay pats _fixity typ = hsib_body $ tfid_eqn tfid
-- bndrsMay isJust e.g. with
type instance forall a . ( Maybe a ) = Either ( ) a
innerNode = outerNode
docWrapNodePrior outerNode $ do
nameStr <- lrdrNameToTextAnn name
needsParens <- hasAnnKeyword outerNode AnnOpenP
let
instanceDoc = if inClass
then docLit $ Text.pack "type"
else docSeq
[appSep . docLit $ Text.pack "type", docLit $ Text.pack "instance"]
makeForallDoc :: [LHsTyVarBndr () GhcPs] -> ToBriDocM BriDocNumbered
makeForallDoc bndrs = do
bndrDocs <- layoutTyVarBndrs bndrs
docSeq
([docLit (Text.pack "forall")] ++ processTyVarBndrsSingleline bndrDocs
)
lhs =
docWrapNode innerNode
. docSeq
$ [appSep instanceDoc]
++ [ makeForallDoc foralls | Just foralls <- [bndrsMay] ]
++ [ docParenL | needsParens ]
++ [appSep $ docWrapNode name $ docLit nameStr]
++ intersperse docSeparator (layoutHsTyPats pats)
++ [ docParenR | needsParens ]
hasComments <-
(||)
<$> hasAnyRegularCommentsConnected outerNode
<*> hasAnyRegularCommentsRest innerNode
typeDoc <- docSharedWrapper layoutType typ
layoutLhsAndType hasComments lhs "=" typeDoc
layoutHsTyPats
:: [HsArg (LHsType GhcPs) (LHsKind GhcPs)] -> [ToBriDocM BriDocNumbered]
layoutHsTyPats pats = pats <&> \case
HsValArg tm -> layoutType tm
HsTypeArg _l ty -> docSeq [docLit $ Text.pack "@", layoutType ty]
we ignore the SourceLoc here .. this LPat not being ( L _ { } ) change
-- is a bit strange. Hopefully this does not ignore any important
-- annotations.
HsArgPar _l -> error "brittany internal error: HsArgPar{}"
--------------------------------------------------------------------------------
-- ClsInstDecl
--------------------------------------------------------------------------------
-- | Layout an @instance@ declaration
--
Layout signatures and bindings using the corresponding layouters from the
-- top-level. Layout the instance head, type family instances, and data family
-- instances using ExactPrint.
layoutClsInst :: ToBriDoc ClsInstDecl
layoutClsInst lcid@(L _ cid) = docLines
[ layoutInstanceHead
, docEnsureIndent BrIndentRegular
$ docSetIndentLevel
$ docSortedLines
$ fmap layoutAndLocateSig (cid_sigs cid)
++ fmap layoutAndLocateBind (bagToList $ cid_binds cid)
++ fmap layoutAndLocateTyFamInsts (cid_tyfam_insts cid)
++ fmap layoutAndLocateDataFamInsts (cid_datafam_insts cid)
]
where
layoutInstanceHead :: ToBriDocM BriDocNumbered
layoutInstanceHead =
briDocByExactNoComment
$ InstD NoExtField
. ClsInstD NoExtField
. removeChildren
<$> lcid
removeChildren :: ClsInstDecl GhcPs -> ClsInstDecl GhcPs
removeChildren c = c
{ cid_binds = emptyBag
, cid_sigs = []
, cid_tyfam_insts = []
, cid_datafam_insts = []
}
-- | Like 'docLines', but sorts the lines based on location
docSortedLines
:: [ToBriDocM (Located BriDocNumbered)] -> ToBriDocM BriDocNumbered
docSortedLines l =
allocateNode
. BDFLines
. fmap unLoc
. List.sortOn (ExactPrint.rs . getLoc)
=<< sequence l
layoutAndLocateSig :: ToBriDocC (Sig GhcPs) (Located BriDocNumbered)
layoutAndLocateSig lsig@(L loc _) = L loc <$> layoutSig lsig
layoutAndLocateBind :: ToBriDocC (HsBind GhcPs) (Located BriDocNumbered)
layoutAndLocateBind lbind@(L loc _) =
L loc <$> (joinBinds =<< layoutBind lbind)
joinBinds
:: Either [BriDocNumbered] BriDocNumbered -> ToBriDocM BriDocNumbered
joinBinds = \case
Left ns -> docLines $ return <$> ns
Right n -> return n
layoutAndLocateTyFamInsts
:: ToBriDocC (TyFamInstDecl GhcPs) (Located BriDocNumbered)
layoutAndLocateTyFamInsts ltfid@(L loc tfid) =
L loc <$> layoutTyFamInstDecl True ltfid tfid
layoutAndLocateDataFamInsts
:: ToBriDocC (DataFamInstDecl GhcPs) (Located BriDocNumbered)
layoutAndLocateDataFamInsts ldfid@(L loc _) =
L loc <$> layoutDataFamInstDecl ldfid
-- | Send to ExactPrint then remove unecessary whitespace
layoutDataFamInstDecl :: ToBriDoc DataFamInstDecl
layoutDataFamInstDecl ldfid =
fmap stripWhitespace <$> briDocByExactNoComment ldfid
| ExactPrint adds indentation / newlines to declarations
stripWhitespace :: BriDocF f -> BriDocF f
stripWhitespace (BDFExternal ann anns b t) =
BDFExternal ann anns b $ stripWhitespace' t
stripWhitespace b = b
| This fixes two issues of output coming from Exactprinting
associated ( data ) type decls . Firstly we place the output into docLines ,
so one newline coming from is superfluous , so we drop the
first ( empty ) line . The second issue is Exactprint indents the first
-- member in a strange fashion:
--
-- input:
--
> instance where
-- > -- | This data is very important
> data
-- > { intData :: String
> , intData2 : : Int
-- > }
--
-- output of just exactprinting the associated data type syntax node
--
-- >
-- > -- | This data is very important
> data
-- > { intData :: String
> , intData2 : : Int
-- > }
--
-- To fix this, we strip whitespace from the start of the comments and the
first line of the declaration , stopping when we see " data " or " type " at
-- the start of a line. I.e., this function yields
--
-- > -- | This data is very important
> data
-- > { intData :: String
> , intData2 : : Int
-- > }
--
-- Downside apart from being a hacky and brittle fix is that this removes
possible additional indentation from comments before the first member .
--
But the whole thing is just a temporary measure until learns
-- to layout data/type decls.
stripWhitespace' :: Text -> Text
stripWhitespace' t =
Text.intercalate (Text.pack "\n") $ go $ List.drop 1 $ Text.lines t
where
go [] = []
go (line1 : lineR) = case Text.stripStart line1 of
st
| isTypeOrData st -> st : lineR
| otherwise -> st : go lineR
isTypeOrData t' =
(Text.pack "type" `Text.isPrefixOf` t')
|| (Text.pack "newtype" `Text.isPrefixOf` t')
|| (Text.pack "data" `Text.isPrefixOf` t')
--------------------------------------------------------------------------------
-- Common Helpers
--------------------------------------------------------------------------------
layoutLhsAndType
:: Bool
-> ToBriDocM BriDocNumbered
-> String
-> ToBriDocM BriDocNumbered
-> ToBriDocM BriDocNumbered
layoutLhsAndType hasComments lhs sep typeDoc = do
runFilteredAlternative $ do
-- (separators probably are "=" or "::")
lhs = type
lhs : : type
addAlternativeCond (not hasComments) $ docSeq
[lhs, docSeparator, docLitS sep, docSeparator, docForceSingleline typeDoc]
lhs
-- :: typeA
-- -> typeB
lhs
-- = typeA
-- -> typeB
addAlternative $ docAddBaseY BrIndentRegular $ docPar lhs $ docCols
ColTyOpPrefix
[ appSep $ docLitS sep
, docAddBaseY (BrIndentSpecial (length sep + 1)) typeDoc
]
| null | https://raw.githubusercontent.com/lspitzner/brittany/7399b7538835411727e025e1480ea96b5496416c/source/library/Language/Haskell/Brittany/Internal/Layouters/Decl.hs | haskell | # SOURCE #
# SOURCE #
------------------------------------------------------------------------------
Sig
------------------------------------------------------------------------------
in fact the default
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Just . (>>= either id return) . Data.Foldable.toList <$> mapBagM layoutBind lhsBindsLR -- TODO: fix ordering
x@(HsValBinds (ValBindsIn{})) ->
Just . (:[]) <$> unknownNodeError "HsValBinds (ValBindsIn _ (_:_))" x
x@(HsValBinds (ValBindsOut _binds _lsigs)) ->
TODO: we don't need the `LHsExpr GhcPs` anymore, now that there is
I have really no idea if this path ever occurs, but better safe than
TODO: apart from this, there probably are more nodes below which could
be shared between alternatives.
where in following lines
[ docSetBaseY $ return body
]
no alternatives exclusively when `length clauseDocs /= 1`
multiple clauses added in-paragraph, each in a single line
example: foo | bar = baz
| lll = asd
the docForceSingleline might seems superflous, but it
helps the alternative resolving impl.
i am not sure if there is a benefit to using
docForceParSpacing additionally here:
multiple clauses, each in a separate, single line
the docForceSingleline might seems superflous, but it
helps the alternative resolving impl.
i am not sure if there is a benefit to using
docForceParSpacing additionally here:
multiple clauses, each with the guard(s) in a single line, body
as a paragraph
multiple clauses, each with the guard(s) in a single line, body
in a new line as a paragraph
conservative approach: everything starts on the left.
| Layout a pattern synonym binding
pattern .. where
..
..
pattern .. =
..
pattern .. <-
.. where
..
..
| Helper method for the left hand side of a pattern synonym
| Helper method to get the where clause from of explicitly bidirectional
pattern synonyms
------------------------------------------------------------------------------
TyClDecl
------------------------------------------------------------------------------
hasTrailingParen <- hasAnnKeywordComment ltycl AnnCloseP
let parenWrapper = if hasTrailingParen
then appSep . docWrapNodeRest ltycl
else id
This isn't quite right, but does give syntactically valid results
------------------------------------------------------------------------------
TyFamInstDecl
------------------------------------------------------------------------------
bndrsMay isJust e.g. with
is a bit strange. Hopefully this does not ignore any important
annotations.
------------------------------------------------------------------------------
ClsInstDecl
------------------------------------------------------------------------------
| Layout an @instance@ declaration
top-level. Layout the instance head, type family instances, and data family
instances using ExactPrint.
| Like 'docLines', but sorts the lines based on location
| Send to ExactPrint then remove unecessary whitespace
member in a strange fashion:
input:
> -- | This data is very important
> { intData :: String
> }
output of just exactprinting the associated data type syntax node
>
> -- | This data is very important
> { intData :: String
> }
To fix this, we strip whitespace from the start of the comments and the
the start of a line. I.e., this function yields
> -- | This data is very important
> { intData :: String
> }
Downside apart from being a hacky and brittle fix is that this removes
to layout data/type decls.
------------------------------------------------------------------------------
Common Helpers
------------------------------------------------------------------------------
(separators probably are "=" or "::")
:: typeA
-> typeB
= typeA
-> typeB | # LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
module Language.Haskell.Brittany.Internal.Layouters.Decl where
import qualified Data.Data
import qualified Data.Foldable
import qualified Data.Maybe
import qualified Data.Semigroup as Semigroup
import qualified Data.Text as Text
import GHC (AnnKeywordId(..), GenLocated(L))
import GHC.Data.Bag (bagToList, emptyBag)
import qualified GHC.Data.FastString as FastString
import GHC.Hs
import qualified GHC.OldList as List
import GHC.Types.Basic
( Activation(..)
, InlinePragma(..)
, InlineSpec(..)
, LexicalFixity(..)
, RuleMatchInfo(..)
)
import GHC.Types.SrcLoc (Located, SrcSpan, getLoc, unLoc)
import Language.Haskell.Brittany.Internal.Config.Types
import Language.Haskell.Brittany.Internal.ExactPrintUtils
import Language.Haskell.Brittany.Internal.LayouterBasics
import Language.Haskell.Brittany.Internal.Layouters.DataDecl
import Language.Haskell.Brittany.Internal.Layouters.Pattern
import Language.Haskell.Brittany.Internal.Layouters.Type
import Language.Haskell.Brittany.Internal.Prelude
import Language.Haskell.Brittany.Internal.PreludeUtils
import Language.Haskell.Brittany.Internal.Types
import qualified Language.Haskell.GHC.ExactPrint as ExactPrint
import Language.Haskell.GHC.ExactPrint.Types (mkAnnKey)
import qualified Language.Haskell.GHC.ExactPrint.Utils as ExactPrint
layoutDecl :: ToBriDoc HsDecl
layoutDecl d@(L loc decl) = case decl of
SigD _ sig -> withTransformedAnns d $ layoutSig (L loc sig)
ValD _ bind -> withTransformedAnns d $ layoutBind (L loc bind) >>= \case
Left ns -> docLines $ return <$> ns
Right n -> return n
TyClD _ tycl -> withTransformedAnns d $ layoutTyCl (L loc tycl)
InstD _ (TyFamInstD _ tfid) ->
withTransformedAnns d $ layoutTyFamInstDecl False d tfid
InstD _ (ClsInstD _ inst) ->
withTransformedAnns d $ layoutClsInst (L loc inst)
_ -> briDocByExactNoComment d
layoutSig :: ToBriDoc Sig
layoutSig lsig@(L _loc sig) = case sig of
TypeSig _ names (HsWC _ (HsIB _ typ)) -> layoutNamesAndType Nothing names typ
InlineSig _ name (InlinePragma _ spec _arity phaseAct conlike) ->
docWrapNode lsig $ do
nameStr <- lrdrNameToTextAnn name
specStr <- specStringCompat lsig spec
let
phaseStr = case phaseAct of
not [ ] - for NOINLINE NeverActive is
AlwaysActive -> ""
ActiveBefore _ i -> "[~" ++ show i ++ "] "
ActiveAfter _ i -> "[" ++ show i ++ "] "
FinalActive -> error "brittany internal error: FinalActive"
let
conlikeStr = case conlike of
FunLike -> ""
ConLike -> "CONLIKE "
docLit
$ Text.pack ("{-# " ++ specStr ++ conlikeStr ++ phaseStr)
<> nameStr
<> Text.pack " #-}"
ClassOpSig _ False names (HsIB _ typ) -> layoutNamesAndType Nothing names typ
PatSynSig _ names (HsIB _ typ) ->
layoutNamesAndType (Just "pattern") names typ
TODO
where
layoutNamesAndType mKeyword names typ = docWrapNode lsig $ do
let
keyDoc = case mKeyword of
Just key -> [appSep . docLit $ Text.pack key]
Nothing -> []
nameStrs <- names `forM` lrdrNameToTextAnn
let nameStr = Text.intercalate (Text.pack ", ") $ nameStrs
typeDoc <- docSharedWrapper layoutType typ
hasComments <- hasAnyCommentsBelow lsig
shouldBeHanging <-
mAsk <&> _conf_layout .> _lconfig_hangingTypeSignature .> confUnpack
if shouldBeHanging
then
docSeq
$ [ appSep
$ docWrapNodeRest lsig
$ docSeq
$ keyDoc
<> [docLit nameStr]
, docSetBaseY $ docLines
[ docCols
ColTyOpPrefix
[ docLit $ Text.pack ":: "
, docAddBaseY (BrIndentSpecial 3) $ typeDoc
]
]
]
else layoutLhsAndType
hasComments
(appSep . docWrapNodeRest lsig . docSeq $ keyDoc <> [docLit nameStr])
"::"
typeDoc
specStringCompat
:: MonadMultiWriter [BrittanyError] m => LSig GhcPs -> InlineSpec -> m String
specStringCompat ast = \case
NoUserInline -> mTell [ErrorUnknownNode "NoUserInline" ast] $> ""
Inline -> pure "INLINE "
Inlinable -> pure "INLINABLE "
NoInline -> pure "NOINLINE "
layoutGuardLStmt :: ToBriDoc' (Stmt GhcPs (LHsExpr GhcPs))
layoutGuardLStmt lgstmt@(L _ stmtLR) = docWrapNode lgstmt $ case stmtLR of
BodyStmt _ body _ _ -> layoutExpr body
BindStmt _ lPat expr -> do
patDoc <- docSharedWrapper layoutPat lPat
expDoc <- docSharedWrapper layoutExpr expr
docCols
ColBindStmt
[ appSep $ colsWrapPat =<< patDoc
, docSeq [appSep $ docLit $ Text.pack "<-", expDoc]
]
TODO
HsBind
layoutBind
:: ToBriDocC (HsBindLR GhcPs GhcPs) (Either [BriDocNumbered] BriDocNumbered)
layoutBind lbind@(L _ bind) = case bind of
FunBind _ fId (MG _ lmatches@(L _ matches) _) [] -> do
idStr <- lrdrNameToTextAnn fId
binderDoc <- docLit $ Text.pack "="
funcPatDocs <-
docWrapNode lbind
$ docWrapNode lmatches
$ layoutPatternBind (Just idStr) binderDoc
`mapM` matches
return $ Left $ funcPatDocs
PatBind _ pat (GRHSs _ grhss whereBinds) ([], []) -> do
patDocs <- colsWrapPat =<< layoutPat pat
clauseDocs <- layoutGrhs `mapM` grhss
mWhereDocs <- layoutLocalBinds whereBinds
TODO : is this the right AnnKey ?
binderDoc <- docLit $ Text.pack "="
hasComments <- hasAnyCommentsBelow lbind
fmap Right $ docWrapNode lbind $ layoutPatternBindFinal
Nothing
binderDoc
(Just patDocs)
clauseDocs
mWhereArg
hasComments
PatSynBind _ (PSB _ patID lpat rpat dir) -> do
fmap Right $ docWrapNode lbind $ layoutPatSynBind patID lpat dir rpat
_ -> Right <$> unknownNodeError "" lbind
layoutIPBind :: ToBriDoc IPBind
layoutIPBind lipbind@(L _ bind) = case bind of
IPBind _ (Right _) _ -> error "brittany internal error: IPBind Right"
IPBind _ (Left (L _ (HsIPName name))) expr -> do
ipName <- docLit $ Text.pack $ '?' : FastString.unpackFS name
binderDoc <- docLit $ Text.pack "="
exprDoc <- layoutExpr expr
hasComments <- hasAnyCommentsBelow lipbind
layoutPatternBindFinal
Nothing
binderDoc
(Just ipName)
[([], exprDoc, expr)]
Nothing
hasComments
data BagBindOrSig = BagBind (LHsBindLR GhcPs GhcPs)
| BagSig (LSig GhcPs)
bindOrSigtoSrcSpan :: BagBindOrSig -> SrcSpan
bindOrSigtoSrcSpan (BagBind (L l _)) = l
bindOrSigtoSrcSpan (BagSig (L l _)) = l
layoutLocalBinds
:: ToBriDocC (HsLocalBindsLR GhcPs GhcPs) (Maybe [BriDocNumbered])
layoutLocalBinds lbinds@(L _ binds) = case binds of
HsValBinds ( [ ] ) - >
HsValBinds _ (ValBinds _ bindlrs sigs) -> do
let
unordered =
[ BagBind b | b <- Data.Foldable.toList bindlrs ]
++ [ BagSig s | s <- sigs ]
ordered = List.sortOn (ExactPrint.rs . bindOrSigtoSrcSpan) unordered
docs <- docWrapNode lbinds $ join <$> ordered `forM` \case
BagBind b -> either id return <$> layoutBind b
BagSig s -> return <$> layoutSig s
return $ Just $ docs
HsValBinds _ (XValBindsLR{}) -> error "brittany internal error: XValBindsLR"
HsIPBinds _ (IPBinds _ bb) -> Just <$> mapM layoutIPBind bb
EmptyLocalBinds{} -> return $ Nothing
parSpacing stuff . B
layoutGrhs
:: LGRHS GhcPs (LHsExpr GhcPs)
-> ToBriDocM ([BriDocNumbered], BriDocNumbered, LHsExpr GhcPs)
layoutGrhs lgrhs@(L _ (GRHS _ guards body)) = do
guardDocs <- docWrapNode lgrhs $ layoutStmt `mapM` guards
bodyDoc <- layoutExpr body
return (guardDocs, bodyDoc, body)
layoutPatternBind
:: Maybe Text
-> BriDocNumbered
-> LMatch GhcPs (LHsExpr GhcPs)
-> ToBriDocM BriDocNumbered
layoutPatternBind funId binderDoc lmatch@(L _ match) = do
let pats = m_pats match
let (GRHSs _ grhss whereBinds) = m_grhss match
patDocs <- pats `forM` \p -> fmap return $ colsWrapPat =<< layoutPat p
let isInfix = isInfixMatch match
mIdStr <- case match of
Match _ (FunRhs matchId _ _) _ _ -> Just <$> lrdrNameToTextAnn matchId
_ -> pure Nothing
let mIdStr' = fixPatternBindIdentifier match <$> mIdStr
patDoc <- docWrapNodePrior lmatch $ case (mIdStr', patDocs) of
(Just idStr, p1 : p2 : pr) | isInfix -> if null pr
then docCols
ColPatternsFuncInfix
[ appSep $ docForceSingleline p1
, appSep $ docLit $ idStr
, docForceSingleline p2
]
else docCols
ColPatternsFuncInfix
([ docCols
ColPatterns
[ docParenL
, appSep $ docForceSingleline p1
, appSep $ docLit $ idStr
, docForceSingleline p2
, appSep $ docParenR
]
]
++ (spacifyDocs $ docForceSingleline <$> pr)
)
(Just idStr, []) -> docLit idStr
(Just idStr, ps) ->
docCols ColPatternsFuncPrefix
$ appSep (docLit $ idStr)
: (spacifyDocs $ docForceSingleline <$> ps)
(Nothing, ps) ->
docCols ColPatterns
$ (List.intersperse docSeparator $ docForceSingleline <$> ps)
clauseDocs <- docWrapNodeRest lmatch $ layoutGrhs `mapM` grhss
mWhereDocs <- layoutLocalBinds whereBinds
let mWhereArg = mWhereDocs <&> (,) (mkAnnKey lmatch)
let alignmentToken = if null pats then Nothing else funId
hasComments <- hasAnyCommentsBelow lmatch
layoutPatternBindFinal
alignmentToken
binderDoc
(Just patDoc)
clauseDocs
mWhereArg
hasComments
fixPatternBindIdentifier :: Match GhcPs (LHsExpr GhcPs) -> Text -> Text
fixPatternBindIdentifier match idStr = go $ m_ctxt match
where
go = \case
(FunRhs _ _ SrcLazy) -> Text.cons '~' idStr
(FunRhs _ _ SrcStrict) -> Text.cons '!' idStr
(FunRhs _ _ NoSrcStrict) -> idStr
(StmtCtxt ctx1) -> goInner ctx1
_ -> idStr
risking another " drop bangpatterns " bugs .
goInner = \case
(PatGuard ctx1) -> go ctx1
(ParStmtCtxt ctx1) -> goInner ctx1
(TransStmtCtxt ctx1) -> goInner ctx1
_ -> idStr
layoutPatternBindFinal
:: Maybe Text
-> BriDocNumbered
-> Maybe BriDocNumbered
-> [([BriDocNumbered], BriDocNumbered, LHsExpr GhcPs)]
-> Maybe (ExactPrint.AnnKey, [BriDocNumbered])
^ AnnKey for the node that contains the AnnWhere position annotation
-> Bool
-> ToBriDocM BriDocNumbered
layoutPatternBindFinal alignmentToken binderDoc mPatDoc clauseDocs mWhereDocs hasComments
= do
let
patPartInline = case mPatDoc of
Nothing -> []
Just patDoc -> [appSep $ docForceSingleline $ return patDoc]
patPartParWrap = case mPatDoc of
Nothing -> id
Just patDoc -> docPar (return patDoc)
whereIndent <- do
shouldSpecial <-
mAsk <&> _conf_layout .> _lconfig_indentWhereSpecial .> confUnpack
regularIndentAmount <-
mAsk <&> _conf_layout .> _lconfig_indentAmount .> confUnpack
pure $ if shouldSpecial
then BrIndentSpecial (max 1 (regularIndentAmount `div` 2))
else BrIndentRegular
wherePartMultiLine :: [ToBriDocM BriDocNumbered] <- case mWhereDocs of
Nothing -> return $ []
Just (annKeyWhere, [w]) -> pure . pure <$> docAlt
[ docEnsureIndent BrIndentRegular
$ docSeq
[ docLit $ Text.pack "where"
, docSeparator
, docForceSingleline $ return w
]
, docMoveToKWDP annKeyWhere AnnWhere False
$ docEnsureIndent whereIndent
$ docLines
[ docLit $ Text.pack "where"
, docEnsureIndent whereIndent
$ docSetIndentLevel
$ docNonBottomSpacing
$ return w
]
]
Just (annKeyWhere, ws) ->
fmap (pure . pure)
$ docMoveToKWDP annKeyWhere AnnWhere False
$ docEnsureIndent whereIndent
$ docLines
[ docLit $ Text.pack "where"
, docEnsureIndent whereIndent
$ docSetIndentLevel
$ docNonBottomSpacing
$ docLines
$ return
<$> ws
]
let
singleLineGuardsDoc guards = appSep $ case guards of
[] -> docEmpty
[g] -> docSeq
[appSep $ docLit $ Text.pack "|", docForceSingleline $ return g]
gs ->
docSeq
$ [appSep $ docLit $ Text.pack "|"]
++ (List.intersperse
docCommaSep
(docForceSingleline . return <$> gs)
)
wherePart = case mWhereDocs of
Nothing -> Just docEmpty
Just (_, [w]) -> Just $ docSeq
[ docSeparator
, appSep $ docLit $ Text.pack "where"
, docSetIndentLevel $ docForceSingleline $ return w
]
_ -> Nothing
indentPolicy <- mAsk <&> _conf_layout .> _lconfig_indentPolicy .> confUnpack
runFilteredAlternative $ do
case clauseDocs of
[(guards, body, _bodyRaw)] -> do
let guardPart = singleLineGuardsDoc guards
forM_ wherePart $ \wherePart' ->
one - line solution
addAlternativeCond (not hasComments) $ docCols
(ColBindingLine alignmentToken)
[ docSeq (patPartInline ++ [guardPart])
, docSeq
[ appSep $ return binderDoc
, docForceSingleline $ return body
, wherePart'
]
]
one - line solution + where in next line(s )
addAlternativeCond (Data.Maybe.isJust mWhereDocs)
$ docLines
$ [ docCols
(ColBindingLine alignmentToken)
[ docSeq (patPartInline ++ [guardPart])
, docSeq
[ appSep $ return binderDoc
, docForceParSpacing $ docAddBaseY BrIndentRegular $ return
body
]
]
]
++ wherePartMultiLine
two - line solution + where in next line(s )
addAlternative
$ docLines
$ [ docForceSingleline
$ docSeq (patPartInline ++ [guardPart, return binderDoc])
, docEnsureIndent BrIndentRegular $ docForceSingleline $ return
body
]
++ wherePartMultiLine
pattern and exactly one clause in single line , body as par ;
addAlternative
$ docLines
$ [ docCols
(ColBindingLine alignmentToken)
[ docSeq (patPartInline ++ [guardPart])
, docSeq
[ appSep $ return binderDoc
, docForceParSpacing $ docAddBaseY BrIndentRegular $ return
body
]
]
]
, lineMod $ docAlt
, docAddBaseY BrIndentRegular $ return body
++ wherePartMultiLine
pattern and exactly one clause in single line , body in new line .
addAlternative
$ docLines
$ [ docSeq (patPartInline ++ [guardPart, return binderDoc])
, docNonBottomSpacing
$ docEnsureIndent BrIndentRegular
$ docAddBaseY BrIndentRegular
$ return body
]
++ wherePartMultiLine
case mPatDoc of
Nothing -> return ()
Just patDoc ->
addAlternativeCond (indentPolicy == IndentPolicyFree)
$ docLines
$ [ docSeq
[ appSep $ docForceSingleline $ return patDoc
, docSetBaseY
$ docLines
$ clauseDocs
<&> \(guardDocs, bodyDoc, _) -> do
let guardPart = singleLineGuardsDoc guardDocs
docForceSingleline $ docCols
ColGuardedBody
[ guardPart
, docSeq
[ appSep $ return binderDoc
, docForceSingleline $ return bodyDoc
, docAddBaseY BrIndentRegular $ return bodyDoc
]
]
]
]
++ wherePartMultiLine
addAlternative
$ docLines
$ [ docAddBaseY BrIndentRegular
$ patPartParWrap
$ docLines
$ map docSetBaseY
$ clauseDocs
<&> \(guardDocs, bodyDoc, _) -> do
let guardPart = singleLineGuardsDoc guardDocs
docForceSingleline $ docCols
ColGuardedBody
[ guardPart
, docSeq
[ appSep $ return binderDoc
, docForceSingleline $ return bodyDoc
, docAddBaseY BrIndentRegular $ return bodyDoc
]
]
]
++ wherePartMultiLine
addAlternative
$ docLines
$ [ docAddBaseY BrIndentRegular
$ patPartParWrap
$ docLines
$ map docSetBaseY
$ clauseDocs
<&> \(guardDocs, bodyDoc, _) ->
docSeq
$ (case guardDocs of
[] -> []
[g] ->
[ docForceSingleline $ docSeq
[appSep $ docLit $ Text.pack "|", return g]
]
gs ->
[ docForceSingleline
$ docSeq
$ [appSep $ docLit $ Text.pack "|"]
++ List.intersperse docCommaSep (return <$> gs)
]
)
++ [ docSeparator
, docCols
ColOpPrefix
[ appSep $ return binderDoc
, docAddBaseY BrIndentRegular
$ docForceParSpacing
$ return bodyDoc
]
]
]
++ wherePartMultiLine
addAlternative
$ docLines
$ [ docAddBaseY BrIndentRegular
$ patPartParWrap
$ docLines
$ map docSetBaseY
$ clauseDocs
>>= \(guardDocs, bodyDoc, _) ->
(case guardDocs of
[] -> []
[g] ->
[ docForceSingleline
$ docSeq [appSep $ docLit $ Text.pack "|", return g]
]
gs ->
[ docForceSingleline
$ docSeq
$ [appSep $ docLit $ Text.pack "|"]
++ List.intersperse docCommaSep (return <$> gs)
]
)
++ [ docCols
ColOpPrefix
[ appSep $ return binderDoc
, docAddBaseY BrIndentRegular
$ docForceParSpacing
$ return bodyDoc
]
]
]
++ wherePartMultiLine
addAlternative
$ docLines
$ [ docAddBaseY BrIndentRegular
$ patPartParWrap
$ docLines
$ map docSetBaseY
$ clauseDocs
>>= \(guardDocs, bodyDoc, _) ->
(case guardDocs of
[] -> []
[g] -> [docSeq [appSep $ docLit $ Text.pack "|", return g]]
(g1 : gr) ->
(docSeq [appSep $ docLit $ Text.pack "|", return g1]
: (gr <&> \g ->
docSeq [appSep $ docLit $ Text.pack ",", return g]
)
)
)
++ [ docCols
ColOpPrefix
[ appSep $ return binderDoc
, docAddBaseY BrIndentRegular $ return bodyDoc
]
]
]
++ wherePartMultiLine
layoutPatSynBind
:: Located (IdP GhcPs)
-> HsPatSynDetails (Located (IdP GhcPs))
-> HsPatSynDir GhcPs
-> LPat GhcPs
-> ToBriDocM BriDocNumbered
layoutPatSynBind name patSynDetails patDir rpat = do
let
patDoc = docLit $ Text.pack "pattern"
binderDoc = case patDir of
ImplicitBidirectional -> docLit $ Text.pack "="
_ -> docLit $ Text.pack "<-"
body = colsWrapPat =<< layoutPat rpat
whereDoc = docLit $ Text.pack "where"
mWhereDocs <- layoutPatSynWhere patDir
headDoc <-
fmap pure
$ docSeq
$ [ patDoc
, docSeparator
, layoutLPatSyn name patSynDetails
, docSeparator
, binderDoc
]
runFilteredAlternative $ do
addAlternative
$
docAddBaseY BrIndentRegular
$ docSeq
([headDoc, docSeparator, body] ++ case mWhereDocs of
Just ds -> [docSeparator, docPar whereDoc (docLines ds)]
Nothing -> []
)
addAlternative
$
docAddBaseY BrIndentRegular
$ docPar
headDoc
(case mWhereDocs of
Nothing -> body
Just ds -> docLines ([docSeq [body, docSeparator, whereDoc]] ++ ds)
)
layoutLPatSyn
:: Located (IdP GhcPs)
-> HsPatSynDetails (Located (IdP GhcPs))
-> ToBriDocM BriDocNumbered
layoutLPatSyn name (PrefixCon vars) = do
docName <- lrdrNameToTextAnn name
names <- mapM lrdrNameToTextAnn vars
docSeq . fmap appSep $ docLit docName : (docLit <$> names)
layoutLPatSyn name (InfixCon left right) = do
leftDoc <- lrdrNameToTextAnn left
docName <- lrdrNameToTextAnn name
rightDoc <- lrdrNameToTextAnn right
docSeq . fmap (appSep . docLit) $ [leftDoc, docName, rightDoc]
layoutLPatSyn name (RecCon recArgs) = do
docName <- lrdrNameToTextAnn name
args <- mapM (lrdrNameToTextAnn . recordPatSynSelectorId) recArgs
docSeq
. fmap docLit
$ [docName, Text.pack " { "]
<> intersperse (Text.pack ", ") args
<> [Text.pack " }"]
layoutPatSynWhere
:: HsPatSynDir GhcPs -> ToBriDocM (Maybe [ToBriDocM BriDocNumbered])
layoutPatSynWhere hs = case hs of
ExplicitBidirectional (MG _ (L _ lbinds) _) -> do
binderDoc <- docLit $ Text.pack "="
Just
<$> mapM (docSharedWrapper $ layoutPatternBind Nothing binderDoc) lbinds
_ -> pure Nothing
layoutTyCl :: ToBriDoc TyClDecl
layoutTyCl ltycl@(L _loc tycl) = case tycl of
SynDecl _ name vars fixity typ -> do
let
isInfix = case fixity of
Prefix -> False
Infix -> True
let wrapNodeRest = docWrapNodeRest ltycl
docWrapNodePrior ltycl
$ layoutSynDecl isInfix wrapNodeRest name (hsq_explicit vars) typ
DataDecl _ext name tyVars _ dataDefn ->
layoutDataDecl ltycl name tyVars dataDefn
_ -> briDocByExactNoComment ltycl
layoutSynDecl
:: Bool
-> (ToBriDocM BriDocNumbered -> ToBriDocM BriDocNumbered)
-> Located (IdP GhcPs)
-> [LHsTyVarBndr () GhcPs]
-> LHsType GhcPs
-> ToBriDocM BriDocNumbered
layoutSynDecl isInfix wrapNodeRest name vars typ = do
nameStr <- lrdrNameToTextAnn name
let
lhs = appSep . wrapNodeRest $ if isInfix
then do
let (a : b : rest) = vars
hasOwnParens <- hasAnnKeywordComment a AnnOpenP
let needsParens = not (null rest) || hasOwnParens
docSeq
$ [docLit $ Text.pack "type", docSeparator]
++ [ docParenL | needsParens ]
++ [ layoutTyVarBndr False a
, docSeparator
, docLit nameStr
, docSeparator
, layoutTyVarBndr False b
]
++ [ docParenR | needsParens ]
++ fmap (layoutTyVarBndr True) rest
else
docSeq
$ [ docLit $ Text.pack "type"
, docSeparator
, docWrapNode name $ docLit nameStr
]
++ fmap (layoutTyVarBndr True) vars
sharedLhs <- docSharedWrapper id lhs
typeDoc <- docSharedWrapper layoutType typ
hasComments <- hasAnyCommentsConnected typ
layoutLhsAndType hasComments sharedLhs "=" typeDoc
layoutTyVarBndr :: Bool -> ToBriDoc (HsTyVarBndr ())
layoutTyVarBndr needsSep lbndr@(L _ bndr) = do
docWrapNodePrior lbndr $ case bndr of
UserTyVar _ _ name -> do
nameStr <- lrdrNameToTextAnn name
docSeq $ [ docSeparator | needsSep ] ++ [docLit nameStr]
KindedTyVar _ _ name kind -> do
nameStr <- lrdrNameToTextAnn name
docSeq
$ [ docSeparator | needsSep ]
++ [ docLit $ Text.pack "("
, appSep $ docLit nameStr
, appSep . docLit $ Text.pack "::"
, docForceSingleline $ layoutType kind
, docLit $ Text.pack ")"
]
layoutTyFamInstDecl
:: Data.Data.Data a
=> Bool
-> Located a
-> TyFamInstDecl GhcPs
-> ToBriDocM BriDocNumbered
layoutTyFamInstDecl inClass outerNode tfid = do
let
FamEqn _ name bndrsMay pats _fixity typ = hsib_body $ tfid_eqn tfid
type instance forall a . ( Maybe a ) = Either ( ) a
innerNode = outerNode
docWrapNodePrior outerNode $ do
nameStr <- lrdrNameToTextAnn name
needsParens <- hasAnnKeyword outerNode AnnOpenP
let
instanceDoc = if inClass
then docLit $ Text.pack "type"
else docSeq
[appSep . docLit $ Text.pack "type", docLit $ Text.pack "instance"]
makeForallDoc :: [LHsTyVarBndr () GhcPs] -> ToBriDocM BriDocNumbered
makeForallDoc bndrs = do
bndrDocs <- layoutTyVarBndrs bndrs
docSeq
([docLit (Text.pack "forall")] ++ processTyVarBndrsSingleline bndrDocs
)
lhs =
docWrapNode innerNode
. docSeq
$ [appSep instanceDoc]
++ [ makeForallDoc foralls | Just foralls <- [bndrsMay] ]
++ [ docParenL | needsParens ]
++ [appSep $ docWrapNode name $ docLit nameStr]
++ intersperse docSeparator (layoutHsTyPats pats)
++ [ docParenR | needsParens ]
hasComments <-
(||)
<$> hasAnyRegularCommentsConnected outerNode
<*> hasAnyRegularCommentsRest innerNode
typeDoc <- docSharedWrapper layoutType typ
layoutLhsAndType hasComments lhs "=" typeDoc
layoutHsTyPats
:: [HsArg (LHsType GhcPs) (LHsKind GhcPs)] -> [ToBriDocM BriDocNumbered]
layoutHsTyPats pats = pats <&> \case
HsValArg tm -> layoutType tm
HsTypeArg _l ty -> docSeq [docLit $ Text.pack "@", layoutType ty]
we ignore the SourceLoc here .. this LPat not being ( L _ { } ) change
HsArgPar _l -> error "brittany internal error: HsArgPar{}"
Layout signatures and bindings using the corresponding layouters from the
layoutClsInst :: ToBriDoc ClsInstDecl
layoutClsInst lcid@(L _ cid) = docLines
[ layoutInstanceHead
, docEnsureIndent BrIndentRegular
$ docSetIndentLevel
$ docSortedLines
$ fmap layoutAndLocateSig (cid_sigs cid)
++ fmap layoutAndLocateBind (bagToList $ cid_binds cid)
++ fmap layoutAndLocateTyFamInsts (cid_tyfam_insts cid)
++ fmap layoutAndLocateDataFamInsts (cid_datafam_insts cid)
]
where
layoutInstanceHead :: ToBriDocM BriDocNumbered
layoutInstanceHead =
briDocByExactNoComment
$ InstD NoExtField
. ClsInstD NoExtField
. removeChildren
<$> lcid
removeChildren :: ClsInstDecl GhcPs -> ClsInstDecl GhcPs
removeChildren c = c
{ cid_binds = emptyBag
, cid_sigs = []
, cid_tyfam_insts = []
, cid_datafam_insts = []
}
docSortedLines
:: [ToBriDocM (Located BriDocNumbered)] -> ToBriDocM BriDocNumbered
docSortedLines l =
allocateNode
. BDFLines
. fmap unLoc
. List.sortOn (ExactPrint.rs . getLoc)
=<< sequence l
layoutAndLocateSig :: ToBriDocC (Sig GhcPs) (Located BriDocNumbered)
layoutAndLocateSig lsig@(L loc _) = L loc <$> layoutSig lsig
layoutAndLocateBind :: ToBriDocC (HsBind GhcPs) (Located BriDocNumbered)
layoutAndLocateBind lbind@(L loc _) =
L loc <$> (joinBinds =<< layoutBind lbind)
joinBinds
:: Either [BriDocNumbered] BriDocNumbered -> ToBriDocM BriDocNumbered
joinBinds = \case
Left ns -> docLines $ return <$> ns
Right n -> return n
layoutAndLocateTyFamInsts
:: ToBriDocC (TyFamInstDecl GhcPs) (Located BriDocNumbered)
layoutAndLocateTyFamInsts ltfid@(L loc tfid) =
L loc <$> layoutTyFamInstDecl True ltfid tfid
layoutAndLocateDataFamInsts
:: ToBriDocC (DataFamInstDecl GhcPs) (Located BriDocNumbered)
layoutAndLocateDataFamInsts ldfid@(L loc _) =
L loc <$> layoutDataFamInstDecl ldfid
layoutDataFamInstDecl :: ToBriDoc DataFamInstDecl
layoutDataFamInstDecl ldfid =
fmap stripWhitespace <$> briDocByExactNoComment ldfid
| ExactPrint adds indentation / newlines to declarations
stripWhitespace :: BriDocF f -> BriDocF f
stripWhitespace (BDFExternal ann anns b t) =
BDFExternal ann anns b $ stripWhitespace' t
stripWhitespace b = b
| This fixes two issues of output coming from Exactprinting
associated ( data ) type decls . Firstly we place the output into docLines ,
so one newline coming from is superfluous , so we drop the
first ( empty ) line . The second issue is Exactprint indents the first
> instance where
> data
> , intData2 : : Int
> data
> , intData2 : : Int
first line of the declaration , stopping when we see " data " or " type " at
> data
> , intData2 : : Int
possible additional indentation from comments before the first member .
But the whole thing is just a temporary measure until learns
stripWhitespace' :: Text -> Text
stripWhitespace' t =
Text.intercalate (Text.pack "\n") $ go $ List.drop 1 $ Text.lines t
where
go [] = []
go (line1 : lineR) = case Text.stripStart line1 of
st
| isTypeOrData st -> st : lineR
| otherwise -> st : go lineR
isTypeOrData t' =
(Text.pack "type" `Text.isPrefixOf` t')
|| (Text.pack "newtype" `Text.isPrefixOf` t')
|| (Text.pack "data" `Text.isPrefixOf` t')
layoutLhsAndType
:: Bool
-> ToBriDocM BriDocNumbered
-> String
-> ToBriDocM BriDocNumbered
-> ToBriDocM BriDocNumbered
layoutLhsAndType hasComments lhs sep typeDoc = do
runFilteredAlternative $ do
lhs = type
lhs : : type
addAlternativeCond (not hasComments) $ docSeq
[lhs, docSeparator, docLitS sep, docSeparator, docForceSingleline typeDoc]
lhs
lhs
addAlternative $ docAddBaseY BrIndentRegular $ docPar lhs $ docCols
ColTyOpPrefix
[ appSep $ docLitS sep
, docAddBaseY (BrIndentSpecial (length sep + 1)) typeDoc
]
|
5bece90416886fdcf33215a29c385fd72541c29a102521d09ca30b0ca0155456 | haskell-repa/repa | Combine.hs |
module Data.Array.Repa.Stream.Combine
( combine2
, combineSegs2 )
where
import Data.Array.Repa.Stream.Base
import Prelude hiding (map, zipWith)
| Combine two streams , using a tag stream to tell us which of the data
-- streams to take the next element from.
--
-- If there are insufficient elements in the data streams for the provided
-- tag stream then `error`.
--
-- @
-- combine2 [F T T F F T] [1 2 3] [4 5 6]
= [ 1 4 5 2 3 6 ]
-- @
--
combine2 :: Stream Bool -> Stream a -> Stream a -> Stream a
combine2 (Stream size sT0 nextT)
(Stream _ sA0 nextA) (Stream _ sB0 nextB)
= Stream size (Nothing, sT0, sA0, sB0) next
where
next (Nothing, sT, sA, sB)
= case nextT sT of
Yield s' t -> Update (Just t, s', sA, sB)
Update s' -> Update (Nothing, s', sA, sB)
Done -> Done
next (Just False, sT, sA, sB)
= case nextA sA of
Yield sA' x -> Yield (Nothing, sT, sA', sB) x
Update sA' -> Update (Just False, sT, sA', sB)
Done -> error "combine2ByTagS: stream 1 too short"
next (Just True, sT, sA, sB)
= case nextB sB of
Yield sB' x -> Yield (Nothing, sT, sA, sB') x
Update sB' -> Update (Just True, sT, sA, sB')
Done -> error "combine2ByTagS: stream 2 too short"
{-# INLINE [1] combine2 #-}
-- | Segmented Stream combine. Like `combine2ByTag`, except that the tags select
entire segments of each data stream , instead of selecting one element at a time .
--
-- @
-- combineSegs2
-- [F, F, T, F, T, T]
-- [2,1,3] [10,20,30,40,50,60]
-- [1,2,3] [11,22,33,44,55,66]
= [ 10,20,30,11,40,50,60,22,33,44,55,66 ]
-- @
--
This says take two elements from the first stream , then another one element
from the first stream , then one element from the second stream , then three
elements from the first stream ...
--
combineSegs2
:: Stream Bool -- ^ tag values
^ segment lengths for first data stream
^ first data stream
^ segment lengths for second data stream
^ second data stream
-> Stream a
combineSegs2
(Stream _ sf0 nextf)
(Stream _ ss10 nexts1) (Stream nv1 vs10 nextv1)
(Stream _ ss20 nexts2) (Stream nv2 vs20 nextv2)
= Stream (nv1 `addSize` nv2)
(Nothing, True, sf0, ss10, vs10, ss20, vs20)
next
where next (Nothing, f, sf, ss1, vs1, ss2, vs2)
= case nextf sf of
Done -> Done
Update sf' -> Update (Nothing, f, sf', ss1, vs1, ss2, vs2)
Yield sf' False
-> case nexts1 ss1 of
Done -> Done
Update ss1' -> Update (Nothing, f, sf, ss1', vs1, ss2, vs2)
Yield ss1' n -> Update (Just n, False, sf',ss1', vs1, ss2, vs2)
Yield sf' True
-> case nexts2 ss2 of
Done -> Done
Update ss2' -> Update (Nothing, f, sf, ss1, vs1, ss2', vs2)
Yield ss2' n -> Update (Just n, True, sf',ss1, vs1, ss2', vs2)
next (Just 0, _, sf, ss1, vs1, ss2, vs2)
= Update (Nothing, True, sf, ss1, vs1, ss2, vs2)
next (Just n, False, sf, ss1, vs1, ss2, vs2)
= case nextv1 vs1 of
Done -> Done
Update vs1' -> Update (Just n, False, sf, ss1, vs1', ss2, vs2)
Yield vs1' x -> Yield (Just (n-1), False, sf, ss1, vs1', ss2, vs2) x
next (Just n, True, sf, ss1, vs1, ss2, vs2)
= case nextv2 vs2 of
Done -> Done
Update vs2' -> Update (Just n, True, sf, ss1, vs1, ss2, vs2')
Yield vs2' x -> Yield (Just (n-1), True, sf, ss1, vs1, ss2, vs2') x
# INLINE [ 1 ] combineSegs2 #
| null | https://raw.githubusercontent.com/haskell-repa/repa/c867025e99fd008f094a5b18ce4dabd29bed00ba/icebox/abandoned/repa-stream/Data/Array/Repa/Stream/Combine.hs | haskell | streams to take the next element from.
If there are insufficient elements in the data streams for the provided
tag stream then `error`.
@
combine2 [F T T F F T] [1 2 3] [4 5 6]
@
# INLINE [1] combine2 #
| Segmented Stream combine. Like `combine2ByTag`, except that the tags select
@
combineSegs2
[F, F, T, F, T, T]
[2,1,3] [10,20,30,40,50,60]
[1,2,3] [11,22,33,44,55,66]
@
^ tag values |
module Data.Array.Repa.Stream.Combine
( combine2
, combineSegs2 )
where
import Data.Array.Repa.Stream.Base
import Prelude hiding (map, zipWith)
| Combine two streams , using a tag stream to tell us which of the data
= [ 1 4 5 2 3 6 ]
combine2 :: Stream Bool -> Stream a -> Stream a -> Stream a
combine2 (Stream size sT0 nextT)
(Stream _ sA0 nextA) (Stream _ sB0 nextB)
= Stream size (Nothing, sT0, sA0, sB0) next
where
next (Nothing, sT, sA, sB)
= case nextT sT of
Yield s' t -> Update (Just t, s', sA, sB)
Update s' -> Update (Nothing, s', sA, sB)
Done -> Done
next (Just False, sT, sA, sB)
= case nextA sA of
Yield sA' x -> Yield (Nothing, sT, sA', sB) x
Update sA' -> Update (Just False, sT, sA', sB)
Done -> error "combine2ByTagS: stream 1 too short"
next (Just True, sT, sA, sB)
= case nextB sB of
Yield sB' x -> Yield (Nothing, sT, sA, sB') x
Update sB' -> Update (Just True, sT, sA, sB')
Done -> error "combine2ByTagS: stream 2 too short"
entire segments of each data stream , instead of selecting one element at a time .
= [ 10,20,30,11,40,50,60,22,33,44,55,66 ]
This says take two elements from the first stream , then another one element
from the first stream , then one element from the second stream , then three
elements from the first stream ...
combineSegs2
^ segment lengths for first data stream
^ first data stream
^ segment lengths for second data stream
^ second data stream
-> Stream a
combineSegs2
(Stream _ sf0 nextf)
(Stream _ ss10 nexts1) (Stream nv1 vs10 nextv1)
(Stream _ ss20 nexts2) (Stream nv2 vs20 nextv2)
= Stream (nv1 `addSize` nv2)
(Nothing, True, sf0, ss10, vs10, ss20, vs20)
next
where next (Nothing, f, sf, ss1, vs1, ss2, vs2)
= case nextf sf of
Done -> Done
Update sf' -> Update (Nothing, f, sf', ss1, vs1, ss2, vs2)
Yield sf' False
-> case nexts1 ss1 of
Done -> Done
Update ss1' -> Update (Nothing, f, sf, ss1', vs1, ss2, vs2)
Yield ss1' n -> Update (Just n, False, sf',ss1', vs1, ss2, vs2)
Yield sf' True
-> case nexts2 ss2 of
Done -> Done
Update ss2' -> Update (Nothing, f, sf, ss1, vs1, ss2', vs2)
Yield ss2' n -> Update (Just n, True, sf',ss1, vs1, ss2', vs2)
next (Just 0, _, sf, ss1, vs1, ss2, vs2)
= Update (Nothing, True, sf, ss1, vs1, ss2, vs2)
next (Just n, False, sf, ss1, vs1, ss2, vs2)
= case nextv1 vs1 of
Done -> Done
Update vs1' -> Update (Just n, False, sf, ss1, vs1', ss2, vs2)
Yield vs1' x -> Yield (Just (n-1), False, sf, ss1, vs1', ss2, vs2) x
next (Just n, True, sf, ss1, vs1, ss2, vs2)
= case nextv2 vs2 of
Done -> Done
Update vs2' -> Update (Just n, True, sf, ss1, vs1, ss2, vs2')
Yield vs2' x -> Yield (Just (n-1), True, sf, ss1, vs1, ss2, vs2') x
# INLINE [ 1 ] combineSegs2 #
|
41a90f762b7dd03f4cce5af9a100921d38c1d22297299d18d01374dff95ff394 | vernemq/vmq-discovery | vmq_discovery_sup.erl | Copyright 2018 Octavo Labs AG Zurich Switzerland ( )
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(vmq_discovery_sup).
-behaviour(supervisor).
%% API
-export([start_link/0]).
%% Supervisor callbacks
-export([init/1]).
-define(SERVER, ?MODULE).
%%====================================================================
%% API functions
%%====================================================================
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
%%====================================================================
%% Supervisor callbacks
%%====================================================================
Child : : { Id , StartFunc , Restart , Shutdown , Type , Modules }
init([]) -> {ok, {{one_for_all, 0, 1}, []}}.
| null | https://raw.githubusercontent.com/vernemq/vmq-discovery/6d25de3c0923c49bf6704543f537b1da56e8588d/src/vmq_discovery_sup.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
API
Supervisor callbacks
====================================================================
API functions
====================================================================
====================================================================
Supervisor callbacks
==================================================================== | Copyright 2018 Octavo Labs AG Zurich Switzerland ( )
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(vmq_discovery_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-define(SERVER, ?MODULE).
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
Child : : { Id , StartFunc , Restart , Shutdown , Type , Modules }
init([]) -> {ok, {{one_for_all, 0, 1}, []}}.
|
b533e1068769042406e5133a792def13fd7b81b87b7f5dfd3fb416bc97d79c90 | disteph/cdsat | interfaces_basic.ml | (******************************************************************)
(* This file contains basic module types that specify how the kernel
interacts with the other components of Psyche *)
(******************************************************************)
Generic interface for printable hashed types
module type PH = sig
type t [@@deriving eq,ord,show,hash]
end
Generic interface for printable hconsed types
module type PHCons = sig
include PH
val id: t -> int
val clear: unit->unit
end
Collection Interface that Theory needs to provide for
module type Collection = sig
type e
type t [@@deriving eq,show,hash]
val empty: t
val singleton: e -> t
val add : e -> t -> t
val remove: e -> t -> t
val union: t -> t -> t
val inter: t -> t -> t
val diff: t -> t -> t
val filter: (e -> bool) -> t -> t
val is_empty : t -> bool
val mem : e -> t -> bool
val equal : t -> t -> bool
val subset : t -> t -> bool
val choose : t -> e
val next : t -> e*t
val fold : (e -> 'a -> 'a) -> t -> 'a -> 'a
end
module type Assign = sig
type term
type v
include Collection with type e = term*v
module Map : sig
val mem : term -> t -> bool
val find : term -> t -> v list
val remove: term -> t -> t
end
end
(* Type of Monads *)
module type MonadType = sig
type 'a t
val return : 'a -> 'a t
val bind : ('a -> 'b t) -> 'a t -> 'b t
end
| null | https://raw.githubusercontent.com/disteph/cdsat/1b569f3eae59802148f4274186746a9ed3e667ed/src/kernel/kernel.mld/top.mld/interfaces_basic.ml | ocaml | ****************************************************************
This file contains basic module types that specify how the kernel
interacts with the other components of Psyche
****************************************************************
Type of Monads |
Generic interface for printable hashed types
module type PH = sig
type t [@@deriving eq,ord,show,hash]
end
Generic interface for printable hconsed types
module type PHCons = sig
include PH
val id: t -> int
val clear: unit->unit
end
Collection Interface that Theory needs to provide for
module type Collection = sig
type e
type t [@@deriving eq,show,hash]
val empty: t
val singleton: e -> t
val add : e -> t -> t
val remove: e -> t -> t
val union: t -> t -> t
val inter: t -> t -> t
val diff: t -> t -> t
val filter: (e -> bool) -> t -> t
val is_empty : t -> bool
val mem : e -> t -> bool
val equal : t -> t -> bool
val subset : t -> t -> bool
val choose : t -> e
val next : t -> e*t
val fold : (e -> 'a -> 'a) -> t -> 'a -> 'a
end
module type Assign = sig
type term
type v
include Collection with type e = term*v
module Map : sig
val mem : term -> t -> bool
val find : term -> t -> v list
val remove: term -> t -> t
end
end
module type MonadType = sig
type 'a t
val return : 'a -> 'a t
val bind : ('a -> 'b t) -> 'a t -> 'b t
end
|
2ac7294231f86d62902231fb4a955e02d58b241ef113d7ff42746ad1bae081b6 | nibbula/yew | package.lisp | ;;;
package.lisp - Package definition for UNICODE .
;;;
(defpackage :unicode
(:documentation "Package definition for UNICODE.")
(:use :cl :dlib)
(:export
;; char-width
#:char-grid-width
;; utf8
#:get-utf8-char #:%get-utf8-char
#:length-in-utf8-bytes
#:put-utf8-char #:%put-utf8-char
#:string-to-utf8-bytes
#:utf8-bytes-to-string
;; utf8b
#:get-utf8b-char #:%get-utf8b-char
#:length-in-utf8b-bytes
#:put-utf8b-char #:%put-utf8b-char
#:string-to-utf8b-bytes
#:utf8b-bytes-to-string
Generic / compatibility
#:list-character-encodings
#:string-to-octets
#:octets-to-string
#:string-size-in-octets
#:vector-size-in-chars
;; encoding construction
#:define-string-converters
#:*encodings*
#:register-encoding
))
(in-package :unicode)
;; End
| null | https://raw.githubusercontent.com/nibbula/yew/e7fbc900e92b1295c4ee640e89ed8c4a60a4b84f/unicode/package.lisp | lisp |
char-width
utf8
utf8b
encoding construction
End | package.lisp - Package definition for UNICODE .
(defpackage :unicode
(:documentation "Package definition for UNICODE.")
(:use :cl :dlib)
(:export
#:char-grid-width
#:get-utf8-char #:%get-utf8-char
#:length-in-utf8-bytes
#:put-utf8-char #:%put-utf8-char
#:string-to-utf8-bytes
#:utf8-bytes-to-string
#:get-utf8b-char #:%get-utf8b-char
#:length-in-utf8b-bytes
#:put-utf8b-char #:%put-utf8b-char
#:string-to-utf8b-bytes
#:utf8b-bytes-to-string
Generic / compatibility
#:list-character-encodings
#:string-to-octets
#:octets-to-string
#:string-size-in-octets
#:vector-size-in-chars
#:define-string-converters
#:*encodings*
#:register-encoding
))
(in-package :unicode)
|
0c37dcc0934f08805b6523a4b139420657e63698a94c2f0e57cb6a6ab8134a3d | amnh/poy5 | gz.mli | (** Zlib interface *)
(** The module [Gz] redefines most of the I/O functions of
[Pervasives] to allow I/O on compressed files (using the
[zlib] library). It doesn't use [zlib] lower-level
functions, so there might be some way to write a more efficient
interface ; this one is however, very small (and didn't take much
time to write!). *)
* { 2 Datatypes & exceptions }
type in_channel
type out_channel
(** When exception [Error] is raised, the channel is automatically closed. *)
exception Error of string
val version : string
* { 2 Output funcions }
type zstrategy = | Default | Filtered | Huffman_only
* [ open_out ] opens the given filename for writing .
@param compression specifies the level of compression : 0
is no compression , 1 is fastest , 9 is best but slowest . The default
is a compromise ( 6 , I think ) .
@param strategy refer to the [ zlib ] manual ( i.e the header
file [ zlib.h ] )
@param compression specifies the level of compression : 0
is no compression, 1 is fastest , 9 is best but slowest. The default
is a compromise (6, I think).
@param strategy refer to the [zlib] manual (i.e the header
file [zlib.h]) *)
val open_out : ?compression:int -> ?strategy:zstrategy ->
string -> out_channel
* [ setparams ] modifies the two parameters of an opened channel
external setparams : out_channel -> compression:int -> strategy:zstrategy -> unit
= "mlgz_gzsetparams"
* These functions output substrings , strings , char or char value
( [ int ] argument ) . The [ external ] ones use the [ zlib ]
functions , the usual ones are only wrappers around
those . [ output_string ] and [ write ] will correctly handle
null characters embedded in strings . [ output_value ] uses
[ Marshal ] module .
([int] argument). The [external] ones use the [zlib]
functions, the usual Caml ones are only wrappers around
those. [output_string] and [write] will correctly handle
null characters embedded in Caml strings. [output_value] uses
[Marshal] module.
*)
external write : out_channel -> buf:string -> pos:int -> len:int -> unit
= "mlgz_gzwrite"
external output_string : out_channel -> string -> unit
= "mlgz_gzputs"
external output_char : out_channel -> char -> unit
= "mlgz_gzputc"
external output_byte : out_channel -> int -> unit
= "mlgz_gzputc"
val output_newline : out_channel -> unit
val output_endline : out_channel -> string -> unit
val output_value : out_channel -> 'a -> unit
type flush = | Sync_flush | Full_flush | Finish_flush
val flush : ?flush:flush -> out_channel -> unit
(** The [flush] function should be used with caution because it can
degrade compression.
@param flush defaults to [Sync_flush].
*)
* [ seek_out ] set the position of the next write operation on the
channel . Only forward seeks are supported ; [ seek_out ] then
compresses a sequence of zeroes up to the new starting position . It
@raise Invalid_argument if called with a negative offset .
channel. Only forward seeks are supported; [seek_out] then
compresses a sequence of zeroes up to the new starting position. It
@raise Invalid_argument if called with a negative offset.
*)
val seek_out : out_channel -> offset:int -> unit
val pos_out : out_channel -> int
(** [close_out] flushes all pending output if necessary, closes the
compressed file and deallocates all the (de)compression state. Any
subsequent use of the channel will raise an [Error] exception. *)
external close_out : out_channel -> unit
= "mlgz_gzclose"
* { 2 Input functions }
val open_in : string -> in_channel
* [ read ] reads characters from the stream and returns the number
of bytes actually read ; it does not raise [ End_of_file ] .
[ input_char ] and [ input_line ] should
appropriately raise [ End_of_file ] if necessary .
[ input_value ] uses [ Marshal ] module .
of bytes actually read ; it does not raise [End_of_file].
[input_char] and [input_line] should
appropriately raise [End_of_file] if necessary.
[input_value] uses [Marshal] module.
*)
external read : in_channel -> buf:string -> pos:int -> len:int -> int
= "mlgz_gzread"
external input_char : in_channel -> char
= "mlgz_gzgetc"
val input_line : in_channel -> string
val input_value : in_channel -> 'a
external rewind : in_channel -> unit
= "mlgz_gzrewind"
val seek_in : in_channel -> offset:int -> unit
(** The [seek_in] function is emulated but can be extremely slow. *)
val pos_in : in_channel -> int
external close_in : in_channel -> unit
= "mlgz_gzclose"
* { 2 In - memory compression }
(** These functions compress and uncompress from a string to another
string. *)
external compress : ?compression:int -> string -> pos:int -> len:int -> string
= "mlgz_compress"
external uncompress : string -> pos:int -> len:int -> string
= "mlgz_uncompress"
| null | https://raw.githubusercontent.com/amnh/poy5/da563a2339d3fa9c0110ae86cc35fad576f728ab/src/gz-0.5.7/gz.mli | ocaml | * Zlib interface
* The module [Gz] redefines most of the I/O functions of
[Pervasives] to allow I/O on compressed files (using the
[zlib] library). It doesn't use [zlib] lower-level
functions, so there might be some way to write a more efficient
interface ; this one is however, very small (and didn't take much
time to write!).
* When exception [Error] is raised, the channel is automatically closed.
* The [flush] function should be used with caution because it can
degrade compression.
@param flush defaults to [Sync_flush].
* [close_out] flushes all pending output if necessary, closes the
compressed file and deallocates all the (de)compression state. Any
subsequent use of the channel will raise an [Error] exception.
* The [seek_in] function is emulated but can be extremely slow.
* These functions compress and uncompress from a string to another
string. |
* { 2 Datatypes & exceptions }
type in_channel
type out_channel
exception Error of string
val version : string
* { 2 Output funcions }
type zstrategy = | Default | Filtered | Huffman_only
* [ open_out ] opens the given filename for writing .
@param compression specifies the level of compression : 0
is no compression , 1 is fastest , 9 is best but slowest . The default
is a compromise ( 6 , I think ) .
@param strategy refer to the [ zlib ] manual ( i.e the header
file [ zlib.h ] )
@param compression specifies the level of compression : 0
is no compression, 1 is fastest , 9 is best but slowest. The default
is a compromise (6, I think).
@param strategy refer to the [zlib] manual (i.e the header
file [zlib.h]) *)
val open_out : ?compression:int -> ?strategy:zstrategy ->
string -> out_channel
* [ setparams ] modifies the two parameters of an opened channel
external setparams : out_channel -> compression:int -> strategy:zstrategy -> unit
= "mlgz_gzsetparams"
* These functions output substrings , strings , char or char value
( [ int ] argument ) . The [ external ] ones use the [ zlib ]
functions , the usual ones are only wrappers around
those . [ output_string ] and [ write ] will correctly handle
null characters embedded in strings . [ output_value ] uses
[ Marshal ] module .
([int] argument). The [external] ones use the [zlib]
functions, the usual Caml ones are only wrappers around
those. [output_string] and [write] will correctly handle
null characters embedded in Caml strings. [output_value] uses
[Marshal] module.
*)
external write : out_channel -> buf:string -> pos:int -> len:int -> unit
= "mlgz_gzwrite"
external output_string : out_channel -> string -> unit
= "mlgz_gzputs"
external output_char : out_channel -> char -> unit
= "mlgz_gzputc"
external output_byte : out_channel -> int -> unit
= "mlgz_gzputc"
val output_newline : out_channel -> unit
val output_endline : out_channel -> string -> unit
val output_value : out_channel -> 'a -> unit
type flush = | Sync_flush | Full_flush | Finish_flush
val flush : ?flush:flush -> out_channel -> unit
* [ seek_out ] set the position of the next write operation on the
channel . Only forward seeks are supported ; [ seek_out ] then
compresses a sequence of zeroes up to the new starting position . It
@raise Invalid_argument if called with a negative offset .
channel. Only forward seeks are supported; [seek_out] then
compresses a sequence of zeroes up to the new starting position. It
@raise Invalid_argument if called with a negative offset.
*)
val seek_out : out_channel -> offset:int -> unit
val pos_out : out_channel -> int
external close_out : out_channel -> unit
= "mlgz_gzclose"
* { 2 Input functions }
val open_in : string -> in_channel
* [ read ] reads characters from the stream and returns the number
of bytes actually read ; it does not raise [ End_of_file ] .
[ input_char ] and [ input_line ] should
appropriately raise [ End_of_file ] if necessary .
[ input_value ] uses [ Marshal ] module .
of bytes actually read ; it does not raise [End_of_file].
[input_char] and [input_line] should
appropriately raise [End_of_file] if necessary.
[input_value] uses [Marshal] module.
*)
external read : in_channel -> buf:string -> pos:int -> len:int -> int
= "mlgz_gzread"
external input_char : in_channel -> char
= "mlgz_gzgetc"
val input_line : in_channel -> string
val input_value : in_channel -> 'a
external rewind : in_channel -> unit
= "mlgz_gzrewind"
val seek_in : in_channel -> offset:int -> unit
val pos_in : in_channel -> int
external close_in : in_channel -> unit
= "mlgz_gzclose"
* { 2 In - memory compression }
external compress : ?compression:int -> string -> pos:int -> len:int -> string
= "mlgz_compress"
external uncompress : string -> pos:int -> len:int -> string
= "mlgz_uncompress"
|
e8877ad70a3f3f037c55e41ccfd5f5a9ed7a11cc1d01a8903a1ae5b41f2e8790 | csabahruska/jhc-components | Infix.hs | module FrontEnd.Infix (
buildFixityMap, infixHsModule, FixityMap,size,
infixStatement, restrictFixityMap, dumpFixityMap) where
import Data.Binary
import Data.Monoid
import qualified Data.Map as Map
import FrontEnd.HsSyn
import FrontEnd.Lex.ParseMonad
import FrontEnd.SrcLoc
import FrontEnd.Syn.Traverse
import Name.Names
import Support.MapBinaryInstance
import Util.HasSize
import qualified FrontEnd.Lex.Fixity as F
type FixityInfo = (Int, HsAssoc)
type SymbolMap = Map.Map Name FixityInfo
newtype FixityMap = FixityMap SymbolMap
deriving(Monoid,HasSize)
instance Binary FixityMap where
put (FixityMap ts) = putMap ts
get = fmap FixityMap getMap
restrictFixityMap :: (Name -> Bool) -> FixityMap -> FixityMap
restrictFixityMap f (FixityMap fm) = FixityMap (Map.filterWithKey (\k _ -> f k) fm)
dumpFixityMap :: FixityMap -> IO ()
dumpFixityMap (FixityMap ts) = do
mapM_ print (Map.toList ts)
infixHsModule :: FixityMap -> HsModule -> HsModule
infixHsModule (FixityMap ism) m = domod m where
(expShuntSpec,pexpShuntSpec) = (expShuntSpec,pexpShuntSpec) where
pexpShuntSpec = expShuntSpec {
F.operator = paren_operator, F.trailingOps }
expShuntSpec = F.shuntSpec {
F.lookupToken,
F.application ,
F.operator,
F.lookupUnary }
lookupToken (HsBackTick bt) = backtick bt
lookupToken (HsAsPat x v) = mr (HsAsPat x) v
lookupToken (HsLocatedExp (Located sl v)) = mr (HsLocatedExp . Located sl) v
lookupToken t = return (Left t)
lookupUnary t = return Nothing
application e1 e2 = return $ HsApp e1 (hsParen e2)
operator (HsBackTick t) as = operator t as
operator (HsVar v) [e] | v == v_sub = return $ HsNegApp (hsParen e)
operator t as = return $ foldl HsApp t (map hsParen as)
paren_operator (HsBackTick t) as = paren_operator t as
paren_operator (HsVar v) [e] | v == v_sub = return $ HsNegApp (hsParen e)
paren_operator t [e] = return $ HsRightSection (hsParen e) t
paren_operator t as = operator t as
trailingOps e (HsBackTick t) = trailingOps e t
trailingOps e t = return $ HsLeftSection t (hsParen e)
backtick bt = f bt where
f (HsVar v) = g v
f ~(HsCon v) = g v
f ( HsAsPat _ v ) = v
g v = return $ case Map.lookup v ism of
Just (n,HsAssocLeft) -> Right (F.L,n)
Just (n,HsAssocRight) -> Right (F.R,n)
Just (n,HsAssocNone) -> Right (F.N,n)
Just (n,HsAssocPrefix) -> Right (F.Prefix,n)
Just (n,HsAssocPrefixy) -> Right (F.Prefixy,n)
Nothing -> Right (F.L,9)
mr x v = do
n <- lookupToken v
case n of
Left v -> return $ Left $ x v
Right {} -> return n
patShuntSpec = F.shuntSpec {
F.lookupToken,
F.application,
F.operator,
F.lookupUnary } where
lookupToken (HsPatBackTick bt) = backtick bt
lookupToken t = return (Left t)
lookupUnary t = return Nothing
application (HsPApp t es) y = return $ HsPApp t (es ++ [y])
application x y = do
parseErrorK $ "weird application: " ++ show (x,y)
return HsPWildCard
operator ~(HsPatBackTick t) as = f t as where
f (HsPVar v) [e] | v == u_Bang = do sl <- getSrcSpan; return $ HsPBangPat (Located sl e)
f (HsPVar v) [e] | v == u_Twiddle = do sl <- getSrcSpan; return $ HsPIrrPat (Located sl e)
f (HsPVar v) [HsPVar ap, e] | v == u_At = do sl <- getSrcSpan; return $ HsPAsPat ap e
f (HsPVar v) [HsPWildCard, e] | v == u_At = do return e
f (HsPVar v) [e] | originalUnqualifiedName v == vu_sub = return $ HsPNeg e
f (HsPApp t xs) y = return $ HsPApp t (xs ++ y)
f x@(HsPVar v) y = do
parseErrorK $ "weird operator: " ++ show (v,originalUnqualifiedName v,x,y)
return HsPWildCard
f x y = do
parseErrorK $ "weird operator: " ++ show (x,y)
return HsPWildCard
backtick bt = f bt where
f (HsPVar v) | v == u_Bang = return (Right (F.Prefix,11))
f (HsPVar v) | v == u_Twiddle = return (Right (F.Prefix,11))
f (HsPVar v) | v == u_At = return (Right (F.R,12))
f (HsPVar v) = g v
f (HsPApp v []) = g v
f z = parseErrorK $ "infix.f: " ++ show z
g v = return $ case Map.lookup v ism of
Just (n,HsAssocLeft) -> Right (F.L,n)
Just (n,HsAssocRight) -> Right (F.R,n)
Just (n,HsAssocNone) -> Right (F.N,n)
Just (n,HsAssocPrefix) -> Right (F.Prefix,n)
Just (n,HsAssocPrefixy) -> Right (F.Prefixy,n)
Nothing -> Right (F.L,9)
domod m = case runP (traverseHsOps ops m) (hsModuleOpt m) of
(ws,~(Just v)) -> if null ws then v else error $ unlines (map show ws)
ops = (hsOpsDefault ops) { opHsExp, opHsPat } where
opHsExp (HsParen (HsWords es)) = F.shunt pexpShuntSpec es >>= applyHsOps ops
opHsExp (HsWords es) = F.shunt expShuntSpec es >>= applyHsOps ops
opHsExp (HsBackTick t) = parseErrorK "unexpected binary operator."
opHsExp e = traverseHsOps ops e
opHsPat (HsPatWords ws) = F.shunt patShuntSpec ws >>= applyHsOps ops
opHsPat p = traverseHsOps ops p
buildFixityMap :: [HsDecl] -> FixityMap
buildFixityMap ds = FixityMap (Map.fromList $ concatMap f ds) where
f (HsInfixDecl _ assoc strength names) = zip (map make_key names) $ repeat (strength,assoc)
f _ = []
make_key = fromValishHsName
--make_key a_name = case a_name of
-- (Qual a_module name) -> (a_module, name)
( UnQual name ) - > ( unqualModule , name )
-- TODO: interactive
infixStatement :: FixityMap -> HsStmt -> HsStmt
infixStatement (FixityMap ism) m = m
infixStatement ( FixityMap ism ) m = processStmt ism m
| null | https://raw.githubusercontent.com/csabahruska/jhc-components/a7dace481d017f5a83fbfc062bdd2d099133adf1/jhc-frontend/src/FrontEnd/Infix.hs | haskell | make_key a_name = case a_name of
(Qual a_module name) -> (a_module, name)
TODO: interactive | module FrontEnd.Infix (
buildFixityMap, infixHsModule, FixityMap,size,
infixStatement, restrictFixityMap, dumpFixityMap) where
import Data.Binary
import Data.Monoid
import qualified Data.Map as Map
import FrontEnd.HsSyn
import FrontEnd.Lex.ParseMonad
import FrontEnd.SrcLoc
import FrontEnd.Syn.Traverse
import Name.Names
import Support.MapBinaryInstance
import Util.HasSize
import qualified FrontEnd.Lex.Fixity as F
type FixityInfo = (Int, HsAssoc)
type SymbolMap = Map.Map Name FixityInfo
newtype FixityMap = FixityMap SymbolMap
deriving(Monoid,HasSize)
instance Binary FixityMap where
put (FixityMap ts) = putMap ts
get = fmap FixityMap getMap
restrictFixityMap :: (Name -> Bool) -> FixityMap -> FixityMap
restrictFixityMap f (FixityMap fm) = FixityMap (Map.filterWithKey (\k _ -> f k) fm)
dumpFixityMap :: FixityMap -> IO ()
dumpFixityMap (FixityMap ts) = do
mapM_ print (Map.toList ts)
infixHsModule :: FixityMap -> HsModule -> HsModule
infixHsModule (FixityMap ism) m = domod m where
(expShuntSpec,pexpShuntSpec) = (expShuntSpec,pexpShuntSpec) where
pexpShuntSpec = expShuntSpec {
F.operator = paren_operator, F.trailingOps }
expShuntSpec = F.shuntSpec {
F.lookupToken,
F.application ,
F.operator,
F.lookupUnary }
lookupToken (HsBackTick bt) = backtick bt
lookupToken (HsAsPat x v) = mr (HsAsPat x) v
lookupToken (HsLocatedExp (Located sl v)) = mr (HsLocatedExp . Located sl) v
lookupToken t = return (Left t)
lookupUnary t = return Nothing
application e1 e2 = return $ HsApp e1 (hsParen e2)
operator (HsBackTick t) as = operator t as
operator (HsVar v) [e] | v == v_sub = return $ HsNegApp (hsParen e)
operator t as = return $ foldl HsApp t (map hsParen as)
paren_operator (HsBackTick t) as = paren_operator t as
paren_operator (HsVar v) [e] | v == v_sub = return $ HsNegApp (hsParen e)
paren_operator t [e] = return $ HsRightSection (hsParen e) t
paren_operator t as = operator t as
trailingOps e (HsBackTick t) = trailingOps e t
trailingOps e t = return $ HsLeftSection t (hsParen e)
backtick bt = f bt where
f (HsVar v) = g v
f ~(HsCon v) = g v
f ( HsAsPat _ v ) = v
g v = return $ case Map.lookup v ism of
Just (n,HsAssocLeft) -> Right (F.L,n)
Just (n,HsAssocRight) -> Right (F.R,n)
Just (n,HsAssocNone) -> Right (F.N,n)
Just (n,HsAssocPrefix) -> Right (F.Prefix,n)
Just (n,HsAssocPrefixy) -> Right (F.Prefixy,n)
Nothing -> Right (F.L,9)
mr x v = do
n <- lookupToken v
case n of
Left v -> return $ Left $ x v
Right {} -> return n
patShuntSpec = F.shuntSpec {
F.lookupToken,
F.application,
F.operator,
F.lookupUnary } where
lookupToken (HsPatBackTick bt) = backtick bt
lookupToken t = return (Left t)
lookupUnary t = return Nothing
application (HsPApp t es) y = return $ HsPApp t (es ++ [y])
application x y = do
parseErrorK $ "weird application: " ++ show (x,y)
return HsPWildCard
operator ~(HsPatBackTick t) as = f t as where
f (HsPVar v) [e] | v == u_Bang = do sl <- getSrcSpan; return $ HsPBangPat (Located sl e)
f (HsPVar v) [e] | v == u_Twiddle = do sl <- getSrcSpan; return $ HsPIrrPat (Located sl e)
f (HsPVar v) [HsPVar ap, e] | v == u_At = do sl <- getSrcSpan; return $ HsPAsPat ap e
f (HsPVar v) [HsPWildCard, e] | v == u_At = do return e
f (HsPVar v) [e] | originalUnqualifiedName v == vu_sub = return $ HsPNeg e
f (HsPApp t xs) y = return $ HsPApp t (xs ++ y)
f x@(HsPVar v) y = do
parseErrorK $ "weird operator: " ++ show (v,originalUnqualifiedName v,x,y)
return HsPWildCard
f x y = do
parseErrorK $ "weird operator: " ++ show (x,y)
return HsPWildCard
backtick bt = f bt where
f (HsPVar v) | v == u_Bang = return (Right (F.Prefix,11))
f (HsPVar v) | v == u_Twiddle = return (Right (F.Prefix,11))
f (HsPVar v) | v == u_At = return (Right (F.R,12))
f (HsPVar v) = g v
f (HsPApp v []) = g v
f z = parseErrorK $ "infix.f: " ++ show z
g v = return $ case Map.lookup v ism of
Just (n,HsAssocLeft) -> Right (F.L,n)
Just (n,HsAssocRight) -> Right (F.R,n)
Just (n,HsAssocNone) -> Right (F.N,n)
Just (n,HsAssocPrefix) -> Right (F.Prefix,n)
Just (n,HsAssocPrefixy) -> Right (F.Prefixy,n)
Nothing -> Right (F.L,9)
domod m = case runP (traverseHsOps ops m) (hsModuleOpt m) of
(ws,~(Just v)) -> if null ws then v else error $ unlines (map show ws)
ops = (hsOpsDefault ops) { opHsExp, opHsPat } where
opHsExp (HsParen (HsWords es)) = F.shunt pexpShuntSpec es >>= applyHsOps ops
opHsExp (HsWords es) = F.shunt expShuntSpec es >>= applyHsOps ops
opHsExp (HsBackTick t) = parseErrorK "unexpected binary operator."
opHsExp e = traverseHsOps ops e
opHsPat (HsPatWords ws) = F.shunt patShuntSpec ws >>= applyHsOps ops
opHsPat p = traverseHsOps ops p
buildFixityMap :: [HsDecl] -> FixityMap
buildFixityMap ds = FixityMap (Map.fromList $ concatMap f ds) where
f (HsInfixDecl _ assoc strength names) = zip (map make_key names) $ repeat (strength,assoc)
f _ = []
make_key = fromValishHsName
( UnQual name ) - > ( unqualModule , name )
infixStatement :: FixityMap -> HsStmt -> HsStmt
infixStatement (FixityMap ism) m = m
infixStatement ( FixityMap ism ) m = processStmt ism m
|
5f798bf64d177f412a548df50f70eac0bcef786432cc66fee348474e63405de6 | ahf/ircd-scylla | log.ml |
* Copyright ( c ) 2015 . All rights reserved .
* Use of this source code is governed by a BSD - style
* license that can be found in the LICENSE file .
* Copyright (c) 2015 Alexander Færøy. All rights reserved.
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file.
*)
open Sexplib.Std
module Make (C : V1_LWT.CONSOLE) =
struct
module Level =
struct
type t =
| Debug
| Info
| Notice
| Warning
| Error
let to_string log_level =
match log_level with
| Debug -> "debug"
| Info -> "info"
| Notice -> "notice"
| Warning -> "warning"
| Error -> "error"
exception LogLevelError of string
let from_string s =
match (String.lowercase s) with
| "debug" -> Debug
| "info" -> Info
| "notice" -> Notice
| "warning" -> Warning
| "error" -> Error
| _ -> raise (LogLevelError s)
let to_colour log_level =
match log_level with
| Debug -> Colour.Gray
| Info -> Colour.Cyan
| Notice -> Colour.Purple
| Warning -> Colour.Yellow
| Error -> Colour.Red
let to_integer log_level =
match log_level with
| Debug -> 0
| Info -> 1
| Notice -> 2
| Warning -> 3
| Error -> 4
end
open Clock
type t =
{
min_level : Level.t;
console : C.t;
}
let create min_level console = {
min_level;
console
}
let log log level fmt =
let f = fun s ->
let level_int = Level.to_integer level in
let min_level_int = Level.to_integer log.min_level in
if level_int >= min_level_int then
let t = Clock.gmtime (Clock.time ()) in
let timestamp = Printf.sprintf "%02d/%02d/%d %02d:%02d:%02d" t.tm_mday (t.tm_mon + 1) (t.tm_year + 1900) t.tm_hour t.tm_min t.tm_sec in
let colour = Level.to_colour level in
let message = Printf.sprintf "%s [%s] %s" timestamp (Level.to_string level) (Colour.format colour s) in
C.log log.console message in
Printf.ksprintf f fmt
end
| null | https://raw.githubusercontent.com/ahf/ircd-scylla/d06bfa61cd75a44c4f5493f316fba23143c32a78/log.ml | ocaml |
* Copyright ( c ) 2015 . All rights reserved .
* Use of this source code is governed by a BSD - style
* license that can be found in the LICENSE file .
* Copyright (c) 2015 Alexander Færøy. All rights reserved.
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file.
*)
open Sexplib.Std
module Make (C : V1_LWT.CONSOLE) =
struct
module Level =
struct
type t =
| Debug
| Info
| Notice
| Warning
| Error
let to_string log_level =
match log_level with
| Debug -> "debug"
| Info -> "info"
| Notice -> "notice"
| Warning -> "warning"
| Error -> "error"
exception LogLevelError of string
let from_string s =
match (String.lowercase s) with
| "debug" -> Debug
| "info" -> Info
| "notice" -> Notice
| "warning" -> Warning
| "error" -> Error
| _ -> raise (LogLevelError s)
let to_colour log_level =
match log_level with
| Debug -> Colour.Gray
| Info -> Colour.Cyan
| Notice -> Colour.Purple
| Warning -> Colour.Yellow
| Error -> Colour.Red
let to_integer log_level =
match log_level with
| Debug -> 0
| Info -> 1
| Notice -> 2
| Warning -> 3
| Error -> 4
end
open Clock
type t =
{
min_level : Level.t;
console : C.t;
}
let create min_level console = {
min_level;
console
}
let log log level fmt =
let f = fun s ->
let level_int = Level.to_integer level in
let min_level_int = Level.to_integer log.min_level in
if level_int >= min_level_int then
let t = Clock.gmtime (Clock.time ()) in
let timestamp = Printf.sprintf "%02d/%02d/%d %02d:%02d:%02d" t.tm_mday (t.tm_mon + 1) (t.tm_year + 1900) t.tm_hour t.tm_min t.tm_sec in
let colour = Level.to_colour level in
let message = Printf.sprintf "%s [%s] %s" timestamp (Level.to_string level) (Colour.format colour s) in
C.log log.console message in
Printf.ksprintf f fmt
end
| |
50109cf603e3986ccf7cbbef816c40d7287d44b9b8af1793914e1402525a5a44 | racket/gui | init.rkt | #lang racket/base
(require ffi/unsafe
"utils.rkt"
"types.rkt"
"queue.rkt")
(define-gtk gtk_rc_parse_string (_fun _string -> _void))
(define-gtk gtk_rc_add_default_file (_fun _path -> _void))
(when (eq? 'windows (system-type))
(let ([dir (simplify-path (build-path (collection-path "racket") 'up 'up "lib"))])
(gtk_rc_parse_string (format "module_path \"~a\"\n" dir))
(gtk_rc_add_default_file (build-path dir "gtkrc"))))
(define pump-thread (gtk-start-event-pump))
| null | https://raw.githubusercontent.com/racket/gui/d1fef7a43a482c0fdd5672be9a6e713f16d8be5c/gui-lib/mred/private/wx/gtk/init.rkt | racket | #lang racket/base
(require ffi/unsafe
"utils.rkt"
"types.rkt"
"queue.rkt")
(define-gtk gtk_rc_parse_string (_fun _string -> _void))
(define-gtk gtk_rc_add_default_file (_fun _path -> _void))
(when (eq? 'windows (system-type))
(let ([dir (simplify-path (build-path (collection-path "racket") 'up 'up "lib"))])
(gtk_rc_parse_string (format "module_path \"~a\"\n" dir))
(gtk_rc_add_default_file (build-path dir "gtkrc"))))
(define pump-thread (gtk-start-event-pump))
| |
945a8fa0c8a6f37b699cbab39df04e50f0624bede60cc09e0aaeccd56cf725c5 | fulcrologic/fulcro | server_render.cljc | (ns com.fulcrologic.fulcro.algorithms.server-render
(:require
[com.fulcrologic.fulcro.algorithms.transit :as transit]
[com.fulcrologic.fulcro.algorithms.merge :as merge]
[com.fulcrologic.fulcro.algorithms.normalize :refer [tree->db]]
[com.fulcrologic.fulcro.algorithms.do-not-use :refer [base64-encode base64-decode]]))
(defn initial-state->script-tag
"Returns a *string* containing an HTML script tag that that sets js/window.INITIAL_APP_STATE to a transit-encoded string version of initial-state.
`opts` is a map to be passed to the transit writer.
`string-transform` should be a function with 1 argument. The stringified app-state is passed to it.
This is the place to perform additional string replacement operations to escape special characters,
as in the case of encoded polylines."
([initial-state] (initial-state->script-tag initial-state {} identity))
([initial-state opts] (initial-state->script-tag initial-state opts identity))
([initial-state opts string-transform]
(let [state-string (-> (transit/transit-clj->str initial-state opts)
(string-transform)
(base64-encode))
assignment (str "window.INITIAL_APP_STATE = '" state-string "'")]
(str
"<script type='text/javascript'>\n"
assignment
"\n</script>\n"))))
#?(:cljs
(defn get-SSR-initial-state
"Obtain the value of the INITIAL_APP_STATE set from server-side rendering. Use initial-state->script-tag on the server to embed the state."
([] (get-SSR-initial-state {}))
([opts]
(when-let [state-string (some-> js/window .-INITIAL_APP_STATE base64-decode)]
(transit/transit-str->clj state-string opts)))))
(defn build-initial-state
"This function normalizes the given state-tree using the root-component's query into standard client db format,
it then walks the query and adds any missing data from union branches that are not the 'default' branch
on the union itself. E.g. A union with initial state can only point to one thing, but you need the other branches
in the normalized application database. Assumes all components (except possibly root-class) that need initial state
use `:initial-state`.
Useful for building a pre-populated db for server-side rendering.
Returns a normalized client db with all union alternates initialized to their InitialAppState."
[state-tree root-class]
(let [base-state (tree->db root-class state-tree true (merge/pre-merge-transform {}))
base-state (merge/merge-alternate-union-elements base-state root-class)]
base-state))
| null | https://raw.githubusercontent.com/fulcrologic/fulcro/71ed79c650222567ac4a566513365a95f12657e3/src/main/com/fulcrologic/fulcro/algorithms/server_render.cljc | clojure | (ns com.fulcrologic.fulcro.algorithms.server-render
(:require
[com.fulcrologic.fulcro.algorithms.transit :as transit]
[com.fulcrologic.fulcro.algorithms.merge :as merge]
[com.fulcrologic.fulcro.algorithms.normalize :refer [tree->db]]
[com.fulcrologic.fulcro.algorithms.do-not-use :refer [base64-encode base64-decode]]))
(defn initial-state->script-tag
"Returns a *string* containing an HTML script tag that that sets js/window.INITIAL_APP_STATE to a transit-encoded string version of initial-state.
`opts` is a map to be passed to the transit writer.
`string-transform` should be a function with 1 argument. The stringified app-state is passed to it.
This is the place to perform additional string replacement operations to escape special characters,
as in the case of encoded polylines."
([initial-state] (initial-state->script-tag initial-state {} identity))
([initial-state opts] (initial-state->script-tag initial-state opts identity))
([initial-state opts string-transform]
(let [state-string (-> (transit/transit-clj->str initial-state opts)
(string-transform)
(base64-encode))
assignment (str "window.INITIAL_APP_STATE = '" state-string "'")]
(str
"<script type='text/javascript'>\n"
assignment
"\n</script>\n"))))
#?(:cljs
(defn get-SSR-initial-state
"Obtain the value of the INITIAL_APP_STATE set from server-side rendering. Use initial-state->script-tag on the server to embed the state."
([] (get-SSR-initial-state {}))
([opts]
(when-let [state-string (some-> js/window .-INITIAL_APP_STATE base64-decode)]
(transit/transit-str->clj state-string opts)))))
(defn build-initial-state
"This function normalizes the given state-tree using the root-component's query into standard client db format,
it then walks the query and adds any missing data from union branches that are not the 'default' branch
on the union itself. E.g. A union with initial state can only point to one thing, but you need the other branches
in the normalized application database. Assumes all components (except possibly root-class) that need initial state
use `:initial-state`.
Useful for building a pre-populated db for server-side rendering.
Returns a normalized client db with all union alternates initialized to their InitialAppState."
[state-tree root-class]
(let [base-state (tree->db root-class state-tree true (merge/pre-merge-transform {}))
base-state (merge/merge-alternate-union-elements base-state root-class)]
base-state))
| |
663028a40363f4454b9f562f2c74fe5e3926fe6e78ea9ea34874ff2b7a1439b4 | dsorokin/aivika | Specs.hs |
-- |
-- Module : Simulation.Aivika.Specs
Copyright : Copyright ( c ) 2009 - 2017 , < >
-- License : BSD3
Maintainer : < >
-- Stability : experimental
Tested with : GHC 8.0.1
--
-- It defines the simulation specs and functions for this data type.
module Simulation.Aivika.Specs
(-- * Simulation Specs
Specs(..),
Method(..),
-- * Auxiliary Functions
basicTime,
integIterationBnds,
integIterationHiBnd,
integIterationLoBnd,
integPhaseBnds,
integPhaseHiBnd,
integPhaseLoBnd,
integTimes,
timeGrid) where
import Simulation.Aivika.Internal.Specs
| null | https://raw.githubusercontent.com/dsorokin/aivika/7a14f460ab114b0f8cdfcd05d5cc889fdc2db0a4/Simulation/Aivika/Specs.hs | haskell | |
Module : Simulation.Aivika.Specs
License : BSD3
Stability : experimental
It defines the simulation specs and functions for this data type.
* Simulation Specs
* Auxiliary Functions |
Copyright : Copyright ( c ) 2009 - 2017 , < >
Maintainer : < >
Tested with : GHC 8.0.1
module Simulation.Aivika.Specs
Specs(..),
Method(..),
basicTime,
integIterationBnds,
integIterationHiBnd,
integIterationLoBnd,
integPhaseBnds,
integPhaseHiBnd,
integPhaseLoBnd,
integTimes,
timeGrid) where
import Simulation.Aivika.Internal.Specs
|
536ca1b2b4732eb03b961a46627d45f666e14f85653efbca7acff096f9f55e46 | static-analysis-engineering/codehawk | jCHSimplify.mli | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author: Arnaud Venet
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHConstantPropagationNoArrays
open CHLanguage
(* jchlib *)
open JCHCopyPropagationNoArrays
class skip_remover_t :
object
method walkBoolExp : boolean_exp_t -> unit
method walkCmd : (code_int, cfg_int) command_t -> unit
method walkCode : code_int -> unit
method walkNumExp : numerical_exp_t -> unit
method walkSymExp : symbolic_exp_t -> unit
method walkVar : variable_t -> unit
end
class simplifier_t :
system_int ->
object
method copy_propagation : copy_propagation_no_arrays_t
method cst_propagation : constant_propagation_no_arrays_t
method remove_skips : code_int -> unit
method remove_unused_vars : procedure_int -> unit
method remove_useless_commands : procedure_int -> unit
method simplify_procedure : procedure_int -> unit
end
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/98ced4d5e6d7989575092df232759afc2cb851f6/CodeHawk/CHJ/jchlib/jCHSimplify.mli | ocaml | jchlib | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author: Arnaud Venet
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHConstantPropagationNoArrays
open CHLanguage
open JCHCopyPropagationNoArrays
class skip_remover_t :
object
method walkBoolExp : boolean_exp_t -> unit
method walkCmd : (code_int, cfg_int) command_t -> unit
method walkCode : code_int -> unit
method walkNumExp : numerical_exp_t -> unit
method walkSymExp : symbolic_exp_t -> unit
method walkVar : variable_t -> unit
end
class simplifier_t :
system_int ->
object
method copy_propagation : copy_propagation_no_arrays_t
method cst_propagation : constant_propagation_no_arrays_t
method remove_skips : code_int -> unit
method remove_unused_vars : procedure_int -> unit
method remove_useless_commands : procedure_int -> unit
method simplify_procedure : procedure_int -> unit
end
|
fb52c6bbdcd24a3c1f240dde79a91f721c9d15e24eba0d90e6ba0bd1596643cf | layerware/pgqueue | core.clj | (ns pgqueue.core
(:refer-clojure :exclude [take count])
(:require [clojure.string :as string]
[clojure.java.jdbc :as jdbc]
[pgqueue.serializer.protocol :as s]
[pgqueue.serializer.nippy :as nippy-serializer]))
(defrecord PGQueue [name config])
(defrecord PGQueueItem [queue id name priority data deleted])
(defrecord PGQueueLock [queue lock-id-1 lock-id-2])
(defrecord PGQueueLockedItem [item lock])
Track the current locks held by a JVM such that
;; worker threads sharing a queue can take multiple
;; items across a postgresql session. Postgresql's
;; advisory locks handle locking for separate processes.
;; We also want to execute unlocks on the same connection
;; a lock was made (if that connection is still open), so
;; we store the db connection used for each lock
;; Each *qlocks* entry looks like:
{ ' qname ' [ { : lock - id 123 : db - id < db pool i d > } , ... ] }
(def ^:private ^:dynamic *qlocks* (atom {}))
;; If :analyze_threshold is > 0, we track put count
;; and run a vacuum analyze when threshold is met
(def ^:private ^:dynamic *analyze-hits* (atom 0))
(defn- get-qlocks
[qname]
(get @*qlocks* qname))
(defn- get-qlocks-ids
[qname]
(map :lock-id (get-qlocks qname)))
(def ^:private db-pool-size (+ 2 (.. Runtime getRuntime availableProcessors)))
(def ^:private ^:dynamic *db-pool* (atom (into [] (repeat db-pool-size nil))))
(defn- new-db-pool-conn
[db-spec db-pool-id]
(let [conn (merge db-spec
{:connection (jdbc/get-connection db-spec)})]
(swap! *db-pool* assoc db-pool-id conn)
conn))
(defn- get-db-and-id
"Get random db connection and its db-pool-id from pool.
Returns [db-conn db-pool-id] vector."
([db-spec] (get-db-and-id db-spec (rand-int db-pool-size)))
([db-spec db-pool-id]
(let [id (or db-pool-id (rand-int db-pool-size))
db (or (get @*db-pool* id)
(new-db-pool-conn db-spec id))]
(try
(jdbc/query db ["select 1"])
[db id]
(catch java.sql.SQLException e
[(new-db-pool-conn db-spec id) id])))))
(defn- get-db
"Get random db connection from pool"
([db-spec] (first (get-db-and-id db-spec (rand-int db-pool-size))))
([db-spec db-pool-id] (first (get-db-and-id db-spec db-pool-id))))
(def ^:private default-config
{:db {:classname "org.postgresql.Driver"}
:schema "public"
:table "pgqueues"
:delete true
:default-priority 100
:analyze-threshold 0
:serializer (nippy-serializer/nippy-serializer)})
(defn- merge-with-default-config
[config]
(merge (assoc default-config
:db (:db default-config)) config))
(defn- qt
"Quote name for pg"
[name]
((jdbc/quoted \") name))
(defn- sql-not-in
"Create an sql \"not in\" clause based on the count of things.
Returns nil for count = 0"
([field things]
(when (> (clojure.core/count things) 0)
(str " " field " not in ("
(string/join "," (repeat (clojure.core/count things) "?")) ") "))))
(defn- sql-values
"Prep sql and values for jdbc/query,
flattening to accommodate sql in clause "
[sql & values]
(apply vector (remove nil? (flatten [sql values]))))
(defn- schema-exists?
[db schema]
(let [rs (jdbc/query db
["select 1
from pg_namespace
where nspname = ?" schema])]
(> (clojure.core/count rs) 0)))
(defn- table-exists?
[db schema table]
(let [rs (jdbc/query db
["select 1
from pg_catalog.pg_class c
join pg_catalog.pg_namespace n
on n.oid = c.relnamespace
where n.nspname = ?
and c.relname = ?" schema table])]
(> (clojure.core/count rs) 0)))
(defn- table-oid
[db schema table]
(let [rs (jdbc/query db
["SELECT c.oid as table_oid
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n
ON n.oid = c.relnamespace
WHERE n.nspname = ?
AND c.relname = ?" schema table])]
(:table_oid (first rs))))
(defn- qt-table
"fully qualified and quoted schema.table"
[schema table]
(str (qt schema) "." (qt table)))
(defn- validate-config
[{:keys [db schema default-priority serializer]}]
(when (empty? db)
(throw (ex-info "config requires a :db key containing a clojure.java.jdbc db-spec" {})))
(when (not (schema-exists? (get-db db) schema))
(throw (ex-info (str ":schema \"" schema "\" does not exist") {})))
(when (not (integer? default-priority))
(throw (ex-info ":default-priority must be an integer" {})))
(when (not (satisfies? s/Serializer serializer))
(throw (ex-info ":serializer must satisfy pgqueue.serializer.protocol.Serializer protocol"))))
(defn- create-queue-table!
"Create the queue table if it does not exist"
[q]
(let [{:keys [db schema table default-priority]} (:config q)]
(when (not (table-exists? (get-db db) schema table))
(jdbc/execute! (get-db db)
[(str "create table " (qt-table schema table) " (\n"
" id bigserial,\n"
" name text not null,\n"
" priority integer not null default " default-priority ", \n"
" data bytea,\n"
" deleted boolean not null default false,\n"
" constraint " (qt (str table "_pkey")) "\n"
" primary key (name, priority, id, deleted));")]))))
(defn- delete-queue!
"Delete the rows for the given queue"
[q]
(let [{:keys [db schema table]} (:config q)
qname (name (:name q))]
(jdbc/with-db-transaction [tx (get-db db)]
(> (first (jdbc/delete! tx (qt-table schema table)
["name = ?", qname])) 0))))
(defn- drop-queue-table!
"Drop the queue table for the given queue's config"
[{:keys [db schema table]}]
(jdbc/execute! (get-db db)
[(str "drop table if exists " (qt-table schema table))]))
(defn- unlock-queue-locks!
"Unlock all advisory locks for the queue"
[q]
(let [{:keys [db schema table]} (:config q)
db (get-db db)
qname (name (:name q))
table-oid (table-oid db schema table)
locks (jdbc/query db
["select classid, objid
from pg_locks where classid = ?" table-oid])]
(swap! *qlocks* assoc qname [])
(doseq [lock locks]
(jdbc/query db
[(str "select pg_advisory_unlock(cast(? as int),cast(q.id as int)) \n"
"from " (qt-table schema table) " as q\n"
"where name = ?")
table-oid
qname]))))
(defn- unlock-queue-table-locks!
"Unlock all advisory locks for all queues in queue table"
[{:keys [db schema table]}]
(let [db (get-db db)
table-oid (table-oid db schema table)
locks (jdbc/query db
["select classid, objid
from pg_locks where classid = ?" table-oid])]
(swap! *qlocks* {})
(doseq [lock locks]
(jdbc/query db
["select pg_advisory_unlock(?,?)"
(:classid lock)
(:objid lock)]))))
(defn destroy-queue!
"Unlocks any existing advisory locks for rows of this queue's
table and deletes all rows for the queue from the queue table."
[q]
(delete-queue! q)
(unlock-queue-locks! q))
(defn destroy-all-queues!
"Drop the queue table, then unlock any existing advisory locks.
This function takes the same config hashmap used in pgqueue/queue."
[config]
(let [config (merge-with-default-config config)]
(validate-config config)
(drop-queue-table! config)
(unlock-queue-table-locks! config)))
(defn- analyze-hit!
"If :analzye-threshold is active (> 0),
increment the *analyze-hits* counter,
and run a vacuum analyze on table if
the threshold has been reached."
([q] (analyze-hit! q 1))
([q n]
(let [{:keys [db schema table analyze-threshold]} (:config q)]
(when (not (= 0 analyze-threshold))
(if (> (+ n @*analyze-hits*) analyze-threshold)
(do
(jdbc/execute! (get-db db)
[(str "vacuum analyze " (qt-table schema table))]
{:transaction? false})
(swap! *analyze-hits* 0))
(swap! *analyze-hits* inc))))))
(defn queue
"Specify a queue with a name and a config.
Creates the underlying queue table if it
does not yet exist.
- name can be a keyword or string
- config is a hashmap with the following keys:
:db - clojure.java.jdbc db-spec
and optional keys:
:schema - schema name (default is \"public\"
:table - table name (default is \"pgqueues\")
:delete - delete behavior upon successful take:
- true (default) deletes queue item row
- false sets a deleted_flag to true
(persists all queue items; see pgqueue/purge-deleted)
:default-priority - default priority for puts not specifying
a priority; must be an integer
where a lower value = higher priority; negative integers
are also accepted
:analyze-threshold - run a 'vacuum analyze' on queue table when this number
of put/put-batch items is hit. 0 disables this feature.
default value is 0 (disabled).
:serializer - instance of a type that implements
pgqueue.serializer.Serializer protocol
default is instance of pgqueue.serializer.nippy/NippySerializer
(pgqueue.serializer.fressian/FressianSerializer is available, too)"
[name config]
(let [config (merge-with-default-config config)]
(validate-config config)
(let [q (->PGQueue name config)]
(create-queue-table! q)
q)))
(defn put
"Put item onto queue.
usage: (put q item)
(put q priority item)
Returns true on success, false on failure, nil on no-op.
item can be any serializable Clojure data.
When item is nil, put is a no-op and returns nil.
For arity of 2, a default priority is used.
For arity of 3, the second argument is a priority integer
where a lower value = higher priority; negative integers
are also accepted.
Examples:
(pgq/put q -10 \"urgent\")
(pgq/put q 1 \"high\")
(pgq/put q 100 \"medium/default\")
(pgq/put q 200 \"low\")
(pgq/put q 500 \"least\")"
([q item]
(put q (get-in q [:config :default-priority]) item))
([q priority item]
(when (not (nil? item))
(let [{:keys [db schema table serializer]} (:config q)]
(try
(jdbc/insert! (get-db db) (qt-table schema table)
{:name (name (:name q))
:priority priority
:data (s/serialize serializer item)})
(analyze-hit! q)
true
(catch java.sql.SQLException _ false))))))
(defn put-batch
"Put batch of items onto queue.
usage: (put q batch)
(put q priority batch)
Returns true on success, false on failure.
batch is a sequence items.
An item can be any serializable Clojure data.
When an item in the batch is nil, it is removed from
the batch. (put q nil) is a no-op, so put-batch does
likewise.
For arity of 2, a default priority is used.
For arity of 3, the second argument is a priority integer
where a lower value = higher priority; negative integers
are also accepted. All items of the batch will have the
same priority."
([q batch]
(put-batch q (get-in q [:config :default-priority]) batch))
([q priority batch]
(let [{:keys [db schema table serializer]} (:config q)
analyze-threshold 5000
batch-parts (partition-all 500 (doall batch))]
(try
(jdbc/with-db-transaction [tx (get-db db)]
(doseq [batch-part batch-parts]
(jdbc/insert-multi! tx (qt-table schema table)
(map (fn [item]
{:name (name (:name q))
:priority priority
:data (s/serialize serializer item)})
(remove nil? batch-part)))))
(analyze-hit! q (clojure.core/count batch))
true
(catch java.sql.SQLException _ false)))))
(defn locking-take
"Lock and take item, returning a PGQueueLockedItem.
usage: (locking-take q)
example: (let [locked-item (pgqueue/locking-take q)]
; do some work here with item
(pgqueue/delete-and-unlock locked-item))
It is expected that pgqueue/delete and pgqueue/unlock
will later be called on the returned item and lock,
respectively.
See the pgqueue/take-with macro, which wraps up the
use case for takers doing work and then deleting
the item only after the work is safely completed."
[q]
(let [{:keys [db schema table serializer]} (:config q)
[db db-pool-id] (get-db-and-id db)
qtable (qt-table schema table)
qname (name (:name q))
table-oid (table-oid db schema table)
qlocks (get-qlocks-ids qname)
qlocks-not-in (sql-not-in "id" qlocks)
qlocks-not-in-str (when qlocks-not-in (str " and " qlocks-not-in))]
(jdbc/execute! db ["set enable_seqscan=off"] {:transaction? false})
(let [rs (jdbc/query db
(sql-values
(str
"with recursive queued as ( \n"
"select (q).*, pg_try_advisory_lock(" table-oid ", cast((q).id as int)) as locked \n"
"from (select q from " qtable " as q \n"
"where name = ? and deleted is false order by priority, id limit 1) as t1 \n"
"union all ( \n"
"select (q).*, pg_try_advisory_lock(" table-oid ", cast((q).id as int)) as locked \n"
"from ( \n"
" select ( \n"
" select q from " qtable " as q \n"
" where name = ? and deleted is false \n"
qlocks-not-in-str
" and (priority, id) > (q2.priority, q2.id) \n"
" order by priority, id limit 1) as q \n"
" from " qtable " as q2 where q2.id is not null \n"
" limit 1) AS t1)) \n"
"select id, name, priority, data, deleted \n"
"from queued where locked \n"
qlocks-not-in-str
"limit 1") qname qname qlocks qlocks))
item (first rs)]
(when item
(swap! *qlocks* assoc qname
(conj (get-qlocks qname) {:lock-id (:id item)
:db-id db-pool-id}))
(->PGQueueLockedItem
(->PGQueueItem q (:id item) (:name item) (:priority item)
(s/deserialize serializer (:data item)) (:deleted item))
(->PGQueueLock q table-oid (:id item)))))))
(defn locking-take-batch
"Lock and take a batch of up to n items from queue.
Returns a sequence of PGQueueLockedItem instances.
usage: (locking-take-batch q n)
It is expected that pgqueue/delete and pgqueue/unlock
will later be called on each of the items and locks
in the PGQeueuLockedItems returned."
[q n]
(let [{:keys [db schema table serializer]} (:config q)
qtable (qt-table schema table)
qname (name (:name q))
table-oid (table-oid db schema table)
internal-batch-size 100]
(mapcat
(fn [internal-n]
(let [[db db-pool-id] (get-db-and-id db)
qlocks (get-qlocks-ids qname)
qlocks-not-in (sql-not-in "id" qlocks)
qlocks-not-in-str (when qlocks-not-in (str " and " qlocks-not-in))
_ (jdbc/execute! db ["set enable_seqscan=off"] {:transaction? false})
batch (jdbc/query db
(sql-values
(str
"with recursive queued as ( \n"
"select (q).*, pg_try_advisory_lock(" table-oid ", cast((q).id as int)) as locked \n"
"from (select q from " qtable " as q \n"
"where name = ? and deleted is false order by priority, id) as t1 \n"
"union all ( \n"
"select (q).*, pg_try_advisory_lock(" table-oid ", cast((q).id as int)) as locked \n"
"from ( \n"
" select ( \n"
" select q from " qtable " as q \n"
" where name = ? and deleted is false \n"
qlocks-not-in-str
" and (priority, id) > (q2.priority, q2.id) \n"
" order by priority, id limit 1) as q \n"
" from " qtable " as q2 where q2.id is not null \n"
" limit 1) AS t1)) \n"
"select id, name, priority, data, deleted \n"
"from queued where locked \n"
qlocks-not-in-str
"limit ?") qname qname qlocks qlocks internal-n))]
(doseq [item batch]
(swap! *qlocks* assoc qname
(conj (get-qlocks qname) {:lock-id (:id item)
:db-id db-pool-id})))
(map (fn [item]
(->PGQueueLockedItem
(->PGQueueItem q (:id item) (:name item) (:priority item)
(s/deserialize serializer (:data item)) (:deleted item))
(->PGQueueLock q table-oid (:id item)))) batch)))
(remove zero?
(conj
(clojure.core/take (quot n internal-batch-size)
(repeat internal-batch-size))
(mod n internal-batch-size))))))
(defn delete
"Delete a PGQueueItem item from queue.
Delete behavior is controlled by the
queue config option :delete in pgqueue/queue.
If true, this actually deletes rows,
otherwise, it sets the \"deleted\" flag to true.
Returns boolean if a row was deleted.
usage: (delete item)"
[item]
(let [q (:queue item)
{:keys [db schema table delete]} (:config q)
db (get-db db)
qname (name (:name q))
qtable (qt-table schema table)]
(if delete
(> (first (jdbc/delete! db qtable ["name = ? and id = ?" qname (:id item)])) 0)
(> (first (jdbc/update! db qtable {:deleted true}
["name = ? and id = ? and deleted is false" qname (:id item)])) 0))))
(defn unlock
"Unlock a PGQueueLock.
Returns boolean.
usage: (unlock lock)"
[lock]
(let [qname (name (get-in lock [:queue :name]))
lock-id-1 (:lock-id-1 lock)
lock-id-2 (:lock-id-2 lock)
qlock (first (filter #(= (:lock-id %) lock-id-2) (get-qlocks qname)))
qlock-db (get-db (get-in lock [:queue :config :db]) (:db-id qlock))]
(swap! *qlocks* assoc qname (remove #(= (:lock-id %) lock-id-2) (doall (get-qlocks qname))))
(:unlocked
(first (jdbc/query qlock-db
["select pg_advisory_unlock(cast(? as int),cast(? as int)) as unlocked"
lock-id-1 lock-id-2])))))
(defn delete-and-unlock
"Delete and unlock a PGQueueLockedItem.
This is a convenience function wrapping
pgqueue/delete and pgqueue/unlock.
Returns boolean \"and\" of above functions.
usage: (delete-and-unlock locked-item)"
[locked-item]
(and
(delete (:item locked-item))
(unlock (:lock locked-item))))
(defn take
"Take item off queue.
Returns nil if no item available.
usage: (take q)
item is retrieved from the queue with the sort order:
- priority (low number = high priority)
- inserted order
This function uses Postgresql's advisory locks
to ensure that only one taker gains access to the item,
such that multiple takers can pull items from the queue
without the fear of another taker pulling the same item.
The item is retrieved from the queue with an advisory lock,
deleted (see pgqueue/queue for delete behavior), unlocked,
and returned.
Also see pgqueue/take-with for use cases requiring the
item to only be removed from the queue after successfully
completing work."
[q]
(when-let [locked-item (locking-take q)]
(delete-and-unlock locked-item)
(get-in locked-item [:item :data])))
(defn take-batch
"Take batch up to n items off queue.
Returns seq of items.
usage: (take-batch q n)
item in batch are retrieved from the queue with the sort order:
- priority (low number = high priority)
- inserted order"
[q n]
(let [locked-items (locking-take-batch q n)]
(doseq [locked-item locked-items]
(delete-and-unlock locked-item))
(map (fn [locked-item]
(get-in locked-item [:item :data]))
locked-items)))
(defmacro take-with
"Lock and take an item off queue, bind the taken item,
execute the body, and ensure delete and unlock after body.
usage: (take-with [binding & body])
binding takes the form [item q], where
item is the binding name, and q is the queue.
This macro uses Postgresql's advisory locks
to ensure that only one taker gains access to the item,
such that multiple takers can pull items from the queue
without the fear of another taker pulling the same item."
[binding & body]
`(let [locked-item# (locking-take ~(second binding))
~(first binding) (get-in locked-item# [:item :data])]
(try
(let [body-return# (do ~@body)]
(when locked-item# (delete (:item locked-item#)))
body-return#)
(finally (when locked-item# (unlock (:lock locked-item#)))))))
(defn count
"Count the items in queue."
[q]
(let [{:keys [db schema table]} (:config q)
qtable (qt-table schema table)
qname (name (:name q))
qlocks (get-qlocks qname)
qlocks-not-in (sql-not-in "id" qlocks)
qlocks-not-in-str (when qlocks-not-in (str " and " qlocks-not-in))]
(:count
(first
(jdbc/query (get-db db)
(sql-values
(str "select count(*) from " qtable "\n"
"where name = ? and deleted is false \n"
qlocks-not-in-str) qname qlocks))) 0)))
(defn count-deleted
"Count the deleted items in queue.
These rows only exist when the :delete
behavior in pgqueue/queue's config is set
to false."
[q]
(let [{:keys [db schema table]} (:config q)
qtable (qt-table schema table)
qname (name (:name q))]
(:count
(first
(jdbc/query (get-db db)
(sql-values
(str "select count(*) from " qtable "\n"
"where name = ? and deleted is true") qname))) 0)))
(defn purge-deleted
"Purge deleted rows for the given queue.
These rows only exist when the :delete
behavior in pgqueue/queue's config is set
to false.
Returns number of rows deleted."
[q]
(let [{:keys [db schema table]} (:config q)
qname (name (:name q))]
(jdbc/with-db-transaction [tx (get-db db)]
(first (jdbc/delete! tx (qt-table schema table)
["name = ? and deleted", qname])))))
| null | https://raw.githubusercontent.com/layerware/pgqueue/2af3f578338d25332e48ee30d9aea2c83eaf05c8/src/pgqueue/core.clj | clojure | worker threads sharing a queue can take multiple
items across a postgresql session. Postgresql's
advisory locks handle locking for separate processes.
We also want to execute unlocks on the same connection
a lock was made (if that connection is still open), so
we store the db connection used for each lock
Each *qlocks* entry looks like:
If :analyze_threshold is > 0, we track put count
and run a vacuum analyze when threshold is met
see pgqueue/purge-deleted)
must be an integer
negative integers
negative integers
negative integers
do some work here with item | (ns pgqueue.core
(:refer-clojure :exclude [take count])
(:require [clojure.string :as string]
[clojure.java.jdbc :as jdbc]
[pgqueue.serializer.protocol :as s]
[pgqueue.serializer.nippy :as nippy-serializer]))
(defrecord PGQueue [name config])
(defrecord PGQueueItem [queue id name priority data deleted])
(defrecord PGQueueLock [queue lock-id-1 lock-id-2])
(defrecord PGQueueLockedItem [item lock])
Track the current locks held by a JVM such that
{ ' qname ' [ { : lock - id 123 : db - id < db pool i d > } , ... ] }
(def ^:private ^:dynamic *qlocks* (atom {}))
(def ^:private ^:dynamic *analyze-hits* (atom 0))
(defn- get-qlocks
[qname]
(get @*qlocks* qname))
(defn- get-qlocks-ids
[qname]
(map :lock-id (get-qlocks qname)))
(def ^:private db-pool-size (+ 2 (.. Runtime getRuntime availableProcessors)))
(def ^:private ^:dynamic *db-pool* (atom (into [] (repeat db-pool-size nil))))
(defn- new-db-pool-conn
[db-spec db-pool-id]
(let [conn (merge db-spec
{:connection (jdbc/get-connection db-spec)})]
(swap! *db-pool* assoc db-pool-id conn)
conn))
(defn- get-db-and-id
"Get random db connection and its db-pool-id from pool.
Returns [db-conn db-pool-id] vector."
([db-spec] (get-db-and-id db-spec (rand-int db-pool-size)))
([db-spec db-pool-id]
(let [id (or db-pool-id (rand-int db-pool-size))
db (or (get @*db-pool* id)
(new-db-pool-conn db-spec id))]
(try
(jdbc/query db ["select 1"])
[db id]
(catch java.sql.SQLException e
[(new-db-pool-conn db-spec id) id])))))
(defn- get-db
"Get random db connection from pool"
([db-spec] (first (get-db-and-id db-spec (rand-int db-pool-size))))
([db-spec db-pool-id] (first (get-db-and-id db-spec db-pool-id))))
(def ^:private default-config
{:db {:classname "org.postgresql.Driver"}
:schema "public"
:table "pgqueues"
:delete true
:default-priority 100
:analyze-threshold 0
:serializer (nippy-serializer/nippy-serializer)})
(defn- merge-with-default-config
[config]
(merge (assoc default-config
:db (:db default-config)) config))
(defn- qt
"Quote name for pg"
[name]
((jdbc/quoted \") name))
(defn- sql-not-in
"Create an sql \"not in\" clause based on the count of things.
Returns nil for count = 0"
([field things]
(when (> (clojure.core/count things) 0)
(str " " field " not in ("
(string/join "," (repeat (clojure.core/count things) "?")) ") "))))
(defn- sql-values
"Prep sql and values for jdbc/query,
flattening to accommodate sql in clause "
[sql & values]
(apply vector (remove nil? (flatten [sql values]))))
(defn- schema-exists?
[db schema]
(let [rs (jdbc/query db
["select 1
from pg_namespace
where nspname = ?" schema])]
(> (clojure.core/count rs) 0)))
(defn- table-exists?
[db schema table]
(let [rs (jdbc/query db
["select 1
from pg_catalog.pg_class c
join pg_catalog.pg_namespace n
on n.oid = c.relnamespace
where n.nspname = ?
and c.relname = ?" schema table])]
(> (clojure.core/count rs) 0)))
(defn- table-oid
[db schema table]
(let [rs (jdbc/query db
["SELECT c.oid as table_oid
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n
ON n.oid = c.relnamespace
WHERE n.nspname = ?
AND c.relname = ?" schema table])]
(:table_oid (first rs))))
(defn- qt-table
"fully qualified and quoted schema.table"
[schema table]
(str (qt schema) "." (qt table)))
(defn- validate-config
[{:keys [db schema default-priority serializer]}]
(when (empty? db)
(throw (ex-info "config requires a :db key containing a clojure.java.jdbc db-spec" {})))
(when (not (schema-exists? (get-db db) schema))
(throw (ex-info (str ":schema \"" schema "\" does not exist") {})))
(when (not (integer? default-priority))
(throw (ex-info ":default-priority must be an integer" {})))
(when (not (satisfies? s/Serializer serializer))
(throw (ex-info ":serializer must satisfy pgqueue.serializer.protocol.Serializer protocol"))))
(defn- create-queue-table!
"Create the queue table if it does not exist"
[q]
(let [{:keys [db schema table default-priority]} (:config q)]
(when (not (table-exists? (get-db db) schema table))
(jdbc/execute! (get-db db)
[(str "create table " (qt-table schema table) " (\n"
" id bigserial,\n"
" name text not null,\n"
" priority integer not null default " default-priority ", \n"
" data bytea,\n"
" deleted boolean not null default false,\n"
" constraint " (qt (str table "_pkey")) "\n"
" primary key (name, priority, id, deleted));")]))))
(defn- delete-queue!
"Delete the rows for the given queue"
[q]
(let [{:keys [db schema table]} (:config q)
qname (name (:name q))]
(jdbc/with-db-transaction [tx (get-db db)]
(> (first (jdbc/delete! tx (qt-table schema table)
["name = ?", qname])) 0))))
(defn- drop-queue-table!
"Drop the queue table for the given queue's config"
[{:keys [db schema table]}]
(jdbc/execute! (get-db db)
[(str "drop table if exists " (qt-table schema table))]))
(defn- unlock-queue-locks!
"Unlock all advisory locks for the queue"
[q]
(let [{:keys [db schema table]} (:config q)
db (get-db db)
qname (name (:name q))
table-oid (table-oid db schema table)
locks (jdbc/query db
["select classid, objid
from pg_locks where classid = ?" table-oid])]
(swap! *qlocks* assoc qname [])
(doseq [lock locks]
(jdbc/query db
[(str "select pg_advisory_unlock(cast(? as int),cast(q.id as int)) \n"
"from " (qt-table schema table) " as q\n"
"where name = ?")
table-oid
qname]))))
(defn- unlock-queue-table-locks!
"Unlock all advisory locks for all queues in queue table"
[{:keys [db schema table]}]
(let [db (get-db db)
table-oid (table-oid db schema table)
locks (jdbc/query db
["select classid, objid
from pg_locks where classid = ?" table-oid])]
(swap! *qlocks* {})
(doseq [lock locks]
(jdbc/query db
["select pg_advisory_unlock(?,?)"
(:classid lock)
(:objid lock)]))))
(defn destroy-queue!
"Unlocks any existing advisory locks for rows of this queue's
table and deletes all rows for the queue from the queue table."
[q]
(delete-queue! q)
(unlock-queue-locks! q))
(defn destroy-all-queues!
"Drop the queue table, then unlock any existing advisory locks.
This function takes the same config hashmap used in pgqueue/queue."
[config]
(let [config (merge-with-default-config config)]
(validate-config config)
(drop-queue-table! config)
(unlock-queue-table-locks! config)))
(defn- analyze-hit!
"If :analzye-threshold is active (> 0),
increment the *analyze-hits* counter,
and run a vacuum analyze on table if
the threshold has been reached."
([q] (analyze-hit! q 1))
([q n]
(let [{:keys [db schema table analyze-threshold]} (:config q)]
(when (not (= 0 analyze-threshold))
(if (> (+ n @*analyze-hits*) analyze-threshold)
(do
(jdbc/execute! (get-db db)
[(str "vacuum analyze " (qt-table schema table))]
{:transaction? false})
(swap! *analyze-hits* 0))
(swap! *analyze-hits* inc))))))
(defn queue
"Specify a queue with a name and a config.
Creates the underlying queue table if it
does not yet exist.
- name can be a keyword or string
- config is a hashmap with the following keys:
:db - clojure.java.jdbc db-spec
and optional keys:
:schema - schema name (default is \"public\"
:table - table name (default is \"pgqueues\")
:delete - delete behavior upon successful take:
- true (default) deletes queue item row
- false sets a deleted_flag to true
:default-priority - default priority for puts not specifying
are also accepted
:analyze-threshold - run a 'vacuum analyze' on queue table when this number
of put/put-batch items is hit. 0 disables this feature.
default value is 0 (disabled).
:serializer - instance of a type that implements
pgqueue.serializer.Serializer protocol
default is instance of pgqueue.serializer.nippy/NippySerializer
(pgqueue.serializer.fressian/FressianSerializer is available, too)"
[name config]
(let [config (merge-with-default-config config)]
(validate-config config)
(let [q (->PGQueue name config)]
(create-queue-table! q)
q)))
(defn put
"Put item onto queue.
usage: (put q item)
(put q priority item)
Returns true on success, false on failure, nil on no-op.
item can be any serializable Clojure data.
When item is nil, put is a no-op and returns nil.
For arity of 2, a default priority is used.
For arity of 3, the second argument is a priority integer
are also accepted.
Examples:
(pgq/put q -10 \"urgent\")
(pgq/put q 1 \"high\")
(pgq/put q 100 \"medium/default\")
(pgq/put q 200 \"low\")
(pgq/put q 500 \"least\")"
([q item]
(put q (get-in q [:config :default-priority]) item))
([q priority item]
(when (not (nil? item))
(let [{:keys [db schema table serializer]} (:config q)]
(try
(jdbc/insert! (get-db db) (qt-table schema table)
{:name (name (:name q))
:priority priority
:data (s/serialize serializer item)})
(analyze-hit! q)
true
(catch java.sql.SQLException _ false))))))
(defn put-batch
"Put batch of items onto queue.
usage: (put q batch)
(put q priority batch)
Returns true on success, false on failure.
batch is a sequence items.
An item can be any serializable Clojure data.
When an item in the batch is nil, it is removed from
the batch. (put q nil) is a no-op, so put-batch does
likewise.
For arity of 2, a default priority is used.
For arity of 3, the second argument is a priority integer
are also accepted. All items of the batch will have the
same priority."
([q batch]
(put-batch q (get-in q [:config :default-priority]) batch))
([q priority batch]
(let [{:keys [db schema table serializer]} (:config q)
analyze-threshold 5000
batch-parts (partition-all 500 (doall batch))]
(try
(jdbc/with-db-transaction [tx (get-db db)]
(doseq [batch-part batch-parts]
(jdbc/insert-multi! tx (qt-table schema table)
(map (fn [item]
{:name (name (:name q))
:priority priority
:data (s/serialize serializer item)})
(remove nil? batch-part)))))
(analyze-hit! q (clojure.core/count batch))
true
(catch java.sql.SQLException _ false)))))
(defn locking-take
"Lock and take item, returning a PGQueueLockedItem.
usage: (locking-take q)
example: (let [locked-item (pgqueue/locking-take q)]
(pgqueue/delete-and-unlock locked-item))
It is expected that pgqueue/delete and pgqueue/unlock
will later be called on the returned item and lock,
respectively.
See the pgqueue/take-with macro, which wraps up the
use case for takers doing work and then deleting
the item only after the work is safely completed."
[q]
(let [{:keys [db schema table serializer]} (:config q)
[db db-pool-id] (get-db-and-id db)
qtable (qt-table schema table)
qname (name (:name q))
table-oid (table-oid db schema table)
qlocks (get-qlocks-ids qname)
qlocks-not-in (sql-not-in "id" qlocks)
qlocks-not-in-str (when qlocks-not-in (str " and " qlocks-not-in))]
(jdbc/execute! db ["set enable_seqscan=off"] {:transaction? false})
(let [rs (jdbc/query db
(sql-values
(str
"with recursive queued as ( \n"
"select (q).*, pg_try_advisory_lock(" table-oid ", cast((q).id as int)) as locked \n"
"from (select q from " qtable " as q \n"
"where name = ? and deleted is false order by priority, id limit 1) as t1 \n"
"union all ( \n"
"select (q).*, pg_try_advisory_lock(" table-oid ", cast((q).id as int)) as locked \n"
"from ( \n"
" select ( \n"
" select q from " qtable " as q \n"
" where name = ? and deleted is false \n"
qlocks-not-in-str
" and (priority, id) > (q2.priority, q2.id) \n"
" order by priority, id limit 1) as q \n"
" from " qtable " as q2 where q2.id is not null \n"
" limit 1) AS t1)) \n"
"select id, name, priority, data, deleted \n"
"from queued where locked \n"
qlocks-not-in-str
"limit 1") qname qname qlocks qlocks))
item (first rs)]
(when item
(swap! *qlocks* assoc qname
(conj (get-qlocks qname) {:lock-id (:id item)
:db-id db-pool-id}))
(->PGQueueLockedItem
(->PGQueueItem q (:id item) (:name item) (:priority item)
(s/deserialize serializer (:data item)) (:deleted item))
(->PGQueueLock q table-oid (:id item)))))))
(defn locking-take-batch
"Lock and take a batch of up to n items from queue.
Returns a sequence of PGQueueLockedItem instances.
usage: (locking-take-batch q n)
It is expected that pgqueue/delete and pgqueue/unlock
will later be called on each of the items and locks
in the PGQeueuLockedItems returned."
[q n]
(let [{:keys [db schema table serializer]} (:config q)
qtable (qt-table schema table)
qname (name (:name q))
table-oid (table-oid db schema table)
internal-batch-size 100]
(mapcat
(fn [internal-n]
(let [[db db-pool-id] (get-db-and-id db)
qlocks (get-qlocks-ids qname)
qlocks-not-in (sql-not-in "id" qlocks)
qlocks-not-in-str (when qlocks-not-in (str " and " qlocks-not-in))
_ (jdbc/execute! db ["set enable_seqscan=off"] {:transaction? false})
batch (jdbc/query db
(sql-values
(str
"with recursive queued as ( \n"
"select (q).*, pg_try_advisory_lock(" table-oid ", cast((q).id as int)) as locked \n"
"from (select q from " qtable " as q \n"
"where name = ? and deleted is false order by priority, id) as t1 \n"
"union all ( \n"
"select (q).*, pg_try_advisory_lock(" table-oid ", cast((q).id as int)) as locked \n"
"from ( \n"
" select ( \n"
" select q from " qtable " as q \n"
" where name = ? and deleted is false \n"
qlocks-not-in-str
" and (priority, id) > (q2.priority, q2.id) \n"
" order by priority, id limit 1) as q \n"
" from " qtable " as q2 where q2.id is not null \n"
" limit 1) AS t1)) \n"
"select id, name, priority, data, deleted \n"
"from queued where locked \n"
qlocks-not-in-str
"limit ?") qname qname qlocks qlocks internal-n))]
(doseq [item batch]
(swap! *qlocks* assoc qname
(conj (get-qlocks qname) {:lock-id (:id item)
:db-id db-pool-id})))
(map (fn [item]
(->PGQueueLockedItem
(->PGQueueItem q (:id item) (:name item) (:priority item)
(s/deserialize serializer (:data item)) (:deleted item))
(->PGQueueLock q table-oid (:id item)))) batch)))
(remove zero?
(conj
(clojure.core/take (quot n internal-batch-size)
(repeat internal-batch-size))
(mod n internal-batch-size))))))
(defn delete
"Delete a PGQueueItem item from queue.
Delete behavior is controlled by the
queue config option :delete in pgqueue/queue.
If true, this actually deletes rows,
otherwise, it sets the \"deleted\" flag to true.
Returns boolean if a row was deleted.
usage: (delete item)"
[item]
(let [q (:queue item)
{:keys [db schema table delete]} (:config q)
db (get-db db)
qname (name (:name q))
qtable (qt-table schema table)]
(if delete
(> (first (jdbc/delete! db qtable ["name = ? and id = ?" qname (:id item)])) 0)
(> (first (jdbc/update! db qtable {:deleted true}
["name = ? and id = ? and deleted is false" qname (:id item)])) 0))))
(defn unlock
"Unlock a PGQueueLock.
Returns boolean.
usage: (unlock lock)"
[lock]
(let [qname (name (get-in lock [:queue :name]))
lock-id-1 (:lock-id-1 lock)
lock-id-2 (:lock-id-2 lock)
qlock (first (filter #(= (:lock-id %) lock-id-2) (get-qlocks qname)))
qlock-db (get-db (get-in lock [:queue :config :db]) (:db-id qlock))]
(swap! *qlocks* assoc qname (remove #(= (:lock-id %) lock-id-2) (doall (get-qlocks qname))))
(:unlocked
(first (jdbc/query qlock-db
["select pg_advisory_unlock(cast(? as int),cast(? as int)) as unlocked"
lock-id-1 lock-id-2])))))
(defn delete-and-unlock
"Delete and unlock a PGQueueLockedItem.
This is a convenience function wrapping
pgqueue/delete and pgqueue/unlock.
Returns boolean \"and\" of above functions.
usage: (delete-and-unlock locked-item)"
[locked-item]
(and
(delete (:item locked-item))
(unlock (:lock locked-item))))
(defn take
"Take item off queue.
Returns nil if no item available.
usage: (take q)
item is retrieved from the queue with the sort order:
- priority (low number = high priority)
- inserted order
This function uses Postgresql's advisory locks
to ensure that only one taker gains access to the item,
such that multiple takers can pull items from the queue
without the fear of another taker pulling the same item.
The item is retrieved from the queue with an advisory lock,
deleted (see pgqueue/queue for delete behavior), unlocked,
and returned.
Also see pgqueue/take-with for use cases requiring the
item to only be removed from the queue after successfully
completing work."
[q]
(when-let [locked-item (locking-take q)]
(delete-and-unlock locked-item)
(get-in locked-item [:item :data])))
(defn take-batch
"Take batch up to n items off queue.
Returns seq of items.
usage: (take-batch q n)
item in batch are retrieved from the queue with the sort order:
- priority (low number = high priority)
- inserted order"
[q n]
(let [locked-items (locking-take-batch q n)]
(doseq [locked-item locked-items]
(delete-and-unlock locked-item))
(map (fn [locked-item]
(get-in locked-item [:item :data]))
locked-items)))
(defmacro take-with
"Lock and take an item off queue, bind the taken item,
execute the body, and ensure delete and unlock after body.
usage: (take-with [binding & body])
binding takes the form [item q], where
item is the binding name, and q is the queue.
This macro uses Postgresql's advisory locks
to ensure that only one taker gains access to the item,
such that multiple takers can pull items from the queue
without the fear of another taker pulling the same item."
[binding & body]
`(let [locked-item# (locking-take ~(second binding))
~(first binding) (get-in locked-item# [:item :data])]
(try
(let [body-return# (do ~@body)]
(when locked-item# (delete (:item locked-item#)))
body-return#)
(finally (when locked-item# (unlock (:lock locked-item#)))))))
(defn count
"Count the items in queue."
[q]
(let [{:keys [db schema table]} (:config q)
qtable (qt-table schema table)
qname (name (:name q))
qlocks (get-qlocks qname)
qlocks-not-in (sql-not-in "id" qlocks)
qlocks-not-in-str (when qlocks-not-in (str " and " qlocks-not-in))]
(:count
(first
(jdbc/query (get-db db)
(sql-values
(str "select count(*) from " qtable "\n"
"where name = ? and deleted is false \n"
qlocks-not-in-str) qname qlocks))) 0)))
(defn count-deleted
"Count the deleted items in queue.
These rows only exist when the :delete
behavior in pgqueue/queue's config is set
to false."
[q]
(let [{:keys [db schema table]} (:config q)
qtable (qt-table schema table)
qname (name (:name q))]
(:count
(first
(jdbc/query (get-db db)
(sql-values
(str "select count(*) from " qtable "\n"
"where name = ? and deleted is true") qname))) 0)))
(defn purge-deleted
"Purge deleted rows for the given queue.
These rows only exist when the :delete
behavior in pgqueue/queue's config is set
to false.
Returns number of rows deleted."
[q]
(let [{:keys [db schema table]} (:config q)
qname (name (:name q))]
(jdbc/with-db-transaction [tx (get-db db)]
(first (jdbc/delete! tx (qt-table schema table)
["name = ? and deleted", qname])))))
|
d38d09cdee25d3a5618ed61ef34d9334845e111d1a5e2ea019d02c328fa73ab3 | weavejester/crypto-equality | project.clj | (defproject crypto-equality "1.0.1"
:description "Securely check equality of strings or byte sequences"
:url "-equality"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.7.0"]]
:aliases {"test-all" ["with-profile" "default:+1.8:+1.9:+1.10" "test"]}
:profiles
{:1.8 {:dependencies [[org.clojure/clojure "1.8.0"]]}
:1.9 {:dependencies [[org.clojure/clojure "1.9.0"]]}
:1.10 {:dependencies [[org.clojure/clojure "1.10.3"]]}})
| null | https://raw.githubusercontent.com/weavejester/crypto-equality/e62f63882c9d25694ba8c53ed6b108b2813175ea/project.clj | clojure | (defproject crypto-equality "1.0.1"
:description "Securely check equality of strings or byte sequences"
:url "-equality"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.7.0"]]
:aliases {"test-all" ["with-profile" "default:+1.8:+1.9:+1.10" "test"]}
:profiles
{:1.8 {:dependencies [[org.clojure/clojure "1.8.0"]]}
:1.9 {:dependencies [[org.clojure/clojure "1.9.0"]]}
:1.10 {:dependencies [[org.clojure/clojure "1.10.3"]]}})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.